diff --git a/.common.mk b/.common.mk deleted file mode 100644 index 16052891902..00000000000 --- a/.common.mk +++ /dev/null @@ -1,63 +0,0 @@ -# This makefile is included from several other makefiles in the tree. - -# It enables configurably 'silent' rules, that do not -# print output unless V=1 is set. When writing a rule, you can do as -# follows (taken from src/Makefile.boot): -# -# ocaml-output/%.ml: -# $(call msg, "EXTRACT", $(notdir $@)) -# $(Q)$(BENCHMARK_PRE) $(FSTAR_C) $(SIL) $(notdir $(subst .checked.lax,,$<)) \ -# --codegen OCaml \ -# --extract_module $(basename $(notdir $(subst .checked.lax,,$<))) -# -# This unconditionally prints a message like '[EXTRACT FStar_Syntax_Subst.ml]' -# (`notdir` is used to omit the directory of the target) and then -# proceeds to execute the F* invocation silently (since $(Q) expands to -# "@"). However, calling the same rule with `make V=1` will still print -# the message and then print the F* invocation before executing. -# -# Besides that, when not using V=1, F* receives the --silent flag to -# reduce non-critical output. - -# It also defines some other utilities for resource monitoring and -# paths manipulation for cygwin - -Q?=@ -SIL?=--silent -RUNLIM= -ifneq ($(V),) - Q= - SIL= -endif - -define NO_RUNLIM_ERR -runlim not found: - To use RESOURCEMONITOR=1, the `runlim` tool must be installed and in your $$PATH. - It must also be a recent version supporting the `-p` option. - You can get it from: [https://github.com/arminbiere/runlim] -endef - -define msg = -@printf " %-8s %s\n" $(1) $(2) -endef - -# Passing RESOURCEMONITOR=1 will create .runlim files through the source tree with -# information about the time and space taken by each F* invocation. -ifneq ($(RESOURCEMONITOR),) - ifeq ($(shell which runlim),) - _ := $(error $(NO_RUNLIM_ERR))) - endif - ifneq ($(MONID),) - MONPREFIX=$(MONID). - endif - RUNLIM=runlim -p -o $@.$(MONPREFIX)runlim -endif - -# Can be called as $(call maybe_cygwin_path,...) -# where ... is the argument - -maybe_cygwin_path=$(if $(findstring $(OS),Windows_NT),$(shell cygpath -m $(1)),$(1)) - -# Ensure that any failing rule will not create its target file. -# In other words, make `make` less insane. -.DELETE_ON_ERROR: diff --git a/.docker/dev-base.Dockerfile b/.docker/dev-base.Dockerfile new file mode 100644 index 00000000000..1c59081eabc --- /dev/null +++ b/.docker/dev-base.Dockerfile @@ -0,0 +1,58 @@ +FROM ubuntu:24.04 + +RUN apt-get update + +RUN apt-get install -y --no-install-recommends \ + git \ + sudo \ + python3 \ + python-is-python3 \ + opam \ + rustc \ + curl \ + ca-certificates \ + rsync \ + wget \ + && apt-get clean -y + +# Install the relevant Z3 versions. +COPY ./bin/get_fstar_z3.sh /usr/local/bin +RUN get_fstar_z3.sh /usr/local/bin + +RUN useradd -ms /bin/bash user +RUN echo 'user ALL=NOPASSWD: ALL' >> /etc/sudoers +USER user +WORKDIR /home/user + +# Install OCaml +ARG OCAML_VERSION=4.14.2 +RUN opam init --compiler=$OCAML_VERSION --disable-sandboxing +RUN opam env --set-switch | tee --append $HOME/.profile $HOME/.bashrc $HOME/.bash_profile +RUN opam option depext-run-installs=true +ENV OPAMYES=1 + +# F* dependencies. This is the only place where we read a file from +# the F* repo. +ADD fstar.opam ./fstar.opam +RUN opam install -j$(nproc) --confirm-level=unsafe-yes --deps-only ./fstar.opam && opam clean + +# Some karamel dependencies too. hex for everparse +RUN opam install -j$(nproc) --confirm-level=unsafe-yes fix fileutils visitors camlp4 wasm ulex uucp ctypes ctypes-foreign hex && opam clean + +RUN sudo apt install time + +# Sigh, install dotnet. The setup-dotnet action does not +# work on a container apparently. +ENV DOTNET_ROOT /dotnet +RUN wget -nv https://download.visualstudio.microsoft.com/download/pr/cd0d0a4d-2a6a-4d0d-b42e-dfd3b880e222/008a93f83aba6d1acf75ded3d2cfba24/dotnet-sdk-6.0.400-linux-x64.tar.gz && \ + sudo mkdir -p $DOTNET_ROOT && \ + sudo tar xf dotnet-sdk-6.0.400-linux-x64.tar.gz -C $DOTNET_ROOT && \ + rm -f dotnet-sdk*.tar.gz +RUN sudo ln -s $DOTNET_ROOT/dotnet /usr/local/bin/dotnet + +RUN rm fstar.opam # move up + +# install rust (move up and remove rustv) +RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y +RUN sudo apt-get update && sudo apt-get install --yes --no-install-recommends llvm-dev libclang-dev clang libgmp-dev pkg-config +RUN . "$HOME/.cargo/env" && rustup component add rustfmt && cargo install bindgen-cli diff --git a/.github/env.sh b/.github/env.sh deleted file mode 100755 index e0e7b9e1523..00000000000 --- a/.github/env.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash - -# Source this file to set up the CI environment. - -eval $(opam env) -source ~/.bash_profile # get z3 path diff --git a/.github/setup-macos.sh b/.github/setup-macos.sh deleted file mode 100755 index fd031b5fc5f..00000000000 --- a/.github/setup-macos.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env bash - -# Install OCaml and other GNU build tools -# coreutils: for the `install` command used in install-ulib.sh -export OPAMYES=1 -brew install opam bash gnu-getopt coreutils gnu-sed -opam init --compiler=4.14.0 -eval $(opam env) - -# Install Z3 and the opam package dependencies -# NOTE: on Mac OS, we cannot do `opam install --deps-only fstar.opam` -# because the z3 opam package is broken -# So, we rely on Everest instead. -# We assume an everest checkout in the same directory as this script. -# The GitHub Actions workflow should take care of cloning everest. -cwd=$(cd $(dirname $0); pwd -P) -cd $cwd/everest -./everest --yes z3 opam diff --git a/.github/workflows/build-all.yml b/.github/workflows/build-all.yml new file mode 100644 index 00000000000..09d68a3c41f --- /dev/null +++ b/.github/workflows/build-all.yml @@ -0,0 +1,14 @@ +name: Build F* binaries (all archs) + +on: + workflow_call: + workflow_dispatch: + +jobs: + build-linux: + # This job also builds an (architecture-indepenendent) source package + # artifact. + uses: ./.github/workflows/build-linux.yml + + build-macos: + uses: ./.github/workflows/build-macos.yml diff --git a/.github/workflows/build-ci.yml b/.github/workflows/build-ci.yml new file mode 100644 index 00000000000..2f2972d559f --- /dev/null +++ b/.github/workflows/build-ci.yml @@ -0,0 +1,69 @@ +name: Build F* (ci) + +# This builds F* for Linux for the purposes of CI. It runs on a +# recent Ubuntu-based container and also generates a source package +# and a repo snapshot. For the purposes of binary releases, see +# build-linux.yml. + +on: + workflow_call: + workflow_dispatch: + +defaults: + run: + shell: bash + +jobs: + build: + # Build an F* binary package: a fully-bootstrapped stage 2 compiler, + # with its plugins, a fully checked library (i.e. with .checked) + # files and compiled versions of fstar_lib and fstar_plugin_lib. + # We do not package a stage 1 compiler. + # runs-on: [self-hosted, linux, X64] # self-hosted so we use fast runners + runs-on: ubuntu-latest + container: mtzguido/dev-base + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + - uses: actions/checkout@master + with: + path: FStar + + - name: Produce all artifacts + run: make -skj$(nproc) package package-src FSTAR_TAG= + working-directory: FStar + + # Upload the archives. + - uses: actions/upload-artifact@v4 + with: + path: FStar/fstar.tar.gz + name: fstar.tar.gz + retention-days: 3 + - uses: actions/upload-artifact@v4 + with: + path: FStar/fstar-src.tar.gz + name: fstar-src.tar.gz + retention-days: 3 + + # Upload full repo too, for stage3 check and Pulse. Note: we + # explicitly run 'make setlink-2' at this point to generate the out/ + # directory, as the previous targets do not. Also, remove the + # previous archives so they don't blow up the size of this + # artifact. + - run: rm -f FStar/fstar*.tar.gz + - run: make setlink-2 + working-directory: FStar + + - uses: mtzguido/gci-upload@master + with: + name: fstar-repo + path: FStar + extra: --exclude=FStar/stage*/dune/_build + hometag: FSTAR + + # FIXME: Ideally, we could upload the artifacts as soon as each of + # them is created, and start the subsequent jobs at that instant too. + # Is that even doable...? diff --git a/.github/workflows/build-linux.yml b/.github/workflows/build-linux.yml new file mode 100644 index 00000000000..70d95fd6382 --- /dev/null +++ b/.github/workflows/build-linux.yml @@ -0,0 +1,58 @@ +name: Build F* (Linux) + +# This builds F* for Linux for a binary package release. +# See build.yml for the build used in normal CI runs. + +on: + workflow_call: + workflow_dispatch: + +defaults: + run: + shell: bash + +jobs: + build: + runs-on: ubuntu-22.04 + # We prefer slightly older Ubuntu so we get binaries that work on + # all more recent versions. + steps: + - uses: actions/checkout@master + with: + path: FStar + + - uses: ocaml/setup-ocaml@v3 + with: + ocaml-compiler: 4.14.2 + + - name: Prepare + run: | + ./FStar/.scripts/get_fstar_z3.sh $HOME/bin + echo "PATH=$HOME/bin:$PATH" >> $GITHUB_ENV + opam install --deps-only FStar/fstar.opam + + - name: Set version + run: | + # Setting FSTAR_VERSION for nightly and release builds. If unset, + # we use $(version.txt)~dev. Setting it avoids the ~dev. + if [[ "${{github.workflow_ref}}" =~ "nightly.yml" ]]; then + echo FSTAR_VERSION="nightly-$(date -I)" >> $GITHUB_ENV + elif [[ "${{github.workflow_ref}}" =~ "release.yml" ]]; then + echo FSTAR_VERSION="$(cat FStar/version.txt)" >> $GITHUB_ENV + fi + + - name: Build packages + working-directory: FStar + run: | + eval $(opam env) + make -skj$(nproc) package FSTAR_TAG=-Linux-x86_64 + make -skj$(nproc) package-src FSTAR_TAG= + + - uses: actions/upload-artifact@v4 + with: + path: FStar/fstar-Linux-x86_64.tar.gz + name: fstar-Linux-x86_64.tar.gz + - uses: actions/upload-artifact@v4 + with: + path: FStar/fstar-src.tar.gz + name: fstar-src.tar.gz diff --git a/.github/workflows/build-macos.yml b/.github/workflows/build-macos.yml new file mode 100644 index 00000000000..47e259bfe47 --- /dev/null +++ b/.github/workflows/build-macos.yml @@ -0,0 +1,46 @@ +name: Build F* (macos) + +on: + workflow_dispatch: + workflow_call: + +jobs: + build: + runs-on: macos-latest + steps: + - uses: actions/checkout@master + with: + path: FStar + + - uses: ocaml/setup-ocaml@v3 + with: + ocaml-compiler: 4.14.2 + + - name: Prepare + run: | + brew install opam bash gnu-getopt coreutils gnu-sed make + ./FStar/.scripts/get_fstar_z3.sh $HOME/bin + echo "PATH=$HOME/bin:$PATH" >> $GITHUB_ENV + opam install --deps-only FStar/fstar.opam + + - name: Set version + run: | + # Setting FSTAR_VERSION for nightly and release builds. If unset, + # we use $(version.txt)~dev. Setting it avoids the ~dev. + if [[ "${{github.workflow_ref}}" =~ "nightly.yml" ]]; then + echo FSTAR_VERSION="nightly-$(date -I)" >> $GITHUB_ENV + elif [[ "${{github.workflow_ref}}" =~ "release.yml" ]]; then + echo FSTAR_VERSION="$(cat FStar/version.txt)" >> $GITHUB_ENV + fi + + # Note *g*make below! + - name: Build package + working-directory: FStar + run: | + eval $(opam env) + gmake -skj$(nproc) package FSTAR_TAG=-Darwin-x86_64 + + - uses: actions/upload-artifact@v4 + with: + path: FStar/fstar-Darwin-x86_64.tar.gz + name: fstar-Darwin-x86_64.tar.gz diff --git a/.github/workflows/check-friends.yml b/.github/workflows/check-friends.yml new file mode 100644 index 00000000000..68c887f5fc4 --- /dev/null +++ b/.github/workflows/check-friends.yml @@ -0,0 +1,562 @@ +name: Check F* friends + +# This workflow must be called ONLY after a run of build.yml +on: + workflow_call: + +defaults: + run: + shell: bash + +jobs: + build-krml: + runs-on: ubuntu-latest + container: mtzguido/dev-base + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + - uses: actions/download-artifact@v4 + with: + name: fstar.tar.gz + - run: tar -xzf fstar.tar.gz + - run: echo "FSTAR_EXE=$(pwd)/fstar/bin/fstar.exe" >> $GITHUB_ENV + + - name: Checkout karamel + uses: actions/checkout@master + with: + path: karamel/ + repository: mtzguido/karamel + ref: dev + + - name: Build krml + run: make -C karamel -skj$(nproc) + + # krml is a symlink to _build/default/src/Karamel.exe, but we want to exclude _build. + # So, overwrite the link with the actual file. + - name: Fix for symlink + run: | + cp --remove-destination $(realpath karamel/krml) karamel/krml + + - uses: mtzguido/gci-upload@master + with: + name: karamel + extra: --exclude=karamel/_build + hometag: KRML + + test-krml: + runs-on: ubuntu-latest + container: mtzguido/dev-base + needs: + - build-krml + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + - uses: actions/download-artifact@v4 + with: + name: fstar.tar.gz + - run: tar -xzf fstar.tar.gz + - run: echo "FSTAR_EXE=$(pwd)/fstar/bin/fstar.exe" >> $GITHUB_ENV + + # krml test needs node + - uses: actions/setup-node@v4 + with: + node-version: 16 + + - uses: mtzguido/gci-download@master + with: + name: karamel + + # node is needed for the wasm tests, skip them for now + - name: Test + run: make -C karamel -skj$(nproc) test + + build-steel: + runs-on: ubuntu-latest + container: mtzguido/dev-base + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + - uses: actions/download-artifact@v4 + with: + name: fstar.tar.gz + - run: tar -xzf fstar.tar.gz + - run: echo "FSTAR_EXE=$(pwd)/fstar/bin/fstar.exe" >> $GITHUB_ENV + + - name: Checkout steel + uses: actions/checkout@master + with: + path: steel/ + repository: mtzguido/steel + ref: dev + + - name: Build + run: make -C steel -skj$(nproc) + + - uses: mtzguido/gci-upload@master + with: + name: steel + hometag: STEEL + + test-steel: + runs-on: ubuntu-latest + container: mtzguido/dev-base + needs: + - build-krml + - build-steel + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + - uses: actions/download-artifact@v4 + with: + name: fstar.tar.gz + - run: tar -xzf fstar.tar.gz + - run: echo "FSTAR_EXE=$(pwd)/fstar/bin/fstar.exe" >> $GITHUB_ENV + + - uses: mtzguido/gci-download@master + with: + name: karamel + + - uses: mtzguido/gci-download@master + with: + name: steel + + - name: Test + run: make -C steel -skj$(nproc) test + + build-pulse: + runs-on: ubuntu-latest + container: mtzguido/dev-base + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + - name: Checkout pulse + uses: actions/checkout@master + with: + path: pulse/ + repository: mtzguido/pulse + ref: dev + + # Since pulse needs F* internal build files, we can't use + # binary nor source packages. So we get the whole repo. + - uses: mtzguido/gci-download@master + with: + name: fstar-repo + + - name: Build (after setting up cargo env) + run: . $HOME/.cargo/env && make -C pulse -skj$(nproc) + + - uses: mtzguido/gci-upload@master + with: + name: pulse + hometag: PULSE + + test-pulse: + runs-on: ubuntu-latest + container: mtzguido/dev-base + needs: + - build-krml + - build-pulse + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + # Since pulse needs F* internal build files, we can't use + # binary nor source packages. So we get the whole repo. + - uses: mtzguido/gci-download@master + with: + name: fstar-repo + + - uses: mtzguido/gci-download@master + with: + name: karamel + + - uses: mtzguido/gci-download@master + with: + name: pulse + + - name: Test + run: . $HOME/.cargo/env && make -C pulse -skj$(nproc) test + + build-hacl: + # runs-on: [self-hosted, linux, big] # using a faster runner + # NOTE: To use a self-hosted runner, we must make sure that + # the runner is executing as UID 1001 (which is the one the + # docker container uses) or it will be unable to write to its + # workspace. This is simply a terrible design by github actions. + # Somehow the cloud runners work regardless of the uid in + # the container. + runs-on: ubuntu-latest + container: mtzguido/dev-base + needs: + - build-krml + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + - uses: actions/download-artifact@v4 + with: + name: fstar.tar.gz + - run: tar -xzf fstar.tar.gz + - run: echo "FSTAR_EXE=$(pwd)/fstar/bin/fstar.exe" >> $GITHUB_ENV + + - uses: mtzguido/gci-download@master + with: + name: karamel + + - name: Checkout hacl-star + uses: actions/checkout@master + with: + path: hacl-star/ + repository: mtzguido/hacl-star + ref: dev + + - run: echo "HACL_HOME=$(pwd)/hacl-star" >> $GITHUB_ENV + + - name: Get Vale + run: ./hacl-star/tools/get_vale.sh + + - name: Build + run: | + NPROC=$(nproc) + if [ $NPROC -gt 16 ]; then NPROC=16; fi + make -C hacl-star -skj${NPROC} + + - uses: mtzguido/gci-upload@master + with: + name: hacl-star + hometag: HACL + + test-hacl: + runs-on: ubuntu-latest + container: mtzguido/dev-base + needs: + - build-krml + - build-hacl + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + - uses: actions/download-artifact@v4 + with: + name: fstar.tar.gz + - run: tar -xzf fstar.tar.gz + - run: echo "FSTAR_EXE=$(pwd)/fstar/bin/fstar.exe" >> $GITHUB_ENV + + - uses: mtzguido/gci-download@master + with: + name: karamel + + - uses: mtzguido/gci-download@master + with: + name: hacl-star + + - name: Get Vale (again) + run: ./hacl-star/tools/get_vale.sh + + - run: sudo apt-get install -y libssl-dev + + - name: Test + run: make -C hacl-star -skj$(nproc) test + + build-everparse: + runs-on: ubuntu-latest + container: mtzguido/dev-base + needs: + - build-krml + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + - run: opam install -y hex re ctypes sha sexplib + + - uses: actions/download-artifact@v4 + with: + name: fstar.tar.gz + - run: tar -xzf fstar.tar.gz + - run: echo "FSTAR_EXE=$(pwd)/fstar/bin/fstar.exe" >> $GITHUB_ENV + + - uses: mtzguido/gci-download@master + with: + name: karamel + + - name: Checkout everparse + uses: actions/checkout@master + with: + path: everparse/ + repository: mtzguido/everparse + ref: dev + + - name: Build + run: | + NPROC=$(nproc) + if [ $NPROC -gt 16 ]; then NPROC=16; fi + make -C everparse -skj${NPROC} + + - uses: mtzguido/gci-upload@master + with: + name: everparse + hometag: EVERPARSE + + test-everparse: + runs-on: ubuntu-latest + container: mtzguido/dev-base + needs: + - build-krml + - build-everparse + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + - run: opam install -y hex re ctypes sha sexplib + + - uses: actions/download-artifact@v4 + with: + name: fstar.tar.gz + - run: tar -xzf fstar.tar.gz + - run: echo "FSTAR_EXE=$(pwd)/fstar/bin/fstar.exe" >> $GITHUB_ENV + + - uses: mtzguido/gci-download@master + with: + name: karamel + + - uses: mtzguido/gci-download@master + with: + name: everparse + + - name: Test + run: make -C everparse -skj$(nproc) test + + build-merkle-tree: + runs-on: ubuntu-latest + container: mtzguido/dev-base + needs: + - build-krml + - build-hacl + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + - uses: actions/download-artifact@v4 + with: + name: fstar.tar.gz + - run: tar -xzf fstar.tar.gz + - run: echo "FSTAR_EXE=$(pwd)/fstar/bin/fstar.exe" >> $GITHUB_ENV + + - uses: mtzguido/gci-download@master + with: + name: karamel + + - uses: mtzguido/gci-download@master + with: + name: hacl-star + + - name: Checkout merkle-tree + uses: actions/checkout@master + with: + path: merkle-tree/ + repository: mtzguido/merkle-tree + ref: dev + + - name: Build + run: | + NPROC=$(nproc) + if [ $NPROC -gt 16 ]; then NPROC=16; fi + make -C merkle-tree -skj${NPROC} dist/libmerkletree.a + + - uses: mtzguido/gci-upload@master + with: + name: merkle-tree + + test-merkle-tree: + runs-on: ubuntu-latest + container: mtzguido/dev-base + needs: + - build-krml + - build-hacl + - build-merkle-tree + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + - uses: actions/download-artifact@v4 + with: + name: fstar.tar.gz + - run: tar -xzf fstar.tar.gz + - run: echo "FSTAR_EXE=$(pwd)/fstar/bin/fstar.exe" >> $GITHUB_ENV + + - uses: mtzguido/gci-download@master + with: + name: karamel + + - uses: mtzguido/gci-download@master + with: + name: hacl-star + + - uses: mtzguido/gci-download@master + with: + name: merkle-tree + + - name: Test + run: make -C merkle-tree -skj$(nproc) test + + build-mitls-fstar: + runs-on: ubuntu-latest + container: mtzguido/dev-base + needs: + - build-krml + - build-hacl + - build-everparse + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + - uses: actions/download-artifact@v4 + with: + name: fstar.tar.gz + - run: tar -xzf fstar.tar.gz + - run: echo "FSTAR_EXE=$(pwd)/fstar/bin/fstar.exe" >> $GITHUB_ENV + + - uses: mtzguido/gci-download@master + with: + name: karamel + + - uses: mtzguido/gci-download@master + with: + name: everparse + + - uses: mtzguido/gci-download@master + with: + name: hacl-star + + - name: Checkout mitls-fstar + uses: actions/checkout@master + with: + path: mitls-fstar/ + repository: mtzguido/mitls-fstar + ref: dev + + - name: Build + run: make -C mitls-fstar/src/tls -skj$(nproc) + + - uses: mtzguido/gci-upload@master + with: + name: mitls-fstar + hometag: MITLS + + test-mitls-fstar: + runs-on: ubuntu-latest + container: mtzguido/dev-base + needs: + - build-krml + - build-hacl + - build-everparse + - build-mitls-fstar + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + - uses: actions/download-artifact@v4 + with: + name: fstar.tar.gz + - run: tar -xzf fstar.tar.gz + - run: echo "FSTAR_EXE=$(pwd)/fstar/bin/fstar.exe" >> $GITHUB_ENV + + - uses: mtzguido/gci-download@master + with: + name: karamel + + - uses: mtzguido/gci-download@master + with: + name: everparse + + - uses: mtzguido/gci-download@master + with: + name: hacl-star + + - uses: mtzguido/gci-download@master + with: + name: mitls-fstar + + - name: Build + run: make -C mitls-fstar/src/tls -skj$(nproc) test + + + build-cbor: + runs-on: ubuntu-latest + container: mtzguido/dev-base + needs: + - build-krml + - build-pulse + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + # Install rust toolchain + - uses: dtolnay/rust-toolchain@stable + + - uses: actions/download-artifact@v4 + with: + name: fstar.tar.gz + - run: tar -xzf fstar.tar.gz + - run: echo "FSTAR_EXE=$(pwd)/fstar/bin/fstar.exe" >> $GITHUB_ENV + + - uses: mtzguido/gci-download@master + with: + name: karamel + + - uses: mtzguido/gci-download@master + with: + name: pulse + + - name: Checkout everparse (cbor branch) + uses: actions/checkout@master + with: + path: everparse/ + ref: taramana_cbor + repository: mtzguido/everparse + + - name: Build + run: | + make -C everparse -skj$(nproc) + + - name: Test + run: | + make -C everparse -skj$(nproc) cbor-det-rust-test + + - uses: mtzguido/gci-upload@master + with: + name: everparse-cbor + path: everparse diff --git a/.github/workflows/check-nix-friends.yml b/.github/workflows/check-nix-friends.yml new file mode 100644 index 00000000000..b2fa89eb7ff --- /dev/null +++ b/.github/workflows/check-nix-friends.yml @@ -0,0 +1,58 @@ +name: Check F* friends (Nix) + +# This workflow must be called ONLY after a run of nix.yml +on: + workflow_call: + +defaults: + run: + shell: bash + +jobs: + comparse: + runs-on: ubuntu-latest + steps: + - uses: DeterminateSystems/nix-installer-action@main + - uses: DeterminateSystems/magic-nix-cache-action@main + + - uses: actions/checkout@master + with: + repository: mtzguido/comparse + ref: dev + + - name: Update fstar flake and check + run: | + nix flake update --update-input fstar-flake --override-input fstar-flake "github:${{github.repository}}?rev=${{github.sha}}" + nix flake check + + dy-star: + runs-on: ubuntu-latest + steps: + - uses: DeterminateSystems/nix-installer-action@main + - uses: DeterminateSystems/magic-nix-cache-action@main + + - uses: actions/checkout@master + with: + repository: mtzguido/dolev-yao-star-extrinsic + ref: dev + + - name: Update fstar flake and check + run: | + nix flake update --update-input fstar-flake --override-input fstar-flake "github:${{github.repository}}?rev=${{github.sha}}" + nix flake check + + mls-star: + runs-on: ubuntu-latest + steps: + - uses: DeterminateSystems/nix-installer-action@main + - uses: DeterminateSystems/magic-nix-cache-action@main + + - uses: actions/checkout@master + with: + repository: mtzguido/mls-star + ref: dev + + - name: Update fstar flake and check + run: | + nix flake update --update-input fstar-flake --override-input fstar-flake "github:${{github.repository}}?rev=${{github.sha}}" + nix flake check diff --git a/.github/workflows/check-world.yml b/.github/workflows/check-world.yml index c236c2b29da..b5c8f8c82af 100644 --- a/.github/workflows/check-world.yml +++ b/.github/workflows/check-world.yml @@ -1,808 +1,27 @@ -name: Check world (build F* and all projects) - -# This workflow builds/checks F* and a selection of subprojects -# depending on it. -# -# It is meant to test if an F* patch impacts other projects heavily or -# not. Jobs can (and will) run in parallel if the dependencies allow, -# using (mostly) Github hosted runners which are small-ish 4 core VMs. -# -# The steps are containerized and running on a docker container -# 'mtzguido/fstar-base-testing` built from .docker/nu_base.Dockerfile -# (FIXME: it needs a big cleanup and should go into an FStarLang -# namespace) -# -# Self-hosted runners *can* be used, and it's desirable to do it for -# things like HACL* or everparse that take really long on Github VMs -# (build-hacl takes ~1h50m on github runners, ~20m on a new-ish 16-core -# 32-thread desktop). HOWEVER, Github actions seems to be incredibly -# stupid in bind mounting the workspace for the job from the current -# directory of the runner, which means the workspace has the UID of the -# user that started the runner, but the steps of the job run inside the -# runner with the UID of the docker user. If these UIDs differ, the job -# will quickly fail to do anything and break. I'm not sure what the -# canonical fix is here, I think this is insane. FIXME -# -# We also use some custom actions. They are all not very well documented -# and not robust, so think twice (or ask) before using elsewhere! -# - mtzguido/set-opam-env: -# this sets up the opam environment for the following steps -# (`eval $(opam env)` will not cut it, nor will `opam env >> $GITHUB_ENV`) -# -# - mtzguido/gci-upload: -# this uploads an artifact, much like github/upload-artifact, but first -# packages it up into a tarball to preserve permissions (like exec bits). -# Again it's insane that github does not do this by default. The tarball -# will anyways be zipped afterwards since all github artifacts are zipped, -# and will show up in the workflow run page. -# It takes an optional 'extra' for argments to tar, that we use to ignore -# some directories. It also ignores .git by default. -# The 'hometag' is a way to set home variables when downloading the artifacts. -# -# - mtzguido/gci-download: -# The companion to gci-upload. It will download the zipped tarball, unzip, -# and extract into a directory named like the artifact. If the gci-upload had -# a 'hometag: FOO', then the client downloading the artifact will get FOO_HOME -# set in the environment, equal to the directory where the artifact was extracted. -# This means most jobs do -# -# - uses: mtzguido/gci-download@master -# with: -# name: FStar -# -# And get a working F* with FSTAR_HOME set in the FStar directory. -# -# -# Adding new jobs should be relatively easy. Just state dependencies in -# the 'needs', and start the job by fetching the required dependencies -# with gci-download, and then build. The examples below should serve -# as a guide. If possible, separate the building from the testing, -# since future jobs may need the built resource, but all tests are -# independent. (However note that there is an overhead to start a job, -# it's ~40s alone to start a container, so don't overdo it by splitting -# up too much). +name: Check world (test F* + all subprojects) on: - # push: workflow_dispatch: - workflow_call: - -# TODO: -# Is there a way to set the default container? -# Move to the regular fstar-ci-base too defaults: run: shell: bash jobs: - build-fstar: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - name: Checkout - uses: actions/checkout@master - with: - path: FStar/ - - - name: Prep - run: | - # In case we edited fstar.opam, install new deps here - # This will most likely fail to like krml below, what's going on? - # opam install --confirm-level=unsafe-yes --deps-only ./FStar/fstar.opam - - name: Build - run: make -C FStar -skj$(nproc) - - - uses: mtzguido/gci-upload@master - with: - name: FStar - extra: --exclude=FStar/ocaml/_build - hometag: FSTAR - - test-fstar: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: build-fstar - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - name: Test - run: make -C FStar -skj$(nproc) ci-uregressions - - test-fstar-boot: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - # needs: build-fstar - # ^ This does not really depend on the previous job, but this can be - # enabled if we wanted to sequentialize them for whatever reason. - # We start from scratch since we need a git repo to check the - # diff, and that is not contained in the artifact. We could just - # take the ulib checked files from the artifact, if we really wanted - # to, but checking ulib with ADMIT is quite fast anyway. - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: actions/checkout@master - with: - path: FStar/ - - - name: Bootstrap - run: | - make -C FStar -skj$(nproc) 1 - make -C FStar -skj$(nproc) full-bootstrap ADMIT=1 - - - name: Check diff - run: | - cd FStar/ - ./.scripts/check-snapshot-diff.sh - - - uses: mtzguido/gci-upload@master - with: - name: FStar-boot - path: FStar - extra: --exclude=FStar/ocaml/_build - hometag: FSTAR - - build-krml: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: build-fstar - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - name: Checkout karamel - uses: actions/checkout@master - with: - path: karamel/ - repository: FStarLang/karamel - - - name: Prep - run: | - # Fails mysteriously: - # - # Error: Package conflict! - # * No agreement on the version of ocaml: - # - (invariant) -> ocaml-base-compiler = 4.14.2 -> ocaml = 4.14.2 - # No solution found, exiting - # - karamel -> fstar -> ocaml < 4.06.0 - # You can temporarily relax the switch invariant with `--update-invariant' - # * No agreement on the version of ocaml-base-compiler: - # - (invariant) -> ocaml-base-compiler = 4.14.2 - # - karamel -> fstar -> ocaml < 4.06.0 -> ocaml-base-compiler = 3.07+1 - # * Missing dependency: - # - karamel -> fstar -> z3 = 4.8.5 -> conf-python-2-7 - # depends on the unavailable system package 'python2.7'. Use `--no-depexts' to attempt installation anyway, or it is possible that a depext package name in the opam file is incorrect. - # * Missing dependency: - # - karamel -> fstar -> ocaml < 4.06.0 -> ocaml-variants >= 3.11.1 -> ocaml-beta - # unmet availability conditions: 'enable-ocaml-beta-repository' - # * Missing dependency: - # - karamel -> fstar -> ocaml < 4.06.0 -> ocaml-variants >= 3.11.1 -> system-msvc - # unmet availability conditions: 'os = "win32"' - # - # opam install --confirm-level=unsafe-yes --deps-only ./karamel/karamel.opam - - - name: Build krml - run: make -C karamel -skj$(nproc) - - # krml is a symlink to _build/default/src/Karamel.exe, but we want to exclude _build. - # So, overwrite the link with the actual file. - - name: Fix for symlink - run: | - cp --remove-destination $(realpath karamel/krml) karamel/krml - - - uses: mtzguido/gci-upload@master - with: - name: karamel - extra: --exclude=karamel/_build - hometag: KRML - - test-krml: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - build-fstar - - build-krml - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - uses: mtzguido/gci-download@master - with: - name: karamel - - - name: Test - run: make -C karamel -skj$(nproc) test - - build-steel: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - build-fstar - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - name: Checkout steel - uses: actions/checkout@master - with: - path: steel/ - repository: FStarLang/steel - - - name: Build - run: make -C steel -skj$(nproc) - - - uses: mtzguido/gci-upload@master - with: - name: steel - hometag: STEEL - - test-steel: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - build-fstar - - build-krml - - build-steel - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - uses: mtzguido/gci-download@master - with: - name: karamel - - - uses: mtzguido/gci-download@master - with: - name: steel - - - name: Test - run: make -C steel -skj$(nproc) test - - build-pulse: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - build-fstar - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - name: Checkout pulse - uses: actions/checkout@master - with: - path: pulse/ - repository: FStarLang/pulse - - - name: Build - run: make -C pulse -skj$(nproc) - - - uses: mtzguido/gci-upload@master - with: - name: pulse - hometag: PULSE - - test-pulse-boot: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - test-fstar-boot - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar-boot - - - name: Checkout pulse - uses: actions/checkout@master - with: - path: pulse/ - repository: FStarLang/pulse - - - name: Build - run: | - # This is similar for 'make full-boot', but does not - # check the library. - make -C pulse/src -skj$(nproc) clean-snapshot - make -C pulse/src -skj$(nproc) extract - make -C pulse/src -skj$(nproc) build-ocaml - - - name: Check diff - run: | - cd pulse/ - ./.scripts/check-snapshot-diff.sh - - test-pulse: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - build-fstar - - build-krml - - build-pulse - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - uses: mtzguido/gci-download@master - with: - name: karamel - - - uses: mtzguido/gci-download@master - with: - name: pulse - - - name: Test - run: make -C pulse -skj$(nproc) test - - build-hacl: - # runs-on: [self-hosted, linux, big] # using a faster runner - # NOTE: To use a self-hosted runner, we must make sure that - # the runner is executing as UID 1001 (which is the one the - # docker container uses) or it will be unable to write to its - # workspace. This is simply a terrible design by github actions. - # Somehow the cloud runners work regardless of the uid in - # the container. - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - build-fstar - - build-krml - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - uses: mtzguido/gci-download@master - with: - name: karamel - - - name: Checkout hacl-star - uses: actions/checkout@master - with: - path: hacl-star/ - repository: hacl-star/hacl-star - - - run: echo "HACL_HOME=$(pwd)/hacl-star" >> $GITHUB_ENV - - - name: Get Vale - run: ./hacl-star/tools/get_vale.sh - - - name: Build - run: | - NPROC=$(nproc) - if [ $NPROC -gt 16 ]; then NPROC=16; fi - make -C hacl-star -skj${NPROC} - - - uses: mtzguido/gci-upload@master - with: - name: hacl-star - hometag: HACL - - test-hacl: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - build-fstar - - build-krml - - build-hacl - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - uses: mtzguido/gci-download@master - with: - name: karamel - - - uses: mtzguido/gci-download@master - with: - name: hacl-star - - - name: Get Vale (again) - run: ./hacl-star/tools/get_vale.sh - - - name: Test - run: make -C hacl-star -skj$(nproc) test - - build-everparse: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - build-fstar - - build-krml - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - uses: mtzguido/gci-download@master - with: - name: karamel - - - name: Checkout everparse - uses: actions/checkout@master - with: - path: everparse/ - repository: project-everest/everparse - - - name: Build - run: | - NPROC=$(nproc) - if [ $NPROC -gt 16 ]; then NPROC=16; fi - make -C everparse -skj${NPROC} - - - uses: mtzguido/gci-upload@master - with: - name: everparse - hometag: EVERPARSE - - test-everparse: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - build-fstar - - build-krml - - build-everparse - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - uses: mtzguido/gci-download@master - with: - name: karamel - - - uses: mtzguido/gci-download@master - with: - name: everparse - - - name: Test - run: make -C everparse -skj$(nproc) test - - build-merkle-tree: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - build-fstar - - build-krml - - build-hacl - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - uses: mtzguido/gci-download@master - with: - name: karamel - - - uses: mtzguido/gci-download@master - with: - name: hacl-star - - - name: Checkout merkle-tree - uses: actions/checkout@master - with: - path: merkle-tree/ - repository: hacl-star/merkle-tree - - - name: Build - run: | - NPROC=$(nproc) - if [ $NPROC -gt 16 ]; then NPROC=16; fi - make -C merkle-tree -skj${NPROC} dist/libmerkletree.a - - - uses: mtzguido/gci-upload@master - with: - name: merkle-tree - - test-merkle-tree: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - build-fstar - - build-krml - - build-hacl - - build-merkle-tree - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - uses: mtzguido/gci-download@master - with: - name: karamel - - - uses: mtzguido/gci-download@master - with: - name: hacl-star - - - uses: mtzguido/gci-download@master - with: - name: merkle-tree - - - name: Test - run: make -C merkle-tree -skj$(nproc) test - - build-mitls-fstar: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - build-fstar - - build-krml - - build-hacl - - build-everparse - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - uses: mtzguido/gci-download@master - with: - name: karamel - - - uses: mtzguido/gci-download@master - with: - name: everparse - - - uses: mtzguido/gci-download@master - with: - name: hacl-star - - - name: Checkout mitls-fstar - uses: actions/checkout@master - with: - path: mitls-fstar/ - repository: project-everest/mitls-fstar - - - name: Build - run: make -C mitls-fstar/src/tls -skj$(nproc) - - - uses: mtzguido/gci-upload@master - with: - name: mitls-fstar - hometag: MITLS - - test-mitls-fstar: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - build-fstar - - build-krml - - build-hacl - - build-everparse - - build-mitls-fstar - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - uses: mtzguido/gci-download@master - with: - name: karamel - - - uses: mtzguido/gci-download@master - with: - name: everparse - - - uses: mtzguido/gci-download@master - with: - name: hacl-star - - - uses: mtzguido/gci-download@master - with: - name: mitls-fstar - - - name: Build - run: make -C mitls-fstar/src/tls -skj$(nproc) test - - - ### Nix jobs, for some Inria projects - # - # NOTE: these jobs are not containerized - # 1- it should not be needed since Nix takes care of isolating the environment - # 2- it would actually fail to setup Nix due to permissions in the container, and I haven't - # found a clear reference on what the permissions/uids should be. - # - # The fstar-nix job is here to - # 1- Test the nix build in this workflow too - # 2- Reuse the built F* in the following projects, via the magic-nix-cache (note the 'needs') - - fstar-nix: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: DeterminateSystems/nix-installer-action@main - - uses: DeterminateSystems/magic-nix-cache-action@main - - name: Build - run: nix build -L - - comparse: - needs: fstar-nix - runs-on: ubuntu-latest - steps: - - uses: DeterminateSystems/nix-installer-action@main - - uses: DeterminateSystems/magic-nix-cache-action@main - - - uses: actions/checkout@master - with: - repository: TWal/comparse - - - name: Update fstar flake and check - run: | - nix flake update --update-input fstar-flake --override-input fstar-flake "github:${{github.repository}}?rev=${{github.sha}}" - nix flake check - - dy-star: - needs: fstar-nix - runs-on: ubuntu-latest - steps: - - uses: DeterminateSystems/nix-installer-action@main - - uses: DeterminateSystems/magic-nix-cache-action@main - - - uses: actions/checkout@master - with: - repository: REPROSEC/dolev-yao-star-extrinsic - - - name: Update fstar flake and check - run: | - nix flake update --update-input fstar-flake --override-input fstar-flake "github:${{github.repository}}?rev=${{github.sha}}" - nix flake check - - mls-star: - needs: fstar-nix - runs-on: ubuntu-latest - steps: - - uses: DeterminateSystems/nix-installer-action@main - - uses: DeterminateSystems/magic-nix-cache-action@main - - - uses: actions/checkout@master - with: - repository: Inria-Prosecco/mls-star - - - name: Update fstar flake and check - run: | - nix flake update --update-input fstar-flake --override-input fstar-flake "github:${{github.repository}}?rev=${{github.sha}}" - nix flake check - - build-cbor: - runs-on: ubuntu-latest - container: mtzguido/fstar-base-testing - needs: - - build-fstar - - build-krml - - build-pulse - steps: - - name: Cleanup - run: find . -delete - - run: echo "HOME=/home/opam" >> $GITHUB_ENV - - uses: mtzguido/set-opam-env@master - - # Install rust toolchain - - uses: dtolnay/rust-toolchain@stable - - - uses: mtzguido/gci-download@master - with: - name: FStar - - - uses: mtzguido/gci-download@master - with: - name: karamel - - - uses: mtzguido/gci-download@master - with: - name: pulse - - - name: Checkout everparse (cbor branch) - uses: actions/checkout@master - with: - path: everparse/ - ref: taramana_cbor - repository: project-everest/everparse - - - name: Build - run: | - make -C everparse -skj$(nproc) - - - name: Test - run: | - make -C everparse -skj$(nproc) cbor-det-rust-test - - - uses: mtzguido/gci-upload@master - with: - name: everparse-cbor - path: everparse + build: + name: build + uses: ./.github/workflows/build-ci.yml + + check-friends: + needs: build + name: friends + uses: ./.github/workflows/check-friends.yml + + build-nix: + name: build (nix) + uses: ./.github/workflows/nix.yml + + check-nix-friends: + needs: build-nix + name: friends-nix + uses: ./.github/workflows/check-nix-friends.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000000..60440c439bc --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,24 @@ +name: F* CI + +on: + push: + pull_request: + workflow_dispatch: + +defaults: + run: + shell: bash + +jobs: + build: + name: build + uses: ./.github/workflows/build-ci.yml + + tests: + name: tests + needs: build + uses: ./.github/workflows/tests.yml + + nix-build: + name: nix + uses: ./.github/workflows/nix.yml diff --git a/.github/workflows/linux-x64-rebuild-base.yaml b/.github/workflows/linux-x64-rebuild-base.yaml deleted file mode 100644 index 6445ef96a25..00000000000 --- a/.github/workflows/linux-x64-rebuild-base.yaml +++ /dev/null @@ -1,145 +0,0 @@ -name: Rebuild base image -on: - schedule: - # 2AM UTC - - cron: '0 2 * * *' - workflow_dispatch: - inputs: - force: - description: Update the base image even if running F* CI fails, and even if this branch is not master - required: true - type: boolean -jobs: - build: - runs-on: [self-hosted, linux, X64] - defaults: - run: - # Setting the default shell to bash. This is not only more standard, - # but also makes sure that we run with -o pipefail, so we can safely - # pipe data (such as | tee LOG) without missing out on failures - # and getting false positives. If you want to change the default shell, - # keep in mind you need a way to handle this. - shell: bash - - steps: - - name: Check out repo - uses: actions/checkout@v4 - - - name: Rebuild base image from scratch - run: | - TEMP_IMAGE_NAME=fstar:update-base-$GITHUB_RUN_ID-$GITHUB_RUN_ATTEMPT - CI_IMAGEBUILD_INITIAL_TIMESTAMP=$(date '+%s') - docker build --pull --no-cache -f .docker/base.Dockerfile -t ${TEMP_IMAGE_NAME} . - CI_IMAGEBUILD_FINAL_TIMESTAMP=$(date '+%s') - echo "CI_IMAGEBUILD_INITIAL_TIMESTAMP=$CI_IMAGEBUILD_INITIAL_TIMESTAMP" >> $GITHUB_ENV - echo "CI_IMAGEBUILD_FINAL_TIMESTAMP=$CI_IMAGEBUILD_FINAL_TIMESTAMP" >> $GITHUB_ENV - echo "TEMP_IMAGE_NAME=$TEMP_IMAGE_NAME" >> $GITHUB_ENV - - - name: Check that F* CI passes - run: | - echo "CI_INITIAL_TIMESTAMP=$(date '+%s')" >> $GITHUB_ENV - ci_docker_image_tag=fstar:update-base-test-$GITHUB_RUN_ID-$GITHUB_RUN_ATTEMPT - echo "ci_docker_image_tag=$ci_docker_image_tag" >> $GITHUB_ENV - - docker build --no-cache -t $ci_docker_image_tag -f .docker/standalone.Dockerfile --build-arg FSTAR_CI_BASE=$TEMP_IMAGE_NAME --build-arg CI_THREADS=$(nproc) . |& tee BUILDLOG - ci_docker_status=$(docker run $ci_docker_image_tag /bin/bash -c 'cat $FSTAR_HOME/status.txt' || echo false) - $ci_docker_status - - - name: Tag base image - if: ${{ (success () && github.ref_name == 'master') || inputs.force }} - run: | - docker tag ${TEMP_IMAGE_NAME} fstar_ci_base - - - name: Compute elapsed time and status message - if: ${{ always() }} - run: | - CI_FINAL_TIMESTAMP=$(date '+%s') - CI_TIME_DIFF=$(( $CI_FINAL_TIMESTAMP - $CI_INITIAL_TIMESTAMP )) - echo "CI_TIME_DIFF_S=$(( $CI_TIME_DIFF % 60 ))" >> $GITHUB_ENV - echo "CI_TIME_DIFF_M=$(( ($CI_TIME_DIFF / 60) % 60 ))" >> $GITHUB_ENV - echo "CI_TIME_DIFF_H=$(( $CI_TIME_DIFF / 3600 ))" >> $GITHUB_ENV - case ${{ job.status }} in - (success) - if orange_contents="$(docker run $ci_docker_image_tag /bin/bash -c 'cat $FSTAR_HOME/orange_file.txt')" && [[ $orange_contents = '' ]] ; then - echo "CI_EMOJI=✅" >> $GITHUB_ENV - else - echo "CI_EMOJI=⚠" >> $GITHUB_ENV - fi - ;; - (cancelled) - echo "CI_EMOJI=⚠" >> $GITHUB_ENV - ;; - (*) - echo "CI_EMOJI=❌" >> $GITHUB_ENV - ;; - esac - echo "CI_COMMIT=$(echo ${{ github.sha }} | grep -o '^........')" >> $GITHUB_ENV - echo "CI_COMMIT_URL=https://github.com/FStarLang/FStar/commit/${{ github.sha }}" >> $GITHUB_ENV - if [[ '${{github.event_name}}' == 'schedule' ]]; then - CI_TRIGGER='schedule' - else - CI_TRIGGER='${{github.triggering_actor}}' - fi - echo "CI_TRIGGER=$CI_TRIGGER" >> $GITHUB_ENV - echo 'CI_STATUS='"$(docker run $ci_docker_image_tag /bin/bash -c 'cat $FSTAR_HOME/result.txt' || echo Failure)" >> $GITHUB_ENV - if [ -n "$CI_IMAGEBUILD_INITIAL_TIMESTAMP" ]; then - DIFF=$(( $CI_IMAGEBUILD_FINAL_TIMESTAMP - $CI_IMAGEBUILD_INITIAL_TIMESTAMP )) - SS=$(( $DIFF % 60 )) - MM=$(( ($DIFF / 60) % 60 )) - HH=$(( $DIFF / 3600 )) - CI_IMAGEBUILD_TIME="${HH}h ${MM}min ${SS}s" - echo "CI_IMAGEBUILD_TIME=$CI_IMAGEBUILD_TIME" >> $GITHUB_ENV - fi - - - name: Remove intermediate images - if: ${{ always() }} - run: | - docker rmi -f ${TEMP_IMAGE_NAME} || true - docker rmi -f ${ci_docker_image_tag} || true - - - name: Output build log error summary - if: ${{ failure() }} - run: | - # Just outputs to the github snippet. Could be part of slack message. - # This command never triggers a failure - grep -C10 -E ' \*\*\* |\(Error' BUILDLOG > BUILDLOG_ERRORS || true - ERRORS_URL=$(.scripts/sprang BUILDLOG_ERRORS) - ERRORS_MSG=" <$ERRORS_URL|(Error summary)>" - echo "ERRORS_MSG=$ERRORS_MSG" >> $GITHUB_ENV - - - name: Post to the Slack channel - if: ${{ always() }} - id: slack - continue-on-error: true - uses: slackapi/slack-github-action@v1.26.0 - with: - channel-id: ${{ env.CI_SLACK_CHANNEL }} - payload: | - { - "blocks" : [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "Update F* base CI image\n<${{ env.CI_COMMIT_URL }}|${{ env.CI_COMMIT }}> on (${{ github.ref_name }}) by ${{ env.CI_TRIGGER }}" - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "${{ env.CI_EMOJI }} ${{env.ERRORS_MSG}}" - } - }, - { - "type": "section", - "text": { - "type": "plain_text", - "text": "Duration (image build): ${{ env.CI_IMAGEBUILD_TIME }}\nDuration (FStar CI): ${{ env.CI_TIME_DIFF_H }}h ${{ env.CI_TIME_DIFF_M }}min ${{ env.CI_TIME_DIFF_S }}s" - } - } - ] - } - env: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} - SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK diff --git a/.github/workflows/linux-x64.yaml b/.github/workflows/linux-x64.yaml deleted file mode 100644 index 799c7502231..00000000000 --- a/.github/workflows/linux-x64.yaml +++ /dev/null @@ -1,227 +0,0 @@ -name: Build and test FStar -on: - push: - branches-ignore: - - _** - pull_request: - workflow_dispatch: - inputs: - ci_refresh_hints: - description: Refresh hints and advance version number - required: true - type: boolean - ci_no_karamel: - description: Disable Karamel extraction tests - required: true - type: boolean - ci_skip_image_tag: - description: Do not tag image - required: true - type: boolean -jobs: - build: - runs-on: [self-hosted, linux, X64] - defaults: - run: - # Setting the default shell to bash. This is not only more standard, - # but also makes sure that we run with -o pipefail, so we can safely - # pipe data (such as | tee LOG) without missing out on failures - # and getting false positives. If you want to change the default shell, - # keep in mind you need a way to handle this. - shell: bash - steps: - - name: Record initial timestamp - run: | - echo "CI_INITIAL_TIMESTAMP=$(date '+%s')" >> $GITHUB_ENV - - name: Check out repo - uses: actions/checkout@v4 - - name: Identify the notification channel - run: | - echo "CI_SLACK_CHANNEL=$(jq -c -r '.NotificationChannel' .docker/build/config.json)" >> $GITHUB_ENV - - name: Set the refresh hints flag - if: ${{ (github.event_name == 'workflow_dispatch') && inputs.ci_refresh_hints }} - run: | - # NOTE: this causes the build to record hints - echo "CI_RECORD_HINTS_ARG=--build-arg CI_RECORD_HINTS=1" >> $GITHUB_ENV - - name: Populate no karamel arg - if: ${{ (github.event_name == 'workflow_dispatch') && inputs.ci_no_karamel }} - run: | - echo "CI_DO_NO_KARAMEL=--build-arg CI_NO_KARAMEL=1" >> $GITHUB_ENV - - name: Populate skip image tag arg - if: ${{ (github.event_name == 'workflow_dispatch') && inputs.ci_skip_image_tag }} - run: | - echo "CI_SKIP_IMAGE_TAG=1" >> $GITHUB_ENV - - name: Enable resource monitoring - if: ${{ vars.FSTAR_CI_RESOURCEMONITOR == '1' }} - run: | - echo "RESOURCEMONITOR=1" >> $GITHUB_ENV - - - name: Make sure base image is present, or build it - run: | - if ! docker images | grep '^fstar_ci_base '; then - echo '*** REBUILDING fstar_ci_base image' - CI_IMAGEBUILD_INITIAL_TIMESTAMP=$(date '+%s') - docker build -f .docker/base.Dockerfile -t fstar_ci_base . - CI_IMAGEBUILD_FINAL_TIMESTAMP=$(date '+%s') - echo "CI_IMAGEBUILD_INITIAL_TIMESTAMP=$CI_IMAGEBUILD_INITIAL_TIMESTAMP" >> $GITHUB_ENV - echo "CI_IMAGEBUILD_FINAL_TIMESTAMP=$CI_IMAGEBUILD_FINAL_TIMESTAMP" >> $GITHUB_ENV - fi - - - name: Build FStar and its dependencies - run: | - ci_docker_image_tag=fstar:local-run-$GITHUB_RUN_ID-$GITHUB_RUN_ATTEMPT - echo "ci_docker_image_tag=$ci_docker_image_tag" >> $GITHUB_ENV - docker build -t $ci_docker_image_tag -f .docker/standalone.Dockerfile --build-arg CI_BRANCH=$GITHUB_REF_NAME --build-arg RESOURCEMONITOR=$RESOURCEMONITOR --build-arg CI_THREADS=$(nproc) $CI_RECORD_HINTS_ARG $CI_DO_NO_KARAMEL . |& tee BUILDLOG - ci_docker_status=$(docker run $ci_docker_image_tag /bin/bash -c 'cat $FSTAR_HOME/status.txt' || echo false) - if $ci_docker_status && [[ -z "$CI_SKIP_IMAGE_TAG" ]] ; then - if ! { echo $GITHUB_REF_NAME | grep '/' ; } ; then - docker tag $ci_docker_image_tag fstar:local-branch-$GITHUB_REF_NAME - fi - docker tag $ci_docker_image_tag fstar:local-commit-$GITHUB_SHA - fi - $ci_docker_status - - - name: Push the generated hints - if: ${{ (github.event_name == 'workflow_dispatch') && inputs.ci_refresh_hints }} - run: | - FSTAR_HOME=$(docker run $ci_docker_image_tag /bin/bash -c 'echo $FSTAR_HOME') - docker run $ci_docker_image_tag bash -c "env DZOMO_GITHUB_TOKEN=$DZOMO_GITHUB_TOKEN $FSTAR_HOME/.scripts/advance.sh refresh_fstar_hints" - env: - DZOMO_GITHUB_TOKEN: ${{ secrets.DZOMO_GITHUB_TOKEN }} - - - name: Collect resource monitoring files and summary - if: ${{ always () && vars.FSTAR_CI_RESOURCEMONITOR == '1' }} - continue-on-error: true - run: | - # docker cp needs absolute path, obtain FSTAR_HOME - FSTAR_HOME=$(docker run $ci_docker_image_tag /bin/bash -c 'echo $FSTAR_HOME') - # We briefly kick up a container from the generated image, so - # we can extract files from it. No need to start it though. - temp_container=$(docker create $ci_docker_image_tag) - docker cp $temp_container:${FSTAR_HOME}/rmon/ rmon - docker rm -f $temp_container - - # Also, read these bottom-line values into the environment so they - # can go into the Slack message. - FSTAR_CI_MEASURE_CPU=$(awk -F':' '/Total CPU/ { print $2 }' rmon/res_summary.txt) - FSTAR_CI_MEASURE_MEM=$(awk -F':' '/Total memory/ { print $2 }' rmon/res_summary.txt) - echo "FSTAR_CI_MEASURE_CPU=$FSTAR_CI_MEASURE_CPU" >> $GITHUB_ENV - echo "FSTAR_CI_MEASURE_MEM=$FSTAR_CI_MEASURE_MEM" >> $GITHUB_ENV - - # Final goodie: upload the summary to sprunge.us and add a link in - # the Slack message for a 1-click report. - RMON_URL=$(.scripts/sprang rmon/res_summary.txt) - echo "RMON_URL=$RMON_URL" >> $GITHUB_ENV - - - name: Save resource monitor summary as artifact - if: ${{ always () && vars.FSTAR_CI_RESOURCEMONITOR == '1' }} - continue-on-error: true - uses: actions/upload-artifact@v4 - with: - name: Resource usage information (summary) - path: | - rmon/res_summary.txt - - - name: Save resource monitor files as artifact - if: ${{ always () && vars.FSTAR_CI_RESOURCEMONITOR == '1' }} - continue-on-error: true - uses: actions/upload-artifact@v4 - with: - name: Resource usage information (individual) - path: | - rmon/rmon.tgz - - - name: Compute elapsed time and status message - if: ${{ always() }} - run: | - CI_FINAL_TIMESTAMP=$(date '+%s') - CI_TIME_DIFF=$(( $CI_FINAL_TIMESTAMP - $CI_INITIAL_TIMESTAMP )) - echo "CI_TIME_DIFF_S=$(( $CI_TIME_DIFF % 60 ))" >> $GITHUB_ENV - echo "CI_TIME_DIFF_M=$(( ($CI_TIME_DIFF / 60) % 60 ))" >> $GITHUB_ENV - echo "CI_TIME_DIFF_H=$(( $CI_TIME_DIFF / 3600 ))" >> $GITHUB_ENV - case ${{ job.status }} in - (success) - if orange_contents="$(docker run $ci_docker_image_tag /bin/bash -c 'cat $FSTAR_HOME/orange_file.txt')" && [[ $orange_contents = '' ]] ; then - echo "CI_EMOJI=✅" >> $GITHUB_ENV - else - echo "CI_EMOJI=⚠" >> $GITHUB_ENV - fi - ;; - (cancelled) - echo "CI_EMOJI=⚠" >> $GITHUB_ENV - ;; - (*) - echo "CI_EMOJI=❌" >> $GITHUB_ENV - ;; - esac - echo "CI_COMMIT=$(echo ${{ github.event.head_commit.id || github.event.pull_request.head.sha || github.head_commit.id }} | grep -o '^........')" >> $GITHUB_ENV - echo 'CI_STATUS='"$(docker run $ci_docker_image_tag /bin/bash -c 'cat $FSTAR_HOME/result.txt' || echo Failure)" >> $GITHUB_ENV - if [ -n "$CI_IMAGEBUILD_INITIAL_TIMESTAMP" ]; then - DIFF=$(( $CI_IMAGEBUILD_FINAL_TIMESTAMP - $CI_IMAGEBUILD_INITIAL_TIMESTAMP )) - SS=$(( $DIFF % 60 )) - MM=$(( ($DIFF / 60) % 60 )) - HH=$(( $DIFF / 3600 )) - CI_IMAGEBUILD_MSG=" (base image rebuilt in ${HH}h ${MM}m ${SS}s)" - echo "CI_IMAGEBUILD_MSG='$CI_IMAGEBUILD_MSG'" >> $GITHUB_ENV - fi - - - name: Output build log error summary - if: ${{ failure () }} - run: | - # Just outputs to the github snippet. Could be part of slack message. - # This command never triggers a failure - grep -C10 -E ' \*\*\* |\(Error' BUILDLOG > BUILDLOG_ERRORS || true - ERRORS_URL=$(.scripts/sprang BUILDLOG_ERRORS) - ERRORS_MSG=" <$ERRORS_URL|(Error summary)>" - echo "ERRORS_MSG=$ERRORS_MSG" >> $GITHUB_ENV - - - name: Post to the Slack channel - if: ${{ always() }} - id: slack - continue-on-error: true - uses: slackapi/slack-github-action@v1.26.0 - with: - channel-id: ${{ env.CI_SLACK_CHANNEL }} - payload: | - { - "blocks" : [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{ github.event.head_commit.url || github.event.pull_request.html_url }}|${{ env.CI_COMMIT }}> on (${{ github.ref_name }}) by ${{ github.event.head_commit.author.username || github.event.pull_request.user.login || github.head_commit.author.username }}" - } - }, - { - "type": "section", - "text": { - "type": "plain_text", - "text": ${{ toJSON(github.event.head_commit.message || github.event.pull_request.title || github.head_commit.message || '') }} - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "${{ env.CI_EMOJI }} ${{env.ERRORS_MSG}}" - } - }, - { - "type": "section", - "text": { - "type": "plain_text", - "text": "Duration: ${{ env.CI_TIME_DIFF_H }}h ${{ env.CI_TIME_DIFF_M }}min ${{ env.CI_TIME_DIFF_S }}s${{env.CI_IMAGEBUILD_MSG}}" - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "<${{env.RMON_URL}}|Resource summary>\nTotal CPU usage: ${{ env.FSTAR_CI_MEASURE_CPU }}\nTotal memory usage: ${{ env.FSTAR_CI_MEASURE_MEM }}" - } - } - ] - } - env: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} - SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK diff --git a/.github/workflows/macos-build.yml b/.github/workflows/macos-build.yml deleted file mode 100644 index bf6f39421ed..00000000000 --- a/.github/workflows/macos-build.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: Build FStar Binaries for MacOS - -on: - workflow_dispatch: - -jobs: - - build: - - runs-on: macos-latest - - steps: - - name: Checkout FStar - uses: actions/checkout@v4 - with: - path: FStar - - name: Checkout everest - uses: actions/checkout@v4 - with: - repository: project-everest/everest - path: FStar/.github/everest - - name: Install .NET SDK - uses: actions/setup-dotnet@v4 - with: - dotnet-version: '6.0.x' - - name: Setup dependencies - run: ./FStar/.github/setup-macos.sh - - name: Build FStar - run: | - source FStar/.github/env.sh - make -j -C FStar all - - name: Package FStar - run: | - source FStar/.github/env.sh - PACKAGE_DOCS=0 make -j -C FStar package - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: fstar-Darwin_x86_64.tar.gz - path: FStar/src/ocaml-output/fstar.tar.gz diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml new file mode 100644 index 00000000000..7cb42c1062f --- /dev/null +++ b/.github/workflows/nightly.yml @@ -0,0 +1,34 @@ +name: F* nightly build + +on: + schedule: + - cron: '0 0 * * *' + workflow_dispatch: + +jobs: + build-all: + uses: ./.github/workflows/build-all.yml + + publish: + runs-on: ubuntu-latest + needs: build-all + steps: + - name: Set up git + run: | + git config --global user.name "Dzomo, the Everest Yak" + git config --global user.email "24394600+dzomo@users.noreply.github.com" + + - uses: actions/download-artifact@v4 + with: + path: artifacts + merge-multiple: true + # ^ Download all artifacts into the same dir. + # Each of them is a single file, so no clashes happen. + + - name: Publish artifacts in nightly tag + run: | + # try to create the release, then upload + gh release create -R ${{ github.repository }} nightly || true + gh release upload --clobber -R ${{ github.repository }} nightly artifacts/fstar-* + env: + GH_TOKEN: ${{ github.token }} diff --git a/.github/workflows/nix.yaml b/.github/workflows/nix.yml similarity index 59% rename from .github/workflows/nix.yaml rename to .github/workflows/nix.yml index e2c4adf1fd2..d51993fb0c1 100644 --- a/.github/workflows/nix.yaml +++ b/.github/workflows/nix.yml @@ -1,13 +1,15 @@ -# This workflow tests the Nix build of F*. We run it only for PRs (not -# on every push) and we use Github hosted runners. - -name: Nix Build +name: Build F* (Nix) on: - pull_request: + workflow_dispatch: + workflow_call: + +defaults: + run: + shell: bash jobs: - nix-build: + fstar-nix: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f2b92927bfc..1d2a7031ca2 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,72 +1,81 @@ -name: FStar Binary Release +name: Create F* release and publish on: workflow_dispatch: + inputs: + dry_run: + description: 'Dry run: just build, do not publish' + default: false + type: boolean -jobs: - - build-linux: +env: + DRY_RUN: ${{ github.event.inputs.dry_run || false }} - runs-on: [self-hosted, linux, X64] +jobs: + # Bump version number beforehand? I don't think the action can push + # that to master (easily). Just remember to do so manually. + # At least check for it, so we don't run this whole thing and fail + # for an existing tag. + pre: + runs-on: ubuntu-latest steps: - - name: Check out repo - uses: actions/checkout@v4 - - name: Package and release FStar - run: | - ci_docker_image_tag=fstar-release:local-run-$GITHUB_RUN_ID-$GITHUB_RUN_ATTEMPT - ci_docker_builder=builder_fstar-release_${GITHUB_RUN_ID}_${GITHUB_RUN_ATTEMPT} - docker buildx create --name $ci_docker_builder --driver-opt env.BUILDKIT_STEP_LOG_MAX_SIZE=500000000 - docker buildx build --builder $ci_docker_builder --pull --load --secret id=DZOMO_GITHUB_TOKEN -t $ci_docker_image_tag -f .docker/release.Dockerfile . - env: - DZOMO_GITHUB_TOKEN: ${{ secrets.DZOMO_GITHUB_TOKEN }} - - name: Remove the builder if created - if: ${{ always() }} - run: | - docker buildx rm -f $ci_docker_builder || true + - uses: actions/checkout@v4 + - run: | + git fetch --tags + V="v$(cat version.txt)" + if git tag -l "$V" | grep -q .; then + echo "::error::Version $V already exists (as a tag). Bump the version number in version.txt before running this workflow." >&2 + false + fi - build-macos: - - needs: build-linux - - runs-on: macos-latest + build-all: + needs: pre + uses: ./.github/workflows/build-all.yml + publish: + runs-on: ubuntu-latest + needs: build-all steps: - - name: Checkout FStar - uses: actions/checkout@v4 - with: - path: FStar - - name: Checkout everest - uses: actions/checkout@v4 - with: - repository: project-everest/everest - path: FStar/.github/everest - - name: Install .NET SDK - uses: actions/setup-dotnet@v4 - with: - dotnet-version: '6.0.x' - - name: Setup dependencies - run: ./FStar/.github/setup-macos.sh - - name: Package and release FStar - run: | - source FStar/.github/env.sh - CI_THREADS=24 FSTAR_SKIP_PACKAGE_TEST=1 FStar/.scripts/release.sh - env: - GH_TOKEN: ${{ secrets.DZOMO_GITHUB_TOKEN }} + - name: Set up git + run: | + git config --global user.name "Dzomo, the Everest Yak" + git config --global user.email "24394600+dzomo@users.noreply.github.com" - build-windows: + - uses: actions/download-artifact@v4 + with: + path: artifacts + merge-multiple: true + # ^ Download all artifacts into the same dir. + # Each of them is a single file, so no clashes happen. - needs: build-linux + - uses: actions/checkout@v4 + with: + path: FStar + - name: Rename packages to have version number + run: | + V="v$(cat FStar/version.txt)" + for file in artifacts/fstar-*; do + mv "$file" "${file/fstar-/fstar-$V-}" + done - runs-on: [self-hosted, Windows, X64, opam-2-3] + - name: Publish release + if: env.DRY_RUN != 'true' + working-directory: FStar + env: + GH_TOKEN: ${{ github.token }} + run: | + V=$(cat version.txt) + # --target with a specific ShA makes sure that if master + # advanced while we were running, the release is still created + # at the commit where this workflow started. Note however + # that it seems this workflow seems to fail when trying + # to tag something other than the lates commit on master + # (probably some Github config should be changed).. but that's + # preferable to silently tagging something untested. - steps: - - name: Check out repo - uses: actions/checkout@v4 - - - name: Package and release FStar - shell: C:\cygwin64\bin\bash.exe --login '{0}' - run: | - eval $(opam env) && CC=x86_64-w64-mingw32-gcc.exe FSTAR_COMMIT=$GITHUB_SHA CI_THREADS=24 $GITHUB_WORKSPACE/.scripts/release.sh && echo "There is a CR at the end of this line" - env: - GH_TOKEN: ${{ secrets.DZOMO_GITHUB_TOKEN }} + gh release create --prerelease \ + --generate-notes \ + --target ${{ github.sha }} \ + -t "F* v$V" \ + "v$V" ../artifacts/fstar-* diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 00000000000..64d636c40c5 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,176 @@ +name: F* tests + +# This workflow must be called ONLY after a run of build.yml +on: + workflow_call: + +defaults: + run: + shell: bash + + +jobs: + # This checks that the stage3 extracted fstarc matches exactly with + # stage2. Also, we build the F# library (it is not packaged, so not + # built by the job above) + check-stage3: + runs-on: ubuntu-latest + container: mtzguido/dev-base + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + # We download the full repo state from the step below and start a + # stage3 check. This allows the rest of the workflow to go ahead + # while we do this. + - uses: mtzguido/gci-download@master + with: + name: fstar-repo + + - name: Check for a stage 3 diff and F# library build + run: make -skj$(nproc) stage3-diff lib-fsharp + working-directory: FStar + + # Download the stage2 binary package from the previous job and run the + # tests in the repo. This makes sure that the tests do not depend on + # some random internal state of the repo, but only on out/. + # + # We could be paranoid and rm -rf ulib too, it should not make a + # difference. + # + # We could also run all tests over the stage 1 compiler. They should + # all pass, just like for stage2. The compiler used to build fstar.exe + # itself should not matter. I am commenting this out just to save on + # CI usage, and getting a nicer display of the workflow run. Ideally + # there would be a toggle for calling this workflow to check the + # stage1, but it's really clunky to tweak a matrix in Github actions. + test-local: + # strategy: + # matrix: + # pak: + # - fstar.tar.gz + # - fstar-stage1.tar.gz + # runs-on: [self-hosted, linux, X64] # self-hosted so we use fast runners + runs-on: ubuntu-latest + container: mtzguido/dev-base + steps: + - name: Cleanup + run: sudo find . -delete + - run: echo "HOME=/home/user" >> $GITHUB_ENV + - uses: mtzguido/set-opam-env@master + + - name: Checkout + uses: actions/checkout@master + + - name: Get fstar package + uses: actions/download-artifact@v4 + with: + # name: ${{ matrix.pak }} + name: fstar.tar.gz + + - name: Set up package locally + run: tar xzf fstar.tar.gz && ln -s fstar out + + - name: Run tests, without forcing a build + run: make -skj$(nproc) _test + + binary-smoke: + strategy: + matrix: + pak: + - fstar.tar.gz + # - fstar-stage1.tar.gz + # ^ See note in test-local. + os: + # - ubuntu-20.04 + # - ubuntu-22.04 + - ubuntu-24.04 + # - ubuntu-latest + # FIXME: the container builds with a recent glibc, use an older + # base system to get a more portable executable. + runs-on: ${{ matrix.os }} + steps: + - uses: cda-tum/setup-z3@main + with: + version: 4.8.5 + + - name: Get fstar package + uses: actions/download-artifact@v4 + with: + name: ${{ matrix.pak }} + + - run: tar xzf ${{ matrix.pak }} + + - name: Smoke test + run: | + ./fstar/bin/fstar.exe fstar/lib/fstar/ulib/Prims.fst -f + echo -e "module A\nopen FStar.Mul\nlet _ = assert (forall x. 1 + x*x > 0)" > A.fst + ./fstar/bin/fstar.exe A.fst + + ocaml-smoke: + strategy: + matrix: + pak: + - fstar-src.tar.gz + # - fstar-stage1-src.tar.gz + # ^ See note in test-local. + os: + - ubuntu-20.04 + - ubuntu-22.04 + - ubuntu-24.04 + - ubuntu-latest + exclude: + - os: ubuntu-24.04 # setup-ocaml fails due to darcs https://github.com/ocaml/setup-ocaml/issues/872 + runs-on: ${{ matrix.os }} + steps: + # TODO: Install both 4.8.5 and 4.13.1 + - uses: cda-tum/setup-z3@main + with: + version: 4.8.5 + + - name: Get fstar package + uses: actions/download-artifact@v4 + with: + name: ${{ matrix.pak }} + - run: tar xzf ${{ matrix.pak }} + + - name: Set-up OCaml + uses: ocaml/setup-ocaml@v3 + with: + ocaml-compiler: 4.14.2 + + - run: opam install . --deps-only --with-test + working-directory: fstar + # Note: we admit queries here, like the OPAM build does. + - run: eval $(opam env) && make -kj$(nproc) ADMIT=1 + working-directory: fstar + + - name: Smoke test + run: | + ./out/bin/fstar.exe out/lib/fstar/ulib/Prims.fst -f + echo -e "module A\nopen FStar.Mul\nlet _ = assert (forall x. 1 + x*x > 0)" > A.fst + ./out/bin/fstar.exe A.fst + working-directory: fstar + + perf-canaries: + runs-on: ubuntu-24.04 + steps: + - name: Checkout + uses: actions/checkout@master + + - uses: cda-tum/setup-z3@main + with: + version: 4.8.5 + + - name: Get fstar package + uses: actions/download-artifact@v4 + with: + name: fstar.tar.gz + + - run: tar xzf fstar.tar.gz + + - name: Run perf canaries + run: .scripts/perf_canaries.sh ./bin/fstar.exe + working-directory: fstar diff --git a/.gitignore b/.gitignore index e68737386ab..9286401efd7 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,15 @@ +/stage1/out +/stage2/out +/stage3 +/out +/fstar*.tar.gz +/_pak* +/_srcpak* +_build +# ^ build output +/bin/fstar.exe +/bin/get_fstar_z3.sh + *~ .#* \#*\# @@ -6,55 +18,17 @@ *.suo *.smt2 *.sav -*.hints.fsval -*.hints.mlval -*.bench dump* cache/ -/VS/packages - -/src/parser/parse.fs -/src/parser/lex.fs - -/src/boot - -/bin/*.dll -/bin/*.pdb -/bin/*.mdb -/bin/*.xml -/bin/*.config -/bin/fstar.exe -/bin/fstar.ocaml -/bin/tests.exe -/bin/z3.exe -/bin/z3-x86.exe -/bin/z3-x64.exe -/src/*/obj /lib -examples/*/*.ml -examples/crypto/CntProtocol.exe -examples/wysteria/ocaml-output/* - -tests/*/*.ml tests/incl/*.neg/error.log -_build/ -/ulib/fs/obj -/ulib/fs/bin - -/release - -*.native +*.exe *.byte queries*.smt2 -transcript -dump - -# ignore .ml files on the root dir -/*.ml *.cmi *.cmo @@ -63,41 +37,19 @@ dump *.cmxs *.cmxa *.o -fstar.install -._fstar.install -.depend -.depend.rsp -._depend -*.a -*.cmxa -*.so -*.annot -/VS/ +.depend* tags *~ *.swp -*.exe *.checked *.checked.lax *.dump *.runlim -/.nubuild -/nucache/ -/nuobj/ -nubuild.log -nubuild.progress - -# Z3/F* nightly/weekly files -/nightly - -# Nuget packages -nuget/ - # devcontainer temp files /.devcontainer_build.log diff --git a/.ignore b/.ignore index 7f42beff97c..4a9485031fe 100644 --- a/.ignore +++ b/.ignore @@ -1,2 +1,70 @@ *.fst.hints -src/ocaml-output/* + +# Ignore stage0, we mostly don't care about it +stage0 + +# This is a list of all symlinks in the repo, generated by +# git ls-files | while read i; do [ -L "$i" ] && echo $i; done +# having this here simplifies search results significantly +mk/fstar-01.mk +stage1/Makefile +stage1/dune/fstar-guts/FStarC_Parser_Parse.mly +stage1/dune/fstar-guts/app +stage1/dune/fstar-guts/bare +stage1/dune/fstar-guts/fstarc.ml +stage1/dune/fstar-plugins/app +stage1/dune/fstar-plugins/full +stage1/dune/fstar-plugins/plugin +stage1/dune/fstar-plugins/plugins.ml +stage1/dune/fstarc-full/main.ml +stage1/dune/libapp/app +stage1/dune/libapp/app-extra +stage1/dune/libapp/ulib.ml +stage1/dune/libplugin/app +stage1/dune/libplugin/app-extra +stage1/dune/libplugin/full +stage1/dune/libplugin/plugin +stage1/dune/libplugin/ulib.pluginml +stage1/ulib +stage1/version.txt +stage2/Makefile +stage2/dune/fstar-guts/FStarC_Parser_Parse.mly +stage2/dune/fstar-guts/app +stage2/dune/fstar-guts/bare +stage2/dune/fstar-guts/fstarc.ml +stage2/dune/fstar-plugins/app +stage2/dune/fstar-plugins/full +stage2/dune/fstar-plugins/plugin +stage2/dune/fstar-plugins/plugins.ml +stage2/dune/fstarc-full/main.ml +stage2/dune/libapp/app +stage2/dune/libapp/app-extra +stage2/dune/libapp/ulib.ml +stage2/dune/libplugin/app +stage2/dune/libplugin/app-extra +stage2/dune/libplugin/full +stage2/dune/libplugin/plugin +stage2/dune/libplugin/ulib.pluginml +stage2/ulib +stage2/version.txt +tests/incl/field.pos/Makefile +tests/incl/noshadow.neg/Makefile +tests/incl/noshadow.pos/Makefile +tests/incl/open.neg/Makefile +tests/incl/open.pos/Makefile +tests/incl/plain.neg/Makefile +tests/incl/plain.pos/Makefile +tests/incl/private.neg/Makefile +tests/incl/private.pos/Makefile +tests/incl/shadow.neg/Makefile +tests/incl/shadow.pos/Makefile +tests/incl/trans.neg/Makefile +tests/incl/trans.pos/Makefile +tests/struct/array.pos/Makefile +tests/struct/disjoint.pos/Makefile +tests/struct/jsonparser.pos/Makefile +tests/struct/nested.pos/Makefile +tests/struct/point-with-nesting.pos/Makefile +tests/struct/point.pos/Makefile +tests/struct/taggedunion.pos/Makefile +tests/struct/union.pos/Makefile diff --git a/.nix/bootstrap.nix b/.nix/bootstrap.nix deleted file mode 100644 index 374dbc7c492..00000000000 --- a/.nix/bootstrap.nix +++ /dev/null @@ -1,25 +0,0 @@ -{ fstar, fstar-dune, fstar-ocaml-snapshot, fstar-ulib, stdenv }: - -let - ocaml-src = stdenv.mkDerivation { - name = "src"; - src = fstar-ocaml-snapshot; - dontBuild = true; - installPhase = '' - mkdir -p $out/ocaml - mv ./* $out/ocaml - cp ${../version.txt} $out/version.txt - ''; - }; - fstar-dune-bootstrap = fstar-dune.overrideAttrs (_: { - pname = "fstar-bootstrap-dune"; - src = ocaml-src; - }); - fstar-ulib-bootstrap = (fstar-ulib.override - (_: { fstar-dune = fstar-dune-bootstrap; })).overrideAttrs - (_: { pname = "fstar-bootstrap-ulib"; }); - -in (fstar.override (_: { - fstar-dune = fstar-dune-bootstrap; - fstar-ulib = fstar-ulib-bootstrap; -})).overrideAttrs (_: { pname = "fstar-bootstrap"; }) diff --git a/.nix/fstar.nix b/.nix/fstar.nix index 5ef0a34bd22..1d22a47f23e 100644 --- a/.nix/fstar.nix +++ b/.nix/fstar.nix @@ -1,31 +1,72 @@ -{ callPackage, fstar-dune, fstar-ulib, installShellFiles, lib, makeWrapper -, stdenv, version, z3 }: +{ callPackage, installShellFiles, lib, makeWrapper, buildDunePackage, version, z3, bash, + batteries, + menhir, + menhirLib, + pprint, + ppx_deriving, + ppx_deriving_yojson, + ppxlib, + process, + sedlex, + stdint, + yojson, + zarith, + memtrace, + mtime } : -stdenv.mkDerivation { +buildDunePackage { pname = "fstar"; inherit version; - buildInputs = [ installShellFiles makeWrapper ]; + duneVersion = "3"; + + nativeBuildInputs = [ installShellFiles makeWrapper menhir ]; + + buildInputs = [ + batteries + menhir + menhirLib + pprint + ppx_deriving + ppx_deriving_yojson + ppxlib + process + sedlex + stdint + yojson + zarith + memtrace + mtime + ]; + + enableParallelBuilding = true; + + prePatch = '' + patchShebangs .scripts/*.sh + patchShebangs ulib/ml/app/ints/mk_int_file.sh + ''; src = lib.sourceByRegex ./.. [ - ".common.mk" - "doc.*" - "examples.*" - "src(/ocaml-output(/Makefile)?)?" - "contrib.*" + "Makefile" + "src.*" "mk.*" + "stage..*" + "ulib.*" + "doc.*" + "version.txt" + ".scripts.*" # Mostly here for get_fstar_z3.sh + "LICENSE.*" + "README.md" + "INSTALL.md" ]; - dontBuild = true; + buildPhase = '' + export PATH="${z3}/bin:$PATH" + make -j$(nproc) + ''; installPhase = '' - mkdir $out - - CP="cp -r --no-preserve=mode" - $CP ${fstar-dune}/* $out - $CP ${fstar-ulib}/* $out - - PREFIX=$out make -C src/ocaml-output install-sides + PREFIX=$out make install for binary in $out/bin/* do diff --git a/.scripts/fstardoc/README.md b/.scripts/fstardoc/README.md index 800e8294326..acbb353d8de 100644 --- a/.scripts/fstardoc/README.md +++ b/.scripts/fstardoc/README.md @@ -19,3 +19,27 @@ make ``` make update-to-latest ``` + +## Makefile + +The snippet below used to be in ulib/Makefile + + DOC_FILES=Prims.fst FStar.Pervasives.Native.fst FStar.Pervasives.fst \ + FStar.Squash.fsti FStar.Classical.fsti FStar.BigOps.fsti \ + FStar.BitVector.fst FStar.BV.fsti \ + FStar.Char.fsti FStar.Date.fsti FStar.DependentMap.fsti \ + FStar.Dyn.fsti FStar.Exn.fst FStar.Fin.fst FStar.Float.fsti \ + FStar.FunctionalExtensionality.fsti FStar.Float.fsti \ + FStar.Ghost.fsti FStar.IFC.fsti FStar.IndefiniteDescription.fst \ + FStar.UInt8.fst FStar.UInt16.fst FStar.UInt32.fst FStar.UInt64.fst + + DOC_DIR=./doc + + fstardoc: $(DOC_DIR) $(addprefix $(DOC_DIR)/, $(addsuffix .md, $(DOC_FILES))) + + $(DOC_DIR): + mkdir -p $@ + + $(DOC_DIR)/%.md: % + ../bin/fstar --print_in_place $^ + python3 ../.scripts/fstardoc/fstardoc.py $^ > $@ diff --git a/bin/get_fstar_z3.sh b/.scripts/get_fstar_z3.sh similarity index 100% rename from bin/get_fstar_z3.sh rename to .scripts/get_fstar_z3.sh diff --git a/.scripts/make_fstar_version.sh b/.scripts/make_fstar_version.sh new file mode 100755 index 00000000000..d4ebe49b567 --- /dev/null +++ b/.scripts/make_fstar_version.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash + +if [[ -z "$FSTAR_VERSION" ]]; then + FSTAR_VERSION=$(head -n 1 version.txt)~dev +fi + +if [ "$OS" = "Windows_NT" ] +then + if [ "$PROCESSOR_ARCHITECTURE" = "AMD64" ] + then + PLATFORM="Windows_x64" + else + PLATFORM="Windows_x86" + fi +else + PLATFORM="$(uname)_$(uname -m)" +fi +COMPILER="OCaml $(ocamlc -version)" +# If a system does not have git, or we are not in a git repo, fallback with "unset" +if [[ -z "$FSTAR_COMMIT" ]] ; then + FSTAR_COMMIT=$(git describe --match="" --always --abbrev=40 --dirty 2>/dev/null || echo unset) +fi +COMMITDATE=$(git log --pretty=format:%ci -n 1 2>/dev/null || echo unset) + +echo "let dummy () = ();;" +echo "FStarC_Options._version := \"$FSTAR_VERSION\";;" +echo "FStarC_Options._platform := \"$PLATFORM\";;" +echo "FStarC_Options._compiler := \"$COMPILER\";;" +# We deliberately use commitdate instead of date, so that rebuilds are no-ops +echo "FStarC_Options._date := \"$COMMITDATE\";;" +echo "FStarC_Options._commit:= \"$FSTAR_COMMIT\";;" diff --git a/.scripts/perf_canaries.sh b/.scripts/perf_canaries.sh new file mode 100755 index 00000000000..c8cc540776e --- /dev/null +++ b/.scripts/perf_canaries.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +set -eu +set -o pipefail + +FSTAR=$1 + +grab_time () { + sed -n 's/real \([0-9.]*\)/\1/p' +} + +t_defs () { + rm -f M.fst + echo 'module M' >>M.fst + for i in $(seq 1 $1); do + echo "let x$i = 1" >>M.fst + done + /usr/bin/time -p ${FSTAR} M.fst 2>&1 | tee output + T=$(cat output | grab_time) + echo "::notice file=DEFS_$i::time = $T" +} + +t_defs 100 +t_defs 200 +t_defs 400 +t_defs 800 +t_defs 1600 +t_defs 3200 +t_defs 6400 diff --git a/bin/run_benchmark.sh b/.scripts/run_benchmark.sh similarity index 100% rename from bin/run_benchmark.sh rename to .scripts/run_benchmark.sh diff --git a/.scripts/setup_nightly.sh b/.scripts/setup_nightly.sh new file mode 100755 index 00000000000..2ea2faf5768 --- /dev/null +++ b/.scripts/setup_nightly.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +set -euo pipefail + +kernel="$(uname -s)" +case "$kernel" in + CYGWIN*) kernel=Windows ;; +esac + +arch="$(uname -m)" +case "$arch" in + arm64) arch=aarch64 ;; +esac + +URL="https://github.com/FStarLang/FStar/releases/download/nightly/fstar-$kernel-$arch.tar.gz" +FILE="$(basename "$URL")" + +# Get artifact +wget "$URL" -O "$FILE" + +# Warn if too old (over 48 hours) +S_NOW=$(date +%s) +S_FILE=$(stat "$FILE" -c '%Y') +if [[ $((S_NOW - S_FILE)) -gt $((48 * 60 * 60)) ]]; then + echo "Warning: downloaded package seems old" >&2 + echo "Modification date: $(stat "$FILE" -c '%y')" >&2 +fi + +# Untar +rm -rf out +mkdir out +tar xzf "$FILE" -C out +rm "$FILE" + +echo Done. diff --git a/.scripts/src-install.sh b/.scripts/src-install.sh new file mode 100755 index 00000000000..754f2ace987 --- /dev/null +++ b/.scripts/src-install.sh @@ -0,0 +1,45 @@ +#!/bin/bash + +set -eu + +if [ $# -ne 2 ]; then + echo "Usage: $0 " >&2 + exit 1 +fi + +if [ -e "${PREFIX}" ]; then + echo "Destination directory already exists: ${PREFIX}" + exit 1 +fi + +mkdir -p "${PREFIX}" + +BROOT="$(realpath "$1")" +PREFIX="$(realpath "$2")" + +# Note: we must exclude everything in the Dune build directories, since +# if some files "vanish" during this copy, rsync will fail (even if +# ignored). We could also copy everything over and then remove the +# leftovers, but cp -r will also abort if some file disappears just +# before it tries to copy it. This seems robust. +rsync -r --copy-links \ + --delete-excluded \ + --delete-after \ + --filter="- **/*.checked" \ + --filter="- **/*.checked.lax" \ + --filter="- **/_build" \ + --filter="- **/*.o" \ + --filter="- **/*.a" \ + --filter="- **/*.exe" \ + --filter="- **/*.cm*" \ + --filter="- **/*.*depend*" \ + --filter="- /out" \ + --filter="- /.gitignore" \ + "${BROOT}/" "${PREFIX}/" + +cp .scripts/get_fstar_z3.sh "${PREFIX}/get_fstar_z3.sh" +cp fstar.opam "${PREFIX}/fstar.opam" +cp mk/src_package_mk.mk "${PREFIX}/Makefile" +mkdir "${PREFIX}/mk" +cp mk/lib.mk "${PREFIX}/mk/lib.mk" +cp mk/common.mk "${PREFIX}/mk/common.mk" diff --git a/.scripts/z3_nightly.py b/.scripts/z3_nightly.py deleted file mode 100755 index fb119b5bd64..00000000000 --- a/.scripts/z3_nightly.py +++ /dev/null @@ -1,309 +0,0 @@ -#!/usr/bin/env python - -import os -import re -import io -import subprocess -import sys -import time -import smtplib -import glob -import shutil -import traceback -import zipfile -import platform -import stat - -PLATFORMS = [ "x64-osx", "x86-ubuntu", "x64-ubuntu", "x64-debian", "x86-win", "x64-win" ] -REQUIRE_ALL_PLATFORMS = True - -FSTAR_BIN_URL = "https://github.com/FStarLang/binaries.git" -FSTAR_BIN_LOCAL = os.path.join("nightly", "fstar-binaries") -FSTAR_BIN_SUBDIR = "z3-tested" -FSTAR_BIN_RBRANCH = "origin/master" - -Z3_BIN_URL = "https://github.com/Z3Prover/bin.git" -Z3_BIN_LOCAL = os.path.join("nightly", "z3-binaries") -Z3_BIN_SUBDIR = "nightly" -Z3_BIN_RBRANCH = "origin/master" - -Z3_PKG_NAME_PAT = re.compile("^z3-([0-9].[0-9].[0-9]).([a-z0-9]{12})-(x86|x64)-([a-zA-Z]*)-?([\.0-9]*).zip$") - -Z3_DIR = os.path.join("nightly", "z3") - -class Z3NightlyException(Exception): - pass - -def get_platform(): - z3bn = "z3" - s = platform.system() - a, fmt = platform.architecture() - - z3a = "x64" if a == "64bit" else "x86" - - if s == "Windows" or s.startswith("CYGWIN") or s.startswith("MSYS"): - z3bn = "z3.exe" - z3s = "win" - elif s == "Linux": - d, v, nn = platform.linux_distribution() - if d == "Ubuntu": - z3s = "ubuntu" - elif d == "Debian": - z3s = "debian" - else: - print("Warning: unknown linux distribution '%s', assuming Ubuntu." % d) - z3s = "ubuntu" - elif s == "Darwin": - z3s = "osx" - else: - print("Warning: unknown system '%s', assuming Ubuntu." % s) - return "ubuntu", "x64" - - return z3bn, z3s, z3a - -def mk_args(cmd): - css = cmd.split(" ") - in_string = False - cs = [] - cur = "" - for i in css: - if not in_string and i.startswith("\"") and i.endswith("\""): - cs = cs + [i[1:-1]] - elif not in_string and i.startswith("\""): - in_string = True - cur = i[1:] - elif in_string and i.endswith("\""): - in_string = False - cur = cur + " " + i - cs = cs + [cur[:-1]] - elif in_string: - cur = cur + " " + i - elif i == "": - pass - else: - cs = cs + [i] - return cs - -def call_logged(cmd, log, checked=True): - cs = mk_args(cmd) - # log.write(">>>>> " + " ".join(cs) + " <<<<<\n") - ec = subprocess.call(cs, stdin=None, stdout=log, stderr=log) - log.flush() - if (checked and ec != 0): - log.write("Error code: %d\n" % ec) - raise Z3NightlyException("Command failed.") - -def call_with_output(cmd): - cs = mk_args(cmd) - # log.write(">>>>> " + " ".join(cs) + " <<<<<\n") - p = subprocess.Popen(cs, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out, err = p.communicate() - if err is None or err == "": - return out - else: - return out + " " + err - -def update_git(url, branch, dir, subdir, log, quiet=False): - q = "--quiet" if quiet else "" - v = "--verbose" if not quiet else "" - if not os.path.isdir(dir): - call_logged("git clone %s %s %s" % (q, url, dir), log) - else: - prev_wd = os.getcwd() - os.chdir(dir) - call_logged("git reset %s --hard %s" % (q, branch), log) - call_logged("git clean %s -f" % (q), log) - call_logged("git pull %s -s recursive -Xtheirs" % (q), log) - call_logged("git reset %s --hard %s" % (q, branch), log) - os.chdir(prev_wd) - sp = os.path.join(dir, subdir) - if not os.path.isdir(sp): - os.mkdir(sp) - -def find_latest_binary(dir, pattern, log): - best_offer = None - for f in os.listdir(dir): - m = pattern.match(f) - if m is not None: - fp = os.path.join(dir, f) - mt = call_with_output("git log -n 1 --date-order --date=raw --pretty=format:\"%cd\"").strip("\"").split(" ")[0] - if best_offer == None or mt > best_offer[1]: - version, git_hash, bitness, platform, pversion = m.groups() - best_offer = (fp, mt, version, git_hash, bitness, platform, pversion) - return best_offer - -def find_specific_binary(dir, pattern, version, git_hash, log): - for f in os.listdir(dir): - m = pattern.match(f) - if m is not None: - fp = os.path.join(dir, f) - fversion, fgit_hash, bitness, platform, pversion = m.groups() - if (version == None or fversion == version) and fgit_hash == git_hash: - return (fp, None, version, git_hash, bitness, platform, pversion) - return None - -def get_platform_files(from_path, version, git_hash, platforms): - res = [] - for pf in platforms: - fnpat = "z3-" + version + "." + git_hash + "-" + pf + "*.zip" - pp = os.path.join(from_path, fnpat) - matching_files = glob.glob(pp) - if REQUIRE_ALL_PLATFORMS == True and len(matching_files) == 0: - raise Z3NightlyException("No platform files for '%s' with version=%s and git_hash=%s." % (pf, version, git_hash)) - elif len(matching_files) > 0: - res.append(matching_files[0]) - return res - -def pick_better(old, new, from_path, pattern, platforms): - if (old is None and new is not None) or (new[3] != old[3] and new[1] > old[1]): - return get_platform_files(from_path, new[2], new[3], platforms) - return None - -def wipe_old_pkgs(to_repo, to_subdir, pattern, log): - prev_dir = os.getcwd() - os.chdir(to_repo) - for f in os.listdir(to_subdir): - if pattern.match(f) is not None: - call_logged('git rm "%s/%s"' % (to_subdir, f), log) - os.chdir(prev_dir) - -def add_new_pkgs(files, to_repo, to_subdir, pattern, log): - prev_dir = os.getcwd() - os.chdir(to_repo) - for f in files: - f_to_path = os.path.join(to_subdir, os.path.basename(f)) - shutil.copy2(os.path.join(prev_dir, f), f_to_path) - call_logged('git add -v %s' % f_to_path, log) - call_logged('git commit -v --amend -m "Automatic update of Z3 nightly packages."', log) - call_logged('git gc --aggressive --auto --prune=all', log) - call_logged('git push -v --force', log) - os.chdir(prev_dir) - -def empty(d): - if not os.path.isdir(d): - os.mkdir(d) - - for old_file in os.listdir(d): - ofp = os.path.join(d, old_file) - if os.path.isdir(ofp): - shutil.rmtree(ofp) - else: - os.remove(ofp) - -def push(version, git_hash, log): - wd = os.getcwd() - try: - old_bin_path = os.path.join(FSTAR_BIN_LOCAL, FSTAR_BIN_SUBDIR) - new_bin_path = os.path.join(Z3_BIN_LOCAL, Z3_BIN_SUBDIR) - update_git(FSTAR_BIN_URL, FSTAR_BIN_RBRANCH, FSTAR_BIN_LOCAL, FSTAR_BIN_SUBDIR, log) - - if git_hash == None: - update_git(Z3_BIN_URL, Z3_BIN_RBRANCH, Z3_BIN_LOCAL, Z3_BIN_SUBDIR, log) - best_old = find_latest_binary(old_bin_path, Z3_PKG_NAME_PAT, log) - best_new = find_latest_binary(new_bin_path, Z3_PKG_NAME_PAT, log) - better = pick_better(best_old, best_new, new_bin_path, Z3_PKG_NAME_PAT, PLATFORMS) - if better is not None: - wipe_old_pkgs(FSTAR_BIN_LOCAL, FSTAR_BIN_SUBDIR, Z3_PKG_NAME_PAT, log) - add_new_pkgs(better, FSTAR_BIN_LOCAL, FSTAR_BIN_SUBDIR, Z3_PKG_NAME_PAT, log) - else: - sb = find_specific_binary(new_bin_path, Z3_PKG_NAME_PAT, version, git_hash, log) - if sb == None: - raise Z3NightlyException("Z3 packages with git hash '%s' not found." % git_hash) - else: - pfiles = get_platform_files(new_bin_path, version, git_hash, PLATFORMS) - wipe_old_pkgs(FSTAR_BIN_LOCAL, FSTAR_BIN_SUBDIR, Z3_PKG_NAME_PAT, log) - add_new_pkgs(pfiles, FSTAR_BIN_LOCAL, FSTAR_BIN_SUBDIR, Z3_PKG_NAME_PAT, log) - pass - os.chdir(wd) - return 0 - except Exception as ex: - os.chdir(wd) - traceback.print_exc(log) - log.close() - return 1 - -def get(binary_name, platform, bitness, log, Tested=True): - wd = os.getcwd() - try: - if Tested: - bsdir = os.path.join(FSTAR_BIN_LOCAL, FSTAR_BIN_SUBDIR) - update_git(FSTAR_BIN_URL, FSTAR_BIN_RBRANCH, FSTAR_BIN_LOCAL, FSTAR_BIN_SUBDIR, log, quiet=True) - else: - bsdir = os.path.join(Z3_BIN_LOCAL, Z3_BIN_SUBDIR) - update_git(Z3_BIN_URL, Z3_BIN_RBRANCH, Z3_BIN_LOCAL, Z3_BIN_SUBDIR, log, quiet=True) - - empty(Z3_DIR) - for f in os.listdir(bsdir): - m = Z3_PKG_NAME_PAT.match(f) - if m is not None: - fp = os.path.join(bsdir, f) - version, git_hash, fbitness, fplatform, pversion = m.groups() - if fplatform == platform and fbitness == bitness: - zfn = os.path.join(bsdir, f) - # print("Extracting Z3 from %s" % zfn) - with zipfile.ZipFile(zfn, "r") as zf: - zf.extractall(Z3_DIR) - break - - Z3_BINARY_PATH = "" - for root, dirs, files in os.walk(Z3_DIR): - for f in files: - fp = os.path.join(root, f) - if f == binary_name: - Z3_BINARY_PATH = fp - if f.endswith("dll"): - os.chmod(fp, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) # Cygwin wants +x on dlls. - - if not os.path.isfile(Z3_BINARY_PATH): - raise Z3NightlyException("Z3 not where it should be.") - else: - print("%s" % Z3_BINARY_PATH) - - os.chmod(Z3_BINARY_PATH, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) - - os.chdir(wd) - return 0 - except Exception as ex: - os.chdir(wd) - traceback.print_exc(log) - log.close() - return 1 - -def print_help(): - print("Usage: %s (get-tested|get-latest|push)" % sys.argv[0]) - -if __name__ =='__main__': - if len(sys.argv) < 2: - print_help() - sys.exit(1) - else: - r = 1 - log = sys.stdout - op = sys.argv[1] - if op == "get-tested": - bn, pfm, bits = get_platform() - if len(sys.argv) >= 3: - pfm = sys.argv[2] - if len(sys.argv) >= 4: - bits = sys.argv[3] - r = get(bn, pfm, bits, log, Tested=True) - elif op == "get-latest": - bn, pfm, bits = get_platform() - if len(sys.argv) >= 3: - pfm = sys.argv[2] - if len(sys.argv) >= 4: - bits = sys.argv[3] - r = get(bn, pfm, bits, log, Tested=False) - elif op == "push": - version = None - git_hash = None - if len(sys.argv) >= 3: - version = sys.argv[2] - if len(sys.argv) >= 4: - git_hash = sys.argv[3] - r = push(version, git_hash, log) - else: - print("Error: Unknown operation '" + op + "'") - print_help() - r = 1 - sys.exit(r) diff --git a/FStar.fst.config.json b/FStar.fst.config.json index 6b87318bc39..cc5e107159f 100644 --- a/FStar.fst.config.json +++ b/FStar.fst.config.json @@ -1,15 +1,9 @@ { - "fstar_exe": "./bin/fstar.exe", + "fstar_exe": "./out/bin/fstar.exe", "options": [ - "--cache_dir", ".cache", - "--ext", "context_pruning", - "--z3version", "4.13.3" + "--z3version", "4.13.3", + "--ext", "context_pruning" ], "include_dirs": [ - "ulib/", - "ulib/experimental", - "ulib/legacy", - "examples/data_structures", - "examples/layeredeffects" ] } diff --git a/Makefile b/Makefile index 85db3846ff4..924e6eef076 100644 --- a/Makefile +++ b/Makefile @@ -1,205 +1,521 @@ -include .common.mk - -.PHONY: all -all: build-and-verify-ulib - -DUNE_SNAPSHOT ?= $(call maybe_cygwin_path,$(CURDIR)/ocaml) - -# The directory where we install files when doing "make install". -# Overridden via the command-line by the OPAM invocation. -PREFIX ?= /usr/local - -# On Cygwin, the `--prefix` option to dune only -# supports Windows paths. -FSTAR_CURDIR=$(call maybe_cygwin_path,$(CURDIR)) - -FSTAR_BUILD_PROFILE ?= release - -.PHONY: fstar -fstar: - $(Q)cp version.txt $(DUNE_SNAPSHOT)/ - @# Call Dune to build the snapshot. - @echo " DUNE BUILD" - $(Q)cd $(DUNE_SNAPSHOT) && dune build --profile=$(FSTAR_BUILD_PROFILE) - @echo " DUNE INSTALL" - $(Q)cd $(DUNE_SNAPSHOT) && dune install --profile=$(FSTAR_BUILD_PROFILE) --prefix=$(FSTAR_CURDIR) - -.PHONY: verify-ulib -verify-ulib: - +$(Q)$(MAKE) -C ulib - -.PHONY: build-and-verify-ulib -build-and-verify-ulib: fstar - +$(Q)$(MAKE) verify-ulib - -# Removes all generated files (including the whole generated snapshot, -# and .checked files), except the object files, so that the snapshot -# can be rebuilt with an existing fstar.exe -.PHONY: clean-snapshot -clean-snapshot: clean-intermediate - $(call msg, "CLEAN SNAPSHOT") - $(Q)cd $(DUNE_SNAPSHOT) && { dune clean || true ; } - $(Q)rm -rf $(DUNE_SNAPSHOT)/fstar-lib/generated/* - $(Q)rm -f src/ocaml-output/fstarc/* - $(Q)rm -f src/ocaml-output/fstarlib/* - -.PHONY: dune-snapshot -dune-snapshot: - +$(Q)$(MAKE) -C src/ocaml-output dune-snapshot - -# This rule is not incremental, by design. -.PHONY: full-bootstrap -full-bootstrap: - +$(Q)$(MAKE) fstar - +$(Q)$(MAKE) clean-snapshot - +$(Q)$(MAKE) bootstrap - -.PHONY: bootstrap -bootstrap: - +$(Q)$(MAKE) dune-snapshot - +$(Q)$(MAKE) fstar - -# This is a faster version of bootstrap, since it does not use dune -# to install the binary and libraries, and instead just copies the binary -# mannualy. HOWEVER, note that this means plugins will not work well, -# since they are compiled against the objects in bin/, which will become -# stale if this rule is used. Using bootstrap is usually safer. -.PHONY: boot -boot: - +$(Q)$(MAKE) dune-snapshot - $(Q)cp version.txt $(DUNE_SNAPSHOT)/ - @# Call Dune to build the snapshot. - $(call msg, "DUNE BUILD") - $(Q)cd $(DUNE_SNAPSHOT) && dune build --profile release - $(call msg, "RAW INSTALL") - $(Q)install ocaml/_build/default/fstar/main.exe $(FSTAR_CURDIR)/bin/fstar.exe - -.PHONY: install -install: - +$(Q)$(MAKE) -C src/ocaml-output install - -# The `uninstall` rule is only necessary for users who manually ran -# `make install`. It is not needed if F* was installed with opam, -# since `opam remove` can uninstall packages automatically with its -# own way. - -.PHONY: uninstall -uninstall: - rm -rf \ - $(PREFIX)/lib/fstar \ - $(PREFIX)/bin/fstar_tests.exe \ - $(PREFIX)/bin/fstar.exe \ - $(PREFIX)/share/fstar - -.PHONY: package -package: all - +$(Q)$(MAKE) -C src/ocaml-output package - -# Removes everything created by `make all`. MUST NOT be used when -# bootstrapping. -.PHONY: clean -clean: clean-intermediate - $(call msg, "CLEAN") - $(Q)cd $(DUNE_SNAPSHOT) && { dune clean || true ; } - -# Removes all .checked files and other intermediate files -# Does not remove the object files from the dune snapshot. -.PHONY: clean-intermediate -clean-intermediate: - +$(Q)$(MAKE) -C ulib clean - +$(Q)$(MAKE) -C src clean - -# Regenerate all hints for the standard library and regression test suite -.PHONY: hints -hints: - +$(Q)OTHERFLAGS="${OTHERFLAGS} --record_hints" $(MAKE) -C ulib/ - +$(Q)OTHERFLAGS="${OTHERFLAGS} --record_hints" $(MAKE) ci-uregressions ci-ulib-extra - -.PHONY: bench -bench: - ./bin/run_benchmark.sh - -# Regenerate and accept expected output tests. Should be manually -# reviewed before checking in. -.PHONY: output -output: \ - output--examples \ - output--tests - -.PHONY: output--% -output--%: - +$(Q)$(MAKE) -C $* accept - -# This rule is meant to mimic what the docker based CI does, but it -# is not perfect. In particular it will not look for a diff on the -# snapshot, nor run the build-standalone script. -.PHONY: ci -ci: - +$(Q)FSTAR_HOME=$(CURDIR) $(MAKE) ci-pre - +$(Q)FSTAR_HOME=$(CURDIR) $(MAKE) ci-post - -# This rule runs a CI job in a local container, exactly like is done for -# CI. -.PHONY: docker-ci -docker-ci: - docker build -f .docker/standalone.Dockerfile \ - --build-arg CI_THREADS=$(shell nproc) \ - --build-arg FSTAR_CI_NO_GITDIFF=1 \ - . - -.PHONY: ci-pre -ci-pre: ci-rebootstrap - -.PHONY: ci-rebootstrap -ci-rebootstrap: - +$(Q)$(MAKE) full-bootstrap FSTAR_BUILD_PROFILE=test - -.PHONY: ci-ocaml-test -ci-ocaml-test: - +$(Q)$(MAKE) -C src ocaml-unit-tests - -.PHONY: ci-ulib-extra -ci-ulib-extra: - +$(Q)$(MAKE) -C ulib extra - -.PHONY: ci-ulib-in-fsharp -ci-ulib-in-fsharp: - +$(Q)$(MAKE) -C ulib ulib-in-fsharp - -.PHONY: ci-post -ci-post: \ - ci-ulib-in-fsharp \ - ci-ocaml-test \ - ci-uregressions \ - $(if $(FSTAR_CI_TEST_KARAMEL),ci-karamel-test,) - -.PHONY: ci-uregressions -ci-uregressions: - +$(Q)$(MAKE) -C src uregressions - -.PHONY: ci-karamel-test -ci-karamel-test: ci-krmllib - +$(Q)$(MAKE) -C examples -f karamel.Makefile - -# krmllib needs FStar.ModifiesGen already checked, so we add the dependency on -# ulib-extra here. This is possibly spurious and fixable by tweaking krml's makefiles. -.PHONY: ci-krmllib -ci-krmllib: ci-ulib-extra - +$(Q)OTHERFLAGS="${OTHERFLAGS} --admit_smt_queries true" $(MAKE) -C $(KRML_HOME)/krmllib - -# Shortcuts: - -.PHONY: 1 2 3 - -1: fstar - -# This is a hacky rule to bootstrap the compiler, and not -# the library, more quickly. -2: - +$(Q)$(MAKE) -C src ocaml - +$(Q)$(MAKE) -C src/ocaml-output overlay-snapshots - +$(Q)$(MAKE) fstar - -3: - +$(Q)$(MAKE) 1 - +$(Q)$(MAKE) 2 +export FSTAR_ROOT=$(CURDIR) +# ^ This variable is only used by internal makefiles. +# Do NOT rely on it in client code. It is not what FSTAR_HOME was. +include mk/common.mk +undefine FSTAR_EXE # just in case + +# NOTE: If you are changing any of install rules, run a macos build too. +# The behavior of cp, find, etc, can differ in subtle ways from that of GNU tools. + +FSTAR_DEFAULT_GOAL ?= build +.DEFAULT_GOAL := $(FSTAR_DEFAULT_GOAL) + +all-packages: package-1 package-2 package-src-1 package-src-2 +all: stage3-bare all-packages lib-fsharp + +### STAGES + +ifneq ($(FSTAR_EXTERNAL_STAGE0),) +FSTAR0_EXE := $(abspath $(FSTAR_EXTERNAL_STAGE0)) +endif + +STAGE0 ?= stage0 + +FSTAR0_EXE ?= $(STAGE0)/bin/fstar.exe +# This is hardcoding some dune paths, with internal (non-public) names. +# This is motivated by dune installing packages as a unit, so I could not +# install simply the bare compiler and then use it to build the full compiler +# without splitting into many packages, which complicates the namespaces. +# +# Also, when we want to extract src/ for stage 2, we must call FSTAR1_FULL_EXE, +# but it's in a bad location (without a library next to it). So, we must +# pass FSTAR_LIB explicitly. This is the only case where this is needed, the rest +# of stages don't need a library. The alternative is to install it, and use +# $(INSTALLED_FSTAR1_FULL_EXE), but that introduces a spurious dependency to the +# stage 1 libraries for the stage 2, which does not need them at all (currently?). +# +# I'd love a better alternative. +FSTAR1_BARE_EXE := stage1/dune/_build/default/fstarc-bare/main.exe +FSTAR1_FULL_EXE := stage1/dune/_build/default/fstarc-full/main.exe +INSTALLED_FSTAR1_FULL_EXE := stage1/out/bin/fstar.exe +FSTAR2_BARE_EXE := stage2/dune/_build/default/fstarc-bare/main.exe +FSTAR2_FULL_EXE := stage2/dune/_build/default/fstarc-full/main.exe +INSTALLED_FSTAR2_FULL_EXE := stage2/out/bin/fstar.exe + +.PHONY: _force +_force: + +build: 2 + +0: $(FSTAR0_EXE) +1.bare: $(FSTAR1_BARE_EXE) +1.full: $(FSTAR1_FULL_EXE) +2.bare: $(FSTAR2_BARE_EXE) +2.full: $(FSTAR2_FULL_EXE) + +# This one we assume it's rather stable, and do not +# mark it PHONY. Still adding '0' allows to force this +# build by 'make 0'. +0 $(FSTAR0_EXE): + $(call bold_msg, "STAGE 0") + mkdir -p $(STAGE0)/ulib/.cache # prevent warnings + $(MAKE) -C $(STAGE0) fstar + $(MAKE) -C $(STAGE0) trim # We don't need OCaml build files. + +$(FSTAR1_BARE_EXE).src: $(FSTAR0_EXE) _force + $(call bold_msg, "EXTRACT", "STAGE 1 FSTARC-BARE") + env \ + SRC=src/ \ + FSTAR_EXE=$(FSTAR0_EXE) \ + CACHE_DIR=stage1/fstarc.checked/ \ + OUTPUT_DIR=stage1/fstarc.ml/ \ + CODEGEN=OCaml \ + TAG=fstarc \ + $(MAKE) -f mk/fstar-01.mk ocaml + +$(FSTAR1_BARE_EXE): $(FSTAR1_BARE_EXE).src _force + $(call bold_msg, "BUILD", "STAGE 1 FSTARC-BARE") + $(MAKE) -C stage1 fstarc-bare + +$(FSTAR1_FULL_EXE).src: $(FSTAR1_BARE_EXE) _force + $(call bold_msg, "EXTRACT", "STAGE 1 PLUGINS") + env \ + SRC=ulib/ \ + FSTAR_EXE=$(FSTAR1_BARE_EXE) \ + CACHE_DIR=stage1/plugins.checked/ \ + OUTPUT_DIR=stage1/plugins.ml/ \ + CODEGEN=PluginNoLib \ + OTHERFLAGS="--ext __guts $(OTHERFLAGS)" \ + TAG=plugins \ + $(MAKE) -f mk/plugins.mk ocaml + +$(FSTAR1_FULL_EXE): $(FSTAR1_FULL_EXE).src _force + $(call bold_msg, "BUILD", "STAGE 1 FSTARC") + $(MAKE) -C stage1 fstarc-full + +1.alib.src: $(FSTAR1_FULL_EXE) _force + $(call bold_msg, "EXTRACT", "STAGE 1 LIB") + env \ + SRC=ulib/ \ + FSTAR_EXE=$(FSTAR1_FULL_EXE) \ + CACHE_DIR=stage1/ulib.checked/ \ + OUTPUT_DIR=stage1/ulib.ml/ \ + CODEGEN=OCaml \ + TAG=lib \ + $(MAKE) -f mk/lib.mk all-ml + +1.alib: 1.alib.src _force + $(call bold_msg, "BUILD", "STAGE 1 LIB") + $(MAKE) -C stage1/ libapp + +1.plib.src: $(FSTAR1_FULL_EXE) 1.alib.src _force + # NB: shares .depend and checked from 1.alib.src, + # hence the dependency, though it is not quite precise. + $(call bold_msg, "EXTRACT", "STAGE 1 PLUGLIB") + env \ + SRC=ulib/ \ + FSTAR_EXE=$(FSTAR1_FULL_EXE) \ + CACHE_DIR=stage1/ulib.checked/ \ + OUTPUT_DIR=stage1/ulib.pluginml/ \ + CODEGEN=PluginNoLib \ + TAG=pluginlib \ + DEPFLAGS='--extract +FStar.Tactics,+FStar.Reflection,+FStar.Sealed' \ + $(MAKE) -f mk/lib.mk all-ml + +1.plib: 1.plib.src _force | 1.alib # this last dependency only to prevent simultaneous dune builds + $(call bold_msg, "BUILD", "STAGE 1 PLUGLIB") + $(MAKE) -C stage1/ libplugin + +$(FSTAR2_BARE_EXE).src: $(FSTAR1_FULL_EXE) _force + $(call bold_msg, "EXTRACT", "STAGE 2 FSTARC") + # NOTE: see the explanation for FSTAR_LIB near top of file. + env \ + SRC=src/ \ + FSTAR_LIB=$(abspath ulib) \ + FSTAR_EXE=$(FSTAR1_FULL_EXE) \ + CACHE_DIR=stage2/fstarc.checked/ \ + OUTPUT_DIR=stage2/fstarc.ml/ \ + CODEGEN=OCaml \ + TAG=fstarc \ + $(MAKE) -f mk/fstar-12.mk ocaml + +$(FSTAR2_BARE_EXE): $(FSTAR2_BARE_EXE).src _force + $(call bold_msg, "BUILD", "STAGE 2 FSTARC-BARE") + $(MAKE) -C stage2 fstarc-bare FSTAR_DUNE_RELEASE=1 + # ^ Note, even if we don't release fstar-bare itself, + # it is still part of the build of the full fstar, so + # we set the release flag to have a more incremental build. + + +$(FSTAR2_FULL_EXE).src: $(FSTAR2_BARE_EXE) _force + $(call bold_msg, "EXTRACT", "STAGE 2 PLUGINS") + env \ + SRC=ulib/ \ + FSTAR_EXE=$(FSTAR2_BARE_EXE) \ + CACHE_DIR=stage2/plugins.checked/ \ + OUTPUT_DIR=stage2/plugins.ml/ \ + CODEGEN=PluginNoLib \ + OTHERFLAGS="--ext __guts $(OTHERFLAGS)" \ + TAG=plugins \ + $(MAKE) -f mk/plugins.mk ocaml + +$(FSTAR2_FULL_EXE): $(FSTAR2_FULL_EXE).src _force + $(call bold_msg, "BUILD", "STAGE 2 FSTARC") + $(MAKE) -C stage2 fstarc-full FSTAR_DUNE_RELEASE=1 + +2.alib.src: $(FSTAR2_FULL_EXE) _force + $(call bold_msg, "EXTRACT", "STAGE 2 LIB") + env \ + SRC=ulib/ \ + FSTAR_EXE=$(FSTAR2_FULL_EXE) \ + CACHE_DIR=stage2/ulib.checked/ \ + OUTPUT_DIR=stage2/ulib.ml/ \ + CODEGEN=OCaml \ + TAG=lib \ + $(MAKE) -f mk/lib.mk all-ml + +2.alib: 2.alib.src _force + $(call bold_msg, "BUILD", "STAGE 2 LIB") + $(MAKE) -C stage2/ libapp FSTAR_DUNE_RELEASE=1 + +2.plib.src: $(FSTAR2_FULL_EXE) 2.alib.src _force + # NB: shares .depend and checked from 2.alib.src, + # hence the dependency, though it is not quite precise. + $(call bold_msg, "EXTRACT", "STAGE 2 PLUGLIB") + env \ + SRC=ulib/ \ + FSTAR_EXE=$(FSTAR2_FULL_EXE) \ + CACHE_DIR=stage2/ulib.checked/ \ + OUTPUT_DIR=stage2/ulib.pluginml/ \ + CODEGEN=PluginNoLib \ + TAG=pluginlib \ + DEPFLAGS='--extract +FStar.Tactics,+FStar.Reflection,+FStar.Sealed' \ + $(MAKE) -f mk/lib.mk all-ml + +2.plib: 2.plib.src _force | 2.alib # this last dependency only to prevent simultaneous dune builds + $(call bold_msg, "BUILD", "STAGE 2 PLUGLIB") + $(MAKE) -C stage2/ libplugin FSTAR_DUNE_RELEASE=1 + +# F# library, from stage 2. +lib-fsharp.src: $(FSTAR2_FULL_EXE) 2.alib.src _force + # NB: shares checked files from 2.alib.src, + # hence the dependency, though it is not quite precise. + $(call bold_msg, "EXTRACT", "FSHARP LIB") + # Note: FStar.Map and FStar.Set are special-cased + env \ + SRC=ulib/ \ + FSTAR_EXE=$(FSTAR2_FULL_EXE) \ + CACHE_DIR=stage2/ulib.checked/ \ + OUTPUT_DIR=fsharp/extracted/ \ + CODEGEN=FSharp \ + TAG=fsharplib \ + DEPFLAGS='--extract -FStar.Map,-FStar.Set' \ + $(MAKE) -f mk/lib.mk all-fs + +.PHONY: lib-fsharp +lib-fsharp: lib-fsharp.src + $(MAKE) -C fsharp/VS all + +# Stage 3 is different, we don't build it, we just check that the +# extracted OCaml files coincide exactly with stage2. We also do not +# extract the plugins, as is stage2/fstarc and stage3/fstarc coincide, +# then they are exactly the same compiler and will extract the plugins +# in the same way. + +stage3-bare: $(FSTAR2_FULL_EXE) _force + $(call bold_msg, "EXTRACT", "STAGE 3 FSTARC") + # NOTE: see the explanation for FSTAR_LIB near top of file. + env \ + SRC=src/ \ + FSTAR_EXE=$(FSTAR2_FULL_EXE) \ + FSTAR_LIB=$(abspath ulib) \ + CACHE_DIR=stage3/fstarc.checked/ \ + OUTPUT_DIR=stage3/fstarc.ml/ \ + CODEGEN=OCaml \ + TAG=fstarc \ + $(MAKE) -f mk/fstar-12.mk ocaml + +stage3-diff: stage3-bare _force + $(call bold_msg, "DIFF", "STAGE 2 vs STAGE 3") + @# No output expected the gitignore line + diff -r stage2/fstarc.ml stage3/fstarc.ml + +$(INSTALLED_FSTAR1_FULL_EXE): 1.full 1.alib.src 1.plib.src + $(call bold_msg, "INSTALL", "STAGE 1") + $(MAKE) -C stage1 install + +$(INSTALLED_FSTAR2_FULL_EXE): 2.full 2.alib.src 2.plib.src + $(call bold_msg, "INSTALL", "STAGE 2") + $(MAKE) -C stage2 install FSTAR_DUNE_RELEASE=1 + +setlink-%: + if [ -e out ] && ! [ -h out ]; then echo "ERROR: out/ exists and is not a symbolic link, please remove it"; false; fi + ln -Trsf stage$*/out out + # For compatibility with the previous layout + mkdir -p bin + ln -Trsf out/bin/fstar.exe bin/fstar.exe + ln -Trsf out/bin/get_fstar_z3.sh bin/get_fstar_z3.sh + +stage1: $(INSTALLED_FSTAR1_FULL_EXE) +1: stage1 + $(MAKE) setlink-1 + +stage2: $(INSTALLED_FSTAR2_FULL_EXE) +2: stage2 + $(MAKE) setlink-2 + +3: stage3-diff + +do-install: _force + $(call bold_msg, "INSTALL", $(PREFIX)) + # Install fstar.exe, application library, and plugin library + mkdir -p $(PREFIX) # Needed for macOS apparently + cp -r $(BROOT)/out/* $(PREFIX) + +install: 2 +install: BROOT=stage2 +install: export PREFIX?=/usr/local +install: do-install + +do-src-install: _force + $(call bold_msg, "SRC INSTALL", $(PREFIX)) + # Install OCaml sources only + .scripts/src-install.sh "$(BROOT)" "$(PREFIX)" + +__do-archive: _force + rm -rf $(PREFIX) + # add an 'fstar' top-level directory to the archive + $(MAKE) do-install PREFIX=$(PREFIX)/fstar + $(call bold_msg, "ARCHIVE", $(ARCHIVE)) + tar czf $(ARCHIVE) -h -C $(PREFIX) . + rm -rf $(PREFIX) + +__do-src-archive: _force + rm -rf $(PREFIX) + $(MAKE) do-src-install PREFIX=$(PREFIX)/fstar + $(call bold_msg, "SRC ARCHIVE", $(ARCHIVE)) + tar czf $(ARCHIVE) -h -C $(PREFIX) . + rm -rf $(PREFIX) + +# We append the version to the package names, unless +# FSTAR_TAG is set (possibly empty) +FSTAR_TAG ?= -v$(shell cat version.txt) + +package-1: $(INSTALLED_FSTAR1_FULL_EXE) _force + env \ + PREFIX=_pak1 \ + BROOT=stage1/ \ + ARCHIVE=fstar$(FSTAR_TAG)-stage1.tar.gz \ + $(MAKE) __do-archive + +package-2: $(INSTALLED_FSTAR2_FULL_EXE) _force + env \ + PREFIX=_pak2 \ + BROOT=stage2/ \ + ARCHIVE=fstar$(FSTAR_TAG).tar.gz \ + $(MAKE) __do-archive + +package-src-1: $(FSTAR1_FULL_EXE).src 1.alib.src 1.plib.src _force + env \ + PREFIX=_srcpak1 \ + BROOT=stage1/ \ + ARCHIVE=fstar$(FSTAR_TAG)-stage1-src.tar.gz \ + $(MAKE) __do-src-archive + +package-src-2: $(FSTAR2_FULL_EXE).src 2.alib.src 2.plib.src _force + env \ + PREFIX=_srcpak2 \ + BROOT=stage2/ \ + ARCHIVE=fstar$(FSTAR_TAG)-src.tar.gz \ + $(MAKE) __do-src-archive + +package: package-2 +package-src: package-src-2 + +test: test-2 + +test-1: override FSTAR_EXE := $(abspath stage1/out/bin/fstar.exe) +test-1: stage1 + $(MAKE) _test FSTAR_EXE=$(FSTAR_EXE) + +test-2: override FSTAR_EXE := $(abspath stage2/out/bin/fstar.exe) +test-2: stage2 + $(MAKE) _test FSTAR_EXE=$(FSTAR_EXE) + +unit-tests: override FSTAR_EXE := $(abspath stage2/out/bin/fstar.exe) +unit-tests: _unit-tests + +# Use directly only at your own risk. +_test: FSTAR_EXE ?= $(abspath out/bin/fstar.exe) +_test: _unit-tests _examples + +need_fstar_exe: + if [ -z "$(FSTAR_EXE)" ]; then \ + echo "This rule needs FSTAR_EXE defined."; \ + false; \ + fi + +_unit-tests: need_fstar_exe _force + +$(MAKE) -C tests all FSTAR_EXE=$(FSTAR_EXE) + +_examples: need_fstar_exe _force + +$(MAKE) -C examples all FSTAR_EXE=$(FSTAR_EXE) + +ci: _force + +$(MAKE) 2 + +$(MAKE) test lib-fsharp stage3-diff + +do-save: _force + $(call bold_msg,"SAVE", "$(FROM) --> $(TO)") + rm -rf $(TO) + mkdir -p $(TO) + cp -r $(FROM) -T $(TO) + rm -rf $(TO)/out + rm -rf $(TO)/fstarc.checked + rm -rf $(TO)/plugins.checked + rm -rf $(TO)/ulib.checked + dune clean --no-print-directory --display=quiet --root=$(TO)/bare + dune clean --no-print-directory --display=quiet --root=$(TO)/full + dune clean --no-print-directory --display=quiet --root=$(TO)/fstarlib + dune clean --no-print-directory --display=quiet --root=$(TO)/fstar-pluginlib + sed -i 's/a/a/' $(TO)/version.txt # hack to turn symlink into concrete file + rm -f $(TO)/full/ulib + rm -f $(TO)/ulib # a symlink + cp -r ulib -T $(TO)/ulib + # For now at least... we do not really use the stage0 F* to compile + # normal applications, though that should definitely change if possible. + # So, remove some more stuff. + rm -rf $(TO)/fstarlib + rm -rf $(TO)/fstar-pluginlib + rm -rf $(TO)/ulib.ml + rm -rf $(TO)/ulib.pluginml + # We also do not ever verify anything with the stage0. So, remove + # the hints, but this is weird... + rm -rf $(TO)/ulib/.hints + rm -f $(TO)/.gitignore + echo '/out' >> $(TO)/.gitignore + +save: FROM=stage2 +save: TO=_new +save: do-save + +bump-stage0: FROM=stage2 +bump-stage0: TO=stage0 +bump-stage0: do-save + # Now that stage0 supports all features, we can return to a clean state + # where the 01 makefile is equal to the 12 makefile. Same for stage1 support + # and config code, we just take it from the stage2. + rm -f mk/fstar-01.mk + ln -s fstar-12.mk mk/fstar-01.mk + rm -rf stage1 + cp -r stage2 stage1 + +# This rule brings a stage0 from an OLD fstar repo. Only useful for migrating. +bring-stage0: _force + if [ -z "$(FROM)" ]; then echo "FROM not set" >&2; false; fi + rm -rf stage0 + mkdir stage0 + cp -r $(FROM)/ocaml -T stage0 + ln -Tsrf mk/stage0.mk stage0/Makefile + cp -r $(FROM)/ulib -T stage0/ulib + find stage0/ulib -name '*.checked' -delete + find stage0/ulib -name '*.hints' -delete + echo '/lib' >> stage0/.gitignore + echo -ne '** -diff -merge\n** linguist-generated=true\n' >> stage0/.gitattributes + +watch: + while true; do \ + $(MAKE) ;\ + inotifywait -qre close_write,moved_to .; \ + done + + +### CLEAN + +clean-depend: _force + rm -f stage1/fstarc.checked/.*depend* + rm -f stage1/plugins.checked/.*depend* + rm -f stage1/ulib.checked/.*depend* + rm -f stage2/fstarc.checked/.*depend* + rm -f stage2/plugins.checked/.*depend* + rm -f stage2/ulib.checked/.*depend* + +clean-0: _force + $(call bold_msg, "CLEAN", "STAGE 0") + $(MAKE) -C $(STAGE0) clean + rm -rf $(STAGE0)/ulib/.cache # created only to prevent warnings, always empty + +clean-1: _force + $(call bold_msg, "CLEAN", "STAGE 1") + $(MAKE) -C stage1 clean + rm -rf stage1/fstarc.checked + rm -rf stage1/fstarc.ml + rm -rf stage1/plugins.checked + rm -rf stage1/plugins.ml + rm -rf stage1/ulib.checked + rm -rf stage1/ulib.ml + rm -rf stage1/ulib.pluginml + +clean-2: _force + $(call bold_msg, "CLEAN", "STAGE 2") + $(MAKE) -C stage2 clean + rm -rf stage2/fstarc.checked + rm -rf stage2/fstarc.ml + rm -rf stage2/plugins.checked + rm -rf stage2/plugins.ml + rm -rf stage2/ulib.checked + rm -rf stage2/ulib.ml + rm -rf stage2/ulib.pluginml + +clean-3: _force + $(call bold_msg, "CLEAN", "STAGE 3") + rm -rf stage3/ + +trim: clean-0 clean-1 clean-2 clean-3 + +clean: trim + $(call bold_msg, "CLEAN", "out/") + # ah.. this is just a symlink, recursive calls above should just trim + rm -rf out + +distclean: clean + $(call bold_msg, "DISTCLEAN") + rm -rf _new + rm -rf _build + rm -f fstar.tar.gz + rm -f fstar-*.tar.gz + +help: + echo "Main rules:" + echo " build build the compiler and libraries, and install it in out/" + echo " test run internal tests and examples (implies build)" + echo " package build a binary package" + echo " package-src build an OCaml source package" + echo " clean clean everything except built packages" + echo " install install F* into your system (by default to /usr/local, set PREFIX to change this)" + echo + echo "Optional arguments:" + echo " V=1 enable verbose build" + echo " ADMIT=1 skip verification (pass '--admit_smt_queries true')" + echo + echo "Rules for F* hackers:" + echo " all build everything that can be built, also extract stage 3" + echo " 0 build the stage0 compiler (in stage0/)" + echo " stage1 build a full stage 1 compiler and libraries" + echo " 1 stage1 + set the out/ symlink" + echo " stage2 build a full stage 2 compiler and libraries" + echo " 2 (= build) stage2 + set the out/ symlink" + echo " package-1 create a binary tar.gz for the stage 1 build" + echo " package-2 create a binary tar.gz for the stage 2 build (= package)" + echo " package-src-1 create an OCaml source distribution for the stage 1 build" + echo " package-src-2 create an OCaml source distribution for the stage 2 build (= package-src)" + echo " all-packages build the four previous rules" + echo " clean-depend remove all .depend files, useful when files change name" + echo " trim clean some buildfiles, but retain any installed F* in out" + echo " distclean remove every generated file" + echo " unit-tests run the smaller unit test suite (implied by test)" + echo " bump-stage0 copy stage2 into stage0, and restore symlinks between stage1/stage2" + echo " (essentially snapshotting a package-src-2)" + echo " save like bump-stage0, but saves the snapshot in _new/ for inspection" + echo + echo "You can set a different default goal by defining FSTAR_DEFAULT_GOAL in your environment." diff --git a/bin/.gitignore b/bin/.gitignore deleted file mode 100644 index 3bd89abe196..00000000000 --- a/bin/.gitignore +++ /dev/null @@ -1,11 +0,0 @@ -Microsoft.Z3.dll -libz3.dll -z3.exe -z3 -fstar-mono.exe -tests-mono.exe -fstar.exe -fstar.fsharp -FsLexYacc.Runtime.dll -*.XML -*lib/META diff --git a/bin/tests-mono.sh b/bin/tests-mono.sh deleted file mode 100755 index 8ce142abb57..00000000000 --- a/bin/tests-mono.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -BASEDIR=$(dirname $0) - -mono $BASEDIR/tests-mono.exe "$@" diff --git a/build_local.sh b/build_local.sh deleted file mode 100755 index 510cf4c75b5..00000000000 --- a/build_local.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env bash -set -e - -# Look for config.json file -FILE=".docker/build/config.json" -if [[ ! -f $FILE ]]; then - echo "File $FILE does not exist." -fi - -# In case you want to build windows, change agentOS here to windows-nt if OSTYPE is not working -agentOS=linux -if [[ "$OSTYPE" == "cygwin" ]]; then - agentOS=linux #windows-nt -fi - -DOCKERFILE=$(jq -c -r ".DockerFile" "$FILE") -DOCKERFILE=$( echo ${DOCKERFILE} | sed "s/{agentOS}/${agentOS}/g" ) - -# Copy dockerfile to root -cp $DOCKERFILE ./Dockerfile - -# Copy dependencies -DEPFILES=$(jq -c -r ".DependencyFiles[]" "$FILE") -cp -r $DEPFILES . - -PROJECTNAME=$(jq -c -r ".ProjectName" "$FILE" | awk '{print tolower($0)}') -BUILDTARGET=$(jq -c -r ".CIBuildTarget" "$FILE") -LOCALBRANCHNAME=$(git branch | grep \* | cut -d ' ' -f2) - -#Find commit id. -REQUESTEDBRANCHNAME=$(jq -c -r ".BranchName" "$FILE") -REQUESTEDCOMMITID=$(jq -c -r ".BaseContainerImageTagOrCommitId" "$FILE") -COMMITURL=$(jq -c -r ".GithubCommitUrl" "$FILE")/$REQUESTEDBRANCHNAME - -if [[ $(jq -c -r ".BaseContainerImageTagOrCommitId" "$FILE") -ne "latest" ]]; then - COMMITURL=$(jq -c -r ".GithubCommitUrl" "$FILE")/$REQUESTEDCOMMITID -fi - -LINE="$( git ls-remote ${COMMITURL%"/commit/master"} HEAD)" -FULLCOMMITID="$( echo ${LINE} | cut -f1 )" -COMMITID=${FULLCOMMITID:0:12} - -# create fake files ssh key, commitinfofilename.json, etc -echo "fake" > id_rsa -echo "fake" > commitinfofilename.json - -# build container -docker build --file Dockerfile --build-arg BUILDLOGFILE="buildlogfile.txt" --build-arg MAXTHREADS="8" --build-arg BUILDTARGET="$BUILDTARGET" --build-arg BRANCHNAME="$LOCALBRANCHNAME" --build-arg COMMITID="$COMMITID" --build-arg DOCKERHUBPROJECT="projecteverest/" --tag "$PROJECTNAME:local" . - -# delete fake files -rm -f id_rsa -rm -f commitinfofilename.json - -# Remove dep files. -for f in $DEPFILES; do rm -f $(basename $f); done - -# delete dockerfile -rm -f Dockerfile diff --git a/doc/ref/bootstrapping b/doc/ref/bootstrapping new file mode 100644 index 00000000000..c00954e8629 --- /dev/null +++ b/doc/ref/bootstrapping @@ -0,0 +1,86 @@ +stage0: contains a starting point to start building F*, an ocaml +snapshot of an older F* that is capable of building the *current* F* in +this repo, i.e. the sources in src/. We update this snapshot only when +needed, it absolutely should not be update on every push to master or +every PR. + +stage1/stage2: These directories are clones of each other. The high +level idea is that we use the stage0 compiler to extract+build a stage1 +compiler, and then use the stage1 to extract+build a stage2. + +One round +========= + +The first step is using stage0 to (lax) check+extract every fst +file in src/. We place the results into stage1/fstarc.checked and +stage1/fstarc.ml. The compiler (in src/) can refer to modules in the +standard library of F*, __as long as they are in stage0 already__. We do +not read ulib/ at all at this point. In fact, the new library could very +well not check with stage0. + +Once we've extracted this set of ml files, we can build a "bare" +fstar.exe. We have to also link with some basic support ML files (in +src/ml/bare) for some interfaces in the compiler that are implemented in +OCaml. Also, we need the ml implementation for the library modules we +use. We take these from ulib/ml/app (the directory for ml files of the +application library). + +NOTE: this last inclusion is acctualy not right, we should take this +from stage0, but stage0 (today) does not provide a directory with the ml +files for it. In fact it does not provide an application library at all. + +This "bare" fstar.exe has all of src/ and is up-to-date, but has +no plugins whatsoever. (Nit: we could find a better name for these +"baked-in plugins", they differ from plugins that go in a cmxs and get +loaded dynamically.) Using fstar-bare.exe, we can (lax) check a part of +ulib (defined by ROOTS in mk/plugins.mk) to generate the relevant ml +files for them. We place all these into stage1/plugins.ml, and compile +them with all previous full to create a full fstar.exe with plugins. + +The next steps are building the libraries (applib and pluginlib). Using +the full compiler, we check every file in ulib/ to generate checked+ml +files (ml unless the module is filtered by --extract). We can then build +all of this into a library. This is defined in stage1/dune/libapp. + +The next step is building the library for dynamic plugins, i.e. cmxs +files that get dynlinked into fstar.exe. Since these plugins can +reference anything in the library, we *must* have implementations for +all of that, and they are not necessarilly already in fstar.exe itself +(e.g. maybe the compiler does not use a fancy red-black tree, so it's +not linked, but a plugin can). The plugin library is essentialy just +the application library extracted with --codegen Plugin. We reuse the +checked files from the application library and extract again to build +this library (as a cmxs). This is defined in stage1/dune/libplugin. +The cmxs is installed in lib/fstar and is loaded automatically by F* +before loading any user-created cmxs plugin (hence these cmxs could only +contain the user module, omitting any support modules in the library). + +Second round +============ + +The second round is the exact same procedure as above, but using the +fully-built stage1 compiler and extracting into a stage2. Why do we even +do this? Because if you had some changes to extraction in stage1 (e.g. +now everything is 2x faster), the fstar.exe in stage1 wouldn't benefit +from that, as it was extracted+built with stage0. The second round gives +you a build of what's in src/, built with a compiler whose source is +src/, reaping the benefits. It's also a good test, of course. + +Note: when we're building a stage2, we are NOT using the stage1's libapp +nor libplugin. In fact they are not even built. + +We also do a stage3: we extract all of src into stage3/fstarc.ml and +compare with stage2/fstarc.ml. These outputs should be identical, or +there is a bug somewhere. + +Optimizations +============= + +An alternative to building stage0 is using some external F* installed +in your system instead, which also makes building a bit faster, since +we skip the dune build of the stage0. You can do this by setting +FSTAR_EXTERNAL_STAGE0 in your environment, but do so at your own risk. +You could also add .checked.lax files into this external F*'s cache +dir to save you from rechecking its library again. We should make this +easier. + diff --git a/examples/Cfg.fst.config.json b/examples/Cfg.fst.config.json new file mode 100644 index 00000000000..a406ce08ad0 --- /dev/null +++ b/examples/Cfg.fst.config.json @@ -0,0 +1,9 @@ +{ + "fstar_exe": "../out/bin/fstar.exe", + "options": [ + "--ext", "context_pruning", + "--z3version", "4.13.3" + ], + "include_dirs": [ + ] +} diff --git a/examples/dsls/DSL.fst.config.json b/examples/dsls/DSL.fst.config.json index 5e232b24e01..d1360f24590 100644 --- a/examples/dsls/DSL.fst.config.json +++ b/examples/dsls/DSL.fst.config.json @@ -1,6 +1,7 @@ { "fstar_exe": "fstar.exe", "options": [ + "--z3version", "4.13.3" ], "include_dirs": [ "bool_refinement", diff --git a/examples/miniparse/MiniParse.fst.config.json b/examples/miniparse/MiniParse.fst.config.json index af60a2e69e8..430077264ee 100644 --- a/examples/miniparse/MiniParse.fst.config.json +++ b/examples/miniparse/MiniParse.fst.config.json @@ -1,7 +1,8 @@ { "fstar_exe": "fstar.exe", "options": [ - "--ext", "context_pruning" + "--ext", "context_pruning", + "--z3version", "4.13.3" ], "include_dirs": [ ] diff --git a/examples/native_tactics/.gitignore b/examples/native_tactics/.gitignore index de4cda24cb9..4ee263da831 100644 --- a/examples/native_tactics/.gitignore +++ b/examples/native_tactics/.gitignore @@ -1,3 +1,4 @@ +*.ml *.cmxs *.test Bench.* diff --git a/examples/native_tactics/Makefile b/examples/native_tactics/Makefile index 876708dbb90..e445de851ba 100644 --- a/examples/native_tactics/Makefile +++ b/examples/native_tactics/Makefile @@ -6,8 +6,8 @@ FSTAR_HOME=../.. OTHERFLAGS += --z3version 4.13.3 -FSTAR_EXE ?= $(FSTAR_HOME)/bin/fstar.exe -MY_FSTAR_EXE=$(FSTAR_EXE) $(OTHERFLAGS) +FSTAR_EXE ?= $(FSTAR_HOME)/out/bin/fstar.exe +FSTAR=$(FSTAR_EXE) $(OTHERFLAGS) # Tests for which the native tactics used in module named Sample.Test.fst are # declared in a corresponding module named Sample.fst @@ -48,7 +48,7 @@ ALL=Apply\ all: $(addsuffix .sep.test, $(TAC_MODULES)) $(addsuffix .test, $(ALL)) # .depend: -# $(FSTAR_EXE) --dep full $(addsuffix .Test.fst, $(ALL)) --output_deps_to .depend +# $(FSTAR) --dep full $(addsuffix .Test.fst, $(ALL)) --output_deps_to .depend # include .depend @@ -56,26 +56,25 @@ all: $(addsuffix .sep.test, $(TAC_MODULES)) $(addsuffix .test, $(ALL)) .PRECIOUS: %.ml %.test: %.fst %.ml - $(MY_FSTAR_EXE) $*.fst --load $* + $(FSTAR) $*.fst --load $* touch $@ %.sep.test: %.fst %.ml - $(MY_FSTAR_EXE) $*.Test.fst --load $* + $(FSTAR) $*.Test.fst --load $* touch $@ %.ml: %.fst - $(MY_FSTAR_EXE) $*.fst --cache_checked_modules --codegen Plugin --extract $* + $(FSTAR) $*.fst --cache_checked_modules --codegen Plugin --extract $* touch $@ %.clean: rm -f Registers_List.ml Registers.List.ml Registers_List.cmxs %.native: %.fst Registers.List.ml - $(MY_FSTAR_EXE) $*.fst --load Registers.List --warn_error -266 + $(FSTAR) $*.fst --load Registers.List --warn_error -266 %.interp: %.fst Registers.List.fst - $(MY_FSTAR_EXE) $*.fst - + $(FSTAR) $*.fst clean: rm -f *.test *.ml *.o *.cm[ix] *.cmxs diff --git a/flake.nix b/flake.nix index 0256435e7ba..b375b77ce41 100644 --- a/flake.nix +++ b/flake.nix @@ -13,15 +13,8 @@ ocamlPackages = pkgs.ocaml-ng.ocamlPackages_4_14; z3 = pkgs.callPackage (import ./.nix/z3.nix) { }; version = self.rev or "dirty"; - fstar-dune = ocamlPackages.callPackage ./ocaml { inherit version; }; - fstar-ulib = pkgs.callPackage ./ulib { inherit fstar-dune version z3; }; - fstar = pkgs.callPackage ./.nix/fstar.nix { - inherit fstar-dune fstar-ulib version z3; - }; - fstar-ocaml-snapshot = - pkgs.callPackage ./src { inherit fstar ocamlPackages version; }; - fstar-bootstrap = pkgs.callPackage ./.nix/bootstrap.nix { - inherit fstar fstar-dune fstar-ocaml-snapshot fstar-ulib; + fstar = ocamlPackages.callPackage ./.nix/fstar.nix { + inherit version z3; }; emacs = pkgs.writeScriptBin "emacs-fstar" '' #!${pkgs.stdenv.shell} @@ -34,9 +27,7 @@ ''; in rec { packages = { - inherit z3 ocamlPackages; - inherit fstar fstar-dune fstar-ocaml-snapshot fstar-bootstrap; - inherit emacs; + inherit z3 fstar emacs; default = fstar; }; apps.emacs = { @@ -45,7 +36,7 @@ }; devShells.default = pkgs.mkShell { name = "${fstar.name}-dev"; - inputsFrom = [ fstar fstar-dune ]; + inputsFrom = [ fstar ]; shellHook = '' export FSTAR_SOURCES_ROOT="$(pwd)" export PATH="$FSTAR_SOURCES_ROOT/bin/:$PATH" diff --git a/fsharp/.gitignore b/fsharp/.gitignore new file mode 100644 index 00000000000..bbaf4bdc5a4 --- /dev/null +++ b/fsharp/.gitignore @@ -0,0 +1,4 @@ +[Oo]bj/** +[Bb]in/** +extracted/ +nuget/ diff --git a/fsharp/README b/fsharp/README new file mode 100644 index 00000000000..6c39d2fc266 --- /dev/null +++ b/fsharp/README @@ -0,0 +1,7 @@ +This directory contains all that's needed to build an F# version of the +F* application library (alib) in order to build F# applications. This is +currently not tested by anything in this repository. + +The output of this build goes into the bin/ and nuget/ subdirectories. +Run `make lib-fsharp` from the top level to extract the library and +build it (this is implied by `make all`). diff --git a/ulib/fs/VS/.gitignore b/fsharp/VS/.gitignore similarity index 100% rename from ulib/fs/VS/.gitignore rename to fsharp/VS/.gitignore diff --git a/fsharp/VS/Makefile b/fsharp/VS/Makefile new file mode 100644 index 00000000000..9da91fc59e2 --- /dev/null +++ b/fsharp/VS/Makefile @@ -0,0 +1,32 @@ +FSTAR_ROOT ?= ../.. + +# -*- Makefile -*- + +# -------------------------------------------------------------------- +DOTNET = dotnet + +CONFIGURATION?=Release + +DOTNET_PARAMS = /verbosity:minimal /p:Configuration=$(CONFIGURATION) + +PREFIX?=$(FSTAR_ROOT) + +# -------------------------------------------------------------------- +.PHONY: all install-packages build + +all: build + $(DOTNET) pack ../ulibfs.fsproj -o $(PREFIX)/fsharp/nuget + +# .NET convention: .dll files go to bin/ instead of lib/fstar +# TODO: in that case, we should rename ulibfs.dll into fstar_ulibfs.dll +# to avoid clashes with other .dll files in bin/ . This is one reason +# why we do not include this rule in `make install`, but only in +# `make package` +build: install-packages + $(DOTNET) build UlibFS.sln -o $(PREFIX)/fsharp/bin + +install-packages: + $(DOTNET) restore $(DOTNET_PARAMS) UlibFS.sln + +clean: + $(DOTNET) clean $(DOTNET_PARAMS) UlibFS.sln diff --git a/ulib/fs/VS/README.md b/fsharp/VS/README.md similarity index 100% rename from ulib/fs/VS/README.md rename to fsharp/VS/README.md diff --git a/ulib/fs/VS/UlibFS.sln b/fsharp/VS/UlibFS.sln similarity index 100% rename from ulib/fs/VS/UlibFS.sln rename to fsharp/VS/UlibFS.sln diff --git a/ulib/fs/VS/fstar-new.png b/fsharp/VS/fstar-new.png similarity index 100% rename from ulib/fs/VS/fstar-new.png rename to fsharp/VS/fstar-new.png diff --git a/ulib/fs/VS/global.json b/fsharp/VS/global.json similarity index 100% rename from ulib/fs/VS/global.json rename to fsharp/VS/global.json diff --git a/ulib/fs/FStar_All.fs b/fsharp/base/FStar_All.fs similarity index 100% rename from ulib/fs/FStar_All.fs rename to fsharp/base/FStar_All.fs diff --git a/ulib/fs/FStar_Char.fs b/fsharp/base/FStar_Char.fs similarity index 100% rename from ulib/fs/FStar_Char.fs rename to fsharp/base/FStar_Char.fs diff --git a/ulib/fs/FStar_CommonST.fs b/fsharp/base/FStar_CommonST.fs similarity index 100% rename from ulib/fs/FStar_CommonST.fs rename to fsharp/base/FStar_CommonST.fs diff --git a/ulib/fs/FStar_Dyn.fs b/fsharp/base/FStar_Dyn.fs similarity index 100% rename from ulib/fs/FStar_Dyn.fs rename to fsharp/base/FStar_Dyn.fs diff --git a/ulib/fs/FStar_Exn.fs b/fsharp/base/FStar_Exn.fs similarity index 100% rename from ulib/fs/FStar_Exn.fs rename to fsharp/base/FStar_Exn.fs diff --git a/ulib/fs/FStar_Float.fs b/fsharp/base/FStar_Float.fs similarity index 100% rename from ulib/fs/FStar_Float.fs rename to fsharp/base/FStar_Float.fs diff --git a/ulib/fs/FStar_Ghost.fs b/fsharp/base/FStar_Ghost.fs similarity index 100% rename from ulib/fs/FStar_Ghost.fs rename to fsharp/base/FStar_Ghost.fs diff --git a/ulib/fs/FStar_Heap.fs b/fsharp/base/FStar_Heap.fs similarity index 100% rename from ulib/fs/FStar_Heap.fs rename to fsharp/base/FStar_Heap.fs diff --git a/ulib/fs/FStar_HyperStack_All.fs b/fsharp/base/FStar_HyperStack_All.fs similarity index 100% rename from ulib/fs/FStar_HyperStack_All.fs rename to fsharp/base/FStar_HyperStack_All.fs diff --git a/ulib/fs/FStar_HyperStack_IO.fs b/fsharp/base/FStar_HyperStack_IO.fs similarity index 100% rename from ulib/fs/FStar_HyperStack_IO.fs rename to fsharp/base/FStar_HyperStack_IO.fs diff --git a/ulib/fs/FStar_HyperStack_ST.fs b/fsharp/base/FStar_HyperStack_ST.fs similarity index 100% rename from ulib/fs/FStar_HyperStack_ST.fs rename to fsharp/base/FStar_HyperStack_ST.fs diff --git a/ulib/fs/FStar_IO.fs b/fsharp/base/FStar_IO.fs similarity index 100% rename from ulib/fs/FStar_IO.fs rename to fsharp/base/FStar_IO.fs diff --git a/ulib/fs/FStar_Int16.fs b/fsharp/base/FStar_Int16.fs similarity index 100% rename from ulib/fs/FStar_Int16.fs rename to fsharp/base/FStar_Int16.fs diff --git a/ulib/fs/FStar_Int32.fs b/fsharp/base/FStar_Int32.fs similarity index 100% rename from ulib/fs/FStar_Int32.fs rename to fsharp/base/FStar_Int32.fs diff --git a/ulib/fs/FStar_Int64.fs b/fsharp/base/FStar_Int64.fs similarity index 100% rename from ulib/fs/FStar_Int64.fs rename to fsharp/base/FStar_Int64.fs diff --git a/ulib/fs/FStar_Int8.fs b/fsharp/base/FStar_Int8.fs similarity index 100% rename from ulib/fs/FStar_Int8.fs rename to fsharp/base/FStar_Int8.fs diff --git a/ulib/fs/FStar_List.fs b/fsharp/base/FStar_List.fs similarity index 100% rename from ulib/fs/FStar_List.fs rename to fsharp/base/FStar_List.fs diff --git a/ulib/fs/FStar_List_Tot_Base.fs b/fsharp/base/FStar_List_Tot_Base.fs similarity index 100% rename from ulib/fs/FStar_List_Tot_Base.fs rename to fsharp/base/FStar_List_Tot_Base.fs diff --git a/ulib/fs/FStar_Map.fs b/fsharp/base/FStar_Map.fs similarity index 100% rename from ulib/fs/FStar_Map.fs rename to fsharp/base/FStar_Map.fs diff --git a/ulib/fs/FStar_Monotonic_Heap.fs b/fsharp/base/FStar_Monotonic_Heap.fs similarity index 100% rename from ulib/fs/FStar_Monotonic_Heap.fs rename to fsharp/base/FStar_Monotonic_Heap.fs diff --git a/ulib/fs/FStar_Option.fs b/fsharp/base/FStar_Option.fs similarity index 100% rename from ulib/fs/FStar_Option.fs rename to fsharp/base/FStar_Option.fs diff --git a/ulib/fs/FStar_Pervasives_Native.fs b/fsharp/base/FStar_Pervasives_Native.fs similarity index 100% rename from ulib/fs/FStar_Pervasives_Native.fs rename to fsharp/base/FStar_Pervasives_Native.fs diff --git a/ulib/fs/FStar_ST.fs b/fsharp/base/FStar_ST.fs similarity index 100% rename from ulib/fs/FStar_ST.fs rename to fsharp/base/FStar_ST.fs diff --git a/ulib/fs/FStar_Set.fs b/fsharp/base/FStar_Set.fs similarity index 100% rename from ulib/fs/FStar_Set.fs rename to fsharp/base/FStar_Set.fs diff --git a/ulib/fs/FStar_String.fs b/fsharp/base/FStar_String.fs similarity index 100% rename from ulib/fs/FStar_String.fs rename to fsharp/base/FStar_String.fs diff --git a/ulib/fs/FStar_UInt16.fs b/fsharp/base/FStar_UInt16.fs similarity index 100% rename from ulib/fs/FStar_UInt16.fs rename to fsharp/base/FStar_UInt16.fs diff --git a/ulib/fs/FStar_UInt32.fs b/fsharp/base/FStar_UInt32.fs similarity index 100% rename from ulib/fs/FStar_UInt32.fs rename to fsharp/base/FStar_UInt32.fs diff --git a/ulib/fs/FStar_UInt64.fs b/fsharp/base/FStar_UInt64.fs similarity index 100% rename from ulib/fs/FStar_UInt64.fs rename to fsharp/base/FStar_UInt64.fs diff --git a/ulib/fs/FStar_UInt8.fs b/fsharp/base/FStar_UInt8.fs similarity index 100% rename from ulib/fs/FStar_UInt8.fs rename to fsharp/base/FStar_UInt8.fs diff --git a/ulib/fs/prims.fs b/fsharp/base/prims.fs similarity index 100% rename from ulib/fs/prims.fs rename to fsharp/base/prims.fs diff --git a/fsharp/ulibfs.fsproj b/fsharp/ulibfs.fsproj new file mode 100644 index 00000000000..cb898ea3030 --- /dev/null +++ b/fsharp/ulibfs.fsproj @@ -0,0 +1,106 @@ + + + netstandard2.0 + --nowarn:0086 --mlcompatibility --nologo + Library + false + True + false + 0.0.3 + README.md + fstar-new.png + https://fstar-lang.org/ + https://github.com/FStarLang/FStar + LICENSE + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ulib/gmake/Makefile.tmpl b/gmake/Makefile.tmpl similarity index 100% rename from ulib/gmake/Makefile.tmpl rename to gmake/Makefile.tmpl diff --git a/gmake/fstar.mk b/gmake/fstar.mk new file mode 100644 index 00000000000..e76849a1e2b --- /dev/null +++ b/gmake/fstar.mk @@ -0,0 +1,24 @@ +HINTS_ENABLED?=--use_hints +WARN_ERROR= +OTHERFLAGS+=$(WARN_ERROR) + +ifdef Z3 +OTHERFLAGS+=--smt $(Z3) +endif + +# Set ADMIT=1 to admit queries +ADMIT ?= +MAYBE_ADMIT = $(if $(ADMIT),--admit_smt_queries true) + +ifdef FSTAR_HOME + FSTAR_HOME := $(realpath $(FSTAR_HOME)) + ifeq ($(OS),Windows_NT) + FSTAR_HOME := $(shell cygpath -m $(FSTAR_HOME)) + endif + FSTAR_EXE?=$(FSTAR_HOME)/out/bin/fstar.exe +else +# FSTAR_HOME not defined, assume fstar.exe reachable from PATH +FSTAR_EXE?=fstar.exe +endif + +FSTAR=$(FSTAR_EXE) $(OTHERFLAGS) $(MAYBE_ADMIT) $(HINTS_ENABLED) $(WITH_CACHE_DIR) diff --git a/mk/fstar-01.mk b/mk/fstar-01.mk new file mode 120000 index 00000000000..0fa32735fe1 --- /dev/null +++ b/mk/fstar-01.mk @@ -0,0 +1 @@ +fstar-12.mk \ No newline at end of file diff --git a/mk/fstar-12.mk b/mk/fstar-12.mk new file mode 100644 index 00000000000..4bd201fccce --- /dev/null +++ b/mk/fstar-12.mk @@ -0,0 +1,82 @@ +include mk/common.mk + +$(call need_exe, FSTAR_EXE, fstar.exe to be used) +$(call need_dir_mk, CACHE_DIR, directory for checked files) +$(call need_dir_mk, OUTPUT_DIR, directory for extracted OCaml files) +$(call need, CODEGEN, backend (OCaml / Plugin)) +$(call need_dir, SRC, source directory) +$(call need, TAG, a tag for the .depend; to prevent clashes. Sorry.) + +.PHONY: clean +clean: + rm -rf $(CACHE_DIR) + rm -rf $(OUTPUT_DIR) + +.PHONY: all +all: verify ocaml + +.PHONY: ocaml +ocaml: all-ml + +.PHONY: verify +verify: all-checked + +FSTAR_OPTIONS += --lax +FSTAR_OPTIONS += --MLish_effect 'FStarC.Compiler.Effect' +FSTAR_OPTIONS += --cache_checked_modules +FSTAR_OPTIONS += --cache_dir "$(CACHE_DIR)" +FSTAR_OPTIONS += --odir "$(OUTPUT_DIR)" + +FSTAR_OPTIONS += --include "$(SRC)" + +FSTAR_OPTIONS += $(OTHERFLAGS) + +FSTAR = $(FSTAR_EXE) $(SIL) $(FSTAR_OPTIONS) + +# FIXME: Maintaining this list sucks. Could **the module** itself specify whether it is +# noextract? Actually, the F* compiler should already know which of its modules are +# in its library, and do this by default. +EXTRACT := +EXTRACT += --extract '*' +EXTRACT += --extract -Prims +EXTRACT += --extract -FStar +EXTRACT += --extract -FStarC.Extraction.ML.PrintML # very much a special case + +# Library wrangling +EXTRACT += --extract +FStar.Pervasives +EXTRACT += --extract -FStar.Pervasives.Native +EXTRACT += --extract +FStar.Class.Printable +EXTRACT += --extract +FStar.Seq.Base +EXTRACT += --extract +FStar.Seq.Properties + +%.checked.lax: LBL=$(basename $(basename $(notdir $@))) +%.checked.lax: + $(call msg, "LAXCHECK", $(LBL)) + $(FSTAR) $(if $(findstring FStarC,$<),--MLish,) $< + @# HACK: finding FStarC modules + @touch -c $@ ## SHOULD NOT BE NEEDED + +%.ml: FF=$(notdir $(subst .checked.lax,,$<)) +%.ml: MM=$(basename $(FF)) +%.ml: LBL=$(notdir $@) +# ^ HACK we use notdir to get the module name since we need to pass in +# the fst (not the checked file), but we don't know where it is, so this +# is relying on F* looking in its include path. +%.ml: + $(call msg, "EXTRACT", $(LBL)) + $(FSTAR) $(FF) $(if $(findstring FStarC,$<),--MLish,) --codegen $(CODEGEN) --extract_module $(MM) + @touch -c $@ ## SHOULD NOT BE NEEDED + +ROOTS := +ROOTS += $(SRC)/fstar/FStarC.Main.fst + +$(CACHE_DIR)/.depend$(TAG): + $(call msg, "DEPEND", $(SRC)) + $(FSTAR) --dep full $(ROOTS) $(EXTRACT) $(DEPFLAGS) --output_deps_to $@ + mkdir -p $(CACHE_DIR) + +depend: $(CACHE_DIR)/.depend$(TAG) +include $(CACHE_DIR)/.depend$(TAG) + +all-ml: $(ALL_ML_FILES) +all-checked: $(ALL_CHECKED_FILES) diff --git a/mk/lib.mk b/mk/lib.mk new file mode 100644 index 00000000000..78bb5ba6d74 --- /dev/null +++ b/mk/lib.mk @@ -0,0 +1,145 @@ +include mk/common.mk + +$(call need_exe, FSTAR_EXE, fstar.exe to be used) +$(call need_dir_mk, CACHE_DIR, directory for checked files) +$(call need_dir_mk, OUTPUT_DIR, directory for extracted OCaml files) +$(call need, CODEGEN, backend (OCaml / Plugin)) +$(call need_dir, SRC, source directory) +$(call need, TAG, a tag for the .depend; to prevent clashes. Sorry.) + +.PHONY: clean +clean: + rm -rf $(CACHE_DIR) + rm -rf $(OUTPUT_DIR) + +.PHONY: verify +verify: all-checked + +FSTAR_OPTIONS += --cache_checked_modules # should be the default +FSTAR_OPTIONS += --cache_dir "$(CACHE_DIR)" +FSTAR_OPTIONS += --odir "$(OUTPUT_DIR)" + +FSTAR_OPTIONS += --use_hints +FSTAR_OPTIONS += --hint_dir $(SRC)/.hints +FSTAR_OPTIONS += --warn_error -333 # Do not warn about missing hints +FSTAR_OPTIONS += --ext context_pruning +FSTAR_OPTIONS += --z3version 4.13.3 + +FSTAR_OPTIONS += --no_default_includes +FSTAR_OPTIONS += --include $(SRC) +ifeq ($(ADMIT),1) +FSTAR_OPTIONS += --admit_smt_queries true +endif + +# Extension for extracted files +ifeq ($(CODEGEN),FSharp) +EEXT:=fs +else +EEXT:=ml +endif + +FSTAR_OPTIONS += $(OTHERFLAGS) + +EXTRACT_NS := +EXTRACT_NS += -FStar.Buffer +EXTRACT_NS += -FStar.Bytes +EXTRACT_NS += -FStar.Char +EXTRACT_NS += -FStar.CommonST +EXTRACT_NS += -FStar.Constructive +EXTRACT_NS += -FStar.Dyn +EXTRACT_NS += -FStar.Float +EXTRACT_NS += -FStar.Ghost +EXTRACT_NS += -FStar.Heap +EXTRACT_NS += -FStar.Monotonic.Heap +EXTRACT_NS += -FStar.HyperStack.All +EXTRACT_NS += -FStar.HyperStack.ST +EXTRACT_NS += -FStar.HyperStack.IO +EXTRACT_NS += -FStar.Int16 +EXTRACT_NS += -FStar.Int32 +EXTRACT_NS += -FStar.Int64 +EXTRACT_NS += -FStar.Int8 +EXTRACT_NS += -FStar.IO +EXTRACT_NS += -FStar.List +EXTRACT_NS += -FStar.List.Tot.Base +EXTRACT_NS += -FStar.Option +EXTRACT_NS += -FStar.Pervasives.Native +EXTRACT_NS += -FStar.ST +EXTRACT_NS += -FStar.Exn +EXTRACT_NS += -FStar.String +EXTRACT_NS += -FStar.UInt16 +EXTRACT_NS += -FStar.UInt32 +EXTRACT_NS += -FStar.UInt64 +EXTRACT_NS += -FStar.UInt8 +EXTRACT_NS += -FStar.Pointer.Derived1 +EXTRACT_NS += -FStar.Pointer.Derived2 +EXTRACT_NS += -FStar.Pointer.Derived3 +EXTRACT_NS += -FStar.BufferNG +EXTRACT_NS += -FStar.TaggedUnion +EXTRACT_NS += -FStar.Bytes +EXTRACT_NS += -FStar.Util +EXTRACT_NS += -FStar.InteractiveHelpers +EXTRACT_NS += -FStar.Class +EXTRACT_NS += -FStar.Vector.Base +EXTRACT_NS += -FStar.Vector.Properties +EXTRACT_NS += -FStar.Vector +EXTRACT_NS += -FStar.TSet +EXTRACT_NS += -FStar.MSTTotal +EXTRACT_NS += -FStar.MST +EXTRACT_NS += -FStar.NMSTTotal +EXTRACT_NS += -FStar.NMST +EXTRACT_NS += -FStar.Printf +EXTRACT_NS += -FStar.ModifiesGen +EXTRACT_NS += -LowStar.Printf +EXTRACT_NS += -FStar.Sealed +EXTRACT_NS += +FStar.List.Pure.Base +EXTRACT_NS += +FStar.List.Tot.Properties +EXTRACT_NS += +FStar.Int.Cast.Full + +# Note: the pluginlib rules will enable these. +EXTRACT_NS += -FStar.Tactics +EXTRACT_NS += -FStar.Reflection + +FSTAR := $(FSTAR_EXE) $(SIL) $(FSTAR_OPTIONS) + +EXTRACT := --extract '* $(EXTRACT_NS)' + +%.checked: LBL=$(basename $(notdir $@)) +%.checked: + $(call msg, "CHECK", $(LBL)) + $(FSTAR) $< + @touch -c $@ ## SHOULD NOT BE NEEDED + +%.$(EEXT): FF=$(notdir $(subst .checked,,$<)) +%.$(EEXT): MM=$(basename $(FF)) +%.$(EEXT): LBL=$(notdir $@) +# ^ HACK: we use notdir to get the module name since we need to pass in +# the fst (not the checked file), but we don't know where it is, so this +# is relying on F* looking in its include path. sigh. +%.$(EEXT): + $(call msg, "EXTRACT", $(LBL)) + $(FSTAR) $(FF) --codegen $(CODEGEN) --extract_module $(MM) + @touch -c $@ ## SHOULD NOT BE NEEDED + +# Leaving this empty, F* will scan the include path for all fst/fsti +# files. This will read fstar.include and follow it too. +# ROOTS := +# No! If we do that, we will pick up files from the current directory +# (the root of the repo) since that is implicitly included in F*'s +# search path. So instead, be explicit about scanning over all the files +# in $(SRC) (i.e. ulib). Note that there is a still a problem if there is a +# file in the cwd named like a file in ulib/, F* may prefer the former. +ROOTS := $(shell find $(SRC) -name '*.fst' -o -name '*.fsti') + +$(CACHE_DIR)/.depend$(TAG): + $(call msg, "DEPEND", $(SRC)) + $(FSTAR) --dep full $(ROOTS) $(EXTRACT) $(DEPFLAGS) --output_deps_to $@ + mkdir -p $(CACHE_DIR) + +depend: $(CACHE_DIR)/.depend$(TAG) +include $(CACHE_DIR)/.depend$(TAG) + +all-checked: $(ALL_CHECKED_FILES) +# These targets imply verification of every file too, regardless +# of extraction. +all-ml: all-checked $(ALL_ML_FILES) +all-fs: all-checked $(ALL_FS_FILES) diff --git a/mk/plugins.mk b/mk/plugins.mk new file mode 100644 index 00000000000..9152e9a6196 --- /dev/null +++ b/mk/plugins.mk @@ -0,0 +1,125 @@ +include mk/common.mk + +$(call need_exe, FSTAR_EXE, fstar.exe to be used) +$(call need, CACHE_DIR, directory for checked files) +$(call need, OUTPUT_DIR, directory for extracted OCaml files) +$(call need, CODEGEN, backend (OCaml / Plugin)) +$(call need, SRC, source directory) +$(call need, TAG, a tag for the .depend; to prevent clashes. Sorry.) + +.PHONY: clean +clean: + rm -rf $(CACHE_DIR) + rm -rf $(OUTPUT_DIR) + +.PHONY: ocaml +ocaml: all-ml + +FSTAR_OPTIONS += --lax +FSTAR_OPTIONS += --cache_checked_modules +FSTAR_OPTIONS += --cache_dir "$(CACHE_DIR)" +FSTAR_OPTIONS += --odir "$(OUTPUT_DIR)" + +FSTAR_OPTIONS += --no_default_includes +FSTAR_OPTIONS += --include $(SRC) + +FSTAR_OPTIONS += $(OTHERFLAGS) + +FSTAR = $(FSTAR_EXE) $(SIL) $(FSTAR_OPTIONS) + +# FIXME: Maintaining this list sucks. Could **the module** itself specify whether it is +# noextract? Or maybe if we find an aptly-named .ml file then we auto skip? +EXTRACT := +EXTRACT += --extract '*' +EXTRACT += --extract -Prims +EXTRACT += --extract -FStar.Pervasives.Native +EXTRACT += --extract -FStar.All +EXTRACT += --extract -FStar.Ghost +EXTRACT += --extract -FStar.Heap +EXTRACT += --extract -FStar.Bytes +EXTRACT += --extract -FStar.Char +EXTRACT += --extract -FStar.Exn +EXTRACT += --extract -FStar.Float +EXTRACT += --extract -FStar.Int16 +EXTRACT += --extract -FStar.Int32 +EXTRACT += --extract -FStar.Int64 +EXTRACT += --extract -FStar.Int8 +EXTRACT += --extract +FStar.Int.Cast.Full +EXTRACT += --extract -FStar.List +EXTRACT += --extract +FStar.List.Pure.Base +EXTRACT += --extract +FStar.List.Tot.Properties +EXTRACT += --extract -FStar.Monotonic.Heap +EXTRACT += --extract -FStar.HyperStack.ST +EXTRACT += --extract -FStar.Option +EXTRACT += --extract -FStar.Printf +EXTRACT += --extract -FStar.Range +EXTRACT += --extract -FStar.ST +EXTRACT += --extract -FStar.String +EXTRACT += --extract -FStar.TSet +EXTRACT += --extract -FStar.UInt16 +EXTRACT += --extract -FStar.UInt32 +EXTRACT += --extract -FStar.UInt64 +EXTRACT += --extract -FStar.UInt8 +EXTRACT += --extract -FStar.Util + +# EXTRACT += --extract -FStar.BitVector +# EXTRACT += --extract -FStar.Calc + +%.checked.lax: + $(call msg, "LAXCHECK", $(basename $(basename $(notdir $@)))) + $(FSTAR) $(if $(findstring /ulib/,$<),,--MLish) $< + @touch -c $@ ## SHOULD NOT BE NEEDED + +%.ml: FF=$(notdir $(subst .checked.lax,,$<)) +%.ml: MM=$(basename $(FF)) +%.ml: + $(call msg, "EXTRACT", $(notdir $@)) + @# HACK we use notdir to get the module name since we need to pass in the + @# fst (not the checked file), but we don't know where it is, so this is + @# relying on F* looking in its include path. sigh. + $(FSTAR) $(FF) \ + --codegen $(CODEGEN) \ + --extract_module $(MM) + @touch -c $@ ## SHOULD NOT BE NEEDED + +# -------------------------------------------------------------------- +# Dependency analysis for bootstrapping +# -------------------------------------------------------------------- + +# List here the files that define plugins in the library, +# so we make sure to also extract them and link them into F*. +# MUST BE NON EMPTY OR WE WILL EXTRACT THE ENTIRE LIBRARY +ROOTS += ../ulib/FStar.Tactics.Effect.fsti +ROOTS += ../ulib/FStar.Order.fst +ROOTS += ../ulib/FStar.Reflection.TermEq.fsti +ROOTS += ../ulib/FStar.Reflection.TermEq.Simple.fsti +ROOTS += ../ulib/FStar.Reflection.V2.Compare.fsti +ROOTS += ../ulib/FStar.Reflection.V2.Formula.fst +ROOTS += ../ulib/FStar.Tactics.BV.fsti +ROOTS += ../ulib/FStar.Tactics.CanonCommMonoidSimple.Equiv.fst +ROOTS += ../ulib/FStar.Tactics.Canon.fst +ROOTS += ../ulib/FStar.Tactics.Canon.fsti +ROOTS += ../ulib/FStar.Tactics.CheckLN.fsti +ROOTS += ../ulib/FStar.Tactics.MApply0.fsti +ROOTS += ../ulib/FStar.Tactics.MkProjectors.fsti +ROOTS += ../ulib/FStar.Tactics.NamedView.fsti +ROOTS += ../ulib/FStar.Tactics.Names.fsti +ROOTS += ../ulib/FStar.Tactics.Parametricity.fsti +ROOTS += ../ulib/FStar.Tactics.Print.fsti +ROOTS += ../ulib/FStar.Tactics.SMT.fsti +ROOTS += ../ulib/FStar.Tactics.Typeclasses.fsti +ROOTS += ../ulib/FStar.Tactics.TypeRepr.fsti +ROOTS += ../ulib/FStar.Tactics.V1.Logic.fsti +ROOTS += ../ulib/FStar.Tactics.V2.Logic.fsti +ROOTS += ../ulib/FStar.Tactics.V2.SyntaxHelpers.fst +ROOTS += ../ulib/FStar.Tactics.Visit.fst + +$(CACHE_DIR)/.depend$(TAG): + $(call msg, "DEPEND") + $(FSTAR) --dep full $(ROOTS) $(EXTRACT) --output_deps_to $@ + mkdir -p $(CACHE_DIR) + +depend: $(CACHE_DIR)/.depend$(TAG) +include $(CACHE_DIR)/.depend$(TAG) + +all-ml: $(ALL_ML_FILES) diff --git a/mk/src_package_mk.mk b/mk/src_package_mk.mk new file mode 100644 index 00000000000..d8608811053 --- /dev/null +++ b/mk/src_package_mk.mk @@ -0,0 +1,52 @@ +# This makefile is for OCaml source distributions and is +# modeled after stage{1,2}/Makefile, but +# 1- is standalone, does use common.mk or others +# 2- does not install the library as it will not be there on +# OCaml source distributions +# +# FSTAR_DUNE_OPTIONS += --no-print-directory +# FSTAR_DUNE_OPTIONS += --display=quiet + +FSTAR_DUNE_BUILD_OPTIONS := $(FSTAR_DUNE_OPTIONS) + +.NOTPARALLEL: +# Sorry, but dune seems to get confused when its OCAMLPATH is changing + +.DEFAULT_GOAL:= all + +.PHONY: _force +_force: + +build: + dune build --root=dune $(FSTAR_DUNE_BUILD_OPTIONS) + +install_bin: build + dune install --root=dune --prefix=$(CURDIR)/out + +check_lib: install_bin + env \ + SRC=ulib/ \ + FSTAR_EXE=out/bin/fstar.exe \ + CACHE_DIR=ulib.checked \ + TAG=lib \ + CODEGEN=none \ + OUTPUT_DIR=none \ + $(MAKE) -f mk/lib.mk verify + +install_lib: check_lib + @# Install get_fstar_z3 script + cp get_fstar_z3.sh $(CURDIR)/out/bin + @# Install library + cp -H -p -r ulib out/lib/fstar/ulib + echo 'ulib' >> out/lib/fstar/fstar.include + rm -f out/lib/fstar/ulib/*.config.json + @# Install checked files for the library + mkdir -p out/lib/fstar/ulib/.checked + cp -p ulib.checked/* out/lib/fstar/ulib/.checked/ + echo '.checked' >> out/lib/fstar/ulib/fstar.include + +clean: _force + dune clean $(FSTAR_DUNE_OPTIONS) --root=dune + rm -rf $(CURDIR)/out + +all: install_lib diff --git a/mk/stage.mk b/mk/stage.mk new file mode 100644 index 00000000000..16a21e481e0 --- /dev/null +++ b/mk/stage.mk @@ -0,0 +1,54 @@ +FSTAR_ROOT ?= .. +include $(FSTAR_ROOT)/mk/common.mk + +ifeq ($(V),) +FSTAR_DUNE_OPTIONS += --no-print-directory +FSTAR_DUNE_OPTIONS += --display=quiet +endif + +FSTAR_DUNE_BUILD_OPTIONS := $(FSTAR_DUNE_OPTIONS) +ifeq ($(FSTAR_DUNE_RELEASE),1) +FSTAR_DUNE_BUILD_OPTIONS += --release +endif + +.NOTPARALLEL: +# Sorry, but dune seems to get confused when its OCAMLPATH is changing + +.PHONY: _force +_force: + +fstarc-bare: _force + cd dune && dune build $(FSTAR_DUNE_BUILD_OPTIONS) fstarc-bare + +fstarc-full: _force + cd dune && dune build $(FSTAR_DUNE_BUILD_OPTIONS) fstarc-full + +libapp: _force + cd dune && dune build $(FSTAR_DUNE_BUILD_OPTIONS) libapp + +libplugin: _force + cd dune && dune build $(FSTAR_DUNE_BUILD_OPTIONS) libplugin + +clean: _force + dune clean $(FSTAR_DUNE_OPTIONS) --root=dune + rm -rf out + +install: fstarc-bare fstarc-full libapp libplugin + @# Seems to need one final build? + cd dune && dune build $(FSTAR_DUNE_BUILD_OPTIONS) + cd dune && dune install $(FSTAR_DUNE_OPTIONS) --prefix=$(abspath $(CURDIR)/out) + @# Install library + cp -H -p -r ulib out/lib/fstar/ulib + echo 'ulib' > out/lib/fstar/fstar.include + rm -f out/lib/fstar/ulib/*.config.json + @# Install checked files for the library + mkdir -p out/lib/fstar/ulib/.checked + cp -p ulib.checked/* out/lib/fstar/ulib/.checked/ + echo '.checked' >> out/lib/fstar/ulib/fstar.include + @# Install get_fstar_z3 script + cp ../.scripts/get_fstar_z3.sh $(CURDIR)/out/bin + @# License and extra files + cp ../LICENSE* $(CURDIR)/out/ + cp ../README.md $(CURDIR)/out/ + cp ../INSTALL.md $(CURDIR)/out/ + cp ../version.txt $(CURDIR)/out/ diff --git a/mk/stage0.mk b/mk/stage0.mk new file mode 100644 index 00000000000..ea9e676f35f --- /dev/null +++ b/mk/stage0.mk @@ -0,0 +1,25 @@ +# This file is used (or created) by the bring-stage0 rule in the toplevel Makefile + +include $(FSTAR_ROOT)/mk/common.mk + +.PHONY: force +_force: + +FSTAR_DUNE_BUILD_OPTIONS += --no-print-directory +FSTAR_DUNE_BUILD_OPTIONS += --display=quiet + +.DEFAULT_GOAL := fstar + +.PHONY: fstar +fstar: + @echo " DUNE BUILD" + $(Q)dune build $(FSTAR_DUNE_BUILD_OPTIONS) + @echo " DUNE INSTALL" + $(Q)dune install --prefix=. + +.PHONY: clean +clean: + dune clean + +trim: _force + dune clean $(FSTAR_DUNE_OPTIONS) diff --git a/mk/test.mk b/mk/test.mk index 2db05dcbb76..f7862946e04 100644 --- a/mk/test.mk +++ b/mk/test.mk @@ -23,7 +23,7 @@ include $(FSTAR_ROOT)/mk/common.mk .DEFAULT_GOAL := all # Set a default FSTAR_EXE for most clients. -FSTAR_EXE ?= $(FSTAR_ROOT)/bin/fstar.exe +FSTAR_EXE ?= $(FSTAR_ROOT)/out/bin/fstar.exe FSTAR_EXE := $(abspath $(FSTAR_EXE)) export FSTAR_EXE @@ -49,7 +49,7 @@ FSTAR = $(FSTAR_EXE) $(SIL) \ $(if $(NO_WRITE_CHECKED),,--cache_checked_modules) \ --odir $(OUTPUT_DIR) \ --cache_dir $(CACHE_DIR) \ - --already_cached Prims,FStar \ + --already_cached Prims,FStar,LowStar \ $(OTHERFLAGS) $(MAYBE_ADMIT) $(HINTS_ENABLED) ifneq ($(MAKECMDGOALS),clean) diff --git a/src/.gitignore b/src/.gitignore deleted file mode 100644 index dd882bda3cd..00000000000 --- a/src/.gitignore +++ /dev/null @@ -1,16 +0,0 @@ -ocaml.ml -fstarml.cm[io] -fsharp-output -/backend/ -u_boot_fsts -u_ocaml-output - -dep.graph -dep_simpl.graph -depgraph.pdf - -[Bb]in/ -[Oo]bj/ - -Makefile.local -*.bak diff --git a/src/FStarCompiler.fst.config.json b/src/FStarCompiler.fst.config.json index d59e9c396f6..029fe16c03e 100644 --- a/src/FStarCompiler.fst.config.json +++ b/src/FStarCompiler.fst.config.json @@ -1,31 +1,14 @@ { - "fstar_exe": "../bin/fstar.exe", + "fstar_exe": "../stage0/bin/fstar.exe", "options": [ "--MLish", "--MLish_effect", "FStarC.Compiler.Effect", "--lax", - "--cache_dir", - ".cache.boot", - "--no_location_info", + "--cache_dir", "../stage1/fstarc.checked", "--warn_error", "-271-272-241-319-274" ], "include_dirs": [ - "../ulib", - "basic", - "class", - "data", - "extraction", - "fstar", - "parser", - "prettyprint", - "reflection", - "smtencoding", - "syntax", - "syntax/print", - "tactics", - "tosyntax", - "typechecker", - "tests" + "." ] } diff --git a/src/Makefile b/src/Makefile deleted file mode 100644 index 6166f754a8d..00000000000 --- a/src/Makefile +++ /dev/null @@ -1,74 +0,0 @@ -# -*- Makefile -*- -include Makefile.config -export FSTAR_HOME # because of the recursive calls to `make` - -# -------------------------------------------------------------------- -.SUFFIXES: -MAKEFLAGS += --no-builtin-rules - -.PHONY: clean boot ocaml - -all: - $(error src/Makefile: Need to specify a rule) - -clean: clean-ocaml - -# -------------------------------------------------------------------- -# Bootstrapping in OCaml: The main logic is in Makefile.boot. -# -------------------------------------------------------------------- - -clean_boot: - $(Q)rm -rf .cache.boot - $(Q)rm -f ._depend - $(Q)rm -f .depend - -# -------------------------------------------------------------------------------- -# Now we have some make targets wrap calls to other makefiles, -# Notably, Makefile.boot, to extract ocaml from the compiler sources -# And ocaml-output/Makefile, to actually build the compiler in OCaml -# -------------------------------------------------------------------------------- -ocaml: - $(Q)+$(MAKE) -f Makefile.boot all-ml - -clean-ocaml: clean_boot - +$(Q)$(MAKE) -C ocaml-output clean - -# -------------------------------------------------------------------- -# Testing -# -------------------------------------------------------------------- - -.PHONY: ocaml-unit-tests -ocaml-unit-tests: - $(BIN)/fstar_tests.exe - -.PHONY: ulib-in-fsharp -ulib-in-fsharp: - $(MAKE) -C ../ulib ulib-in-fsharp - -.PHONY: uregressions -uregressions: tutorial utests uexamples - -.PHONY: tutorial -tutorial: book-code tutorial-old - -.PHONY: book-code -book-code: - +$(MAKE) -C ../doc/book/code - -.PHONY: tutorial-old -tutorial-old: - +$(MAKE) -C ../doc/old/tutorial regressions - -.PHONY: utests -utests: - +$(MAKE) -C ../tests all - -.PHONY: uexamples -uexamples: examples-all - -.PHONY: examples-all -examples-all: - +$(MAKE) -C ../examples all - -ctags: - ctags --exclude=boot_fsts --exclude=boot_fstis --exclude=ocaml-output -R . diff --git a/src/Makefile.boot b/src/Makefile.boot deleted file mode 100644 index ac97ba48390..00000000000 --- a/src/Makefile.boot +++ /dev/null @@ -1,111 +0,0 @@ -include Makefile.config - -FSTAR_HOME ?= .. - -# Provides variables INCLUDE_PATHS, FSTAR_BOOT_OPTIONS, -# and CACHE_DIR, shared with interactive mode targets -include Makefile.boot.common - -# This variable can be defined to the path of a different F* binary for -# bootstrapping (and only bootstrapping: the library will be checked -# with the newly-compiled F*). This is useful when developing some -# breaking changes that may not bootstrap. It can be passed as an -# argument to make or via the environment. -FSTAR_BOOT ?= $(FSTAR) - -# FSTAR_C: This is the way in which we invoke F* for boostrapping -# -- we use automatic dependence analysis based on files in ulib, src/{basic, ...} and boot -# -- MLish and lax tune type-inference for use with unverified ML programs -DUNE_SNAPSHOT ?= $(call maybe_cygwin_path,$(FSTAR_HOME)/ocaml) -OUTPUT_DIRECTORY = $(FSTAR_HOME)/src/ocaml-output/fstarc - -FSTAR_BOOT_OPTIONS += --MLish_effect FStarC.Compiler.Effect - -FSTAR_C=$(RUNLIM) $(FSTAR_BOOT) $(SIL) $(FSTAR_BOOT_OPTIONS) --cache_checked_modules - -# Tests.* goes to fstar-tests, the rest to fstar-lib -OUTPUT_DIRECTORY_FOR = $(if $(findstring FStarC_Tests_,$(1)),$(DUNE_SNAPSHOT)/fstar-tests/generated,$(OUTPUT_DIRECTORY)) - -EXTRACT_NAMESPACES=FStarC # It's that easy! - -# Except some files that want to extract are not within a particularly -# specific namespace. So, we mention extracting those explicitly. -# TODO: Do we really need this anymore? Which (implementation) modules -# from src/basic are *not* extracted? -EXTRACT_MODULES=FStar.Pervasives FStar.Order - -# And there are a few specific files that should not be extracted at -# all, despite being in one of the EXTRACT_NAMESPACES -NO_EXTRACT=FStarC.Tactics.Native FStarC.Tactics.Load \ - FStarC.Extraction.ML.PrintML FStarC.Compiler.List - -EXTRACT = $(addprefix --extract_module , $(EXTRACT_MODULES)) \ - $(addprefix --extract_namespace , $(EXTRACT_NAMESPACES)) \ - $(addprefix --no_extract , $(NO_EXTRACT)) - -# We first lax type-check each file, producing a .checked.lax file -# We touch the file, because if F* determined that the .checked.lax -# file was already up to date, it doesn't touch it. Touching it here -# ensures that if this rule is successful then %.checked.lax is more -# recent than its dependences. -%.checked.lax: - $(call msg, "LAXCHECK", $(basename $(basename $(notdir $@)))) - $(Q)$(BENCHMARK_PRE) $(FSTAR_C) --already_cached '*,'-$(basename $(notdir $<)) \ - $(if $(findstring /ulib/,$<),,--MLish) \ - $< - $(Q)@touch -c $@ - -# And then, in a separate invocation, from each .checked.lax we -# extract an .ml file -%.ml: - $(call msg, "EXTRACT", $(notdir $@)) - $(Q)$(BENCHMARK_PRE) $(FSTAR_C) $(notdir $(subst .checked.lax,,$<)) \ - --odir "$(call OUTPUT_DIRECTORY_FOR,"$@")" \ - $(if $(findstring /ulib/,$<),,--MLish) \ - --codegen Plugin \ - --extract_module $(basename $(notdir $(subst .checked.lax,,$<))) - -# -------------------------------------------------------------------- -# Dependency analysis for bootstrapping -# -------------------------------------------------------------------- - -# The dependence analysis starts from the main file and the unit-tests -# file as the roots, mentioning the the modules that are to be -# extracted. This emits dependences for each of the ML files we want -# to produce. -# -# We do an indirection via ._depend so we don't write an empty file if -# the dependency analysis failed. - -.depend: - $(call msg, "DEPEND") - $(Q)$(FSTAR_C) --dep full \ - fstar/FStarC.Main.fst \ - tests/FStarC.Tests.Test.fst \ - --odir $(OUTPUT_DIRECTORY) \ - $(EXTRACT) \ - --output_deps_to ._depend - @# We've generated deps for everything into fstar-lib/generated. - @# Here we fix up the .depend file to move tests out of the library. - $(Q)$(SED) 's,src/ocaml-output/fstarc/FStarC_Test,ocaml/fstar-tests/generated/FStarC_Test,g' <._depend >.depend - $(Q)mkdir -p $(CACHE_DIR) - -.PHONY: dep.graph -dep.graph: - $(call msg, "DEPEND") - $(Q)$(FSTAR_C) --dep graph \ - fstar/FStarC.Main.fst \ - tests/FStarC.Tests.Test.fst \ - $(EXTRACT) \ - --output_deps_to dep.graph - -depgraph.pdf: dep.graph - $(Q)$(FSTAR_HOME)/.scripts/simpl_graph.py dep.graph > dep_simpl.graph - $(call msg, "DOT", $@) - $(Q)dot -Tpdf -o $@ dep_simpl.graph - -depend: .depend - -include .depend - -all-ml: $(ALL_ML_FILES) diff --git a/src/Makefile.boot.common b/src/Makefile.boot.common deleted file mode 100644 index 40a8fcf8085..00000000000 --- a/src/Makefile.boot.common +++ /dev/null @@ -1,29 +0,0 @@ -# Makefiles in the following subdirectories include this file for the interactive mode targets %.fs-in and %.fsi-in -# Makefile.boot includes it too for bootstrapping -# Makefiles that include it should define FSTAR_HOME before the include - -INCLUDE_PATHS = \ - basic \ - class \ - data \ - extraction \ - fstar \ - parser \ - prettyprint \ - reflection \ - smtencoding \ - syntax \ - syntax/print \ - tactics \ - tosyntax \ - typechecker \ - tests - -CACHE_DIR?=$(FSTAR_HOME)/src/.cache.boot - -# 274, else we get a warning for shadowing parse.fsi, when opening FStar.Parser namespace - -FSTAR_BOOT_OPTIONS=$(OTHERFLAGS) --lax --no_location_info --warn_error -271-272-241-319-274 --cache_dir $(CACHE_DIR) $(addprefix --include , $(addprefix $(FSTAR_HOME)/src/,$(INCLUDE_PATHS))) - -%.fsti-in %.fst-in: - @echo $(FSTAR_BOOT_OPTIONS) diff --git a/src/Makefile.config b/src/Makefile.config deleted file mode 100644 index fd22c0227da..00000000000 --- a/src/Makefile.config +++ /dev/null @@ -1,19 +0,0 @@ -FSTAR_HOME=.. -include $(FSTAR_HOME)/.common.mk -include $(FSTAR_HOME)/ulib/gmake/z3.mk # This pins $(Z3) ... -include $(FSTAR_HOME)/ulib/gmake/fstar.mk # and $(FSTAR) for all sub-make calls - -# -------------------------------------------------------------------- -BIN=../bin - -# -------------------------------------------------------------------- -# Configuration of some platform-specific tools; eventually we will want a configure script - -DOS2UNIX=$(shell which dos2unix >/dev/null 2>&1 && echo dos2unix || echo true) - -# Use options compatible between BSD and GNU versions, on macOS and Linux -HEAD=head -SED=sed -FIND=find - -# -------------------------------------------------------------------- diff --git a/src/basic/FStarC.Compiler.Plugins.fst b/src/basic/FStarC.Compiler.Plugins.fst index 4eedcaf8221..35d7d8cfbba 100644 --- a/src/basic/FStarC.Compiler.Plugins.fst +++ b/src/basic/FStarC.Compiler.Plugins.fst @@ -27,51 +27,60 @@ module O = FStarC.Options open FStarC.Class.Show let loaded : ref (list string) = BU.mk_ref [] +let loaded_plugin_lib : ref bool = BU.mk_ref false let pout s = if Debug.any () then BU.print_string s let pout1 s x = if Debug.any () then BU.print1 s x let perr s = if Debug.any () then BU.print_error s let perr1 s x = if Debug.any () then BU.print1_error s x +let do_dynlink (fname:string) : unit = + try + dynlink_loadfile fname + with DynlinkError e -> + E.log_issue0 E.Error_PluginDynlink [ + E.text (BU.format1 "Failed to load plugin file %s" fname); + Pprint.prefix 2 1 (E.text "Reason:") (E.text e); + E.text (BU.format1 "Remove the `--load` option or use `--warn_error -%s` to ignore and continue." + (show (E.errno E.Error_PluginDynlink))) + ]; + (* If we weren't ignoring this error, just stop now *) + E.stop_if_err () + let dynlink (fname:string) : unit = if List.mem fname !loaded then ( pout1 "Plugin %s already loaded, skipping\n" fname ) else ( pout ("Attempting to load " ^ fname ^ "\n"); - begin try - dynlink_loadfile fname - with DynlinkError e -> - E.log_issue0 E.Error_PluginDynlink [ - E.text (BU.format1 "Failed to load plugin file %s" fname); - Pprint.prefix 2 1 (E.text "Reason:") (E.text e); - E.text (BU.format1 "Remove the `--load` option or use `--warn_error -%s` to ignore and continue." - (show (E.errno E.Error_PluginDynlink))) - ]; - (* If we weren't ignoring this error, just stop now *) - E.stop_if_err () - end; + do_dynlink fname; loaded := fname :: !loaded; pout1 "Loaded %s\n" fname; () ) let load_plugin tac = + if not (!loaded_plugin_lib) then ( + pout "Loading fstar.pluginlib before first plugin\n"; + do_dynlink (BU.normalize_file_path <| BU.get_exec_dir () ^ "/../lib/fstar/pluginlib/fstar_pluginlib.cmxs"); + pout "Loaded fstar.pluginlib OK\n"; + loaded_plugin_lib := true + ); dynlink tac let load_plugins tacs = - List.iter load_plugin tacs + List.iter load_plugin tacs let load_plugins_dir dir = - (* Dynlink all .cmxs files in the given directory *) - (* fixme: confusion between FStarC.Compiler.String and FStar.String *) - BU.readdir dir - |> List.filter (fun s -> String.length s >= 5 && FStar.String.sub s (String.length s - 5) 5 = ".cmxs") - |> List.map (fun s -> dir ^ "/" ^ s) - |> load_plugins + (* Dynlink all .cmxs files in the given directory *) + (* fixme: confusion between FStarC.Compiler.String and FStar.String *) + BU.readdir dir + |> List.filter (fun s -> String.length s >= 5 && FStar.String.sub s (String.length s - 5) 5 = ".cmxs") + |> List.map (fun s -> dir ^ "/" ^ s) + |> load_plugins let compile_modules dir ms = let compile m = - let packages = [ "fstar.lib" ] in + let packages = [ "fstar.pluginlib" ] in let pkg pname = "-package " ^ pname in let args = ["ocamlopt"; "-shared"] (* FIXME shell injection *) @ ["-I"; dir] @@ -88,11 +97,11 @@ let compile_modules dir ms = | Some s -> s | None -> "" in - let env_setter = BU.format5 "env OCAMLPATH=\"%s/../lib/%s%s/%s%s\"" - Find.fstar_bin_directory - ocamlpath_sep - Find.fstar_bin_directory + let env_setter = BU.format3 "env OCAMLPATH=\"%s%s%s\"" + (Find.locate_ocaml ()) ocamlpath_sep + // Options.fstar_bin_directory // needed? + // ocamlpath_sep old_ocamlpath in let cmd = String.concat " " (env_setter :: "ocamlfind" :: args) in diff --git a/src/basic/FStarC.Find.fst b/src/basic/FStarC.Find.fst index e4bc5d9a172..f41f4858c44 100644 --- a/src/basic/FStarC.Find.fst +++ b/src/basic/FStarC.Find.fst @@ -59,9 +59,7 @@ let lib_root () : option string = | None -> (* Otherwise, try to find the library in the default locations. It's ulib/ in the repository, and lib/fstar/ in the binary package. *) - if Util.file_exists (fstar_bin_directory ^ "/../ulib") - then Some (fstar_bin_directory ^ "/../ulib") - else if Util.file_exists (fstar_bin_directory ^ "/../lib/fstar") + if Util.file_exists (fstar_bin_directory ^ "/../lib/fstar") then Some (fstar_bin_directory ^ "/../lib/fstar") else None diff --git a/src/basic/FStarC.Options.fst b/src/basic/FStarC.Options.fst index 71649bfc83d..ca75d797e06 100644 --- a/src/basic/FStarC.Options.fst +++ b/src/basic/FStarC.Options.fst @@ -843,7 +843,7 @@ let rec specs_with_types warn_unsafe : list (char & string & opt_type & Pprint.d ( noshort, "codegen", - EnumStr ["OCaml"; "FSharp"; "krml"; "Plugin"; "Extension"], + EnumStr ["OCaml"; "FSharp"; "krml"; "Plugin"; "PluginNoLib"; "Extension"], text "Generate code for further compilation to executable code, or build a compiler plugin"); ( noshort, @@ -956,7 +956,7 @@ let rec specs_with_types warn_unsafe : list (char & string & opt_type & Pprint.d "extract", Accumulated (SimpleStr "One or more semicolon separated occurrences of '[TargetName:]ModuleSelector'"), text "Extract only those modules whose names or namespaces match the provided options. \ - 'TargetName' ranges over {OCaml, krml, FSharp, Plugin, Extension}. \ + 'TargetName' ranges over {OCaml, krml, FSharp, Plugin, PluginNoLib, Extension}. \ A 'ModuleSelector' is a space or comma-separated list of '[+|-]( * | namespace | module)'. \ For example --extract 'OCaml:A -A.B' --extract 'krml:A -A.C' --extract '*' means \ for OCaml, extract everything in the A namespace only except A.B; \ @@ -1932,6 +1932,7 @@ let parse_codegen = | "FSharp" -> Some FSharp | "krml" -> Some Krml | "Plugin" -> Some Plugin + | "PluginNoLib" -> Some PluginNoLib | "Extension" -> Some Extension | _ -> None @@ -1941,6 +1942,7 @@ let print_codegen = | FSharp -> "FSharp" | Krml -> "krml" | Plugin -> "Plugin" + | PluginNoLib -> "PluginNoLib" | Extension -> "Extension" let codegen () = @@ -2196,7 +2198,7 @@ let extract_settings | Some x -> [tgt,x] in { - target_specific_settings = List.collect merge_target [OCaml;FSharp;Krml;Plugin;Extension]; + target_specific_settings = List.collect merge_target [OCaml;FSharp;Krml;Plugin;PluginNoLib;Extension]; default_settings = merge_setting p0.default_settings p1.default_settings } in diff --git a/src/basic/FStarC.Options.fsti b/src/basic/FStarC.Options.fsti index 6408a57f5bc..6057da6504d 100644 --- a/src/basic/FStarC.Options.fsti +++ b/src/basic/FStarC.Options.fsti @@ -26,6 +26,7 @@ type codegen_t = | FSharp | Krml | Plugin + | PluginNoLib | Extension //let __test_norm_all = Util.mk_ref false diff --git a/src/basic/Makefile b/src/basic/Makefile deleted file mode 100644 index 9f6d5bc01e9..00000000000 --- a/src/basic/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -FSTAR_HOME ?= ../.. - -include ../Makefile.boot.common diff --git a/src/default.nix b/src/default.nix deleted file mode 100644 index 5bda511e7cd..00000000000 --- a/src/default.nix +++ /dev/null @@ -1,28 +0,0 @@ -{ fstar, lib, ocamlPackages, stdenv, version }: - -stdenv.mkDerivation { - pname = "fstar-ocaml-snapshot"; - inherit version; - - src = lib.cleanSourceWith { - src = ./..; - filter = path: _: - let relPath = lib.removePrefix (toString ./.. + "/") (toString path); - in lib.any (lib.flip lib.hasPrefix relPath) [ "src" "ulib" ] - || (lib.hasPrefix "ocaml" relPath && !(lib.hasInfix "/generated/" relPath) - && !(lib.hasInfix "/dynamic/" relPath)) - || lib.hasSuffix ".common.mk" relPath; - }; - - preConfigure = '' - mkdir bin - cp ${fstar}/bin/fstar.exe bin - cd src/ocaml-output - ''; - - nativeBuildInputs = with ocamlPackages; [ ocaml menhir ]; - - enableParallelBuilding = true; - - installPhase = "mv ../../ocaml $out"; -} diff --git a/src/extraction/FStarC.Extraction.ML.Code.fst b/src/extraction/FStarC.Extraction.ML.Code.fst index 300e487bdb5..7e6f62a1e21 100644 --- a/src/extraction/FStarC.Extraction.ML.Code.fst +++ b/src/extraction/FStarC.Extraction.ML.Code.fst @@ -182,7 +182,7 @@ let prim_constructors = [ (* -------------------------------------------------------------------- *) let is_prims_ns (ns : list mlsymbol) = - ns = ["Prims"] || ns = ["Prims"] + ns = ["Prims"] || ns = ["Fstarcompiler.Prims"] (* -------------------------------------------------------------------- *) let as_bin_op ((ns, x) : mlpath) = diff --git a/src/extraction/FStarC.Extraction.ML.Modul.fst b/src/extraction/FStarC.Extraction.ML.Modul.fst index d9b7d2e79b0..1a96f34e481 100644 --- a/src/extraction/FStarC.Extraction.ML.Modul.fst +++ b/src/extraction/FStarC.Extraction.ML.Modul.fst @@ -1052,7 +1052,7 @@ let rec extract_sig (g:env_t) (se:sigelt) : env_t & list mlmodule1 = (* Ignore tactics whenever we're not extracting plugins *) | Sig_let {lbs=(_, lbs)} - when Options.codegen () <> Some (Options.Plugin) && + when not <| List.mem (Options.codegen ()) [Some Options.Plugin; Some Options.PluginNoLib] && List.for_all (lb_is_tactic g) lbs -> g, [] diff --git a/src/extraction/FStarC.Extraction.ML.RegEmb.fst b/src/extraction/FStarC.Extraction.ML.RegEmb.fst index 04ebd3ddd7e..f320973195e 100644 --- a/src/extraction/FStarC.Extraction.ML.RegEmb.fst +++ b/src/extraction/FStarC.Extraction.ML.RegEmb.fst @@ -84,27 +84,27 @@ let ml_none : mlexpr = mk (MLE_Name (["FStar"; "Pervasives"; "Native"], "None")) let ml_some : mlexpr = mk (MLE_Name (["FStar"; "Pervasives"; "Native"], "Some")) let s_tdataconstr = - mk (MLE_Name (splitlast ["FStarC"; "Syntax"; "Syntax"; "tdataconstr"])) + mk (MLE_Name (splitlast ["Fstarcompiler.FStarC"; "Syntax"; "Syntax"; "tdataconstr"])) let mk_app = - mk (MLE_Name (splitlast ["FStarC"; "Syntax"; "Util"; "mk_app"])) + mk (MLE_Name (splitlast ["Fstarcompiler.FStarC"; "Syntax"; "Util"; "mk_app"])) let tm_fvar = - mk (MLE_Name (splitlast ["FStarC"; "Syntax"; "Syntax"; "Tm_fvar"])) + mk (MLE_Name (splitlast ["Fstarcompiler.FStarC"; "Syntax"; "Syntax"; "Tm_fvar"])) let fv_eq_lid = - mk (MLE_Name (splitlast ["FStarC"; "Syntax"; "Syntax"; "fv_eq_lid"])) + mk (MLE_Name (splitlast ["Fstarcompiler.FStarC"; "Syntax"; "Syntax"; "fv_eq_lid"])) let lid_of_str = - mk (MLE_Name (splitlast ["FStarC"; "Ident"; "lid_of_str"])) + mk (MLE_Name (splitlast ["Fstarcompiler.FStarC"; "Ident"; "lid_of_str"])) let nil_lid = Ident.lid_of_str "Prims.Nil" let cons_lid = Ident.lid_of_str "Prims.Cons" -let embed = mk (MLE_Name (splitlast ["FStarC"; "Syntax"; "Embeddings"; "Base"; "extracted_embed"])) -let unembed = mk (MLE_Name (splitlast ["FStarC"; "Syntax"; "Embeddings"; "Base"; "extracted_unembed"])) -let bind_opt = mk (MLE_Name (splitlast ["FStarC"; "Compiler"; "Util"; "bind_opt"])) +let embed = mk (MLE_Name (splitlast ["Fstarcompiler.FStarC"; "Syntax"; "Embeddings"; "Base"; "extracted_embed"])) +let unembed = mk (MLE_Name (splitlast ["Fstarcompiler.FStarC"; "Syntax"; "Embeddings"; "Base"; "extracted_unembed"])) +let bind_opt = mk (MLE_Name (splitlast ["Fstarcompiler.FStarC"; "Compiler"; "Util"; "bind_opt"])) let ml_nbe_unsupported : mlexpr = (* extraction thunks this definition *) - let hd = mk (MLE_Name (["FStarC"; "TypeChecker"; "NBETerm"], "e_unsupported")) in + let hd = mk (MLE_Name (["Fstarcompiler.FStarC"; "TypeChecker"; "NBETerm"], "e_unsupported")) in mk (MLE_App (hd, [ml_unit])) let ml_magic : mlexpr = @@ -153,10 +153,10 @@ type embedding_data = { (*** List of registered embeddings ***) let builtin_embeddings : list (Ident.lident & embedding_data) = - let syn_emb_lid s = Ident.lid_of_path ["FStarC"; "Syntax"; "Embeddings"; s] Range.dummyRange in - let nbe_emb_lid s = Ident.lid_of_path ["FStarC"; "TypeChecker"; "NBETerm"; s] Range.dummyRange in - let refl_emb_lid s = Ident.lid_of_path ["FStarC"; "Reflection"; "V2"; "Embeddings"; s] Range.dummyRange in - let nbe_refl_emb_lid s = Ident.lid_of_path ["FStarC"; "Reflection"; "V2"; "NBEEmbeddings"; s] Range.dummyRange in + let syn_emb_lid s = Ident.lid_of_path ["Fstarcompiler.FStarC"; "Syntax"; "Embeddings"; s] Range.dummyRange in + let nbe_emb_lid s = Ident.lid_of_path ["Fstarcompiler.FStarC"; "TypeChecker"; "NBETerm"; s] Range.dummyRange in + let refl_emb_lid s = Ident.lid_of_path ["Fstarcompiler.FStarC"; "Reflection"; "V2"; "Embeddings"; s] Range.dummyRange in + let nbe_refl_emb_lid s = Ident.lid_of_path ["Fstarcompiler.FStarC"; "Reflection"; "V2"; "NBEEmbeddings"; s] Range.dummyRange in [ (PC.int_lid, {arity=0; syn_emb=syn_emb_lid "e_int"; nbe_emb=Some(nbe_emb_lid "e_int")}); (PC.bool_lid, {arity=0; syn_emb=syn_emb_lid "e_bool"; nbe_emb=Some(nbe_emb_lid "e_bool")}); @@ -248,8 +248,8 @@ let rec embedding_for let emb_arrow e1 e2 = let comb = match k with - | SyntaxTerm -> mk <| MLE_Name (["FStarC"; "Syntax"; "Embeddings"], "e_arrow") - | NBETerm -> mk <| MLE_Name (["FStarC"; "TypeChecker"; "NBETerm"], "e_arrow") + | SyntaxTerm -> mk <| MLE_Name (["Fstarcompiler.FStarC"; "Syntax"; "Embeddings"], "e_arrow") + | NBETerm -> mk <| MLE_Name (["Fstarcompiler.FStarC"; "TypeChecker"; "NBETerm"], "e_arrow") in mk (MLE_App (comb, [e1; e2])) in @@ -267,8 +267,8 @@ let rec embedding_for | Tm_name bv when BU.for_some (find_env_entry bv) env -> let comb = match k with - | SyntaxTerm -> mk <| MLE_Name (["FStarC"; "Syntax"; "Embeddings"], "mk_any_emb") - | NBETerm -> mk <| MLE_Name (["FStarC"; "TypeChecker"; "NBETerm"], "mk_any_emb") + | SyntaxTerm -> mk <| MLE_Name (["Fstarcompiler.FStarC"; "Syntax"; "Embeddings"], "mk_any_emb") + | NBETerm -> mk <| MLE_Name (["Fstarcompiler.FStarC"; "TypeChecker"; "NBETerm"], "mk_any_emb") in let s = snd (BU.must (BU.find_opt (find_env_entry bv) env)) in mk <| MLE_App(comb, [str_to_name s]) @@ -368,7 +368,7 @@ let interpret_plugin_as_term_fun (env:UEnv.uenv) (fv:fv) (t:typ) (arity_opt:opti let str_to_name s = as_name ([], s) in let fv_lid_embedded = with_ty MLTY_Top <| - MLE_App (as_name (["FStarC_Ident"],"lid_of_str"), + MLE_App (as_name (["Fstarcompiler.FStarC_Ident"],"lid_of_str"), [with_ty MLTY_Top <| MLE_Const (MLC_String (Ident.string_of_lid fv_lid))]) in let mk_tactic_interpretation l arity = @@ -380,7 +380,7 @@ let interpret_plugin_as_term_fun (env:UEnv.uenv) (fv:fv) (t:typ) (arity_opt:opti | SyntaxTerm -> "mk_tactic_interpretation_" | NBETerm -> "mk_nbe_tactic_interpretation_" in - as_name (["FStarC_Tactics_InterpFuns"], idroot^string_of_int arity) + as_name (["Fstarcompiler.FStarC_Tactics_InterpFuns"], idroot^string_of_int arity) in let mk_from_tactic l arity = let idroot = @@ -388,13 +388,13 @@ let interpret_plugin_as_term_fun (env:UEnv.uenv) (fv:fv) (t:typ) (arity_opt:opti | SyntaxTerm -> "from_tactic_" | NBETerm -> "from_nbe_tactic_" in - as_name (["FStarC_Tactics_Native"], idroot^string_of_int arity) + as_name (["Fstarcompiler.FStarC_Tactics_Native"], idroot^string_of_int arity) in let mk_arrow_as_prim_step k (arity: int) : mlexpr = let modul = match k with - | SyntaxTerm -> ["FStarC"; "Syntax"; "Embeddings"] - | NBETerm -> ["FStarC"; "TypeChecker"; "NBETerm"] + | SyntaxTerm -> ["Fstarcompiler.FStarC"; "Syntax"; "Embeddings"] + | NBETerm -> ["Fstarcompiler.FStarC"; "TypeChecker"; "NBETerm"] in as_name (modul, "arrow_as_prim_step_" ^ string_of_int arity) in @@ -414,7 +414,7 @@ let interpret_plugin_as_term_fun (env:UEnv.uenv) (fv:fv) (t:typ) (arity_opt:opti match tvar_names with | [] -> let body = - mk <| MLE_App(as_name (["FStarC_Syntax_Embeddings"], "debug_wrap"), + mk <| MLE_App(as_name (["Fstarcompiler.FStarC_Syntax_Embeddings"], "debug_wrap"), [with_ty MLTY_Top <| MLE_Const (MLC_String (Ident.string_of_lid fv_lid)); ml_lam "_" (mk <| MLE_App(body, [str_to_name "args"]))]) in @@ -448,7 +448,7 @@ let interpret_plugin_as_term_fun (env:UEnv.uenv) (fv:fv) (t:typ) (arity_opt:opti mk <| MLE_Match(as_name ([], "args"), [branch; default_branch]) in let body = - mk <| MLE_App(as_name (["FStarC_Syntax_Embeddings"], "debug_wrap"), + mk <| MLE_App(as_name (["Fstarcompiler.FStarC_Syntax_Embeddings"], "debug_wrap"), [with_ty MLTY_Top <| MLE_Const (MLC_String (Ident.string_of_lid fv_lid)); ml_lam "_" body]) in @@ -701,8 +701,8 @@ let __do_handle_plugin (g: uenv) (arity_opt: option int) (se: sigelt) : list mlm | Some (interp, nbe_interp, arity, plugin) -> let register, args = if plugin - then (["FStarC_Tactics_Native"], "register_plugin"), [interp; nbe_interp] - else (["FStarC_Tactics_Native"], "register_tactic"), [interp] + then (["Fstarcompiler.FStarC_Tactics_Native"], "register_plugin"), [interp; nbe_interp] + else (["Fstarcompiler.FStarC_Tactics_Native"], "register_tactic"), [interp] in let h = with_ty MLTY_Top <| MLE_Name register in let arity = MLE_Const (MLC_Int(string_of_int arity, None)) in @@ -741,7 +741,7 @@ let __do_handle_plugin (g: uenv) (arity_opt: option int) (se: sigelt) : list mlm let tcenv = tcenv_of_uenv g in let ml_unembed = mk_unembed tcenv mutual_lids record_fields ctors in let ml_embed = mk_embed tcenv mutual_lids record_fields ctors in - let def = mk (MLE_App (mk (MLE_Name (["FStarC"; "Syntax"; "Embeddings"; "Base"], "mk_extracted_embedding")), [ + let def = mk (MLE_App (mk (MLE_Name (["Fstarcompiler.FStarC"; "Syntax"; "Embeddings"; "Base"], "mk_extracted_embedding")), [ ml_name; ml_unembed; ml_embed])) @@ -828,9 +828,11 @@ let maybe_register_plugin (g:uenv) (se:sigelt) : list mlmodule1 = | _ -> Some None ) in - if Options.codegen() <> Some Options.Plugin then + if not <| List.mem (Options.codegen()) [Some Options.Plugin; Some Options.PluginNoLib] + then [] - else match plugin_with_arity se.sigattrs with + else + match plugin_with_arity se.sigattrs with | None -> [] (* ignore projectors and discriminators, they get a @@plugin attribute inherited from the type, but we should not do anything for them. *) diff --git a/src/extraction/FStarC.Extraction.ML.UEnv.fst b/src/extraction/FStarC.Extraction.ML.UEnv.fst index 83db90868cf..650e83228ea 100644 --- a/src/extraction/FStarC.Extraction.ML.UEnv.fst +++ b/src/extraction/FStarC.Extraction.ML.UEnv.fst @@ -53,6 +53,10 @@ module Const = FStarC.Parser.Const open FStarC.Class.Show +let plug () = Options.codegen () = Some Options.Plugin + || Options.codegen () = Some Options.PluginNoLib +let plug_no_lib () = Options.codegen () = Some Options.PluginNoLib + (**** Type definitions *) (** A top-level F* type definition, i.e., a type abbreviation, @@ -249,11 +253,22 @@ let is_fv_type g fv = g.tydefs |> BU.for_some (fun tydef -> fv_eq fv tydef.tydef_fv) let no_fstar_stubs_ns (ns : list mlsymbol) : list mlsymbol = - let pl = Options.codegen () = Some Options.Plugin in match ns with - | "Prims" :: [] when pl -> "Prims" :: [] - | "FStar"::"Stubs"::rest when pl -> "FStarC"::rest - | "FStar"::"Stubs"::rest -> "FStar"::rest // unclear + | "FStar"::"Stubs"::rest when plug_no_lib () && Options.Ext.get "__guts" <> "" -> "FStarC"::rest + + (* These 3 modules are special, and are not in the guts. They live in src/ml/full and + are visible at the ambient namespace when building the plugin lib. *) + | "FStar"::"Stubs"::"Tactics"::"V1"::"Builtins"::[] when plug () -> + "FStarC"::"Tactics"::"V1"::"Builtins"::[] + | "FStar"::"Stubs"::"Tactics"::"V2"::"Builtins"::[] when plug () -> + "FStarC"::"Tactics"::"V2"::"Builtins"::[] + | "FStar"::"Stubs"::"Tactics"::"Unseal"::[] when plug () -> + "FStarC"::"Tactics"::"Unseal"::[] + + | "FStar"::"Stubs"::rest when plug () -> "Fstarcompiler.FStarC"::rest // review, but I think it's right + + | "FStar"::"Stubs"::rest -> "FStar"::rest // review, wrong + | _ -> ns let no_fstar_stubs (p : mlpath) : mlpath = @@ -278,9 +293,8 @@ let lookup_record_field_name g (type_name, fn) = | None -> failwith ("Field name not found: " ^ string_of_lid key) | Some mlp -> let ns, id = mlp in - if Options.codegen () = Some Options.Plugin - then List.filter (fun s -> s <> "Stubs") ns, id - else ns, id + let ns = no_fstar_stubs_ns ns in + ns, id (**** Naming conventions and freshness (internal) *) @@ -302,7 +316,9 @@ let initial_mlident_map = (match Options.codegen() with | Some Options.FSharp -> fsharpkeywords | Some Options.OCaml - | Some Options.Plugin -> ocamlkeywords + | Some Options.Plugin + | Some Options.PluginNoLib -> + ocamlkeywords | Some Options.Krml -> krml_keywords | Some Options.Extension -> [] // TODO | None -> []) @@ -412,6 +428,42 @@ let new_mlpath_of_lident (g:uenv) (x : lident) : mlpath & uenv = let g = { g with env_mlident_map = map } in (mlns_of_lid x, name), g in + let guts (p::ps, l) = ("Fstarcompiler."^p) :: ps, l in + let mlp = + match string_of_lid x with + (* This sucks, but these are the types in the interface + to tactic primitives. Tuples/lists are not here since they + get extracted to the OCaml native ones. *) + | "Prims.dtuple2" + | "Prims.Mkdtuple2" + | "FStar.Pervasives.either" + | "FStar.Pervasives.Inl" + | "FStar.Pervasives.Inr" + + | "FStar.Pervasives.norm_step" + | "FStar.Pervasives.norm_debug" + | "FStar.Pervasives.simplify" + | "FStar.Pervasives.weak" + | "FStar.Pervasives.hnf" + | "FStar.Pervasives.primops" + | "FStar.Pervasives.delta" + | "FStar.Pervasives.norm_debug" + | "FStar.Pervasives.zeta" + | "FStar.Pervasives.zeta_full" + | "FStar.Pervasives.iota" + | "FStar.Pervasives.nbe" + | "FStar.Pervasives.reify_" + | "FStar.Pervasives.delta_only" + | "FStar.Pervasives.delta_fully" + | "FStar.Pervasives.delta_attr" + | "FStar.Pervasives.delta_qualifier" + | "FStar.Pervasives.delta_namespace" + | "FStar.Pervasives.unmeta" + | "FStar.Pervasives.unascribe" + when plug () + -> guts mlp + | _ -> mlp + in let g = { g with mlpath_of_lid = BU.psmap_add g.mlpath_of_lid (string_of_lid x) mlp } in diff --git a/src/extraction/FStarC.Extraction.ML.Util.fst b/src/extraction/FStarC.Extraction.ML.Util.fst index a98e141af47..784133c9034 100644 --- a/src/extraction/FStarC.Extraction.ML.Util.fst +++ b/src/extraction/FStarC.Extraction.ML.Util.fst @@ -395,9 +395,9 @@ let rec uncurry_mlty_fun t = let list_elements (e:mlexpr) : option (list mlexpr) = let rec list_elements acc e = match e.expr with - | MLE_CTor (([ "Prims" ], "Cons" ), [ hd; tl ]) -> + | MLE_CTor (([ "Fstarcompiler.Prims" ], "Cons" ), [ hd; tl ]) -> list_elements (hd :: acc) tl - | MLE_CTor (([ "Prims" ], "Nil" ), []) -> + | MLE_CTor (([ "Fstarcompiler.Prims" ], "Nil" ), []) -> List.rev acc |> Some | MLE_CTor (([ "Prims" ], "Cons" ), [ hd; tl ]) -> list_elements (hd :: acc) tl diff --git a/src/extraction/Makefile b/src/extraction/Makefile deleted file mode 100644 index 9f6d5bc01e9..00000000000 --- a/src/extraction/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -FSTAR_HOME ?= ../.. - -include ../Makefile.boot.common diff --git a/src/fstar/FStarC.CheckedFiles.fst b/src/fstar/FStarC.CheckedFiles.fst index 479a6b43362..5a9b6a2ba03 100644 --- a/src/fstar/FStarC.CheckedFiles.fst +++ b/src/fstar/FStarC.CheckedFiles.fst @@ -206,8 +206,13 @@ let load_checked_file (fn:string) (checked_fn:string) :cache_t = if !dbg then BU.print1 "Trying to load checked file result %s\n" checked_fn; let elt = checked_fn |> BU.smap_try_find mcache in - if elt |> is_some then elt |> must //already loaded - else + if elt |> is_some + then ( + //already loaded + if !dbg then + BU.print1 "Already loaded checked file %s\n" checked_fn; + elt |> must + ) else let add_and_return elt = BU.smap_add mcache checked_fn elt; elt in if not (BU.file_exists checked_fn) then let msg = BU.format1 "checked file %s does not exist" checked_fn in diff --git a/src/fstar/FStarC.OCaml.fst b/src/fstar/FStarC.OCaml.fst index ce765823dd4..82aa41b4dc3 100644 --- a/src/fstar/FStarC.OCaml.fst +++ b/src/fstar/FStarC.OCaml.fst @@ -49,7 +49,7 @@ let exec_in_ocamlenv #a (cmd : string) (args : list string) : a = | Inr _ -> exit 1 let app_lib = "fstar.lib" -let plugin_lib = "fstar.lib" +let plugin_lib = "fstar.pluginlib" (* OCaml Warning 8: this pattern-matching is not exhaustive. This is usually benign as we check for exhaustivenss via SMT. *) diff --git a/src/fstar/FStarC.OCaml.fsti b/src/fstar/FStarC.OCaml.fsti index 9b095824b3a..5f41b0cf2d1 100644 --- a/src/fstar/FStarC.OCaml.fsti +++ b/src/fstar/FStarC.OCaml.fsti @@ -41,6 +41,6 @@ arguments. *) val exec_ocamlopt #a (args : list string) : a (* Run ocamlc passing appropriate flags to generate an F* plugin, -using fstar_plugin_lib. Expects the source file and further options as +using fstar.pluginlib. Expects the source file and further options as arguments. *) val exec_ocamlopt_plugin #a (args : list string) : a diff --git a/src/fstar/FStarC.Universal.fst b/src/fstar/FStarC.Universal.fst index 7c0049e480b..6927900beac 100644 --- a/src/fstar/FStarC.Universal.fst +++ b/src/fstar/FStarC.Universal.fst @@ -304,13 +304,14 @@ let emit dep_graph (mllibs:list (uenv & MLSyntax.mllib)) = let ext = match opt with | Some Options.FSharp -> ".fs" | Some Options.OCaml - | Some Options.Plugin -> ".ml" + | Some Options.Plugin + | Some Options.PluginNoLib -> ".ml" | Some Options.Krml -> ".krml" | Some Options.Extension -> ".ast" | _ -> fail () in match opt with - | Some Options.FSharp | Some Options.OCaml | Some Options.Plugin -> + | Some Options.FSharp | Some Options.OCaml | Some Options.Plugin | Some Options.PluginNoLib -> (* When bootstrapped in F#, this will use the old printer in FStarC.Extraction.ML.Code for both OCaml and F# extraction. When bootstarpped in OCaml, this will use the old printer diff --git a/src/fstar/Makefile b/src/fstar/Makefile deleted file mode 100644 index 9f6d5bc01e9..00000000000 --- a/src/fstar/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -FSTAR_HOME ?= ../.. - -include ../Makefile.boot.common diff --git a/ocaml/fstar-lib/FStarC_BaseTypes.ml b/src/ml/bare/FStarC_BaseTypes.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_BaseTypes.ml rename to src/ml/bare/FStarC_BaseTypes.ml diff --git a/ocaml/fstar-lib/FStarC_BigInt.ml b/src/ml/bare/FStarC_BigInt.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_BigInt.ml rename to src/ml/bare/FStarC_BigInt.ml diff --git a/ocaml/fstar-lib/FStarC_Compiler_Bytes.ml b/src/ml/bare/FStarC_Compiler_Bytes.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Compiler_Bytes.ml rename to src/ml/bare/FStarC_Compiler_Bytes.ml diff --git a/ocaml/fstar-lib/FStarC_Compiler_Effect.ml b/src/ml/bare/FStarC_Compiler_Effect.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Compiler_Effect.ml rename to src/ml/bare/FStarC_Compiler_Effect.ml diff --git a/ocaml/fstar-lib/FStarC_Compiler_Hints.ml b/src/ml/bare/FStarC_Compiler_Hints.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Compiler_Hints.ml rename to src/ml/bare/FStarC_Compiler_Hints.ml diff --git a/ocaml/fstar-lib/FStarC_Compiler_List.ml b/src/ml/bare/FStarC_Compiler_List.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Compiler_List.ml rename to src/ml/bare/FStarC_Compiler_List.ml diff --git a/ocaml/fstar-lib/FStarC_Compiler_Plugins_Base.ml b/src/ml/bare/FStarC_Compiler_Plugins_Base.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Compiler_Plugins_Base.ml rename to src/ml/bare/FStarC_Compiler_Plugins_Base.ml diff --git a/ocaml/fstar-lib/FStarC_Compiler_Range.ml b/src/ml/bare/FStarC_Compiler_Range.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Compiler_Range.ml rename to src/ml/bare/FStarC_Compiler_Range.ml diff --git a/ocaml/fstar-lib/FStarC_Compiler_String.ml b/src/ml/bare/FStarC_Compiler_String.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Compiler_String.ml rename to src/ml/bare/FStarC_Compiler_String.ml diff --git a/src/ml/bare/FStarC_Compiler_Util.ml b/src/ml/bare/FStarC_Compiler_Util.ml new file mode 100644 index 00000000000..fe3529d385c --- /dev/null +++ b/src/ml/bare/FStarC_Compiler_Util.ml @@ -0,0 +1,1190 @@ +open FStarC_Json + +let max_int = Z.of_int max_int +let is_letter c = if c > 255 then false else BatChar.is_letter (BatChar.chr c) +let is_digit c = if c > 255 then false else BatChar.is_digit (BatChar.chr c) +let is_letter_or_digit c = is_letter c || is_digit c +let is_symbol c = if c > 255 then false else BatChar.is_symbol (BatChar.chr c) + +(* Modeled after: Char.IsPunctuation in .NET + (http://www.dotnetperls.com/char-ispunctuation) +*) +let is_punctuation c = List.mem c [33; 34; 35; 37; 38; 39; 40; 41; 42; 44; 45; 46; 47; 58; 59; 63; 64; 91; 92; 93; 95; 123; 125] +(*'!','"','#','%','&','\'','(',')','*',',','-','.','/',':',';','?','@','[','\\',']','_','{','}'*) + +let return_all x = x + +type time_ns = int64 +let now_ns () = Mtime_clock.now_ns() +let time_diff_ns t1 t2 = + Z.of_int (Int64.to_int (Int64.sub t2 t1)) +let time_diff_ms t1 t2 = Z.div (time_diff_ns t1 t2) (Z.of_int 1000000) +let record_time_ns f = + let start = now_ns () in + let res = f () in + let elapsed = time_diff_ns start (now_ns()) in + res, elapsed +let record_time_ms f = + let res, ns = record_time_ns f in + res, Z.div ns (Z.of_int 1000000) + +type time_of_day = float +let get_time_of_day () = BatUnix.gettimeofday() +let get_time_of_day_ms () = Z.of_int (int_of_float (get_time_of_day () *. 1000.0)) +let get_file_last_modification_time f = (BatUnix.stat f).BatUnix.st_mtime +let is_before t1 t2 = compare t1 t2 < 0 +let string_of_time_of_day = string_of_float + +exception Impos + +let cur_sigint_handler : Sys.signal_behavior ref = + ref Sys.Signal_default + +exception SigInt +type sigint_handler = Sys.signal_behavior + +let sigint_handler_f f = Sys.Signal_handle f + +let sigint_ignore: sigint_handler = + Sys.Signal_ignore + +let sigint_delay = ref 0 +let sigint_pending = ref false + +let raise_sigint _ = + sigint_pending := false; + raise SigInt + +let raise_sigint_maybe_delay _ = + (* This function should not do anything complicated, lest it cause deadlocks. + * Calling print_string, for example, can cause a deadlock (print_string → + * caml_flush → process_pending_signals → caml_execute_signal → raise_sigint → + * print_string → caml_io_mutex_lock ⇒ deadlock) *) + if !sigint_delay = 0 + then raise_sigint () + else sigint_pending := true + +let sigint_raise: sigint_handler = + Sys.Signal_handle raise_sigint_maybe_delay + +let get_sigint_handler () = + !cur_sigint_handler + +let set_sigint_handler sigint_handler = + cur_sigint_handler := sigint_handler; + Sys.set_signal Sys.sigint !cur_sigint_handler + +let with_sigint_handler handler f = + let original_handler = !cur_sigint_handler in + BatPervasives.finally + (fun () -> Sys.set_signal Sys.sigint original_handler) + (fun () -> set_sigint_handler handler; f ()) + () + +(* Re export this type, it's mentioned in the interface for this module. *) +type out_channel = Stdlib.out_channel + +let stderr = Stdlib.stderr +let stdout = Stdlib.stdout + +let open_file_for_writing (fn : string) = Stdlib.open_out_bin fn +let open_file_for_appending (fn : string) = Stdlib.open_out_gen [Open_append; Open_wronly; Open_creat; Open_binary] 0o644 fn +let close_out_channel (c : out_channel) = Stdlib.close_out c + +let flush (c:out_channel) : unit = Stdlib.flush c + +let append_to_file (c:out_channel) s = Printf.fprintf c "%s\n" s; flush c + +type proc = + {pid: int; + inc : in_channel; (* in == where we read from, so the process's stdout *) + errc : in_channel; (* the process's stderr *) + outc : out_channel; (* the process's stdin *) + mutable killed : bool; + stop_marker: (string -> bool) option; + id : string; + prog : string; + start_time : time_of_day} + +let all_procs : (proc list) ref = ref [] + +let lock () = () +let release () = () +let sleep n = Thread.delay ((Z.to_float n) /. 1000.) + +let mlock = Mutex.create () + +let monitor_enter _ = Mutex.lock mlock +let monitor_exit _ = Mutex.unlock mlock +let monitor_wait _ = () +let monitor_pulse _ = () +let current_tid _ = Z.zero + +let atomically f = (* This function only protects against signals *) + let finalizer () = + decr sigint_delay; + if !sigint_pending && !sigint_delay = 0 then + raise_sigint () in + let body f = + incr sigint_delay; f () in + BatPervasives.finally finalizer body f + +let with_monitor _ f x = atomically (fun () -> + monitor_enter (); + BatPervasives.finally monitor_exit f x) + +let spawn f = + let _ = Thread.create f () in () + +let stack_dump () = Printexc.raw_backtrace_to_string (Printexc.get_callstack 1000) + +(* On the OCaml side it would make more sense to take stop_marker in + ask_process, but the F# side isn't built that way *) +let start_process' + (id: string) (prog: string) (args: string list) + (stop_marker: (string -> bool) option) : proc = + let (stdin_r, stdin_w) = Unix.pipe () in + let (stdout_r, stdout_w) = Unix.pipe () in + let (stderr_r, stderr_w) = Unix.pipe () in + Unix.set_close_on_exec stdin_w; + Unix.set_close_on_exec stdout_r; + Unix.set_close_on_exec stderr_r; + let pid = Unix.create_process prog (Array.of_list (prog :: args)) stdin_r stdout_w stderr_w in + Unix.close stdin_r; + Unix.close stdout_w; + Unix.close stderr_w; + let proc = { pid = pid; + id = prog ^ ":" ^ id; + prog = prog; + inc = Unix.in_channel_of_descr stdout_r; + errc = Unix.in_channel_of_descr stderr_r; + outc = Unix.out_channel_of_descr stdin_w; + stop_marker = stop_marker; + killed = false; + start_time = get_time_of_day()} in + (* print_string ("Started process " ^ proc.id ^ "\n" ^ (stack_dump())); *) + all_procs := proc :: !all_procs; + proc + +let start_process + (id: string) (prog: string) (args: string list) + (stop_marker: string -> bool) : proc = + start_process' id prog args (Some stop_marker) + +let rec waitpid_ignore_signals pid = + try ignore (Unix.waitpid [] pid) + with Unix.Unix_error (Unix.EINTR, _, _) -> + waitpid_ignore_signals pid + +let kill_process (p: proc) = + if not p.killed then begin + (* Close the fds directly: close_in and close_out both call `flush`, + potentially forcing us to wait until p starts reading again. They + might have been closed already (e.g. `run_process`), so we + just `attempt` it. *) + let attempt f = + try f () with | _ -> () + in + attempt (fun () -> Unix.close (Unix.descr_of_in_channel p.inc)); + attempt (fun () -> Unix.close (Unix.descr_of_in_channel p.errc)); + attempt (fun () -> Unix.close (Unix.descr_of_out_channel p.outc)); + (try Unix.kill p.pid Sys.sigkill + with Unix.Unix_error (Unix.ESRCH, _, _) -> ()); + (* Avoid zombie processes (Unix.close_process does the same thing. *) + waitpid_ignore_signals p.pid; + (* print_string ("Killed process " ^ p.id ^ "\n" ^ (stack_dump())); *) + p.killed <- true + end + +let kill_all () = + BatList.iter kill_process !all_procs + +let proc_prog (p:proc) : string = p.prog + +let process_read_all_output (p: proc) = + (* Pass cleanup:false because kill_process closes both fds already. *) + BatIO.read_all (BatIO.input_channel ~autoclose:true ~cleanup:false p.inc) + + +let channel_read_all_nonblock (c: in_channel) : string = + let buffer = Bytes.create 8192 in + let fd = Unix.descr_of_in_channel c in + let rec aux (idx:int) (rem:int) = + if rem <= 0 then idx + else ( + let rd, _, _ = Unix.select [fd] [] [] 0.0 in + if rd = [] then idx + else ( + let n = Unix.read fd buffer idx rem in + if n <= 0 + then idx + else aux (idx+n) (rem-n) + ) + ) + in + let len = aux 0 1024 in + Bytes.sub_string buffer 0 len + +(** Feed `stdin` to `p`, and call `reader_fn` in a separate thread to read the + response. + + Signal handling makes this function fairly hairy. The usual design is to + launch a reader thread, then write to the process on the main thread and use + `Thread.join` to wait for the reader to complete. + + When we get a signal, Caml routes it to either of the threads. If it + reaches the reader thread, we're good: the reader thread is most likely + waiting in input_line at that point, and input_line polls for signals fairly + frequently. If the signal reaches the writer (main) thread, on the other + hand, we're toast: `Thread.join` isn't interruptible, so Caml will save the + signal until the child thread exits and `join` returns, and at that point the + Z3 query is complete and the signal is useless. + + There are three possible solutions to this problem: + 1. Use an interruptible version of Thread.join written in C + 2. Ensure that signals are always delivered to the reader thread + 3. Use a different synchronization mechanism between the reader and the writer. + + Option 1 is bad because building F* doesn't currently require a C compiler. + Option 2 is easy to implement with `Unix.sigprocmask`, but that isn't + available on Windows. Option 3 is what the code below does: it uses a pipe + and a 1-byte write as a way for the writer thread to wait on the reader + thread. That's why `reader_fn` is passed a `signal_exit` function. + + If a SIGINT reaches the reader, it should still call `signal_exit`. If + a SIGINT reaches the writer, it should make sure that the reader exits. + These two things are the responsibility of the caller of this function. **) + +let process_read_async p stdin reader_fn = + let fd_r, fd_w = Unix.pipe () in + BatPervasives.finally (fun () -> Unix.close fd_w; Unix.close fd_r) + (fun () -> + let wait_for_exit () = + ignore (Unix.read fd_r (Bytes.create 1) 0 1) in + let signal_exit () = + try ignore (Unix.write fd_w (Bytes.create 1) 0 1) + with (* ‘write’ will fail if called after the finalizer above *) + | Unix.Unix_error (Unix.EBADF, _, _) -> () in + + let write_input = function + | Some str -> output_string p.outc str; flush p.outc + | None -> () in + + (* In the following we can get a signal at any point; it's the caller's + responsibility to ensure that reader_fn will exit in that case *) + let t = Thread.create reader_fn signal_exit in + write_input stdin; + wait_for_exit (); + Thread.join t) () + +let run_process (id: string) (prog: string) (args: string list) (stdin: string option): string = + let p = start_process' id prog args None in + (match stdin with + | None -> () + | Some str -> + try output_string p.outc str with + | Sys_error _ -> () (* FIXME: check for "Broken pipe". In that case this is fine, process must have finished without reading input *) + | e -> raise e + ); + (try flush p.outc with | _ -> ()); (* only _attempt_ to flush, so we don't get an exception if the process is finished *) + (try close_out p.outc with | _ -> ()); (* idem *) + let s = process_read_all_output p in + kill_process p; + s + +let system_run (cmd:string) : Z.t = Z.of_int (Sys.command cmd) + +type read_result = EOF | SIGINT + +let handle_stderr (p:proc) (h : string -> unit) = + (* Read stderr and call the handler if anything is in there. *) + let se = channel_read_all_nonblock p.errc in + if se <> "" then + h (BatString.trim se) + +let ask_process + (p: proc) (stdin: string) + (exn_handler: unit -> string) + (stderr_handler : string -> unit) + : string = + let result = ref None in + let out = Buffer.create 16 in + let stop_marker = BatOption.default (fun s -> false) p.stop_marker in + + let reader_fn signal_fn = + let rec loop p out = + let line = BatString.trim (input_line p.inc) in (* raises EOF *) + if not (stop_marker line) then + (Buffer.add_string out (line ^ "\n"); loop p out) in + (try loop p out + with | SigInt -> result := Some SIGINT + | End_of_file -> result := Some EOF); + signal_fn () in + + try + (* Check stderr both before and after asking. Note: this does + * not handle the case when the process prints something to stderr + * and then hangs. We will stay in the process_read_async call without + * ever handling the output. To properly handle that, we could + * use a separate thread, but then all stderr_handler functions need + * to take locks. Since this is not a problem for now, we just avoid + * this complexity. *) + handle_stderr p stderr_handler; + process_read_async p (Some stdin) reader_fn; + handle_stderr p stderr_handler; + (match !result with + | Some EOF -> kill_process p; Buffer.add_string out (exn_handler ()) + | Some SIGINT -> raise SigInt + | None -> ()); + Buffer.contents out + with e -> (* Ensure that reader_fn gets an EOF and exits *) + kill_process p; raise e + +let get_file_extension (fn:string) : string = snd (BatString.rsplit fn ".") +let is_path_absolute path_str = + let open Batteries.Incubator in + let open BatPathGen.OfString in + let path_str' = of_string path_str in + is_absolute path_str' +let join_paths path_str0 path_str1 = + let open Batteries.Incubator in + let open BatPathGen.OfString in + let open BatPathGen.OfString.Operators in + to_string ((of_string path_str0) //@ (of_string path_str1)) + +let normalize_file_path (path_str:string) = + let open Batteries.Incubator in + let open BatPathGen.OfString in + let open BatPathGen.OfString.Operators in + to_string + (normalize_in_tree + (let path = of_string path_str in + if is_absolute path then + path + else + let pwd = of_string (BatSys.getcwd ()) in + pwd //@ path)) + +type stream_reader = BatIO.input +let open_stdin () = BatIO.stdin +let read_line s = + try + Some (BatIO.read_line s) + with + _ -> None +let nread (s:stream_reader) (n:Z.t) = + try + Some (BatIO.nread s (Z.to_int n)) + with + _ -> None + +let poll_stdin (f:float) = + try + let ready_fds, _, _ = Unix.select [Unix.stdin] [] [] f in + match ready_fds with + | [] -> false + | _ -> true + with + | _ -> false + +type string_builder = BatBuffer.t +let new_string_builder () = BatBuffer.create 256 +let clear_string_builder b = BatBuffer.clear b +let string_of_string_builder b = BatBuffer.contents b +let string_builder_append b s = BatBuffer.add_string b s + +let message_of_exn (e:exn) = Printexc.to_string e +let trace_of_exn (e:exn) = Printexc.get_backtrace () + +module StringOps = + struct + type t = string + let equal (x:t) (y:t) = x=y + let compare (x:t) (y:t) = BatString.compare x y + let hash (x:t) = BatHashtbl.hash x + end + +module StringHashtbl = BatHashtbl.Make(StringOps) +module StringMap = BatMap.Make(StringOps) + +type 'value smap = 'value StringHashtbl.t +let smap_create (i:Z.t) : 'value smap = StringHashtbl.create (Z.to_int i) +let smap_clear (s:('value smap)) = StringHashtbl.clear s +let smap_add (m:'value smap) k (v:'value) = StringHashtbl.replace m k v +let smap_of_list (l: (string * 'value) list) = + let s = StringHashtbl.create (BatList.length l) in + FStar_List.iter (fun (x,y) -> smap_add s x y) l; + s +let smap_try_find (m:'value smap) k = StringHashtbl.find_option m k +let smap_fold (m:'value smap) f a = StringHashtbl.fold f m a +let smap_remove (m:'value smap) k = StringHashtbl.remove m k +let smap_keys (m:'value smap) = smap_fold m (fun k _ acc -> k::acc) [] +let smap_copy (m:'value smap) = StringHashtbl.copy m +let smap_size (m:'value smap) = StringHashtbl.length m +let smap_iter (m:'value smap) f = StringHashtbl.iter f m + +exception PSMap_Found +type 'value psmap = 'value StringMap.t +let psmap_empty (_: unit) : 'value psmap = StringMap.empty +let psmap_add (map: 'value psmap) (key: string) (value: 'value) = StringMap.add key value map +let psmap_find_default (map: 'value psmap) (key: string) (dflt: 'value) = + StringMap.find_default dflt key map +let psmap_try_find (map: 'value psmap) (key: string) = + StringMap.Exceptionless.find key map +let psmap_fold (m:'value psmap) f a = StringMap.fold f m a +let psmap_find_map (m:'value psmap) f = + let res = ref None in + let upd k v = + let r = f k v in + if r <> None then (res := r; raise PSMap_Found) in + (try StringMap.iter upd m with PSMap_Found -> ()); + !res +let psmap_modify (m: 'value psmap) (k: string) (upd: 'value option -> 'value) = + StringMap.modify_opt k (fun vopt -> Some (upd vopt)) m + +let psmap_merge (m1: 'value psmap) (m2: 'value psmap) : 'value psmap = + psmap_fold m1 (fun k v m -> psmap_add m k v) m2 + +let psmap_remove (m: 'value psmap) (key:string) + : 'value psmap = StringMap.remove key m + +module ZHashtbl = BatHashtbl.Make(Z) +module ZMap = BatMap.Make(Z) + +type 'value imap = 'value ZHashtbl.t +let imap_create (i:Z.t) : 'value imap = ZHashtbl.create (Z.to_int i) +let imap_clear (s:('value imap)) = ZHashtbl.clear s +let imap_add (m:'value imap) k (v:'value) = ZHashtbl.replace m k v +let imap_of_list (l: (Z.t * 'value) list) = + let s = ZHashtbl.create (BatList.length l) in + FStar_List.iter (fun (x,y) -> imap_add s x y) l; + s +let imap_try_find (m:'value imap) k = ZHashtbl.find_option m k +let imap_fold (m:'value imap) f a = ZHashtbl.fold f m a +let imap_remove (m:'value imap) k = ZHashtbl.remove m k +let imap_keys (m:'value imap) = imap_fold m (fun k _ acc -> k::acc) [] +let imap_copy (m:'value imap) = ZHashtbl.copy m + +type 'value pimap = 'value ZMap.t +let pimap_empty (_: unit) : 'value pimap = ZMap.empty +let pimap_add (map: 'value pimap) (key: Z.t) (value: 'value) = ZMap.add key value map +let pimap_find_default (map: 'value pimap) (key: Z.t) (dflt: 'value) = + ZMap.find_default dflt key map +let pimap_try_find (map: 'value pimap) (key: Z.t) = + ZMap.Exceptionless.find key map +let pimap_fold (m:'value pimap) f a = ZMap.fold f m a +let pimap_remove (m:'value pimap) k = ZMap.remove k m + +(* restore pre-2.11 BatString.nsplit behavior, + see https://github.com/ocaml-batteries-team/batteries-included/issues/845 *) +let batstring_nsplit s t = + if s = "" then [] else BatString.split_on_string t s + +let format (fmt:string) (args:string list) = + let frags = batstring_nsplit fmt "%s" in + if BatList.length frags <> BatList.length args + 1 then + failwith ("Not enough arguments to format string " ^fmt^ " : expected " ^ (Stdlib.string_of_int (BatList.length frags)) ^ " got [" ^ (BatString.concat ", " args) ^ "] frags are [" ^ (BatString.concat ", " frags) ^ "]") + else + let sbldr = new_string_builder () in + string_builder_append sbldr (List.hd frags); + BatList.iter2 + (fun frag arg -> string_builder_append sbldr arg; + string_builder_append sbldr frag) + (List.tl frags) args; + string_of_string_builder sbldr + +let format1 f a = format f [a] +let format2 f a b = format f [a;b] +let format3 f a b c = format f [a;b;c] +let format4 f a b c d = format f [a;b;c;d] +let format5 f a b c d e = format f [a;b;c;d;e] +let format6 f a b c d e g = format f [a;b;c;d;e;g] + +let flush_stdout () = flush stdout + +let stdout_isatty () = Some (Unix.isatty Unix.stdout) + +(* NOTE: this is deciding whether or not to color by looking + at stdout_isatty(), which may be a wrong choice if + we're instead outputting to stderr. e.g. + fstar.exe Blah.fst 2>errlog + will colorize the errors in the file if stdout is not + also redirected. +*) +let colorize s colors = + match colors with + | (c1,c2) -> + match stdout_isatty () with + | Some true -> format3 "%s%s%s" c1 s c2 + | _ -> s + +let colorize_bold s = + match stdout_isatty () with + | Some true -> format3 "%s%s%s" "\x1b[39;1m" s "\x1b[0m" + | _ -> s + +let colorize_red s = + match stdout_isatty () with + | Some true -> format3 "%s%s%s" "\x1b[31;1m" s "\x1b[0m" + | _ -> s + +let colorize_yellow s = + match stdout_isatty () with + | Some true -> format3 "%s%s%s" "\x1b[33;1m" s "\x1b[0m" + | _ -> s + +let colorize_cyan s = + match stdout_isatty () with + | Some true -> format3 "%s%s%s" "\x1b[36;1m" s "\x1b[0m" + | _ -> s + +let colorize_green s = + match stdout_isatty () with + | Some true -> format3 "%s%s%s" "\x1b[32;1m" s "\x1b[0m" + | _ -> s + +let colorize_magenta s = + match stdout_isatty () with + | Some true -> format3 "%s%s%s" "\x1b[35;1m" s "\x1b[0m" + | _ -> s + +let pr = Printf.printf +let spr = Printf.sprintf +let fpr = Printf.fprintf + +type printer = { + printer_prinfo: string -> unit; + printer_prwarning: string -> unit; + printer_prerror: string -> unit; + printer_prgeneric: string -> (unit -> string) -> (unit -> json) -> unit +} + +let default_printer = + { printer_prinfo = (fun s -> pr "%s" s; flush stdout); + printer_prwarning = (fun s -> fpr stderr "%s" (colorize_yellow s); flush stdout; flush stderr); + printer_prerror = (fun s -> fpr stderr "%s" (colorize_red s); flush stdout; flush stderr); + printer_prgeneric = fun label get_string get_json -> pr "%s: %s" label (get_string ())} + +let current_printer = ref default_printer +let set_printer printer = current_printer := printer + +let print_raw s = set_binary_mode_out stdout true; pr "%s" s; flush stdout +let print_string s = (!current_printer).printer_prinfo s +let print_generic label to_string to_json a = (!current_printer).printer_prgeneric label (fun () -> to_string a) (fun () -> to_json a) +let print_any s = (!current_printer).printer_prinfo (Marshal.to_string s []) +let strcat s1 s2 = s1 ^ s2 +let concat_l sep (l:string list) = BatString.concat sep l + +let string_of_unicode (bytes:int array) = + BatArray.fold_left (fun acc b -> acc^(BatUTF8.init 1 (fun _ -> BatUChar.of_int b))) "" bytes +let unicode_of_string (string:string) = + let n = BatUTF8.length string in + let t = Array.make n 0 in + let i = ref 0 in + BatUTF8.iter (fun c -> t.(!i) <- BatUChar.code c; incr i) string; + t +let base64_encode s = BatBase64.str_encode s +let base64_decode s = BatBase64.str_decode s +let char_of_int i = Z.to_int i +let int_of_string = Z.of_string +let safe_int_of_string x = + if x = "" then None else + try Some (int_of_string x) with Invalid_argument _ -> None +let int_of_char x = Z.of_int x +let int_of_byte x = x +let int_of_uint8 x = Z.of_int (Char.code x) +let uint16_of_int i = Z.to_int i +let byte_of_char c = c + +let float_of_string s = float_of_string s +let float_of_byte b = float_of_int (Char.code b) +let float_of_int32 = float_of_int +let float_of_int64 = BatInt64.to_float + +let int_of_int32 i = i +let int32_of_int i = BatInt32.of_int i + +let string_of_int = Z.to_string +let string_of_bool = string_of_bool +let string_of_int32 = BatInt32.to_string +let string_of_int64 = BatInt64.to_string +let string_of_float = string_of_float +let string_of_char i = BatUTF8.init 1 (fun _ -> BatUChar.chr i) +let hex_string_of_byte (i:int) = + let hs = spr "%x" i in + if (String.length hs = 1) then "0" ^ hs + else hs +let string_of_bytes = string_of_unicode +let bytes_of_string = unicode_of_string +let starts_with = BatString.starts_with +let trim_string = BatString.trim +let ends_with = BatString.ends_with +let char_at s index = BatUChar.code (BatUTF8.get s (Z.to_int index)) +let is_upper c = 65 <= c && c <= 90 +let contains (s1:string) (s2:string) = BatString.exists s1 s2 +let substring_from s index = BatString.tail s (Z.to_int index) +let substring s i j = BatString.sub s (Z.to_int i) (Z.to_int j) +let replace_char (s:string) c1 c2 = + let c1, c2 = BatUChar.chr c1, BatUChar.chr c2 in + BatUTF8.map (fun x -> if x = c1 then c2 else x) s +let replace_chars (s:string) c (by:string) = + BatString.replace_chars (fun x -> if x = Char.chr c then by else BatString.of_char x) s +let hashcode s = Z.of_int (StringOps.hash s) +let compare s1 s2 = Z.of_int (BatString.compare s1 s2) +let split s sep = BatString.split_on_string sep s +let splitlines s = split s "\n" + +let iof = int_of_float +let foi = float_of_int + +let print1 a b = print_string (format1 a b) +let print2 a b c = print_string (format2 a b c) +let print3 a b c d = print_string (format3 a b c d) +let print4 a b c d e = print_string (format4 a b c d e) +let print5 a b c d e f = print_string (format5 a b c d e f) +let print6 a b c d e f g = print_string (format6 a b c d e f g) +let print fmt args = print_string (format fmt args) + +let print_error s = (!current_printer).printer_prerror s +let print1_error a b = print_error (format1 a b) +let print2_error a b c = print_error (format2 a b c) +let print3_error a b c d = print_error (format3 a b c d) + +let print_warning s = (!current_printer).printer_prwarning s +let print1_warning a b = print_warning (format1 a b) +let print2_warning a b c = print_warning (format2 a b c) +let print3_warning a b c d = print_warning (format3 a b c d) + +let fprint (oc:out_channel) fmt args : unit = Printf.fprintf oc "%s" (format fmt args) + +[@@deriving yojson,show] + +let is_left = function + | FStar_Pervasives.Inl _ -> true + | _ -> false + +let is_right = function + | FStar_Pervasives.Inr _ -> true + | _ -> false + +let left = function + | FStar_Pervasives.Inl x -> x + | _ -> failwith "Not in left" +let right = function + | FStar_Pervasives.Inr x -> x + | _ -> failwith "Not in right" + +let (-<-) f g x = f (g x) + +let find_dup f l = + let rec aux = function + | hd::tl -> + let hds, tl' = BatList.partition (f hd) tl in + (match hds with + | [] -> aux tl' + | _ -> Some hd) + | _ -> None in + aux l + +let nodups f l = match find_dup f l with | None -> true | _ -> false + +let remove_dups f l = + let rec aux out = function + | hd::tl -> let _, tl' = BatList.partition (f hd) tl in aux (hd::out) tl' + | _ -> out in + aux [] l + +let is_none = function + | None -> true + | Some _ -> false + +let is_some = function + | None -> false + | Some _ -> true + +let must = function + | Some x -> x + | None -> failwith "Empty option" + +let dflt x = function + | None -> x + | Some x -> x + +let find_opt f l = + let rec aux = function + | [] -> None + | hd::tl -> if f hd then Some hd else aux tl in + aux l + +(* JP: why so many duplicates? :'( *) +let sort_with = FStar_List.sortWith + +let bind_opt opt f = + match opt with + | None -> None + | Some x -> f x + +let catch_opt opt f = + match opt with + | Some x -> opt + | None -> f () + +let map_opt opt f = + match opt with + | None -> None + | Some x -> Some (f x) + +let iter_opt opt f = + ignore (map_opt opt f) + +let rec find_map l f = + match l with + | [] -> None + | x::tl -> + match f x with + | None -> find_map tl f + | y -> y + +let try_find f l = BatList.find_opt f l + +let try_find_index f l = + let rec aux i = function + | [] -> None + | hd::tl -> if f hd then Some (Z.of_int i) else aux (i+1) tl in + aux 0 l + +let fold_map f state s = + let fold (state, acc) x = + let state, v = f state x in (state, v :: acc) in + let (state, rs) = BatList.fold_left fold (state, []) s in + (state, BatList.rev rs) + +let choose_map f state s = + let fold (state, acc) x = + match f state x with + | state, None -> (state, acc) + | state, Some v -> (state, v :: acc) in + let (state, rs) = BatList.fold_left fold (state, []) s in + (state, BatList.rev rs) + +let for_all f l = BatList.for_all f l +let for_some f l = BatList.exists f l +let forall_exists rel l1 l2 = + for_all (fun x -> for_some (rel x) l2) l1 +let multiset_equiv rel l1 l2 = + BatList.length l1 = BatList.length l2 && forall_exists rel l1 l2 +let take p l = + let rec take_aux acc = function + | [] -> l, [] + | x::xs when p x -> take_aux (x::acc) xs + | x::xs -> List.rev acc, x::xs + in take_aux [] l + +let rec fold_flatten f acc l = + match l with + | [] -> acc + | x :: xs -> let acc, xs' = f acc x in fold_flatten f acc (xs' @ xs) + +let add_unique f x l = + if for_some (f x) l then + l + else + x::l + +let first_N n l = + let n = Z.to_int n in + let rec f acc i l = + if i = n then BatList.rev acc,l else + match l with + | h::tl -> f (h::acc) (i+1) tl + | _ -> failwith "firstN" + in + f [] 0 l + +let nth_tail n l = + let rec aux n l = + if n=0 then l else aux (n - 1) (BatList.tl l) + in + aux (Z.to_int n) l + +let prefix l = + match BatList.rev l with + | hd::tl -> BatList.rev tl, hd + | _ -> failwith "impossible" + +let prefix_until f l = + let rec aux prefix = function + | [] -> None + | hd::tl -> + if f hd then Some (BatList.rev prefix, hd, tl) + else aux (hd::prefix) tl in + aux [] l + +let string_to_ascii_bytes (s:string) : char array = + BatArray.of_list (BatString.explode s) +let ascii_bytes_to_string (b:char array) : string = + BatString.implode (BatArray.to_list b) + +let mk_ref a = ref a + +let write_file (fn:string) s = + let fh = open_file_for_writing fn in + append_to_file fh s; + close_out_channel fh + +let copy_file input_name output_name = + (* see https://ocaml.github.io/ocamlunix/ocamlunix.html#sec33 *) + let open Unix in + let buffer_size = 8192 in + let buffer = Bytes.create buffer_size in + let fd_in = openfile input_name [O_RDONLY] 0 in + let fd_out = openfile output_name [O_WRONLY; O_CREAT; O_TRUNC] 0o666 in + let rec copy_loop () = + match read fd_in buffer 0 buffer_size with + | 0 -> () + | r -> ignore (write fd_out buffer 0 r); copy_loop () + in + copy_loop (); + close fd_in; + close fd_out +let delete_file (fn:string) = Sys.remove fn +let file_get_contents f = + let ic = open_in_bin f in + let l = in_channel_length ic in + let s = really_input_string ic l in + close_in ic; + s +let file_get_lines f = + let ic = open_in f in + let rec aux accu = + let l = + try + Some (input_line ic) + with + | End_of_file -> None + in + match l with + | None -> accu + | Some l -> aux (l::accu) + in + let l = aux [] in + close_in ic; + List.rev l +let concat_dir_filename d f = Filename.concat d f + +let slash_code : int = + BatUChar.code (BatUChar.of_char '/') + +let rec dropWhile f xs = + match xs with + | [] -> [] + | x::xs -> + if f x + then dropWhile f xs + else x::xs + +let path_parent (fn : string) : string = + let cs = FStar_String.split [slash_code] fn in + (* ^ Components of the path *) + let cs = cs |> List.rev |> dropWhile (fun s -> s = "") |> List.rev in + (* ^ Remove empty trailing components, so we interpret a/b/c/ as a/b/c *) + (* Remove last component to get parent and concat. *) + FStar_String.concat "/" (FStar_List.init cs) + +let rec __mkdir clean mkparents nm = + let remove_all_in_dir nm = + let open Sys in + Array.iter remove (Array.map (concat_dir_filename nm) (readdir nm)) in + let open Unix in + (match Sys.os_type with + | "Unix" -> ignore (umask 0o002) + | _ -> (* unimplemented*) ()); + try Unix.mkdir nm 0o777 + with + | Unix_error (EEXIST, _, _) -> + if clean then remove_all_in_dir nm + + (* failed due to nonexisting directory, mkparents is true, and nm has a slash: + attempt to recursively create parent and retry. *) + | Unix_error (ENOENT, _, _) when mkparents && FStar_String.index_of nm slash_code <> (Z.of_int (-1)) -> + __mkdir false true (path_parent nm); + Unix.mkdir nm 0o777 + +let mkdir = __mkdir + +let for_range lo hi f = + for i = Z.to_int lo to Z.to_int hi do + f (Z.of_int i) + done + + +let incr r = r := Z.(!r + one) +let decr r = r := Z.(!r - one) +let geq (i:int) (j:int) = i >= j + +let exec_name = Sys.executable_name +let get_exec_dir () = Filename.dirname (Sys.executable_name) +let get_cmd_args () = Array.to_list Sys.argv +let expand_environment_variable x = try Some (Sys.getenv x) with Not_found -> None + +let physical_equality (x:'a) (y:'a) = x == y +let check_sharing a b msg = if physical_equality a b then print1 "Sharing OK: %s\n" msg else print1 "Sharing broken in %s\n" msg + +type oWriter = { + write_byte: char -> unit; + write_bool: bool -> unit; + write_int: int -> unit; + write_int32: int32 -> unit; + write_int64: int64 -> unit; + write_char: char -> unit; + write_double: float -> unit; + write_bytearray: char array -> unit; + write_string: string -> unit; + + close: unit -> unit +} + +type oReader = { + read_byte: unit -> char; + read_bool: unit -> bool; + read_int: unit -> int; + read_int32: unit -> int32; + read_int64: unit -> int64; + read_char: unit -> char; + read_double: unit -> float; + read_bytearray: unit -> char array; + read_string: unit -> string; + + close: unit -> unit +} + +module MkoReader = struct + let read_byte r x = r.read_byte x + let read_bool r x = r.read_bool x + let read_int r x = r.read_int32 x + let read_int32 r x = r.read_int32 x + let read_int64 r x = r.read_int64 x + let read_char r x = r.read_char x + let read_double r x = r.read_double x + let read_bytearray r x = r.read_bytearray x + let read_string r x = r.read_string x + + let close r x = r.close x +end + +module MkoWriter = struct + let write_byte w x = w.write_byte x + let write_bool w x = w.write_bool x + let write_int w x = w.write_int32 x + let write_int32 w x = w.write_int32 x + let write_int64 w x = w.write_int64 x + let write_char w x = w.write_char x + let write_double w x = w.write_double x + let write_bytearray w x = w.write_bytearray x + let write_string w x = w.write_string x + + let close w x = w.close x +end + +(* + * TODO: these functions need to be filled in + *) +let get_owriter (filename:string) : oWriter = { + write_byte = (fun _ -> ()); + write_bool = (fun _ -> ()); + write_int = (fun _ -> ()); + write_int32 = (fun _ -> ()); + write_int64 = (fun _ -> ()); + write_char = (fun _ -> ()); + write_double = (fun _ -> ()); + write_bytearray = (fun _ -> ()); + write_string = (fun _ -> ()); + + close = (fun _ -> ()); +} + +let get_oreader (filename:string) : oReader = { + read_byte = (fun _ -> 'a'); + read_bool = (fun _ -> true); + read_int = (fun _ -> 0); + read_int32 = (fun _ -> failwith "NYI"); + read_int64 = (fun _ -> 0L); + read_char = (fun _ -> 'a'); + read_double = (fun _ -> 0.0); + read_bytearray = (fun _ -> [||]); + read_string = (fun _ -> ""); + + close = (fun _ -> ()); +} + +let getcwd = Sys.getcwd + +let readdir dir = "." :: ".." :: Array.to_list (Sys.readdir dir) + +let paths_to_same_file f g = + let open Unix in + let { st_dev = i; st_ino = j } = stat f in + let { st_dev = i'; st_ino = j' } = stat g in + (i,j) = (i',j') + +let file_exists = Sys.file_exists +(* Sys.is_directory raises Sys_error if the path does not exist *) +let is_directory f = Sys.file_exists f && Sys.is_directory f + + +let basename = Filename.basename +let dirname = Filename.dirname +let print_endline = print_endline + +let map_option f opt = BatOption.map f opt + +let save_value_to_file (fname:string) value = + (* BatFile.with_file_out uses Unix.openfile (which isn't available in + js_of_ocaml) instead of Pervasives.open_out, so we don't use it here. *) + let channel = open_out_bin fname in + BatPervasives.finally + (fun () -> close_out channel) + (fun channel -> output_value channel value) + channel + +let load_value_from_file (fname:string) = + (* BatFile.with_file_in uses Unix.openfile (which isn't available in + js_of_ocaml) instead of Pervasives.open_in, so we don't use it here. *) + try + let channel = open_in_bin fname in + BatPervasives.finally + (fun () -> close_in channel) + (fun channel -> Some (input_value channel)) + channel + with | _ -> None + +let save_2values_to_file (fname:string) value1 value2 = + try + let channel = open_out_bin fname in + BatPervasives.finally + (fun () -> close_out channel) + (fun channel -> + output_value channel value1; + output_value channel value2) + channel + with + | e -> delete_file fname; + raise e + +let load_2values_from_file (fname:string) = + try + let channel = open_in_bin fname in + BatPervasives.finally + (fun () -> close_in channel) + (fun channel -> + let v1 = input_value channel in + let v2 = input_value channel in + Some (v1, v2)) + channel + with | _ -> None + +let print_exn e = + Printexc.to_string e + +let digest_of_file = + let cache = smap_create (Z.of_int 101) in + fun (fname:string) -> + match smap_try_find cache fname with + | Some dig -> dig + | None -> + let dig = BatDigest.file fname in + smap_add cache fname dig; + dig + +let digest_of_string (s:string) = + BatDigest.to_hex (BatDigest.string s) + +(* Precondition: file exists *) +let touch_file (fname:string) : unit = + (* Sets access and modification times to current time *) + Unix.utimes fname 0.0 0.0 + +let ensure_decimal s = Z.to_string (Z.of_string s) + +let measure_execution_time tag f = + let t = Sys.time () in + let retv = f () in + print2 "Execution time of %s: %s ms\n" tag (string_of_float (1000.0 *. (Sys.time() -. t))); + retv + +let return_execution_time f = + let t1 = Sys.time () in + let retv = f () in + let t2 = Sys.time () in + (retv, 1000.0 *. (t2 -. t1)) + +(* Outside of this file the reference to FStar_Util.ref must use the following combinators *) +(* Export it at the end of the file so that we don't break other internal uses of ref *) +(* type 'a ref = 'a ref *) + +let read r = !r +let write r v = r := v +let (!) = read +let (:=) = write + +let marshal (x:'a) : string = Marshal.to_string x [] +let unmarshal (x:string) : 'a = Marshal.from_string x 0 + +type signedness = | Unsigned | Signed +type width = | Int8 | Int16 | Int32 | Int64 + +let rec z_pow2 n = + if n = Z.zero then Z.one + else Z.mul (Z.of_string "2") (z_pow2 (Z.sub n Z.one)) + +let bounds signedness width = + let n = + match width with + | Int8 -> Z.of_string "8" + | Int16 -> Z.of_string "16" + | Int32 -> Z.of_string "32" + | Int64 -> Z.of_string "64" + in + let lower, upper = + match signedness with + | Unsigned -> + Z.zero, Z.sub (z_pow2 n) Z.one + | Signed -> + let upper = z_pow2 (Z.sub n Z.one) in + Z.neg upper, Z.sub upper Z.one + in + lower, upper + +let within_bounds repr signedness width = + let lower, upper = bounds signedness width in + let value = Z.of_string (ensure_decimal repr) in + Z.leq lower value && Z.leq value upper + +let print_array (f: 'a -> string) + (s: 'a array) + : string + = let ls = Array.fold_left (fun out a -> f a :: out) [] s in + format1 "[| %s |]" (String.concat "; " (List.rev ls)) + +let array_of_list (l:'a list) = FStar_ImmutableArray_Base.of_list l + +let array_length (l:'a FStar_ImmutableArray_Base.t) = FStar_ImmutableArray_Base.length l + +let array_index (l:'a FStar_ImmutableArray_Base.t) (i:Z.t) = FStar_ImmutableArray_Base.index l i + +let putenv k v = Unix.putenv k v +let create_process (prog:string) (args:string list) : Z.t = + let pid = Unix.create_process prog (Array.of_list args) Unix.stdin Unix.stdout Unix.stderr in + Z.of_int pid + +let waitpid (pid:Z.t) : (Z.t, Z.t) FStar_Pervasives.either = + let pid, s = Unix.waitpid [] (Z.to_int pid) in + match s with + | WEXITED rc -> FStar_Pervasives.Inl (Z.of_int rc) + | WSIGNALED rc -> FStar_Pervasives.Inr (Z.of_int rc) + | WSTOPPED _ -> failwith "waitpid: unexpected WSTOPPED, should not happen with empty flags" + +let exn_is_enoent (e:exn) : bool = + match e with + | Unix.Unix_error (Unix.ENOENT, _, _) -> true + | _ -> false diff --git a/ocaml/fstar-lib/FStarC_Dyn.ml b/src/ml/bare/FStarC_Dyn.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Dyn.ml rename to src/ml/bare/FStarC_Dyn.ml diff --git a/src/ml/bare/FStarC_Extraction_ML_PrintML.ml b/src/ml/bare/FStarC_Extraction_ML_PrintML.ml new file mode 100644 index 00000000000..a777ff5a37d --- /dev/null +++ b/src/ml/bare/FStarC_Extraction_ML_PrintML.ml @@ -0,0 +1,566 @@ +open List +open Lexing +open Ppxlib_ast +open Astlib.Ast_500.Parsetree +open Location +open Pprintast +open Ast_helper +open Astlib.Ast_500.Asttypes +open Longident + +open FStarC_Extraction_ML_Syntax + +(* Global state used for the name of the ML module being pprinted. + current_module is only set once in build_ast and read once in + path_to_ident. This is done in order to avoid clutter. *) +let current_module = ref "" + + +let flatmap f l = map f l |> List.flatten +let opt_to_list = function Some x -> [x] | None -> [] + + +let no_position : Lexing.position = + {pos_fname = ""; pos_lnum = 0; pos_bol = 0; pos_cnum = 0} + +let no_location : Location.t = + {loc_start = no_position; loc_end = no_position; loc_ghost = false} + +let no_attrs: attributes = [] + + +(* functions for generating names and paths *) +let mk_sym s: string Location.loc = {txt=s; loc=no_location} + +let mk_sym_lident s: Longident.t Location.loc = {txt=s; loc=no_location} + +let mk_lident name = Lident name |> mk_sym_lident + +let mk_typ_name s = + (* remove an apostrophe from beginning of type name *) + match (BatString.sub s 0 1) with + | "'" -> BatString.tail s 1 + | _ -> s + +let rec path_to_string ((l, sym): mlpath): string = + match l with + | [] -> sym + | (hd::tl) -> BatString.concat "_" [hd; path_to_string (tl, sym)] + +let split_path (l1: string list) (l2: string list): (string list * string list) option = + let rec split_aux l1 l2 = + match l2 with + | [] -> Some l1 + | hd2::tl2 when BatString.equal hd2 (hd l1) -> split_aux (tl l1) tl2 + | _ -> None + in + if (length l1 >= length l2) then + match split_aux l1 l2 with + | None -> None + | Some l1' -> Some (l1', l2) + else None + +let path_to_ident ((l, sym): mlpath): Longident.t Asttypes.loc = + let codegen_libs = FStarC_Options.codegen_libs() in + match l with + | [] -> mk_lident sym + | hd::tl -> + let m_name = !current_module in + let suffix, prefix = + try BatList.find_map (split_path l) codegen_libs with + | Not_found -> l, [] + in + let path_abbrev = BatString.concat "_" suffix in + if (prefix = [] && BatString.equal m_name path_abbrev) then + (* remove circular references *) + mk_lident sym + else + match prefix with + | [] -> Ldot(Lident path_abbrev, sym) |> mk_sym_lident + | p_hd::p_tl -> + let q = fold_left (fun x y -> Ldot (x,y)) (Lident p_hd) p_tl in + (match path_abbrev with + | "" -> Ldot(q, sym) |> mk_sym_lident + | _ -> Ldot(Ldot(q, path_abbrev), sym) |> mk_sym_lident) + +let mk_top_mllb (e: mlexpr): mllb = + {mllb_name="_"; + mllb_tysc=None; + mllb_add_unit=false; + mllb_def=e; + mllb_meta=[]; + mllb_attrs=[]; + print_typ=false } + +(* Find the try_with in the default effect module. For instance this can be +FStar.All.try_with (for most users) or FStarC.Compiler.Effect.try_with (during +bootstrapping with "--MLish --MLish_effect FStarC.Compiler.Effect"). *) +let try_with_ident () = + let lid = FStarC_Parser_Const.try_with_lid () in + let ns = FStarC_Ident.ns_of_lid lid in + let id = FStarC_Ident.ident_of_lid lid in + path_to_ident (List.map FStarC_Ident.string_of_id ns, FStarC_Ident.string_of_id id) + +(* For integer constants (not 0/1) in this range we will use Prims.of_int + * Outside this range we will use string parsing to allow arbitrary sized + * integers. + * Using int_zero/int_one removes int processing to create the Z.t + * Using of_int removes string processing to create the Z.t + *) +let max_of_int_const = Z.of_int 65535 +let min_of_int_const = Z.of_int (-65536) + +let maybe_guts (s:string) : string = + if FStarC_Options.codegen () = Some FStarC_Options.Plugin + then "Fstarcompiler." ^ s + else s + +(* mapping functions from F* ML AST to Parsetree *) +let build_constant (c: mlconstant): Parsetree.constant = + let stdint_module (s:FStarC_Const.signedness) (w:FStarC_Const.width) : string = + let sign = match s with + | FStarC_Const.Signed -> "Int" + | FStarC_Const.Unsigned -> "Uint" in + let with_w ws = BatString.concat "" ["Stdint."; sign; ws] in + match w with + | FStarC_Const.Int8 -> with_w "8" + | FStarC_Const.Int16 -> with_w "16" + | FStarC_Const.Int32 -> with_w "32" + | FStarC_Const.Int64 -> with_w "64" + | FStarC_Const.Sizet -> with_w "64" in + match c with + | MLC_Int (v, None) -> + let s = match Z.of_string v with + | x when x = Z.zero -> + maybe_guts "Prims.int_zero" + | x when x = Z.one -> + maybe_guts "Prims.int_one" + | x when (min_of_int_const < x) && (x < max_of_int_const) -> + BatString.concat v ["(Prims.of_int ("; "))"] + | x -> + BatString.concat v ["(Prims.parse_int \""; "\")"] in + Const.integer s + (* Special case for UInt8, as it's realized as OCaml built-in int type *) + | MLC_Int (v, Some (FStarC_Const.Unsigned, FStarC_Const.Int8)) -> + Const.integer v + | MLC_Int (v, Some (s, w)) -> + let s = match Z.of_string v with + | x when x = Z.zero -> + BatString.concat "" [stdint_module s w; ".zero"] + | x when x = Z.one -> + BatString.concat "" [stdint_module s w; ".one"] + | x when (min_of_int_const < x) && (x < max_of_int_const) -> + BatString.concat "" ["("; stdint_module s w; ".of_int ("; v; "))"] + | x -> + BatString.concat "" ["("; stdint_module s w; ".of_string \""; v; "\")"] in + Const.integer s + | MLC_Float v -> Const.float (string_of_float v) + | MLC_Char v -> Const.int v + | MLC_String v -> Const.string v + | MLC_Bytes _ -> failwith "Case not handled" (* do we need this? *) + | _ -> failwith "Case not handled" + +let build_constant_expr (c: mlconstant): expression = + match c with + | MLC_Unit -> Exp.construct (mk_lident "()") None + | MLC_Bool b -> + let id = if b then "true" else "false" in + Exp.construct (mk_lident id) None + | _ -> Exp.constant (build_constant c) + +let build_constant_pat (c: mlconstant): pattern_desc = + match c with + | MLC_Unit -> Ppat_construct (mk_lident "()", None) + | MLC_Bool b -> + let id = if b then "true" else "false" in + Ppat_construct (mk_lident id, None) + | _ -> Ppat_constant (build_constant c) + +let rec build_pattern (p: mlpattern): pattern = + match p with + | MLP_Wild -> Pat.any () + | MLP_Const c -> build_constant_pat c |> Pat.mk + | MLP_Var sym -> Pat.var (mk_sym sym) + | MLP_CTor args -> build_constructor_pat args |> Pat.mk + | MLP_Branch l -> + (match l with + | [pat] -> build_pattern pat + | (pat1::tl) -> Pat.or_ (build_pattern pat1) (build_pattern (MLP_Branch tl)) + | [] -> failwith "Empty branch shouldn't happen") + | MLP_Record (path, l) -> + let fs = map (fun (x,y) -> (path_to_ident (path, x), build_pattern y)) l in + Pat.record fs Open (* does the closed flag matter? *) + | MLP_Tuple l -> Pat.tuple (map build_pattern l) + +and build_constructor_pat ((path, sym), p) = + let (path', name) = + (* resugaring the Cons and Nil from Prims *) + (match path with + | ["Prims"] + | ["Fstarcompiler.Prims"] -> + (match sym with + | "Cons" -> ([], "::") + | "Nil" -> ([], "[]") + | x -> (path, x)) + | _ -> (path, sym)) in + match p with + | [] -> + Ppat_construct (path_to_ident (path', name), None) + | [pat] -> + Ppat_construct (path_to_ident (path', name), Some ([], build_pattern pat)) + | pats -> + let inner = Pat.tuple (map build_pattern pats) in + Ppat_construct (path_to_ident(path', name), Some ([], inner)) + +let rec build_core_type ?(annots = []) (ty: mlty): core_type = + let t = + match ty with + | MLTY_Var sym -> Typ.mk (Ptyp_var (mk_typ_name sym)) + | MLTY_Fun (ty1, tag, ty2) -> + let c_ty1 = build_core_type ty1 in + let c_ty2 = build_core_type ty2 in + let label = Nolabel in + Typ.mk (Ptyp_arrow (label,c_ty1,c_ty2)) + | MLTY_Named (tys, (path, sym)) -> + let c_tys = map build_core_type tys in + let p = path_to_ident (path, sym) in + let ty = Typ.mk (Ptyp_constr (p, c_tys)) in + (match path with + | ["Fstarcompiler.FStar"; "Pervasives"; "Native"] + | ["FStar"; "Pervasives"; "Native"] -> + (* A special case for tuples, so they are displayed as + * ('a * 'b) instead of ('a,'b) FStar_Pervasives_Native.tuple2 + * VD: Should other types named "tupleXX" where XX does not represent + * the arity of the tuple be added to FStar.Pervasives.Native, + * the condition below might need to be more specific. *) + if BatString.starts_with sym "tuple" then + Typ.mk (Ptyp_tuple (map build_core_type tys)) + else + ty + | _ -> ty) + | MLTY_Tuple tys -> Typ.mk (Ptyp_tuple (map build_core_type tys)) + | MLTY_Top -> Typ.mk (Ptyp_constr (mk_lident "Obj.t", [])) + | MLTY_Erased -> Typ.mk (Ptyp_constr (mk_lident "unit", [])) + in + if annots = [] + then t + else Typ.mk (Ptyp_poly (annots, t)) + +let build_binding_pattern ?ty (sym : mlident) : pattern = + let p = Pat.mk (Ppat_var (mk_sym sym)) in + match ty with + | None -> p + | Some ty -> Pat.mk (Ppat_constraint (p, ty)) + +let resugar_prims_ops path: expression = + (match path with + | (["Prims"], "op_Addition") -> mk_lident "+" + | (["Prims"], "op_Subtraction") -> mk_lident "-" + | (["Prims"], "op_Multiply") -> mk_lident "*" + | (["Prims"], "op_Division") -> mk_lident "/" + | (["Prims"], "op_Equality") -> mk_lident "=" + | (["Prims"], "op_Colon_Equals") -> mk_lident ":=" + | (["Prims"], "op_disEquality") -> mk_lident "<>" + | (["Prims"], "op_AmpAmp") -> mk_lident "&&" + | (["Prims"], "op_BarBar") -> mk_lident "||" + | (["Prims"], "op_LessThanOrEqual") -> mk_lident "<=" + | (["Prims"], "op_GreaterThanOrEqual") -> mk_lident ">=" + | (["Prims"], "op_LessThan") -> mk_lident "<" + | (["Prims"], "op_GreaterThan") -> mk_lident ">" + | (["Prims"], "op_Modulus") -> mk_lident "mod" + | (["Prims"], "op_Minus") -> mk_lident "~-" + | path -> path_to_ident path) + |> Exp.ident + +let resugar_if_stmts ep cases = + if List.length cases = 2 then + let case1 = List.hd cases in + let case2 = BatList.last cases in + (match case1.pc_lhs.ppat_desc with + | Ppat_construct({txt=Lident "true"}, None) -> + Exp.ifthenelse ep case1.pc_rhs (Some case2.pc_rhs) + | _ -> Exp.match_ ep cases) + else + Exp.match_ ep cases + +let rec build_expr (e: mlexpr): expression = + match e.expr with + | MLE_Const c -> build_constant_expr c + | MLE_Var sym -> Exp.ident (mk_lident sym) + | MLE_Name path -> + (match path with + | (["Prims"], op) -> resugar_prims_ops path + | _ -> Exp.ident (path_to_ident path)) + | MLE_Let ((flavour, lbs), expr) -> + let recf = match flavour with + | Rec -> Recursive + | NonRec -> Nonrecursive in + let val_bindings = map (build_binding false) lbs in + Exp.let_ recf val_bindings (build_expr expr) + | MLE_App (e, es) -> + let args = map (fun x -> (Nolabel, build_expr x)) es in + let f = build_expr e in + resugar_app f args es + | MLE_TApp (e, ts) -> + build_expr e + | MLE_Fun (l, e) -> build_fun l e + | MLE_Match (e, branches) -> + let ep = build_expr e in + let cases = map build_case branches in + resugar_if_stmts ep cases + | MLE_Coerce (e, _, _) -> + let r = Exp.ident (mk_lident "Obj.magic") in + Exp.apply r [(Nolabel, build_expr e)] + | MLE_CTor args -> build_constructor_expr args + | MLE_Seq args -> build_seq args + | MLE_Tuple l -> Exp.tuple (map build_expr l) + | MLE_Record (path, _, l) -> + let fields = map (fun (x,y) -> (path_to_ident(path, x), build_expr y)) l in + Exp.record fields None + | MLE_Proj (e, path) -> + Exp.field (build_expr e) (path_to_ident (path)) + (* MLE_If always desugared to match? *) + | MLE_If (e, e1, e2) -> + Exp.ifthenelse (build_expr e) (build_expr e1) (BatOption.map build_expr e2) + | MLE_Raise (path, es) -> + let r = Exp.ident (mk_lident "raise") in + let args = map (fun x -> (Nolabel, build_expr x)) es in + Exp.apply r args + | MLE_Try (e, cs) -> + Exp.try_ (build_expr e) (map build_case cs) + +and resugar_app f args es: expression = + match f.pexp_desc with + | Pexp_ident x when x = try_with_ident () -> + (* resugar try_with to a try...with + try_with : (unit -> ML 'a) -> (exn -> ML 'a) -> ML 'a *) + assert (length es == 2); + let s, cs = BatList.first es, BatList.last es in + (* We have FStar.All.try_with s cs, with s : unit -> ML 'a + * and cs : exn -> ML 'a + * + * We need to create an OCaml try..with, with a body and a + * set of cases for catching the exception. + * + * For the body, we simply translate `s ()` and we're done. + * + * For the cases, we can't a similar trick, so we try to reverse-engineer + * the shape of the term in order to obtain a proper set. See get_variants. *) + + let body = Exp.apply (build_expr s) [(Nolabel, build_expr ml_unit)] in + let variants = get_variants cs in + Exp.try_ body variants + + | _ -> Exp.apply f args + +and get_variants (e : mlexpr) : Parsetree.case list = + match e.expr with + | MLE_Fun ([{mlbinder_name=id}], e) -> + (match e.expr with + | MLE_Match ({expr = MLE_Var id'}, branches) when id = id' -> + map build_case branches + | _ -> + [build_case (MLP_Var id, None, e)] + ) + | _ -> failwith "Cannot resugar FStar.All.try_with (3)" + +and build_seq args = + match args with + | [hd] -> build_expr hd + | hd::tl -> Exp.sequence (build_expr hd) (build_seq tl) + | [] -> failwith "Empty sequence should never happen" + +and build_constructor_expr ((path, sym), exp): expression = + let path', name = + (match path, sym with + | ["Prims"], "Cons" -> ([], "::") + | ["Prims"], "Nil" -> ([], "[]") + | ["Fstarcompiler.Prims"], "Cons" -> ([], "::") + | ["Fstarcompiler.Prims"], "Nil" -> ([], "[]") + | path, x -> (path, x)) in + match exp with + | [] -> Exp.construct (path_to_ident(path', name)) None + | [e] -> + Exp.construct (path_to_ident(path', name)) (Some (build_expr e)) + | es -> + let inner = Exp.tuple (map build_expr es) in + Exp.construct (path_to_ident(path', name)) (Some inner) + +and build_fun l e = + match l with + | ({mlbinder_name=id; mlbinder_ty=ty}::tl) -> + let p = build_binding_pattern id in + Exp.fun_ Nolabel None p (build_fun tl e) + | [] -> build_expr e + +and build_case ((lhs, guard, rhs): mlbranch): case = + {pc_lhs = (build_pattern lhs); + pc_guard = BatOption.map build_expr guard; + pc_rhs = (build_expr rhs)} + +and build_binding (toplevel: bool) (lb: mllb): value_binding = + (* Add a constraint on the binding (ie. an annotation) for top-level lets *) + let mk1 s = mkloc (String.sub s 1 (String.length s - 1)) none in + let ty = + match lb.mllb_tysc with + | None -> None + | Some ts -> + if lb.print_typ && toplevel + then let vars = List.map mk1 (ty_param_names (fst ts)) in + let ty = snd ts in + Some (build_core_type ~annots:vars ty) + else None + in + let e = build_expr lb.mllb_def in + let p = build_binding_pattern ?ty:ty lb.mllb_name in + (Vb.mk p e) + +let build_label_decl (sym, ty): label_declaration = + Type.field (mk_sym sym) (build_core_type ty) + +let build_constructor_decl (sym, tys): constructor_declaration = + let tys = List.map snd tys in + let args = if BatList.is_empty tys then None else + Some (Pcstr_tuple (map build_core_type tys)) in + Type.constructor ?args:args (mk_sym sym) + +let build_ty_kind (b: mltybody): type_kind = + match b with + | MLTD_Abbrev ty -> Ptype_abstract + | MLTD_Record l -> Ptype_record (map build_label_decl l) + | MLTD_DType l -> Ptype_variant (map build_constructor_decl l) + +let build_ty_manifest (b: mltybody): core_type option= + match b with + | MLTD_Abbrev ty -> Some (build_core_type ty) + | MLTD_Record l -> None + | MLTD_DType l -> None + + +let skip_type_defn (current_module:string) (type_name:string) :bool = + current_module = "FStar_Pervasives" && type_name = "option" + +let type_metadata (md : metadata): attributes option = + let deriving = BatList.filter_map (function + | PpxDerivingShow | PpxDerivingShowConstant _ -> Some "show" + | PpxDerivingYoJson -> Some "yojson" + | _ -> None + ) md in + if List.length deriving > 0 then + let str = String.concat "," deriving in + Some [ { + attr_name = mk_sym "deriving"; + attr_payload = PStr [Str.eval (Exp.ident (mk_lident str))]; + attr_loc = no_location } + ] + else + None + +let add_deriving_const (md: metadata) (ptype_manifest: core_type option): core_type option = + match List.filter (function PpxDerivingShowConstant _ -> true | _ -> false) md with + | [PpxDerivingShowConstant s] -> + let e = Exp.apply (Exp.ident (path_to_ident (["Format"], "pp_print_string"))) [(Nolabel, Exp.ident (mk_lident "fmt")); (Nolabel, Exp.constant (Const.string s))] in + let deriving_const = { + attr_name = mk_sym "printer"; + attr_payload = PStr [Str.eval (Exp.fun_ Nolabel None (build_binding_pattern "fmt") (Exp.fun_ Nolabel None (Pat.any ()) e))]; + attr_loc = no_location } in + BatOption.map (fun x -> {x with ptyp_attributes=[deriving_const]}) ptype_manifest + | _ -> ptype_manifest + +let build_one_tydecl ({tydecl_name=x; + tydecl_ignored=mangle_opt; + tydecl_parameters=tparams; + tydecl_meta=attrs; + tydecl_defn=body}: one_mltydecl): type_declaration = + let ptype_name = match mangle_opt with + | Some y -> mk_sym y + | None -> mk_sym x in + let ptype_params = Some (map (fun sym -> Typ.mk (Ptyp_var (mk_typ_name sym)), (NoVariance, NoInjectivity)) (ty_param_names tparams)) in + let (ptype_manifest: core_type option) = + BatOption.map_default build_ty_manifest None body |> add_deriving_const attrs in + let ptype_kind = Some (BatOption.map_default build_ty_kind Ptype_abstract body) in + let ptype_attrs = type_metadata attrs in + Type.mk ?params:ptype_params ?kind:ptype_kind ?manifest:ptype_manifest ?attrs:ptype_attrs ptype_name + +let build_tydecl (td: mltydecl): structure_item_desc option = + let recf = Recursive in + let type_declarations = map build_one_tydecl td in + if type_declarations = [] then None else Some (Pstr_type (recf, type_declarations)) + +let build_exn (sym, tys): type_exception = + let tys = List.map snd tys in + let name = mk_sym sym in + let args = Some (Pcstr_tuple (map build_core_type tys)) in + let ctor = Te.decl ?args:args name in + Te.mk_exception ctor + +let build_module1 path (m1: mlmodule1): structure_item option = + match m1.mlmodule1_m with + | MLM_Ty tydecl -> + (match build_tydecl tydecl with + | Some t -> Some (Str.mk t) + | None -> None) + | MLM_Let (flav, mllbs) -> + let recf = match flav with | Rec -> Recursive | NonRec -> Nonrecursive in + let bindings = map (build_binding true) mllbs in + Some (Str.value recf bindings) + | MLM_Exn exn -> Some (Str.exception_ (build_exn exn)) + | MLM_Top expr -> + let lb = mk_top_mllb expr in + let binding = build_binding true lb in + Some (Str.value Nonrecursive [binding]) + | MLM_Loc (p, f) -> None + +let build_m path (md: (mlsig * mlmodule) option) : structure = + match md with + | Some(s, m) -> + let open_plugin_lib = + if FStarC_Options.codegen () = Some FStarC_Options.Plugin (* NB: PluginNoLib does not open the library *) + then [Str.open_ (Opn.mk ?override:(Some Fresh) (Mod.ident (mk_lident "Fstar_pluginlib")))] + else [] + in + let open_guts = + if FStarC_Options.codegen () = Some FStarC_Options.PluginNoLib + then [Str.open_ (Opn.mk ?override:(Some Fresh) (Mod.ident (mk_lident "Fstarcompiler")))] + else [] + in + let open_prims = + [Str.open_ (Opn.mk ?override:(Some Fresh) (Mod.ident (mk_lident "Prims")))] + in + open_plugin_lib @ open_guts @ open_prims @ (map (build_module1 path) m |> flatmap opt_to_list) + | None -> [] + +let build_ast (out_dir: string option) (ext: string) (ml: mllib) = + match ml with + | MLLib l -> + map (fun (p, md, _) -> + let m = path_to_string p in + current_module := m; + let name = BatString.concat "" [m; ext] in + let path = (match out_dir with + | Some out -> BatString.concat "/" [out; name] + | None -> name) in + (path, build_m path md)) l + + +(* printing the AST to the correct path *) +let print_module ((path, m): string * structure) = + Format.set_formatter_out_channel (open_out_bin path); + structure Format.std_formatter m; + Format.pp_print_flush Format.std_formatter () + +let print (out_dir: string option) (ext: string) (ml: mllib) = + match ext with + | ".ml" -> + (* Use this printer for OCaml extraction *) + let ast = build_ast out_dir ext ml in + iter print_module ast + | ".fs" -> + (* Use the old printer for F# extraction *) + let new_doc = FStarC_Extraction_ML_Code.doc_of_mllib ml in + iter (fun (n, d) -> + FStarC_Compiler_Util.write_file + (FStarC_Find.prepend_output_dir (BatString.concat "" [n;ext])) + (FStarC_Extraction_ML_Code.pretty (Prims.parse_int "120") d) + ) new_doc + | _ -> failwith "Unrecognized extension" diff --git a/ocaml/fstar-lib/FStarC_Getopt.ml b/src/ml/bare/FStarC_Getopt.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Getopt.ml rename to src/ml/bare/FStarC_Getopt.ml diff --git a/ocaml/fstar-lib/FStarC_Hash.ml b/src/ml/bare/FStarC_Hash.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Hash.ml rename to src/ml/bare/FStarC_Hash.ml diff --git a/ocaml/fstar-lib/FStarC_Json.ml b/src/ml/bare/FStarC_Json.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Json.ml rename to src/ml/bare/FStarC_Json.ml diff --git a/ocaml/fstar-lib/FStarC_Parser_LexFStar.ml b/src/ml/bare/FStarC_Parser_LexFStar.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Parser_LexFStar.ml rename to src/ml/bare/FStarC_Parser_LexFStar.ml diff --git a/ocaml/fstar-lib/FStarC_Parser_Parse.mly b/src/ml/bare/FStarC_Parser_Parse.mly similarity index 100% rename from ocaml/fstar-lib/FStarC_Parser_Parse.mly rename to src/ml/bare/FStarC_Parser_Parse.mly diff --git a/ocaml/fstar-lib/FStarC_Parser_ParseIt.ml b/src/ml/bare/FStarC_Parser_ParseIt.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Parser_ParseIt.ml rename to src/ml/bare/FStarC_Parser_ParseIt.ml diff --git a/ocaml/fstar-lib/FStarC_Parser_Utf8.ml b/src/ml/bare/FStarC_Parser_Utf8.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Parser_Utf8.ml rename to src/ml/bare/FStarC_Parser_Utf8.ml diff --git a/ocaml/fstar-lib/FStarC_Parser_Util.ml b/src/ml/bare/FStarC_Parser_Util.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Parser_Util.ml rename to src/ml/bare/FStarC_Parser_Util.ml diff --git a/ocaml/fstar-lib/FStarC_Platform.ml b/src/ml/bare/FStarC_Platform.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Platform.ml rename to src/ml/bare/FStarC_Platform.ml diff --git a/ocaml/fstar-lib/FStarC_Pprint.ml b/src/ml/bare/FStarC_Pprint.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Pprint.ml rename to src/ml/bare/FStarC_Pprint.ml diff --git a/ocaml/fstar-lib/FStarC_Reflection_Types.ml b/src/ml/bare/FStarC_Reflection_Types.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Reflection_Types.ml rename to src/ml/bare/FStarC_Reflection_Types.ml diff --git a/ocaml/fstar-lib/FStarC_Sedlexing.ml b/src/ml/bare/FStarC_Sedlexing.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Sedlexing.ml rename to src/ml/bare/FStarC_Sedlexing.ml diff --git a/ocaml/fstar-lib/FStarC_StringBuffer.ml b/src/ml/bare/FStarC_StringBuffer.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_StringBuffer.ml rename to src/ml/bare/FStarC_StringBuffer.ml diff --git a/ocaml/fstar-lib/FStarC_Syntax_TermHashTable.ml b/src/ml/bare/FStarC_Syntax_TermHashTable.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Syntax_TermHashTable.ml rename to src/ml/bare/FStarC_Syntax_TermHashTable.ml diff --git a/ocaml/fstar-lib/FStarC_Tactics_Native.ml b/src/ml/bare/FStarC_Tactics_Native.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Tactics_Native.ml rename to src/ml/bare/FStarC_Tactics_Native.ml diff --git a/ocaml/fstar-lib/FStarC_Unionfind.ml b/src/ml/bare/FStarC_Unionfind.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Unionfind.ml rename to src/ml/bare/FStarC_Unionfind.ml diff --git a/src/ml/full/FStarC_Tactics_Unseal.ml b/src/ml/full/FStarC_Tactics_Unseal.ml new file mode 100644 index 00000000000..62339692c70 --- /dev/null +++ b/src/ml/full/FStarC_Tactics_Unseal.ml @@ -0,0 +1,7 @@ +open Fstarcompiler +open FStarC_Tactics_Result +open FStarC_Tactics_Types + +let tac_return x = fun ps -> Success (x, ps) + +let unseal x = tac_return x diff --git a/src/ml/full/FStarC_Tactics_V1_Builtins.ml b/src/ml/full/FStarC_Tactics_V1_Builtins.ml new file mode 100644 index 00000000000..ef0bc1b2ca7 --- /dev/null +++ b/src/ml/full/FStarC_Tactics_V1_Builtins.ml @@ -0,0 +1,143 @@ +open Fstarcompiler +open Prims +open FStar_Pervasives_Native +open FStar_Pervasives +open FStarC_Tactics_Result +open FStarC_Tactics_Types + +module N = FStarC_TypeChecker_Normalize +module B = FStarC_Tactics_V1_Basic +module TM = FStarC_Tactics_Monad +module CTRW = FStarC_Tactics_CtrlRewrite +module RD = FStarC_Reflection_V1_Data +module EMB = FStarC_Syntax_Embeddings +module EMBBase = FStarC_Syntax_Embeddings_Base +module NBET = FStarC_TypeChecker_NBETerm + +type ('a,'wp) tac_repr = proofstate -> 'a __result +type 'a __tac = ('a, unit) tac_repr + +let interpret_tac (t: 'a TM.tac) (ps: proofstate): 'a __result = + TM.run t ps + +let uninterpret_tac (t: 'a __tac) (ps: proofstate): 'a __result = + t ps + +let to_tac_0 (t: 'a __tac): 'a TM.tac = + (fun (ps: proofstate) -> + uninterpret_tac t ps) |> TM.mk_tac + +let to_tac_1 (t: 'b -> 'a __tac): 'b -> 'a TM.tac = fun x -> + (fun (ps: proofstate) -> + uninterpret_tac (t x) ps) |> TM.mk_tac + +let from_tac_1 (t: 'a -> 'b TM.tac): 'a -> 'b __tac = + fun (x: 'a) -> + fun (ps: proofstate) -> + let m = t x in + interpret_tac m ps + +let from_tac_2 (t: 'a -> 'b -> 'c TM.tac): 'a -> 'b -> 'c __tac = + fun (x: 'a) -> + fun (y: 'b) -> + fun (ps: proofstate) -> + let m = t x y in + interpret_tac m ps + +let from_tac_3 (t: 'a -> 'b -> 'c -> 'd TM.tac): 'a -> 'b -> 'c -> 'd __tac = + fun (x: 'a) -> + fun (y: 'b) -> + fun (z: 'c) -> + fun (ps: proofstate) -> + let m = t x y z in + interpret_tac m ps + +let from_tac_4 (t: 'a -> 'b -> 'c -> 'd -> 'e TM.tac): 'a -> 'b -> 'c -> 'd -> 'e __tac = + fun (x: 'a) -> + fun (y: 'b) -> + fun (z: 'c) -> + fun (w: 'd) -> + fun (ps: proofstate) -> + let m = t x y z w in + interpret_tac m ps + +(* Pointing to the internal primitives *) +let set_goals = from_tac_1 TM.set_goals +let set_smt_goals = from_tac_1 TM.set_smt_goals +let top_env = from_tac_1 B.top_env +let fresh = from_tac_1 B.fresh +let refine_intro = from_tac_1 B.refine_intro +let tc = from_tac_2 B.tc +let tcc = from_tac_2 B.tcc +let unshelve = from_tac_1 B.unshelve +let unquote = fun t -> failwith "Sorry, unquote does not work in compiled tactics" +let norm = fun s -> from_tac_1 B.norm s +let norm_term_env = fun e s -> from_tac_3 B.norm_term_env e s +let norm_binder_type = fun s -> from_tac_2 B.norm_binder_type s +let intro = from_tac_1 B.intro +let intro_rec = from_tac_1 B.intro_rec +let rename_to = from_tac_2 B.rename_to +let revert = from_tac_1 B.revert +let binder_retype = from_tac_1 B.binder_retype +let clear_top = from_tac_1 B.clear_top +let clear = from_tac_1 B.clear +let rewrite = from_tac_1 B.rewrite +let t_exact = from_tac_3 B.t_exact +let t_apply = from_tac_4 B.t_apply +let t_apply_lemma = from_tac_3 B.t_apply_lemma +let print = from_tac_1 B.print +let debugging = from_tac_1 B.debugging +let dump = from_tac_1 B.dump +let dump_all = from_tac_2 B.dump_all +let dump_uvars_of = from_tac_2 B.dump_uvars_of +let t_trefl = from_tac_1 B.t_trefl +let dup = from_tac_1 B.dup +let prune = from_tac_1 B.prune +let addns = from_tac_1 B.addns +let t_destruct = from_tac_1 B.t_destruct +let set_options = from_tac_1 B.set_options +let uvar_env = from_tac_2 B.uvar_env +let ghost_uvar_env = from_tac_2 B.ghost_uvar_env +let unify_env = from_tac_3 B.unify_env +let unify_guard_env = from_tac_3 B.unify_guard_env +let match_env = from_tac_3 B.match_env +let launch_process = from_tac_3 B.launch_process +let fresh_bv_named = from_tac_1 B.fresh_bv_named +let change = from_tac_1 B.change +let get_guard_policy = from_tac_1 B.get_guard_policy +let set_guard_policy = from_tac_1 B.set_guard_policy +let lax_on = from_tac_1 B.lax_on +let tadmit_t = from_tac_1 B.tadmit_t +let join = from_tac_1 B.join +let inspect = from_tac_1 B.inspect +let pack = from_tac_1 B.pack +let pack_curried = from_tac_1 B.pack_curried +let curms = from_tac_1 B.curms +let set_urgency = from_tac_1 B.set_urgency +let t_commute_applied_match = from_tac_1 B.t_commute_applied_match +let gather_or_solve_explicit_guards_for_resolved_goals = from_tac_1 B.gather_explicit_guards_for_resolved_goals +let string_to_term = from_tac_2 B.string_to_term +let push_bv_dsenv = from_tac_2 B.push_bv_dsenv +let term_to_string = from_tac_1 B.term_to_string +let comp_to_string = from_tac_1 B.comp_to_string +let range_to_string = from_tac_1 B.range_to_string +let term_eq_old = from_tac_2 B.term_eq_old + +let with_compat_pre_core (n:Prims.int) (f: unit -> 'a __tac) : 'a __tac = + from_tac_2 B.with_compat_pre_core n (to_tac_0 (f ())) + +let get_vconfig = from_tac_1 B.get_vconfig +let set_vconfig = from_tac_1 B.set_vconfig +let t_smt_sync = from_tac_1 B.t_smt_sync +let free_uvars = from_tac_1 B.free_uvars + +(* The handlers need to "embed" their argument. *) +let catch (t: unit -> 'a __tac): ((exn, 'a) either) __tac = from_tac_1 TM.catch (to_tac_0 (t ())) +let recover (t: unit -> 'a __tac): ((exn, 'a) either) __tac = from_tac_1 TM.recover (to_tac_0 (t ())) + +let ctrl_rewrite + (d : direction) + (t1 : FStarC_Syntax_Syntax.term -> (bool * ctrl_flag) __tac) + (t2 : unit -> unit __tac) + : unit __tac + = from_tac_3 CTRW.ctrl_rewrite d (to_tac_1 t1) (to_tac_0 (t2 ())) diff --git a/src/ml/full/FStarC_Tactics_V2_Builtins.ml b/src/ml/full/FStarC_Tactics_V2_Builtins.ml new file mode 100644 index 00000000000..9829fffd4b7 --- /dev/null +++ b/src/ml/full/FStarC_Tactics_V2_Builtins.ml @@ -0,0 +1,183 @@ +open Fstarcompiler +open Prims +open FStar_Pervasives_Native +open FStar_Pervasives +open FStarC_Tactics_Result +open FStarC_Tactics_Types + +module N = FStarC_TypeChecker_Normalize +module B = FStarC_Tactics_V2_Basic +module TM = FStarC_Tactics_Monad +module CTRW = FStarC_Tactics_CtrlRewrite +module RT = FStarC_Reflection_Types +module RD = FStarC_Reflection_V1_Data +module EMB = FStarC_Syntax_Embeddings +module EMBBase = FStarC_Syntax_Embeddings_Base +module NBET = FStarC_TypeChecker_NBETerm + +type ('a,'wp) tac_repr = proofstate -> 'a __result +type 'a __tac = ('a, unit) tac_repr + +let interpret_tac (s:string) (t: 'a TM.tac) (ps: proofstate): 'a __result = + FStarC_Errors.with_ctx + ("While running primitive " ^ s ^ " (called from within a plugin)") + (fun () -> TM.run t ps) + +let uninterpret_tac (t: 'a __tac) (ps: proofstate): 'a __result = + t ps + +let to_tac_0 (t: 'a __tac): 'a TM.tac = + (fun (ps: proofstate) -> + uninterpret_tac t ps) |> TM.mk_tac + +let to_tac_1 (t: 'b -> 'a __tac): 'b -> 'a TM.tac = fun x -> + (fun (ps: proofstate) -> + uninterpret_tac (t x) ps) |> TM.mk_tac + +let from_tac_1 s (t: 'a -> 'r TM.tac): 'a -> 'r __tac = + fun (xa: 'a) (ps : proofstate) -> + let m = t xa in + interpret_tac s m ps + +let from_tac_2 s (t: 'a -> 'b -> 'r TM.tac): 'a -> 'b -> 'r __tac = + fun (xa: 'a) (xb: 'b) (ps : proofstate) -> + let m = t xa xb in + interpret_tac s m ps + +let from_tac_3 s (t: 'a -> 'b -> 'c -> 'r TM.tac): 'a -> 'b -> 'c -> 'r __tac = + fun (xa: 'a) (xb: 'b) (xc: 'c) (ps : proofstate) -> + let m = t xa xb xc in + interpret_tac s m ps + +let from_tac_4 s (t: 'a -> 'b -> 'c -> 'd -> 'r TM.tac): 'a -> 'b -> 'c -> 'd -> 'r __tac = + fun (xa: 'a) (xb: 'b) (xc: 'c) (xd: 'd) (ps : proofstate) -> + let m = t xa xb xc xd in + interpret_tac s m ps + +let from_tac_5 s (t: 'a -> 'b -> 'c -> 'd -> 'e -> 'r TM.tac): 'a -> 'b -> 'c -> 'd -> 'e -> 'r __tac = + fun (xa: 'a) (xb: 'b) (xc: 'c) (xd: 'd) (xe: 'e) (ps : proofstate) -> + let m = t xa xb xc xd xe in + interpret_tac s m ps + + +(* Pointing to the internal primitives *) +let compress = from_tac_1 "B.compress" B.compress +let set_goals = from_tac_1 "TM.set_goals" TM.set_goals +let set_smt_goals = from_tac_1 "TM.set_smt_goals" TM.set_smt_goals +let top_env = from_tac_1 "B.top_env" B.top_env +let fresh = from_tac_1 "B.fresh" B.fresh +let refine_intro = from_tac_1 "B.refine_intro" B.refine_intro +let tc = from_tac_2 "B.tc" B.tc +let tcc = from_tac_2 "B.tcc" B.tcc +let unshelve = from_tac_1 "B.unshelve" B.unshelve +let unquote = fun t -> failwith "Sorry, unquote does not work in compiled tactics" +let norm = fun s -> from_tac_1 "B.norm" B.norm s +let norm_term_env = fun e s -> from_tac_3 "B.norm_term_env" B.norm_term_env e s +let norm_binding_type = fun s -> from_tac_2 "B.norm_binding_type" B.norm_binding_type s +let intro = from_tac_1 "B.intro" B.intro +let intros = from_tac_1 "B.intros" B.intros +let intro_rec = from_tac_1 "B.intro_rec" B.intro_rec +let rename_to = from_tac_2 "B.rename_to" B.rename_to +let revert = from_tac_1 "B.revert" B.revert +let var_retype = from_tac_1 "B.var_retype" B.var_retype +let clear_top = from_tac_1 "B.clear_top" B.clear_top +let clear = from_tac_1 "B.clear" B.clear +let rewrite = from_tac_1 "B.rewrite" B.rewrite +let grewrite = from_tac_2 "B.grewrite" B.grewrite +let t_exact = from_tac_3 "B.t_exact" B.t_exact +let t_apply = from_tac_4 "B.t_apply" B.t_apply +let t_apply_lemma = from_tac_3 "B.t_apply_lemma" B.t_apply_lemma +let print = from_tac_1 "B.print" B.print +let debugging = from_tac_1 "B.debugging" B.debugging +let ide = from_tac_1 "B.ide" B.ide +let dump = from_tac_1 "B.dump" B.dump +let dump_all = from_tac_2 "B.dump_all" B.dump_all +let dump_uvars_of = from_tac_2 "B.dump_uvars_of" B.dump_uvars_of +let t_trefl = from_tac_1 "B.t_trefl" B.t_trefl +let dup = from_tac_1 "B.dup" B.dup +let prune = from_tac_1 "B.prune" B.prune +let addns = from_tac_1 "B.addns" B.addns +let t_destruct = from_tac_1 "B.t_destruct" B.t_destruct +let set_options = from_tac_1 "B.set_options" B.set_options +let uvar_env = from_tac_2 "B.uvar_env" B.uvar_env +let ghost_uvar_env = from_tac_2 "B.ghost_uvar_env" B.ghost_uvar_env +let unify_env = from_tac_3 "B.unify_env" B.unify_env +let unify_guard_env = from_tac_3 "B.unify_guard_env" B.unify_guard_env +let match_env = from_tac_3 "B.match_env" B.match_env +let launch_process = from_tac_3 "B.launch_process" B.launch_process +let fresh_bv_named = from_tac_1 "B.fresh_bv_named" B.fresh_bv_named +let change = from_tac_1 "B.change" B.change +let get_guard_policy = from_tac_1 "B.get_guard_policy" B.get_guard_policy +let set_guard_policy = from_tac_1 "B.set_guard_policy" B.set_guard_policy +let lax_on = from_tac_1 "B.lax_on" B.lax_on +let tadmit_t = from_tac_1 "B.tadmit_t" B.tadmit_t +let join = from_tac_1 "B.join" B.join +let curms = from_tac_1 "B.curms" B.curms +let set_urgency = from_tac_1 "B.set_urgency" B.set_urgency +let set_dump_on_failure = from_tac_1 "B.set_dump_on_failure" B.set_dump_on_failure +let t_commute_applied_match = from_tac_1 "B.t_commute_applied_match" B.t_commute_applied_match +let gather_or_solve_explicit_guards_for_resolved_goals = from_tac_1 "B.gather_explicit_guards_for_resolved_goals" B.gather_explicit_guards_for_resolved_goals +let string_to_term = from_tac_2 "B.string_to_term" B.string_to_term +let push_bv_dsenv = from_tac_2 "B.push_bv_dsenv" B.push_bv_dsenv +let term_to_string = from_tac_1 "B.term_to_string" B.term_to_string +let comp_to_string = from_tac_1 "B.comp_to_string" B.comp_to_string +let term_to_doc = from_tac_1 "B.term_to_doc" B.term_to_doc +let comp_to_doc = from_tac_1 "B.comp_to_doc" B.comp_to_doc +let range_to_string = from_tac_1 "B.range_to_string" B.range_to_string +let term_eq_old = from_tac_2 "B.term_eq_old" B.term_eq_old + +let with_compat_pre_core (n:Prims.int) (f: unit -> 'a __tac) : 'a __tac = + from_tac_2 "B.with_compat_pre_core" B.with_compat_pre_core n (to_tac_0 (f ())) + +let get_vconfig = from_tac_1 "B.get_vconfig" B.get_vconfig +let set_vconfig = from_tac_1 "B.set_vconfig" B.set_vconfig +let t_smt_sync = from_tac_1 "B.t_smt_sync" B.t_smt_sync +let free_uvars = from_tac_1 "B.free_uvars" B.free_uvars +let all_ext_options = from_tac_1 "B.all_ext_options" B.all_ext_options +let ext_getv = from_tac_1 "B.ext_getv" B.ext_getv +let ext_getns = from_tac_1 "B.ext_getns" B.ext_getns + +let alloc x = from_tac_1 "B.alloc" B.alloc x +let read r = from_tac_1 "B.read" B.read r +let write r x = from_tac_2 "B.write" B.write r x + +type ('env, 't) prop_validity_token = unit +type ('env, 'sc, 't, 'pats, 'bnds) match_complete_token = unit + +let is_non_informative = from_tac_2 "B.refl_is_non_informative" B.refl_is_non_informative +let check_subtyping = from_tac_3 "B.refl_check_subtyping" B.refl_check_subtyping +let t_check_equiv = from_tac_5 "B.t_refl_check_equiv" B.t_refl_check_equiv +let core_compute_term_type = from_tac_2 "B.refl_core_compute_term_type" B.refl_core_compute_term_type +let core_check_term = from_tac_4 "B.refl_core_check_term" B.refl_core_check_term +let core_check_term_at_type = from_tac_3 "B.refl_core_check_term_at_type" B.refl_core_check_term_at_type +let check_match_complete = from_tac_4 "B.refl_check_match_complete" B.refl_check_match_complete +let tc_term = from_tac_2 "B.refl_tc_term" B.refl_tc_term +let universe_of = from_tac_2 "B.refl_universe_of" B.refl_universe_of +let check_prop_validity = from_tac_2 "B.refl_check_prop_validity" B.refl_check_prop_validity +let instantiate_implicits = from_tac_3 "B.refl_instantiate_implicits" B.refl_instantiate_implicits +let try_unify = from_tac_4 "B.refl_try_unify" B.refl_try_unify +let maybe_relate_after_unfolding = from_tac_3 "B.refl_maybe_relate_after_unfolding" B.refl_maybe_relate_after_unfolding +let maybe_unfold_head = from_tac_2 "B.refl_maybe_unfold_head" B.refl_maybe_unfold_head +let norm_well_typed_term = from_tac_3 "B.norm_well_typed_term" B.refl_norm_well_typed_term + +let push_open_namespace = from_tac_2 "B.push_open_namespace" B.push_open_namespace +let push_module_abbrev = from_tac_3 "B.push_module_abbrev" B.push_module_abbrev +let resolve_name = from_tac_2 "B.resolve_name" B.resolve_name +let log_issues = from_tac_1 "B.log_issues" B.log_issues + +(* The handlers need to "embed" their argument. *) +let catch (t: unit -> 'a __tac): ((exn, 'a) either) __tac = from_tac_1 "TM.catch" TM.catch (to_tac_0 (t ())) +let recover (t: unit -> 'a __tac): ((exn, 'a) either) __tac = from_tac_1 "TM.recover" TM.recover (to_tac_0 (t ())) + +let ctrl_rewrite + (d : direction) + (t1 : FStarC_Syntax_Syntax.term -> (bool * ctrl_flag) __tac) + (t2 : unit -> unit __tac) + : unit __tac + = from_tac_3 "ctrl_rewrite" CTRW.ctrl_rewrite d (to_tac_1 t1) (to_tac_0 (t2 ())) + +let call_subtac g (t : unit -> unit __tac) u ty = + let t = to_tac_1 t () in + from_tac_4 "B.call_subtac" B.call_subtac g t u ty + +let call_subtac_tm = from_tac_4 "B.call_subtac_tm" B.call_subtac_tm diff --git a/src/ocaml-output/.gitignore b/src/ocaml-output/.gitignore deleted file mode 100644 index 2d5f063d671..00000000000 --- a/src/ocaml-output/.gitignore +++ /dev/null @@ -1,15 +0,0 @@ -yac-log -.depend -*.cm* -*.o -*.exe -fstar/ -*.tar.gz - -# A touch file -install-compiler-lib - -main.ml - -fstarc/ -fstarlib/ diff --git a/src/ocaml-output/Makefile b/src/ocaml-output/Makefile deleted file mode 100644 index d3ce6ad14d9..00000000000 --- a/src/ocaml-output/Makefile +++ /dev/null @@ -1,156 +0,0 @@ -include ../../.common.mk - -FSTAR_HOME = ../.. -DUNE_SNAPSHOT ?= $(call maybe_cygwin_path,$(realpath $(FSTAR_HOME)/ocaml)) -export DUNE_SNAPSHOT - -# The string "Madoko" if madoko is installed, something else otherwise. -MADOKO = $(shell madoko --version 2>/dev/null | cut -c -6) -DOS2UNIX=$(shell which dos2unix >/dev/null 2>&1 && echo dos2unix || echo true) - -# Detect the GNU utilities -INSTALL_EXEC := $(shell ginstall --version 2>/dev/null | cut -c -8 | head -n 1) -FIND=$(shell which gfind >/dev/null 2>&1 && echo gfind || echo find) -ifdef INSTALL_EXEC - INSTALL_EXEC := ginstall -else - INSTALL_EXEC := install -endif -export INSTALL_EXEC - -all: dune-snapshot - -.PHONY: dune-stdlib-snapshot dune-snapshot dune-fstar-snapshot dune-verify-ulib source-files - -# FIXME: dune-fstar-snapshot should also depend on dune-verify-ulib, -# once we no longer lax-typecheck -dune-fstar-snapshot: - +$(MAKE) -C .. ocaml - -dune-verify-ulib: - +$(MAKE) -C ../../ulib core - -dune-stdlib-snapshot: dune-verify-ulib - $(MAKE) -C ../../ulib -f Makefile.extract dune-snapshot - -dune-snapshot: dune-fstar-snapshot dune-stdlib-snapshot - +$(Q)$(MAKE) overlay-snapshots - -overlay-snapshots: - @# These copies MUST be done in this order to make sure - @# we avoid races and always end in a consistent state. - $(call msg, "OVERLAY SNAPSHOTS") - $(Q)install -m0644 -t $(DUNE_SNAPSHOT)/fstar-lib/generated/ fstarc/*.ml - $(Q)install -m0644 -t $(DUNE_SNAPSHOT)/fstar-lib/generated/ fstarlib/*.ml - -# ------------------------------------------------------------------------------ -# Preparing a release... these targets are not optimized and the Makefile is -# actually used for scripting a bunch of stuff. -# ------------------------------------------------------------------------------ - -# Copy the contents of $(1) into $(PREFIX)/$(2) while setting the right file -# permissions and creating directories on the fly as needed. -# (JP: the package version of this command is based on git but for OPAM -# installs we cannot assume the user has git installed.) -install_dir = cd ../../$(1) && find . -type f -exec $(INSTALL_EXEC) -m 644 -D {} $(PREFIX)/$(2)/{} \; - -# Install FStar into $(PREFIX) using the standard Unix directory -# structure. - -# On Cygwin, the `--prefix` option to dune only -# supports Windows paths. -FSTAR_PREFIX=$(call maybe_cygwin_path,$(PREFIX)) - -FSTAR_BUILD_PROFILE ?= release - -install: - @# Rebuild everything - +$(MAKE) -C $(FSTAR_HOME) - @# Install the binary and the binary library - cd $(DUNE_SNAPSHOT) && dune install --profile=$(FSTAR_BUILD_PROFILE) --prefix=$(FSTAR_PREFIX) - @# Then the standard library sources and checked files - +$(MAKE) -C $(FSTAR_HOME)/ulib install - @# Then the rest - +$(MAKE) install-sides - -# The `install-sides` rule is intended to be run only by the nix flake. -# Indeed, nix needs to patch binaries created by OCaml (here fstar.exe), -# thus nix cannot use `dune install` - -.PHONY: install-sides -install-sides: - @# Then the examples (those now work from any F* installation flavor, sources, binary package or opam) - @# contrib is needed by examples/crypto - $(call install_dir,examples,share/fstar/examples) - $(call install_dir,contrib,share/fstar/contrib) - $(call install_dir,mk,share/fstar/mk) - @echo '# This line added only for the binary package, to use the fstar.exe in the package' >> $(PREFIX)/share/fstar/mk/test.mk - @echo 'FSTAR_EXE := $$(abspath $$(FSTAR_ROOT)/../../bin/fstar.exe)' >> $(PREFIX)/share/fstar/mk/test.mk - @# Then the tutorial -ifeq ($(MADOKO),Madoko) - @# Build the tutorial first - +$(MAKE) -C ../../doc/old/tutorial -endif - $(INSTALL_EXEC) -m 644 -D ../../doc/Makefile.include $(PREFIX)/share/fstar/doc/Makefile.include - $(call install_dir,doc/old/tutorial,share/fstar/doc/old/tutorial) - -PACKAGE_NAME ?= fstar - -ifeq ($(OS),Windows_NT) - Z3_NAME=z3.exe -else - Z3_NAME=z3 -endif -Z3_DIR=$(dir $(shell which $(Z3_NAME))) -# Z3_LICENSE MUST be explicitly overridden if z3 is installed from an opam package. -# See for instance $(FSTAR_HOME)/.docker/package.Dockerfile -ifndef Z3_LICENSE - Z3_LICENSE?=$(shell if test -f $(Z3_DIR)/LICENSE.txt ; then echo $(Z3_DIR)/LICENSE.txt ; elif test -f $(Z3_DIR)/../LICENSE.txt ; then echo $(Z3_DIR)/../LICENSE.txt ; fi) -endif - -# Create a zip / tar.gz package of FStar that contains a Z3 binary and -# proper license files. - -# On Cygwin, the `--prefix` option to dune only -# supports Windows paths. -package_prefix=$(call maybe_cygwin_path,$(CURDIR)/fstar) - -package_dir = cd ../../$(1) && find . -type f -exec $(INSTALL_EXEC) -m 644 -D {} $(package_prefix)/$(2)/{} \; - -package: - if test -z "$(Z3_LICENSE)" ; then echo Please set Z3_LICENSE to the location of the license file for Z3 ; false ; fi - @# Clean previous packages. - ! [ -d "$(package_prefix)" ] - rm -f $(PACKAGE_NAME).zip $(PACKAGE_NAME).tar.gz - @# Install F* into the package - +PREFIX=$(package_prefix) $(MAKE) install - @# Make the F* ulib F# DLL (NOT the nuget package) - +PREFIX=$(package_prefix) $(MAKE) -C $(FSTAR_HOME)/ulib ulib-in-fsharp-dll - @# Then the version file. - cp ../../version.txt $(package_prefix)/ - @# Documentation and licenses - cp ../../README.md ../../INSTALL.md ../../LICENSE ../../LICENSE-fsharp.txt $(package_prefix) - cp $(Z3_LICENSE) $(package_prefix)/LICENSE-z3.txt - @# Z3 -ifeq ($(OS),Windows_NT) - cp $(shell which libgmp-10.dll) $(package_prefix)/bin - cp $(Z3_DIR)/*.exe $(Z3_DIR)/*.dll $(Z3_DIR)/*.lib $(package_prefix)/bin - chmod a+x $(package_prefix)/bin/z3.exe $(package_prefix)/bin/*.dll - zip -r -9 $(PACKAGE_NAME).zip fstar -else - cp $(Z3_DIR)/z3 $(package_prefix)/bin - tar czf $(PACKAGE_NAME).tar.gz fstar -endif - -.PHONY: clean -# Clean up all files generated by targets in _this_ Makefile -# We only remove the dynamic/ part of the OCaml snapshot. The rest -# should be removed only through the `clean-dune-snapshot` rule in the -# root Makefile. This is because `make all` no longer performs any -# bootstrapping, so `make clean` shouldn't clean up the whole -# snapshot. -clean: - $(call msg, "CLEAN", "src/ocaml-output") - $(Q)rm -f *.tar.gz *.zip - $(Q)rm -f version_platform.txt - $(Q)rm -rf fstar diff --git a/src/parser/Makefile b/src/parser/Makefile deleted file mode 100644 index 1bec20525c0..00000000000 --- a/src/parser/Makefile +++ /dev/null @@ -1,17 +0,0 @@ -all: ../../bin/parse.exe - -../../bin/parse.exe: ast.fs parse.fs lexhelp.fs lex.fs dsenv.fs desugar.fs parseit.fsi parseit.fs driver.fs - fsc --define:TEST -g -r ../../bin/tc.dll -r ../../bin/basic.dll -r ../../bin/absyn.dll --mlcompatibility $^ -o $@ - -parse.fs: parse.fsy - fsyacc --module FStar.Parser.Parse $^ - -lex.fs: lex.fsl - fslex --unicode $^ - -clean: - rm lex.fs parse.fs - -FSTAR_HOME ?= ../.. - -include ../Makefile.boot.common diff --git a/src/parser/README b/src/parser/README index be87da38e0b..449be539603 100644 --- a/src/parser/README +++ b/src/parser/README @@ -2,14 +2,14 @@ Parser ------ The parser is implemented as a menhir grammar in -FSTAR_HOME/ocaml/fstar-lib/FStar_Parser_Parse.mly +/src/ml/bare/FStarC_Parser_Parse.mly To call into the parser from F*, we have a wrapper written in OCaml with an F* interface: * FStarC.Parser.ParseIt.fsti: This is the F* interface -* FSTAR_HOME/ocaml/fstar-lib/FStar_Parser_ParseIt.ml: This is its +* src/ml/bare/FStar_Parser_ParseIt.ml: This is its implementation It provides an API that allows parsing and entire file, a fragment of @@ -21,22 +21,22 @@ Lexer ----- The lexer is written using sedlex in -FSTAR_HOME/ocaml/fstar-lib/FStar_Parser_LexFStar.ml. +src/ml/bare/FStarC_Parser_LexFStar.ml. It uses a small wrapper for the OCaml Lexing module implemeted in -FSTAR_HOME/ocaml/fstar-lib/FStar_SedLexing.ml +src/ml/bare/FStarC_SedLexing.ml If you want to modify the parser, you need a recent version of menhir (at least december 2016). Also the printer in -[src/parser/FStarC.Parser.ToDocument.fs] should be kept up to date with +[src/parser/FStarC.Parser.ToDocument.fst] should be kept up to date with the parser as much as possible since it tries to keep the same general structure as the parser. If you're adding a new token, you need to edit: -- [src/parser/ml/FStar_Parser_LexFStar.ml] to add it to the parser +- [src/ml/bare/FStarC_Parser_LexFStar.ml] to add it to the parser keyword table (OCaml) -- [FStar_Parser_Parse.mly] to expose it to the parser, possibly with +- [src/ml/bare/FStarC_Parser_Parse.mly] to expose it to the parser, possibly with an adequate precedence diff --git a/src/prettyprint/Makefile b/src/prettyprint/Makefile deleted file mode 100644 index 9f6d5bc01e9..00000000000 --- a/src/prettyprint/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -FSTAR_HOME ?= ../.. - -include ../Makefile.boot.common diff --git a/src/reflection/FStarC.Reflection.V1.Builtins.fsti b/src/reflection/FStarC.Reflection.V1.Builtins.fsti index 2f09bfd14f4..b9abc4e0aac 100644 --- a/src/reflection/FStarC.Reflection.V1.Builtins.fsti +++ b/src/reflection/FStarC.Reflection.V1.Builtins.fsti @@ -15,18 +15,20 @@ *) module FStarC.Reflection.V1.Builtins -open FStarC.Ident -open FStarC.Syntax.Syntax +open FStarC.Compiler +open FStarC.Compiler.Effect +open FStarC.Compiler.Order +open FStarC.Reflection.V1.Data open FStarC.Syntax.Embeddings -open FStar.Order +open FStarC.Syntax.Syntax +open FStarC.VConfig +open FStarC.Ident + +module EMB = FStarC.Syntax.Embeddings module Env = FStarC.TypeChecker.Env -open FStarC.Reflection.V1.Data -open FStarC.Compiler.Effect module O = FStarC.Options module RD = FStarC.Reflection.V1.Data -module EMB = FStarC.Syntax.Embeddings module Z = FStarC.BigInt -open FStarC.VConfig (* Primitives *) val compare_bv : bv -> bv -> order diff --git a/src/reflection/FStarC.Reflection.V2.Builtins.fsti b/src/reflection/FStarC.Reflection.V2.Builtins.fsti index 2e35aa4027e..1d32bd1b1f0 100644 --- a/src/reflection/FStarC.Reflection.V2.Builtins.fsti +++ b/src/reflection/FStarC.Reflection.V2.Builtins.fsti @@ -17,12 +17,12 @@ module FStarC.Reflection.V2.Builtins open FStarC.Compiler open FStarC.Compiler.Effect -open FStarC.Ident -open FStar.Order +open FStarC.Compiler.Order open FStarC.Reflection.V2.Data open FStarC.Syntax.Embeddings open FStarC.Syntax.Syntax open FStarC.VConfig +open FStarC.Ident module EMB = FStarC.Syntax.Embeddings module Env = FStarC.TypeChecker.Env diff --git a/src/reflection/Makefile b/src/reflection/Makefile deleted file mode 100644 index 9f6d5bc01e9..00000000000 --- a/src/reflection/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -FSTAR_HOME ?= ../.. - -include ../Makefile.boot.common diff --git a/src/smtencoding/Makefile b/src/smtencoding/Makefile deleted file mode 100644 index 9f6d5bc01e9..00000000000 --- a/src/smtencoding/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -FSTAR_HOME ?= ../.. - -include ../Makefile.boot.common diff --git a/src/syntax/FStarC.Syntax.Embeddings.fst b/src/syntax/FStarC.Syntax.Embeddings.fst index e3a90fbd00b..008305ef41f 100644 --- a/src/syntax/FStarC.Syntax.Embeddings.fst +++ b/src/syntax/FStarC.Syntax.Embeddings.fst @@ -1061,7 +1061,7 @@ let e_vconfig = (fun () -> ET_app (PC.vconfig_lid |> Ident.string_of_lid, [])) let e_order = - let open FStar.Order in + let open FStarC.Compiler.Order in let ord_Lt_lid = Ident.lid_of_path (["FStar"; "Order"; "Lt"]) Range.dummyRange in let ord_Eq_lid = Ident.lid_of_path (["FStar"; "Order"; "Eq"]) Range.dummyRange in let ord_Gt_lid = Ident.lid_of_path (["FStar"; "Order"; "Gt"]) Range.dummyRange in diff --git a/src/syntax/FStarC.Syntax.Embeddings.fsti b/src/syntax/FStarC.Syntax.Embeddings.fsti index b1c13049f8e..1ccf3273536 100644 --- a/src/syntax/FStarC.Syntax.Embeddings.fsti +++ b/src/syntax/FStarC.Syntax.Embeddings.fsti @@ -44,7 +44,7 @@ instance val e_string : embedding string instance val e_real : embedding Compiler.Real.real instance val e_norm_step : embedding Pervasives.norm_step instance val e_vconfig : embedding FStarC.VConfig.vconfig -instance val e_order : embedding FStar.Order.order +instance val e_order : embedding FStarC.Compiler.Order.order instance val e_option : embedding 'a -> Tot (embedding (option 'a)) instance val e_list : embedding 'a -> Tot (embedding (list 'a)) diff --git a/src/syntax/Makefile b/src/syntax/Makefile deleted file mode 100644 index 9f6d5bc01e9..00000000000 --- a/src/syntax/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -FSTAR_HOME ?= ../.. - -include ../Makefile.boot.common diff --git a/src/tactics/Makefile b/src/tactics/Makefile deleted file mode 100644 index 9f6d5bc01e9..00000000000 --- a/src/tactics/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -FSTAR_HOME ?= ../.. - -include ../Makefile.boot.common diff --git a/src/tests/Makefile b/src/tests/Makefile deleted file mode 100644 index 9f6d5bc01e9..00000000000 --- a/src/tests/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -FSTAR_HOME ?= ../.. - -include ../Makefile.boot.common diff --git a/src/tosyntax/Makefile b/src/tosyntax/Makefile deleted file mode 100644 index 9f6d5bc01e9..00000000000 --- a/src/tosyntax/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -FSTAR_HOME ?= ../.. - -include ../Makefile.boot.common diff --git a/src/typechecker/FStarC.TypeChecker.NBETerm.fst b/src/typechecker/FStarC.TypeChecker.NBETerm.fst index f4a23cd5402..57b3219d247 100644 --- a/src/typechecker/FStarC.TypeChecker.NBETerm.fst +++ b/src/typechecker/FStarC.TypeChecker.NBETerm.fst @@ -919,7 +919,7 @@ let e_order = let ord_Lt_fv = lid_as_fv ord_Lt_lid (Some Data_ctor) in let ord_Eq_fv = lid_as_fv ord_Eq_lid (Some Data_ctor) in let ord_Gt_fv = lid_as_fv ord_Gt_lid (Some Data_ctor) in - let open FStar.Order in + let open FStarC.Compiler.Order in let embed_order cb (o:order) : t = match o with | Lt -> mkConstruct ord_Lt_fv [] [] diff --git a/src/typechecker/FStarC.TypeChecker.NBETerm.fsti b/src/typechecker/FStarC.TypeChecker.NBETerm.fsti index 3eb2efac532..5b4d54b7c49 100644 --- a/src/typechecker/FStarC.TypeChecker.NBETerm.fsti +++ b/src/typechecker/FStarC.TypeChecker.NBETerm.fsti @@ -301,7 +301,7 @@ instance val e_string_list : embedding (list string) val e_arrow : embedding 'a -> embedding 'b -> embedding ('a -> 'b) instance val e_abstract_nbe_term : embedding abstract_nbe_term -instance val e_order : embedding FStar.Order.order +instance val e_order : embedding FStarC.Compiler.Order.order (* Unconditionally fails raising an exception when called *) val e_unsupported : #a:Type -> embedding a diff --git a/src/typechecker/FStarC.TypeChecker.Tc.fst b/src/typechecker/FStarC.TypeChecker.Tc.fst index 4ccd7d83b7d..c4900c7c30f 100644 --- a/src/typechecker/FStarC.TypeChecker.Tc.fst +++ b/src/typechecker/FStarC.TypeChecker.Tc.fst @@ -1222,7 +1222,7 @@ let load_checked_module (en:env) (m:modul) :env = module. *) (* Reset debug flags *) - if Options.should_check (string_of_lid m.name) || Options.debug_all_modules () + if Options.should_check (string_of_lid m.name) // || Options.debug_all_modules () then Debug.enable_toggles (Options.debug_keys ()) else Debug.disable_all (); diff --git a/src/typechecker/Makefile b/src/typechecker/Makefile deleted file mode 100644 index 9f6d5bc01e9..00000000000 --- a/src/typechecker/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -FSTAR_HOME ?= ../.. - -include ../Makefile.boot.common diff --git a/stage0/.gitattributes b/stage0/.gitattributes new file mode 100644 index 00000000000..3ceac9c918b --- /dev/null +++ b/stage0/.gitattributes @@ -0,0 +1,5 @@ +# Do not shows diff nor try to merge for the OCaml snapshot and hint +# files. This just takes the latest one when there's a conflict, and +# does not show diffs in the CLI nor GitHub. +** -diff -merge +** linguist-generated=true diff --git a/ocaml/.gitignore b/stage0/.gitignore similarity index 87% rename from ocaml/.gitignore rename to stage0/.gitignore index 9e6e65c9f82..3daf4e6bbe4 100644 --- a/ocaml/.gitignore +++ b/stage0/.gitignore @@ -1,2 +1,3 @@ # copied from the root version.txt +/lib diff --git a/stage0/Makefile b/stage0/Makefile new file mode 120000 index 00000000000..b0cd2af01e9 --- /dev/null +++ b/stage0/Makefile @@ -0,0 +1 @@ +../mk/stage0.mk \ No newline at end of file diff --git a/ocaml/default.nix b/stage0/default.nix similarity index 100% rename from ocaml/default.nix rename to stage0/default.nix diff --git a/ocaml/dune b/stage0/dune similarity index 100% rename from ocaml/dune rename to stage0/dune diff --git a/ocaml/dune-project b/stage0/dune-project similarity index 100% rename from ocaml/dune-project rename to stage0/dune-project diff --git a/ocaml/fstar-lib/.gitignore b/stage0/fstar-lib/.gitignore similarity index 100% rename from ocaml/fstar-lib/.gitignore rename to stage0/fstar-lib/.gitignore diff --git a/stage0/fstar-lib/FStarC_BaseTypes.ml b/stage0/fstar-lib/FStarC_BaseTypes.ml new file mode 100644 index 00000000000..66b018bd10c --- /dev/null +++ b/stage0/fstar-lib/FStarC_BaseTypes.ml @@ -0,0 +1,10 @@ +type char = FStar_Char.char[@@deriving yojson,show] +type float = FStar_Float.float[@@deriving yojson,show] +type double = FStar_Float.double[@@deriving yojson,show] +type byte = FStar_UInt8.byte[@@deriving yojson,show] +type int8 = FStar_Int8.int8 +type uint8 = FStar_UInt8.uint8 +type int16 = FStar_Int16.int16 +type uint16 = FStar_UInt16.uint16 +type int32 = FStar_Int32.int32 +type int64 = FStar_Int64.int64 diff --git a/stage0/fstar-lib/FStarC_BigInt.ml b/stage0/fstar-lib/FStarC_BigInt.ml new file mode 100644 index 00000000000..2314ae4ffac --- /dev/null +++ b/stage0/fstar-lib/FStarC_BigInt.ml @@ -0,0 +1,44 @@ +type bigint = Z.t +type t = bigint + +let zero = Z.zero +let one = Z.one +let two = Z.of_string "2" + +let succ_big_int = Z.succ +let pred_big_int = Z.pred +let minus_big_int = Z.neg +let abs_big_int = Z.abs + +let add_big_int = Z.add +let mult_big_int = Z.mul +let sub_big_int = Z.sub +let div_big_int = Z.ediv +let mod_big_int = Z.erem + +let eq_big_int = Z.equal +let le_big_int = Z.leq +let lt_big_int = Z.lt +let ge_big_int = Z.geq +let gt_big_int = Z.gt + +let logand_big_int = Z.logand +let logor_big_int = Z.logor +let logxor_big_int = Z.logxor +let lognot_big_int = Z.lognot + +let shift_left_big_int x y = Z.shift_left x (Z.to_int y) +let shift_right_big_int x y = Z.shift_right x (Z.to_int y) + +let sqrt_big_int = Z.sqrt + +let string_of_big_int = Z.to_string +let big_int_of_string = Z.of_string + +let of_int = Z.of_int +let to_int = Z.to_int + +let of_int_fs x = x +let to_int_fs x = x + +let of_hex x = Z.of_string ("0x" ^ x) diff --git a/stage0/fstar-lib/FStarC_Compiler_Bytes.ml b/stage0/fstar-lib/FStarC_Compiler_Bytes.ml new file mode 100644 index 00000000000..c9ed427771d --- /dev/null +++ b/stage0/fstar-lib/FStarC_Compiler_Bytes.ml @@ -0,0 +1,111 @@ +let b0 n = (n land 0xFF) +let b1 n = ((n lsr 8) land 0xFF) +let b2 n = ((n lsr 16) land 0xFF) +let b3 n = ((n lsr 24) land 0xFF) + +let dWw1 n = BatInt64.to_int (BatInt64.logand (BatInt64.shift_right n 32) 0xFFFFFFFFL) +let dWw0 n = BatInt64.to_int (BatInt64.logand n 0xFFFFFFFFL) + +type bytes = int array + +let f_encode f (b:bytes) = String.concat "" (Array.to_list (Array.map f b)) +let length (b:bytes) = BatArray.length b +let get (b:bytes) n = Z.of_int (BatArray.get b (Z.to_int n)) +let make (f : _ -> Z.t) n = BatArray.init (Z.to_int n) (fun i -> Z.to_int (f (Z.of_int i))) +let zero_create n : bytes = BatArray.make n 0 + +let sub ( b:bytes) s l = BatArray.sub b s l +let set = BatArray.set +let blit (a:bytes) b c d e = BatArray.blit a b c d e +let string_as_unicode_bytes (s:string) = FStarC_Compiler_Util.unicode_of_string s +let utf8_bytes_as_string (b:bytes) = FStarC_Compiler_Util.string_of_unicode b +let unicode_bytes_as_string (b:bytes) = FStarC_Compiler_Util.string_of_unicode b +let compare (b1:bytes) (b2:bytes) = compare b1 b2 + +let to_intarray (b:bytes) = b +let of_intarray (arr:int array) = arr + +let string_as_utf8_bytes (s:string) = FStarC_Compiler_Util.unicode_of_string s + +let append (b1: bytes) (b2:bytes) = BatArray.append b1 b2 + +type bytebuf = + { mutable bbArray: bytes; + mutable bbCurrent: int } + +module Bytebuf = struct + let create sz = + { bbArray=zero_create sz; + bbCurrent = 0; } + + let ensure_bytebuf buf new_size = + let old_buf_size = BatArray.length buf.bbArray in + if new_size > old_buf_size then ( + let old = buf.bbArray in + buf.bbArray <- zero_create (max new_size (old_buf_size * 2)); + blit old 0 buf.bbArray 0 buf.bbCurrent + ) + + let close buf = sub buf.bbArray 0 buf.bbCurrent + + let emit_int_as_byte buf i = + let new_size = buf.bbCurrent + 1 in + ensure_bytebuf buf new_size; + set buf.bbArray buf.bbCurrent i; + buf.bbCurrent <- new_size + + let emit_byte buf (b:char) = emit_int_as_byte buf (int_of_char b) + let emit_bool_as_byte buf (b:bool) = emit_int_as_byte buf (if b then 1 else 0) + + let emit_bytes buf i = + let n = length i in + let new_size = buf.bbCurrent + n in + ensure_bytebuf buf new_size; + blit i 0 buf.bbArray buf.bbCurrent n; + buf.bbCurrent <- new_size + + let emit_i32_as_u16 buf n = + let new_size = buf.bbCurrent + 2 in + ensure_bytebuf buf new_size; + set buf.bbArray buf.bbCurrent (b0 n); + set buf.bbArray (buf.bbCurrent + 1) (b1 n); + buf.bbCurrent <- new_size + + (* let emit_u16 buf (x:uint16) = emit_i32_as_u16 buf (BatInt64.to_int x) *) + + let fixup_i32 bb pos n = + set bb.bbArray pos (b0 n); + set bb.bbArray (pos + 1) (b1 n); + set bb.bbArray (pos + 2) (b2 n); + set bb.bbArray (pos + 3) (b3 n) + + let emit_i32 buf n = + let new_size = buf.bbCurrent + 4 in + ensure_bytebuf buf new_size; + fixup_i32 buf buf.bbCurrent n; + buf.bbCurrent <- new_size + + let emit_i64 buf x = + emit_i32 buf (dWw0 x); + emit_i32 buf (dWw1 x) + + let emit_intarray_as_bytes buf arr = + let n = BatArray.length arr in + let new_size = buf.bbCurrent + n in + ensure_bytebuf buf new_size; + let bbarr = buf.bbArray in + let bbbase = buf.bbCurrent in + for i= 0 to n - 1 do set bbarr (bbbase + i) (BatArray.get arr i) done; + buf.bbCurrent <- new_size + + let length bb = bb.bbCurrent + let position bb = bb.bbCurrent + +end + +let create i = Bytebuf.create i +let close t = Bytebuf.close t +let emit_int_as_byte t i = Bytebuf.emit_int_as_byte t (Z.to_int i) +let emit_bytes t b = Bytebuf.emit_bytes t b + +let length x = Z.of_int (length x) diff --git a/stage0/fstar-lib/FStarC_Compiler_Effect.ml b/stage0/fstar-lib/FStarC_Compiler_Effect.ml new file mode 100644 index 00000000000..f2f62ef3da1 --- /dev/null +++ b/stage0/fstar-lib/FStarC_Compiler_Effect.ml @@ -0,0 +1,14 @@ +type 'a ref' = 'a ref[@@deriving yojson,show] +type 'a ref = 'a ref'[@@deriving yojson,show] + +let op_Bang (r:'a ref) = !r +let op_Colon_Equals x y = x := y +let alloc x = ref x +let raise = raise +let exit i = exit (Z.to_int i) +exception Failure = Failure (* NB: reusing OCaml's native Failure. *) +(* Not used: handled specially by extraction. If used, + you will get all sorts of weird failures (e.g. an incomplete match + on f2!). *) +(* let try_with f1 f2 = try f1 () with | e -> f2 e *) +(* let failwith x = raise (Failure x) *) diff --git a/stage0/fstar-lib/FStarC_Compiler_Hints.ml b/stage0/fstar-lib/FStarC_Compiler_Hints.ml new file mode 100644 index 00000000000..85f40c7f926 --- /dev/null +++ b/stage0/fstar-lib/FStarC_Compiler_Hints.ml @@ -0,0 +1,118 @@ +open FStarC_Json + +(** Hints. *) +type hint = { + hint_name:string; + hint_index:Z.t; + fuel:Z.t; + ifuel:Z.t; + unsat_core:string list option; + query_elapsed_time:Z.t; + hash:string option +} + +type hints = hint option list + +type hints_db = { + module_digest:string; + hints: hints +} + +type hints_read_result = + | HintsOK of hints_db + | MalformedJson + | UnableToOpen + +let write_hints (filename: string) (hints: hints_db): unit = + let json = `List [ + `String hints.module_digest; + `List (List.map (function + | None -> `Null + | Some { hint_name; hint_index; fuel; ifuel; unsat_core; query_elapsed_time; hash } -> + `List [ + `String hint_name; + `Int (Z.to_int hint_index); + `Int (Z.to_int fuel); + `Int (Z.to_int ifuel); + (match unsat_core with + | None -> `Null + | Some strings -> + `List (List.map (fun s -> `String s) strings)); + `Int (Z.to_int query_elapsed_time); + `String (match hash with | Some(h) -> h | _ -> "") + ] + ) hints.hints) + ] in + let channel = open_out_bin filename in + BatPervasives.finally + (fun () -> close_out channel) + (fun channel -> Yojson.Safe.pretty_to_channel channel json) + channel + +let read_hints (filename: string) : hints_read_result = + let mk_hint nm ix fuel ifuel unsat_core time hash_opt = { + hint_name = nm; + hint_index = Z.of_int ix; + fuel = Z.of_int fuel; + ifuel = Z.of_int ifuel; + unsat_core = begin + match unsat_core with + | `Null -> + None + | `List strings -> + Some (List.map (function + | `String s -> s + | _ -> raise Exit) + strings) + | _ -> + raise Exit + end; + query_elapsed_time = Z.of_int time; + hash = hash_opt + } + in + try + let chan = open_in filename in + let json = Yojson.Safe.from_channel chan in + close_in chan; + HintsOK ( + match json with + | `List [ + `String module_digest; + `List hints + ] -> { + module_digest; + hints = List.map (function + | `Null -> None + | `List [ `String hint_name; + `Int hint_index; + `Int fuel; + `Int ifuel; + unsat_core; + `Int query_elapsed_time ] -> + (* This case is for dealing with old-style hint files + that lack a query-hashes field. We should remove this + case once we definitively remove support for old hints *) + Some (mk_hint hint_name hint_index fuel ifuel unsat_core query_elapsed_time None) + | `List [ `String hint_name; + `Int hint_index; + `Int fuel; + `Int ifuel; + unsat_core; + `Int query_elapsed_time; + `String hash ] -> + let hash_opt = if hash <> "" then Some(hash) else None in + Some (mk_hint hint_name hint_index fuel ifuel unsat_core query_elapsed_time hash_opt) + | _ -> + raise Exit + ) hints + } + | _ -> + raise Exit + ) + with + | Exit -> + MalformedJson + | Sys_error _ -> + UnableToOpen + diff --git a/stage0/fstar-lib/FStarC_Compiler_List.ml b/stage0/fstar-lib/FStarC_Compiler_List.ml new file mode 100644 index 00000000000..9fb1e8cf062 --- /dev/null +++ b/stage0/fstar-lib/FStarC_Compiler_List.ml @@ -0,0 +1,3 @@ +(* We give an implementation here using OCaml's BatList, + which provides tail-recursive versions of most functions *) +include FStar_List diff --git a/stage0/fstar-lib/FStarC_Compiler_Plugins_Base.ml b/stage0/fstar-lib/FStarC_Compiler_Plugins_Base.ml new file mode 100644 index 00000000000..c089443fc2f --- /dev/null +++ b/stage0/fstar-lib/FStarC_Compiler_Plugins_Base.ml @@ -0,0 +1,9 @@ +open Dynlink + +exception DynlinkError of string + +let dynlink_loadfile (fname:string) : unit = + try + Dynlink.loadfile fname + with Dynlink.Error e -> + raise (DynlinkError (Dynlink.error_message e)) diff --git a/stage0/fstar-lib/FStarC_Compiler_Range.ml b/stage0/fstar-lib/FStarC_Compiler_Range.ml new file mode 100644 index 00000000000..7d3435eed24 --- /dev/null +++ b/stage0/fstar-lib/FStarC_Compiler_Range.ml @@ -0,0 +1,2 @@ +include FStarC_Compiler_Range_Type +include FStarC_Compiler_Range_Ops diff --git a/ocaml/fstar-lib/FStar_String.ml b/stage0/fstar-lib/FStarC_Compiler_String.ml similarity index 100% rename from ocaml/fstar-lib/FStar_String.ml rename to stage0/fstar-lib/FStarC_Compiler_String.ml diff --git a/ocaml/fstar-lib/FStarC_Compiler_Util.ml b/stage0/fstar-lib/FStarC_Compiler_Util.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Compiler_Util.ml rename to stage0/fstar-lib/FStarC_Compiler_Util.ml diff --git a/stage0/fstar-lib/FStarC_Dyn.ml b/stage0/fstar-lib/FStarC_Dyn.ml new file mode 100644 index 00000000000..a138a7cd6c2 --- /dev/null +++ b/stage0/fstar-lib/FStarC_Dyn.ml @@ -0,0 +1,12 @@ +type dyn = Obj.t +[@printer fun fmt _ -> Format.pp_print_string fmt ""] +[@@deriving show] + +let dyn_to_yojson _ = `Null +let dyn_of_yojson _ = failwith "cannot readback" + +let mkdyn (x:'a) : dyn = + Obj.repr x + +let undyn (d:dyn) : 'a = + Obj.obj d diff --git a/ocaml/fstar-lib/FStarC_Extraction_ML_PrintML.ml b/stage0/fstar-lib/FStarC_Extraction_ML_PrintML.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Extraction_ML_PrintML.ml rename to stage0/fstar-lib/FStarC_Extraction_ML_PrintML.ml diff --git a/stage0/fstar-lib/FStarC_Getopt.ml b/stage0/fstar-lib/FStarC_Getopt.ml new file mode 100644 index 00000000000..a60d8bdd054 --- /dev/null +++ b/stage0/fstar-lib/FStarC_Getopt.ml @@ -0,0 +1,108 @@ +let noshort = 0 +type 'a opt_variant = + | ZeroArgs of (unit -> 'a) + | OneArg of (string -> 'a) * string +type 'a opt' = FStar_Char.char * string * 'a opt_variant +type opt = unit opt' +type parse_cmdline_res = + | Empty + | Help + | Error of string + | Success + +let bind l f = + match l with + | Help + | Error _ -> l + | Success -> f () + (* | Empty *) + (* ^ Empty does not occur internally. *) + +(* Returns None if this wasn't an option arg (did not start with "-") + * Otherwise, returns Some (o, s) where [s] is the trimmed option, and [o] + * is the opt we found in specs (possibly None if not present, which should + * trigger an error) *) +let find_matching_opt specs s : (opt option * string) option = + if String.length s < 2 then + None + else if String.sub s 0 2 = "--" then + (* long opts *) + let strim = String.sub s 2 ((String.length s) - 2) in + let o = FStar_List.tryFind (fun (_, option, _) -> option = strim) specs in + Some (o, strim) + else if String.sub s 0 1 = "-" then + (* short opts *) + let strim = String.sub s 1 ((String.length s) - 1) in + let o = FStar_List.tryFind (fun (shortoption, _, _) -> FStar_String.make Z.one shortoption = strim) specs in + Some (o, strim) + else + None + +(* remark: doesn't work with files starting with -- *) +let rec parse (opts:opt list) def ar ix max i : parse_cmdline_res = + if ix > max then Success + else + let arg = ar.(ix) in + let go_on () = bind (def arg) (fun _ -> parse opts def ar (ix + 1) max (i + 1)) in + match find_matching_opt opts arg with + | None -> go_on () + | Some (None, _) -> Error ("unrecognized option '" ^ arg ^ "'\n") + | Some (Some (_, _, p), argtrim) -> + begin match p with + | ZeroArgs f -> f (); parse opts def ar (ix + 1) max (i + 1) + | OneArg (f, _) -> + if ix + 1 > max + then Error ("last option '" ^ argtrim ^ "' takes an argument but has none\n") + else + let r = + try (f (ar.(ix + 1)); Success) + with _ -> Error ("wrong argument given to option `" ^ argtrim ^ "`\n") + in bind r (fun () -> parse opts def ar (ix + 2) max (i + 1)) + end + +let parse_array specs others args offset = + parse specs others args offset (Array.length args - 1) 0 + +let parse_cmdline specs others = + if Array.length Sys.argv = 1 then Empty + else parse_array specs others Sys.argv 1 + +let parse_string specs others (str:string) = + let split_spaces (str:string) = + let seps = [int_of_char ' '; int_of_char '\t'] in + FStar_List.filter (fun s -> s != "") (FStar_String.split seps str) + in + (* to match the style of the F# code in FStar.GetOpt.fs *) + let index_of str c = + try + String.index str c + with Not_found -> -1 + in + let substring_from s j = + let len = String.length s - j in + String.sub s j len + in + let rec split_quoted_fragments (str:string) = + let i = index_of str '\'' in + if i < 0 then Some (split_spaces str) + else let prefix = String.sub str 0 i in + let suffix = substring_from str (i + 1) in + let j = index_of suffix '\'' in + if j < 0 then None + else let quoted_frag = String.sub suffix 0 j in + let rest = split_quoted_fragments (substring_from suffix (j + 1)) in + match rest with + | None -> None + | Some rest -> Some (split_spaces prefix @ quoted_frag::rest) + + in + match split_quoted_fragments str with + | None -> Error("Failed to parse options; unmatched quote \"'\"") + | Some args -> + parse_array specs others (Array.of_list args) 0 + +let parse_list specs others lst = + parse_array specs others (Array.of_list lst) 0 + +let cmdline () = + Array.to_list (Sys.argv) diff --git a/stage0/fstar-lib/FStarC_Hash.ml b/stage0/fstar-lib/FStarC_Hash.ml new file mode 100644 index 00000000000..4ece1088b4a --- /dev/null +++ b/stage0/fstar-lib/FStarC_Hash.ml @@ -0,0 +1,76 @@ +module BU = FStarC_Compiler_Util +module Z = FStarC_BigInt + +type hash_code = int + +let cmp_hash (x:hash_code) (y:hash_code) : Z.t = Z.of_int (x-y) + +let to_int (i:int) = Z.of_int i + +let of_int (i:Z.t) = Z.to_int i +let of_string (s:string) = BatHashtbl.hash s + +(* This function is taken from Bob Jenkins' + http://burtleburtle.net/bob/hash/doobs.html + + It's defined there as a mix on 32 bit integers. + + I'm abusing it here by using it on OCaml's 63 bit + integers. + + But it seems to work well, at least in comparison + to some simpler mixes that I tried. E.g., using + this mix taken from Lean (src/runtime/hash.h) + +uint64 hash(uint64 h1, uint64 h2) { + h2 -= h1; h2 ^= (h1 << 16); + h1 -= h2; h2 ^= (h1 << 32); + h2 -= h1; h2 ^= (h1 << 20); + return h2; +} + + But, it produces many collisions, see, e.g., in + tests/FStar.Tests.Pars.test_hashes +*) +let mix (a: hash_code) (b:hash_code) = + let c = 11 in + (* a -= b; a -= c; a ^= (c >> 13); *) + let a = a - b in + let a = a - c in + (* skip this step since c lsr 13 = 0 *) + (* let a = a lxor (c lsr 13) in *) + (* b -= c; b -= a; b ^= (a << 8); *) + let b = b - c in + let b = b - a in + let b = b lxor (a lsl 8) in + (* c -= a; c -= b; c ^= (b >> 13); *) + let c = c - a in + let c = c - b in + let c = c lxor (b lsr 13) in + (* a -= b; a -= c; a ^= (c >> 12); *) + let a = a - b in + let a = a - c in + let a = a lxor (c lsr 12) in + (* b -= c; b -= a; b ^= (a << 16); *) + let b = b - c in + let b = b - a in + let b = b lxor (a lsl 16) in + (* c -= a; c -= b; c ^= (b >> 5); *) + let c = c - a in + let c = c - b in + let c = c lxor (b lsr 5) in + (* a -= b; a -= c; a ^= (c >> 3); *) + let a = a - b in + let a = a - c in + let a = a lxor (c lsr 3) in + (* b -= c; b -= a; b ^= (a << 10); *) + let b = b - c in + let b = b - a in + let b = b lxor (a lsl 10) in + (* c -= a; c -= b; c ^= (b >> 15); *) + let c = c - a in + let c = c - b in + let c = c lxor (b lsr 15) in + c + +let string_of_hash_code h = string_of_int h diff --git a/stage0/fstar-lib/FStarC_Json.ml b/stage0/fstar-lib/FStarC_Json.ml new file mode 100644 index 00000000000..120e71eefb0 --- /dev/null +++ b/stage0/fstar-lib/FStarC_Json.ml @@ -0,0 +1,39 @@ +exception UnsupportedJson + +type json = +| JsonNull +| JsonBool of bool +| JsonInt of Z.t +| JsonStr of string +| JsonList of json list +| JsonAssoc of (string * json) list + +let json_of_yojson yjs: json option = + let rec aux yjs = + match yjs with + | `Null -> JsonNull + | `Bool b -> JsonBool b + | `Int i -> JsonInt (Z.of_int i) + | `String s -> JsonStr s + | `List l -> JsonList (List.map aux l) + | `Assoc a -> JsonAssoc (List.map (fun (k, v) -> (k, aux v)) a) + | _ -> raise UnsupportedJson in + try Some (aux yjs) with UnsupportedJson -> None + +let rec yojson_of_json js = + match js with + | JsonNull -> `Null + | JsonBool b -> `Bool b + | JsonInt i -> `Int (Z.to_int i) + | JsonStr s -> `String s + | JsonList l -> `List (List.map yojson_of_json l) + | JsonAssoc a -> `Assoc (List.map (fun (k, v) -> (k, yojson_of_json v)) a) + +let json_of_string str : json option = + let open Yojson.Basic in + try + json_of_yojson (Yojson.Basic.from_string str) + with Yojson.Json_error _ -> None + +let string_of_json json = + Yojson.Basic.to_string (yojson_of_json json) diff --git a/stage0/fstar-lib/FStarC_Parser_LexFStar.ml b/stage0/fstar-lib/FStarC_Parser_LexFStar.ml new file mode 100644 index 00000000000..25be6188616 --- /dev/null +++ b/stage0/fstar-lib/FStarC_Parser_LexFStar.ml @@ -0,0 +1,719 @@ +open FStarC_Parser_Parse +open FStarC_Parser_Util + +module Option = BatOption +module String = BatString +module Hashtbl = BatHashtbl +module Sedlexing = FStarC_Sedlexing +module L = Sedlexing +module E = FStarC_Errors +module Codes = FStarC_Errors_Codes +module BU = FStarC_Compiler_Util + +let ba_of_string s = Array.init (String.length s) (fun i -> Char.code (String.get s i)) +let array_trim_both a n m = Array.sub a n (Array.length a - n - m) +let string_trim_both s n m = BatString.sub s n (String.length s - (n+m)) +let trim_both lexbuf n m = string_trim_both (L.lexeme lexbuf) n m +let utrim_both lexbuf n m = array_trim_both (L.ulexeme lexbuf) n m +let trim_right lexbuf n = trim_both lexbuf 0 n +let trim_left lexbuf n = trim_both lexbuf n 0 + +let unescape (a:int array) : int = + match a.(0) with + | 92 (* \ *) -> + (match a.(1) with + | 48 (*0*) -> 0 + | 98 (*b*) -> 8 + | 116 (*t*) -> 9 + | 110 (*n*) -> 10 + | 118 (*v*) -> 11 + | 102 (*f*) -> 12 + | 114 (*r*) -> 13 + | 117 (*u*) -> + let s = FStarC_Parser_Utf8.from_int_array a 2 4 in + int_of_string ("0x"^s) + | 120 (*x*) -> + let s = FStarC_Parser_Utf8.from_int_array a 2 2 in + int_of_string ("0x"^s) + | c -> c) + | c -> c + +let keywords = Hashtbl.create 0 +let constructors = Hashtbl.create 0 +let operators = Hashtbl.create 0 + +let () = + Hashtbl.add keywords "attributes" ATTRIBUTES ; + Hashtbl.add keywords "noeq" NOEQUALITY ; + Hashtbl.add keywords "unopteq" UNOPTEQUALITY ; + Hashtbl.add keywords "and" AND ; + Hashtbl.add keywords "assert" ASSERT ; + Hashtbl.add keywords "assume" ASSUME ; + Hashtbl.add keywords "begin" BEGIN ; + Hashtbl.add keywords "by" BY ; + Hashtbl.add keywords "calc" CALC ; + Hashtbl.add keywords "class" CLASS ; + Hashtbl.add keywords "default" DEFAULT ; + Hashtbl.add keywords "decreases" DECREASES ; + Hashtbl.add keywords "effect" EFFECT ; + Hashtbl.add keywords "eliminate" ELIM; + Hashtbl.add keywords "else" ELSE ; + Hashtbl.add keywords "end" END ; + Hashtbl.add keywords "ensures" ENSURES ; + Hashtbl.add keywords "exception" EXCEPTION ; + Hashtbl.add keywords "exists" (EXISTS false); + Hashtbl.add keywords "false" FALSE ; + Hashtbl.add keywords "friend" FRIEND ; + Hashtbl.add keywords "forall" (FORALL false); + Hashtbl.add keywords "fun" FUN ; + Hashtbl.add keywords "λ" FUN ; + Hashtbl.add keywords "function" FUNCTION ; + Hashtbl.add keywords "if" IF ; + Hashtbl.add keywords "in" IN ; + Hashtbl.add keywords "include" INCLUDE ; + Hashtbl.add keywords "inline" INLINE ; + Hashtbl.add keywords "inline_for_extraction" INLINE_FOR_EXTRACTION ; + Hashtbl.add keywords "instance" INSTANCE ; + Hashtbl.add keywords "introduce" INTRO ; + Hashtbl.add keywords "irreducible" IRREDUCIBLE ; + Hashtbl.add keywords "let" (LET false) ; + Hashtbl.add keywords "logic" LOGIC ; + Hashtbl.add keywords "match" MATCH ; + Hashtbl.add keywords "returns" RETURNS ; + Hashtbl.add keywords "as" AS ; + Hashtbl.add keywords "module" MODULE ; + Hashtbl.add keywords "new" NEW ; + Hashtbl.add keywords "new_effect" NEW_EFFECT ; + Hashtbl.add keywords "layered_effect" LAYERED_EFFECT ; + Hashtbl.add keywords "polymonadic_bind" POLYMONADIC_BIND ; + Hashtbl.add keywords "polymonadic_subcomp" POLYMONADIC_SUBCOMP ; + Hashtbl.add keywords "noextract" NOEXTRACT ; + Hashtbl.add keywords "of" OF ; + Hashtbl.add keywords "open" OPEN ; + Hashtbl.add keywords "opaque" OPAQUE ; + Hashtbl.add keywords "private" PRIVATE ; + Hashtbl.add keywords "quote" QUOTE ; + Hashtbl.add keywords "range_of" RANGE_OF ; + Hashtbl.add keywords "rec" REC ; + Hashtbl.add keywords "reifiable" REIFIABLE ; + Hashtbl.add keywords "reify" REIFY ; + Hashtbl.add keywords "reflectable" REFLECTABLE ; + Hashtbl.add keywords "requires" REQUIRES ; + Hashtbl.add keywords "set_range_of" SET_RANGE_OF; + Hashtbl.add keywords "sub_effect" SUB_EFFECT ; + Hashtbl.add keywords "synth" SYNTH ; + Hashtbl.add keywords "then" THEN ; + Hashtbl.add keywords "total" TOTAL ; + Hashtbl.add keywords "true" TRUE ; + Hashtbl.add keywords "try" TRY ; + Hashtbl.add keywords "type" TYPE ; + Hashtbl.add keywords "unfold" UNFOLD ; + Hashtbl.add keywords "unfoldable" UNFOLDABLE ; + Hashtbl.add keywords "val" VAL ; + Hashtbl.add keywords "when" WHEN ; + Hashtbl.add keywords "with" WITH ; + Hashtbl.add keywords "_" UNDERSCORE ; + Hashtbl.add keywords "α" (TVAR "a") ; + Hashtbl.add keywords "β" (TVAR "b") ; + Hashtbl.add keywords "γ" (TVAR "c") ; + Hashtbl.add keywords "δ" (TVAR "d") ; + Hashtbl.add keywords "ε" (TVAR "e") ; + Hashtbl.add keywords "φ" (TVAR "f") ; + Hashtbl.add keywords "χ" (TVAR "g") ; + Hashtbl.add keywords "η" (TVAR "h") ; + Hashtbl.add keywords "ι" (TVAR "i") ; + Hashtbl.add keywords "κ" (TVAR "k") ; + Hashtbl.add keywords "μ" (TVAR "m") ; + Hashtbl.add keywords "ν" (TVAR "n") ; + Hashtbl.add keywords "π" (TVAR "p") ; + Hashtbl.add keywords "θ" (TVAR "q") ; + Hashtbl.add keywords "ρ" (TVAR "r") ; + Hashtbl.add keywords "σ" (TVAR "s") ; + Hashtbl.add keywords "τ" (TVAR "t") ; + Hashtbl.add keywords "ψ" (TVAR "u") ; + Hashtbl.add keywords "ω" (TVAR "w") ; + Hashtbl.add keywords "ξ" (TVAR "x") ; + Hashtbl.add keywords "ζ" (TVAR "z") ; + Hashtbl.add constructors "ℕ" (IDENT "nat"); + Hashtbl.add constructors "ℤ" (IDENT "int"); + Hashtbl.add constructors "𝔹" (IDENT "bool"); + let l = + ["~", TILDE "~"; + "-", MINUS; + "/\\", CONJUNCTION; + "\\/", DISJUNCTION; + "<:", SUBTYPE; + "$:", EQUALTYPE; + "<@", SUBKIND; + "(|", LENS_PAREN_LEFT; + "|)", LENS_PAREN_RIGHT; + "#", HASH; + "u#", UNIV_HASH; + "&", AMP; + "()", LPAREN_RPAREN; + "(", LPAREN; + ")", RPAREN; + ",", COMMA; + "~>", SQUIGGLY_RARROW; + "->", RARROW; + "<--", LONG_LEFT_ARROW; + "<-", LARROW; + "<==>", IFF; + "==>", IMPLIES; + ".", DOT; + "?.", QMARK_DOT; + "?", QMARK; + ".[", DOT_LBRACK; + ".(|", DOT_LENS_PAREN_LEFT; + ".(", DOT_LPAREN; + ".[|", DOT_LBRACK_BAR; + "{:pattern", LBRACE_COLON_PATTERN; + "{:well-founded", LBRACE_COLON_WELL_FOUNDED; + "returns$", RETURNS_EQ; + ":", COLON; + "::", COLON_COLON; + ":=", COLON_EQUALS; + ";", SEMICOLON; + "=", EQUALS; + "%[", PERCENT_LBRACK; + "!{", BANG_LBRACE; + "[@@@", LBRACK_AT_AT_AT; + "[@@", LBRACK_AT_AT; + "[@", LBRACK_AT; + "[", LBRACK; + "[|", LBRACK_BAR; + "{|", LBRACE_BAR; + "|>", PIPE_RIGHT; + "]", RBRACK; + "|]", BAR_RBRACK; + "|}", BAR_RBRACE; + "{", LBRACE; + "|", BAR; + "}", RBRACE; + "$", DOLLAR; + (* New Unicode equivalents *) + "∀", (FORALL false); + "∃", (EXISTS false); + "⊤", NAME "True"; + "⊥", NAME "False"; + "⟹", IMPLIES; + "⟺", IFF; + "→", RARROW; + "←", LARROW; + "⟵", LONG_LEFT_ARROW; + "↝", SQUIGGLY_RARROW; + "≔", COLON_EQUALS; + "∧", CONJUNCTION; + "∨", DISJUNCTION; + "¬", TILDE "~"; + "⸬", COLON_COLON; + "▹", PIPE_RIGHT; + "÷", OPINFIX3 "÷"; + "‖", OPINFIX0a "||"; + "×", IDENT "op_Multiply"; + "∗", OPINFIX3 "*"; + "⇒", OPINFIX0c "=>"; + "≥", OPINFIX0c ">="; + "≤", OPINFIX0c "<="; + "≠", OPINFIX0c "<>"; + "≪", OPINFIX0c "<<"; + "◃", OPINFIX0c "<|"; + "±", OPPREFIX "±"; + "∁", OPPREFIX "∁"; + "∂", OPPREFIX "∂"; + "√", OPPREFIX "√"; + ] in + List.iter (fun (k,v) -> Hashtbl.add operators k v) l + +let current_range lexbuf = + FStarC_Parser_Util.mksyn_range (fst (L.range lexbuf)) (snd (L.range lexbuf)) + +let fail lexbuf (e, msg) = + let m = current_range lexbuf in + E.raise_error_text m e msg + +type delimiters = { angle:int ref; paren:int ref; } +let n_typ_apps = ref 0 + +let is_typ_app_gt () = + if !n_typ_apps > 0 + then (decr n_typ_apps; true) + else false + +let rec mknewline n lexbuf = + if n = 0 then () + else (L.new_line lexbuf; mknewline (n-1) lexbuf) + +let clean_number x = String.strip ~chars:"uzyslLUnIN" x + +(* Try to trim each line of [comment] by the ammount of space + on the first line of the comment if possible *) +(* TODO : apply this to FSDOC too *) +let maybe_trim_lines start_column comment = + if start_column = 0 then comment + else + let comment_lines = String.split_on_char '\n' comment in + let ensures_empty_prefix k s = + let j = min k (String.length s - 1) in + let rec aux i = if i > j then k else if s.[i] <> ' ' then i else aux (i+1) in + aux 0 in + let trim_width = List.fold_left ensures_empty_prefix start_column comment_lines in + String.concat "\n" (List.map (fun s -> String.tail s trim_width) comment_lines) + +let comment_buffer = Buffer.create 128 +let blob_buffer = Buffer.create 128 +let use_lang_buffer = Buffer.create 128 + +let start_comment lexbuf = + Buffer.add_string comment_buffer "(*" ; + (false, comment_buffer, fst (L.range lexbuf)) + +let terminate_comment buffer startpos lexbuf = + let endpos = snd (L.range lexbuf) in + Buffer.add_string buffer "*)" ; + let comment = Buffer.contents buffer in + let comment = maybe_trim_lines (startpos.Lexing.pos_cnum - startpos.Lexing.pos_bol) comment in + Buffer.clear buffer; + add_comment (comment, FStarC_Parser_Util.mksyn_range startpos endpos) + +let push_one_line_comment pre lexbuf = + let startpos, endpos = L.range lexbuf in + assert (startpos.Lexing.pos_lnum = endpos.Lexing.pos_lnum); + add_comment (pre ^ L.lexeme lexbuf, FStarC_Parser_Util.mksyn_range startpos endpos) + +(** Unicode class definitions + Auto-generated from http:/ /www.unicode.org/Public/8.0.0/ucd/UnicodeData.txt **) +(** Ll **) +let u_lower = [%sedlex.regexp? ll] +(** Lu *) +let u_upper = [%sedlex.regexp? lu] +(** Lo *) +let u_other = [%sedlex.regexp? lo] +(** Lm *) +let u_modifier = [%sedlex.regexp? lm] +(** Lt *) +let u_title = [%sedlex.regexp? lt] +(** Zs *) +let u_space = [%sedlex.regexp? zs] +(** These are not unicode spaces but we accept as whitespace in F* source (e.g. tab and BOM) *) +let u_space_extra = [%sedlex.regexp? '\t' | '\x0B' | '\x0C' | '\xA0' | 0xfeff] +(** Zl and Zp *) +let u_line_sep = [%sedlex.regexp? zl] +let u_par_sep = [%sedlex.regexp? zp] +(** Sm math symbols *) +let u_math = [%sedlex.regexp? sm] +let u_math_ascii = [%sedlex.regexp? 0x002b | 0x003c .. 0x003e | 0x007c | 0x007e] +let u_math_nonascii = [%sedlex.regexp? Sub(u_math, u_math_ascii)] +(** Sc currency *) +let u_currency = [%sedlex.regexp? sc] +(** Sk *) +let u_modifier_symbol = [%sedlex.regexp? sk] +(** So *) +let u_other_symbol = [%sedlex.regexp? so] +(** Nd *) +let u_decimal_digit = [%sedlex.regexp? nd] +(** Nl *) +let u_digit_letter = [%sedlex.regexp? nl] +(** No *) +let u_other_digit = [%sedlex.regexp? no] +(** Pd *) +let u_punct_hyphen = [%sedlex.regexp? pd] +(** Ps *) +let u_punct_obra = [%sedlex.regexp? ps] +(** Pe *) +let u_punct_cbra = [%sedlex.regexp? pe] +(** Pi *) +let u_punct_oquot = [%sedlex.regexp? pi] +(** Pf *) +let u_punct_cquot = [%sedlex.regexp? pf] +(** Pc *) +let u_punct_connect = [%sedlex.regexp? pc] +(** Po *) +let u_punct_other = [%sedlex.regexp? po] +(** Mn *) +let u_mod_nospace = [%sedlex.regexp? mn] +(** Mc *) +let u_mod = [%sedlex.regexp? mc] +(** Me *) +let u_mod_enclose = [%sedlex.regexp? me] +(** Cc *) +let u_ascii_control = [%sedlex.regexp? cc] +(** Cf *) +let u_format_control = [%sedlex.regexp? cf] +(** Co *) +let u_private_use = [%sedlex.regexp? co] +(** Cs *) +let u_surrogate = [%sedlex.regexp? cs] + +(* -------------------------------------------------------------------- *) +let lower = [%sedlex.regexp? u_lower] +let upper = [%sedlex.regexp? u_upper | u_title] +let letter = [%sedlex.regexp? u_lower | u_upper | u_other | u_modifier] +let digit = [%sedlex.regexp? '0'..'9'] +let hex = [%sedlex.regexp? '0'..'9' | 'A'..'F' | 'a'..'f'] + +(* -------------------------------------------------------------------- *) +let anywhite = [%sedlex.regexp? u_space | u_space_extra] +let newline = [%sedlex.regexp? "\r\n" | 10 | 13 | 0x2028 | 0x2029] + +(* -------------------------------------------------------------------- *) +let op_char = [%sedlex.regexp? Chars "!$%&*+-.<>=?^|~:@#\\/"] + +(* op_token must be splt into seperate regular expressions to prevent + compliation from hanging *) +let op_token_1 = [%sedlex.regexp? "~" | "-" | "/\\" | "\\/" | "<:" | "$:" | "<@" | "(|" | "|)" | "#" ] +let op_token_2 = [%sedlex.regexp? "u#" | "&" | "()" | "(" | ")" | "," | "~>" | "->" | "<--" ] +let op_token_3 = [%sedlex.regexp? "<-" | "<==>" | "==>" | "." | "?." | "?" | ".[|" | ".[" | ".(|" | ".(" ] +let op_token_4 = [%sedlex.regexp? "$" | "{:pattern" | "{:well-founded" | ":" | "::" | ":=" | ";;" | ";" | "=" | "%[" | "returns$" ] +let op_token_5 = [%sedlex.regexp? "!{" | "[@@@" | "[@@" | "[@" | "[|" | "{|" | "[" | "|>" | "]" | "|]" | "|}" | "{" | "|" | "}" ] + +(* -------------------------------------------------------------------- *) +let xinteger = + [%sedlex.regexp? + ( '0', ('x'| 'X'), Plus hex + | '0', ('o'| 'O'), Plus ('0' .. '7') + | '0', ('b'| 'B'), Plus ('0' .. '1') )] +let integer = [%sedlex.regexp? Plus digit] +let any_integer = [%sedlex.regexp? xinteger | integer] +let unsigned = [%sedlex.regexp? Chars "uU"] +let int8 = [%sedlex.regexp? any_integer, 'y'] +let uint8 = [%sedlex.regexp? any_integer, unsigned, 'y'] +let int16 = [%sedlex.regexp? any_integer, 's'] +let uint16 = [%sedlex.regexp? any_integer, unsigned, 's'] +let int32 = [%sedlex.regexp? any_integer, 'l'] +let uint32 = [%sedlex.regexp? any_integer, unsigned, 'l'] +let int64 = [%sedlex.regexp? any_integer, 'L'] +let uint64 = [%sedlex.regexp? any_integer, unsigned, 'L'] +let char8 = [%sedlex.regexp? any_integer, 'z'] +let sizet = [%sedlex.regexp? any_integer, "sz"] + +let floatp = [%sedlex.regexp? Plus digit, '.', Star digit] +let floate = [%sedlex.regexp? Plus digit, Opt ('.', Star digit), Chars "eE", Opt (Chars "+-"), Plus digit] +let real = [%sedlex.regexp? floatp, 'R'] +let ieee64 = [%sedlex.regexp? floatp | floate] +let xieee64 = [%sedlex.regexp? xinteger, 'L', 'F'] +let range = [%sedlex.regexp? Plus digit, '.', '.', Plus digit] + +let op_prefix = [%sedlex.regexp? Chars "!~?"] +let op_infix0a = [%sedlex.regexp? Chars "|"] (* left *) +let op_infix0b = [%sedlex.regexp? Chars "&"] (* left *) +let op_infix0c = [%sedlex.regexp? Chars "=<>"] (* left *) +let op_infix0c_nogt = [%sedlex.regexp? Chars "=<"] (* left *) +let op_infix0d = [%sedlex.regexp? Chars "$"] (* left *) + +let op_infix0 = [%sedlex.regexp? op_infix0a | op_infix0b | op_infix0c | op_infix0d] +let op_infix1 = [%sedlex.regexp? Chars "@^"] (* right *) +let op_infix2 = [%sedlex.regexp? Chars "+-"] (* left *) +let op_infix3 = [%sedlex.regexp? Chars "*/%"] (* left *) +let symbolchar = [%sedlex.regexp? op_prefix | op_infix0 | op_infix1 | op_infix2 | op_infix3 | Chars ".:"] +let uoperator = [%sedlex.regexp? u_math_nonascii] + +(* -------------------------------------------------------------------- *) +let escape_char = [%sedlex.regexp? '\\', (Chars "\\\"'bfntrv0" | "x", hex, hex | "u", hex, hex, hex, hex)] +let char = [%sedlex.regexp? Compl '\\' | escape_char] + +(* -------------------------------------------------------------------- *) +let constructor_start_char = [%sedlex.regexp? upper] +let ident_start_char = [%sedlex.regexp? lower | '_'] +let ident_char = [%sedlex.regexp? letter | digit | '\'' | '_'] +let tvar_char = [%sedlex.regexp? letter | digit | '\'' | '_'] + +let constructor = [%sedlex.regexp? constructor_start_char, Star ident_char] +let ident = [%sedlex.regexp? ident_start_char, Star ident_char] +let tvar = [%sedlex.regexp? '\'', (ident_start_char | constructor_start_char), Star tvar_char] + +(* [ensure_no_comment lexbuf next] takes a [lexbuf] and [next], a + continuation. It is to be called after a regexp was matched, to + ensure match text does not contain any comment start. + + If the match [s] contains a comment start (an occurence of [//]) + then we place the lexer at that comment start. We continue with + [next s], [s] being either the whole match, or the chunk before + [//]. +*) +let ensure_no_comment lexbuf (next: string -> token): token = + let s = L.lexeme lexbuf in + next (try let before, _after = BatString.split s "//" in + (* rollback to the begining of the match *) + L.rollback lexbuf; + (* skip [n] characters in the lexer, with [n] being [hd]'s len *) + BatString.iter (fun _ -> let _ = L.next lexbuf in ()) before; + before with | Not_found -> s) + +let rec token lexbuf = +match%sedlex lexbuf with + | "%splice" -> SPLICE + | "%splice_t" -> SPLICET + | "```", ident -> + let s = L.lexeme lexbuf in + let name = BatString.lchop ~n:3 s in + Buffer.clear blob_buffer; + let snap = Sedlexing.snapshot lexbuf in + let pos = L.current_pos lexbuf in + uninterpreted_blob snap name pos blob_buffer lexbuf + | "`%" -> BACKTICK_PERC + | "`#" -> BACKTICK_HASH + | "`@" -> BACKTICK_AT + | "#lang-", ident -> ( + let s = L.lexeme lexbuf in + let lang_name = BatString.lchop ~n:6 s in + let snap = Sedlexing.snapshot lexbuf in + Buffer.clear use_lang_buffer; + let pos = L.current_pos lexbuf in + use_lang_blob snap lang_name pos use_lang_buffer lexbuf + ) + + | "seq![" -> SEQ_BANG_LBRACK + + | "#show-options" -> PRAGMA_SHOW_OPTIONS + | "#set-options" -> PRAGMA_SET_OPTIONS + | "#reset-options" -> PRAGMA_RESET_OPTIONS + | "#push-options" -> PRAGMA_PUSH_OPTIONS + | "#pop-options" -> PRAGMA_POP_OPTIONS + | "#restart-solver" -> PRAGMA_RESTART_SOLVER + | "#print-effects-graph" -> PRAGMA_PRINT_EFFECTS_GRAPH + | "__SOURCE_FILE__" -> STRING (BU.basename (L.source_file lexbuf)) + | "__LINE__" -> INT (string_of_int (L.current_line lexbuf), false) + | "__FILELINE__" -> STRING (BU.basename (L.source_file lexbuf) ^ "(" ^ (string_of_int (L.current_line lexbuf)) ^ ")") + + | Plus anywhite -> token lexbuf + | newline -> L.new_line lexbuf; token lexbuf + + (* Must appear before tvar to avoid 'a <-> 'a' conflict *) + | ('\'', char, '\'') -> CHAR (unescape (utrim_both lexbuf 1 1)) + | ('\'', char, '\'', 'B') -> CHAR (unescape (utrim_both lexbuf 1 2)) + | '`' -> BACKTICK + + | "match", Plus op_char -> + ensure_no_comment lexbuf (fun s -> + match BatString.lchop ~n:5 s with + | "" -> MATCH + | s -> MATCH_OP s + ) + + | "if", Plus op_char -> + ensure_no_comment lexbuf (fun s -> + match BatString.lchop ~n:2 s with + | "" -> IF + | s -> IF_OP s + ) + + | "let", Plus op_char -> + ensure_no_comment lexbuf (fun s -> + match BatString.lchop ~n:3 s with + | "" -> LET false + | s -> LET_OP s + ) + + | "exists", Plus op_char -> + ensure_no_comment lexbuf (fun s -> + match BatString.lchop ~n:6 s with + | "" -> EXISTS false + | s -> EXISTS_OP s + ) + + | "∃", Plus op_char -> + ensure_no_comment lexbuf (fun s -> + match BatString.lchop ~n:1 s with + | "" -> EXISTS false + | s -> EXISTS_OP s + ) + + | "forall", Plus op_char -> + ensure_no_comment lexbuf (fun s -> + match BatString.lchop ~n:6 s with + | "" -> FORALL false + | s -> FORALL_OP s + ) + + | "∀", Plus op_char -> + ensure_no_comment lexbuf (fun s -> + match BatString.lchop ~n:1 s with + | "" -> FORALL false + | s -> FORALL_OP s + ) + + | "and", Plus op_char -> + ensure_no_comment lexbuf (fun s -> + match BatString.lchop ~n:3 s with + | "" -> AND + | s -> AND_OP s + ) + + | ";", Plus op_char -> + ensure_no_comment lexbuf (fun s -> + match BatString.lchop ~n:1 s with + | "" -> SEMICOLON + | s -> SEMICOLON_OP (Some s) + ) + + | ";;" -> SEMICOLON_OP None + + | ident -> let id = L.lexeme lexbuf in + if BU.starts_with id FStarC_Ident.reserved_prefix + then FStarC_Errors.raise_error_text (current_range lexbuf) Codes.Fatal_ReservedPrefix + (FStarC_Ident.reserved_prefix ^ " is a reserved prefix for an identifier"); + Hashtbl.find_option keywords id |> Option.default (IDENT id) + | constructor -> let id = L.lexeme lexbuf in + Hashtbl.find_option constructors id |> Option.default (NAME id) + + | tvar -> TVAR (L.lexeme lexbuf) + | (integer | xinteger) -> INT (clean_number (L.lexeme lexbuf), false) + | (uint8 | char8) -> + let c = clean_number (L.lexeme lexbuf) in + let cv = int_of_string c in + if cv < 0 || cv > 255 then fail lexbuf (Codes.Fatal_SyntaxError, "Out-of-range character literal") + else UINT8 (c) + | int8 -> INT8 (clean_number (L.lexeme lexbuf), false) + | uint16 -> UINT16 (clean_number (L.lexeme lexbuf)) + | int16 -> INT16 (clean_number (L.lexeme lexbuf), false) + | uint32 -> UINT32 (clean_number (L.lexeme lexbuf)) + | int32 -> INT32 (clean_number (L.lexeme lexbuf), false) + | uint64 -> UINT64 (clean_number (L.lexeme lexbuf)) + | int64 -> INT64 (clean_number (L.lexeme lexbuf), false) + | sizet -> SIZET (clean_number (L.lexeme lexbuf)) + | range -> RANGE (L.lexeme lexbuf) + | real -> REAL(trim_right lexbuf 1) + | (integer | xinteger | ieee64 | xieee64), Plus ident_char -> + fail lexbuf (Codes.Fatal_SyntaxError, "This is not a valid numeric literal: " ^ L.lexeme lexbuf) + + | "(*" -> + let inner, buffer, startpos = start_comment lexbuf in + comment inner buffer startpos lexbuf + + | "// IN F*:" -> token lexbuf + | "//" -> + (* Only match on "//" to allow the longest-match rule to catch IN F*. This + * creates a lexing conflict with op_infix3 which is caught below. *) + one_line_comment (L.lexeme lexbuf) lexbuf + + | '"' -> string (Buffer.create 0) lexbuf.Sedlexing.start_p lexbuf + + | '`', '`', (Plus (Compl ('`' | 10 | 13 | 0x2028 | 0x2029) | '`', Compl ('`' | 10 | 13 | 0x2028 | 0x2029))), '`', '`' -> + IDENT (trim_both lexbuf 2 2) + + (* Pipe operators have special treatment in the parser. *) + | "<|" -> PIPE_LEFT + | "|>" -> PIPE_RIGHT + + | op_token_1 + | op_token_2 + | op_token_3 + | op_token_4 + | op_token_5 -> L.lexeme lexbuf |> Hashtbl.find operators + + | "<" -> OPINFIX0c("<") + | ">" -> if is_typ_app_gt () + then TYP_APP_GREATER + else begin match%sedlex lexbuf with + | Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0c (">" ^ s)) + | _ -> assert false end + + (* Operators. *) + | op_prefix, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPPREFIX s) + | op_infix0a, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0a s) + | op_infix0b, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0b s) + | op_infix0c_nogt, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0c s) + | op_infix0d, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0d s) + | op_infix1, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX1 s) + | op_infix2, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX2 s) + | op_infix3, Star symbolchar -> ensure_no_comment lexbuf (function + | "" -> one_line_comment "" lexbuf + | s -> OPINFIX3 s + ) + | "**" , Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX4 s) + + (* Unicode Operators *) + | uoperator -> let id = L.lexeme lexbuf in + Hashtbl.find_option operators id |> Option.default (OPINFIX4 id) + + | ".[]<-" -> OP_MIXFIX_ASSIGNMENT (L.lexeme lexbuf) + | ".()<-" -> OP_MIXFIX_ASSIGNMENT (L.lexeme lexbuf) + | ".(||)<-" -> OP_MIXFIX_ASSIGNMENT (L.lexeme lexbuf) + | ".[||]<-" -> OP_MIXFIX_ASSIGNMENT (L.lexeme lexbuf) + | ".[]" -> OP_MIXFIX_ACCESS (L.lexeme lexbuf) + | ".()" -> OP_MIXFIX_ACCESS (L.lexeme lexbuf) + | ".(||)" -> OP_MIXFIX_ACCESS (L.lexeme lexbuf) + | ".[||]" -> OP_MIXFIX_ACCESS (L.lexeme lexbuf) + + | eof -> EOF + | _ -> fail lexbuf (Codes.Fatal_SyntaxError, "unexpected char") + +and one_line_comment pre lexbuf = +match%sedlex lexbuf with + | Star (Compl (10 | 13 | 0x2028 | 0x2029)) -> push_one_line_comment pre lexbuf; token lexbuf + | _ -> assert false + +and string buffer start_pos lexbuf = +match%sedlex lexbuf with + | '\\', newline, Star anywhite -> L.new_line lexbuf; string buffer start_pos lexbuf + | newline -> + Buffer.add_string buffer (L.lexeme lexbuf); + L.new_line lexbuf; string buffer start_pos lexbuf + | escape_char -> + Buffer.add_string buffer (BatUTF8.init 1 (fun _ -> unescape (L.ulexeme lexbuf) |> BatUChar.chr)); + string buffer start_pos lexbuf + | '"' -> + (* position info must be set since the start of the string *) + lexbuf.Sedlexing.start_p <- start_pos; + STRING (Buffer.contents buffer) + | eof -> fail lexbuf (Codes.Fatal_SyntaxError, "unterminated string") + | any -> + Buffer.add_string buffer (L.lexeme lexbuf); + string buffer start_pos lexbuf + | _ -> assert false + +and comment inner buffer startpos lexbuf = +match%sedlex lexbuf with + | "(*" -> + Buffer.add_string buffer "(*" ; + let _ = comment true buffer startpos lexbuf in + comment inner buffer startpos lexbuf + | newline -> + L.new_line lexbuf; + Buffer.add_string buffer (L.lexeme lexbuf); + comment inner buffer startpos lexbuf + | "*)" -> + terminate_comment buffer startpos lexbuf; + if inner then EOF else token lexbuf + | eof -> + terminate_comment buffer startpos lexbuf; EOF + | any -> + Buffer.add_string buffer (L.lexeme lexbuf); + comment inner buffer startpos lexbuf + | _ -> assert false + +and uninterpreted_blob snap name pos buffer lexbuf = +match %sedlex lexbuf with + | "```" -> + BLOB(name, Buffer.contents buffer, pos, snap) + | eof -> + E.raise_error_text (current_range lexbuf) Codes.Fatal_SyntaxError + "Syntax error: unterminated extension syntax" + | newline -> + L.new_line lexbuf; + Buffer.add_string buffer (L.lexeme lexbuf); + uninterpreted_blob snap name pos buffer lexbuf + | any -> + Buffer.add_string buffer (L.lexeme lexbuf); + uninterpreted_blob snap name pos buffer lexbuf + | _ -> assert false + +and use_lang_blob snap name pos buffer lexbuf = +match %sedlex lexbuf with + | eof -> + L.rollback lexbuf; (* leave the eof to be consumed later *) + USE_LANG_BLOB(name, Buffer.contents buffer, pos, snap) + | newline -> + L.new_line lexbuf; + Buffer.add_string buffer (L.lexeme lexbuf); + use_lang_blob snap name pos buffer lexbuf + | any -> + Buffer.add_string buffer (L.lexeme lexbuf); + use_lang_blob snap name pos buffer lexbuf + | _ -> assert false + +and ignore_endline lexbuf = +match%sedlex lexbuf with + | Star ' ', newline -> token lexbuf + | _ -> assert false diff --git a/stage0/fstar-lib/FStarC_Parser_Parse.mly b/stage0/fstar-lib/FStarC_Parser_Parse.mly new file mode 100644 index 00000000000..ccf4d38af78 --- /dev/null +++ b/stage0/fstar-lib/FStarC_Parser_Parse.mly @@ -0,0 +1,1730 @@ +%{ +(* + Menhir reports the following warnings: + + Warning: 5 states have shift/reduce conflicts. + Warning: 6 shift/reduce conflicts were arbitrarily resolved. + Warning: 221 end-of-stream conflicts were arbitrarily resolved. + + If you're editing this file, be sure to not increase the warnings, + except if you have a really good reason. + + The shift-reduce conflicts are natural in an ML-style language. E.g., + there are S-R conflicts with dangling elses, with a non-delimited match where + the BAR is dangling etc. + + Note: Some symbols are marked public, so that we can reuse this parser from + the parser for the Pulse DSL in FStarLang/steel. + +*) +(* (c) Microsoft Corporation. All rights reserved *) +open Prims +open FStar_Pervasives +open FStarC_Errors +open FStarC_Compiler_List +open FStarC_Compiler_Util +open FStarC_Compiler_Range + +(* TODO : these files should be deprecated and removed *) +open FStarC_Parser_Const +open FStarC_Parser_AST +open FStarC_Const +open FStarC_Ident + +(* Shorthands *) +let rr = FStarC_Parser_Util.translate_range +let rr2 = FStarC_Parser_Util.translate_range2 + +let logic_qualifier_deprecation_warning = + "logic qualifier is deprecated, please remove it from the source program. In case your program verifies with the qualifier annotated but not without it, please try to minimize the example and file a github issue." + +let mk_meta_tac m = Meta m + +let old_attribute_syntax_warning = + "The `[@ ...]` syntax of attributes is deprecated. \ + Use `[@@ a1; a2; ...; an]`, a semi-colon separated list of attributes, instead" + +let do_notation_deprecation_warning = + "The lightweight do notation [x <-- y; z] or [x ;; z] is deprecated, use let operators (i.e. [let* x = y in z] or [y ;* z], [*] being any sequence of operator characters) instead." + +let none_to_empty_list x = + match x with + | None -> [] + | Some l -> l + +let parse_extension_blob (extension_name:string) + (s:string) + (blob_range:range) + (extension_syntax_start:range) : FStarC_Parser_AST.decl' = + DeclSyntaxExtension (extension_name, s, blob_range, extension_syntax_start) + +let parse_use_lang_blob (extension_name:string) + (s:string) + (blob_range:range) + (extension_syntax_start:range) +: FStarC_Parser_AST.decl list += FStarC_Parser_AST_Util.parse_extension_lang extension_name s extension_syntax_start + +%} + +%token STRING +%token IDENT +%token NAME +%token TVAR +%token TILDE + +/* bool indicates if INT8 was 'bad' max_int+1, e.g. '128' */ +%token INT8 +%token INT16 +%token INT32 +%token INT64 +%token INT +%token RANGE + +%token UINT8 +%token UINT16 +%token UINT32 +%token UINT64 +%token SIZET +%token REAL +%token CHAR +%token LET +%token LET_OP +%token AND_OP +%token MATCH_OP +%token IF_OP +%token EXISTS +%token EXISTS_OP +%token FORALL +%token FORALL_OP + + +/* [SEMICOLON_OP] encodes either: +- [;;], which used to be SEMICOLON_SEMICOLON, or +- [;], with a sequence of [op_char] (see FStarC_Parser_LexFStar). +*/ +%token SEMICOLON_OP + +%token ASSUME NEW LOGIC ATTRIBUTES +%token IRREDUCIBLE UNFOLDABLE INLINE OPAQUE UNFOLD INLINE_FOR_EXTRACTION +%token NOEXTRACT +%token NOEQUALITY UNOPTEQUALITY +%token PRAGMA_SHOW_OPTIONS PRAGMA_SET_OPTIONS PRAGMA_RESET_OPTIONS PRAGMA_PUSH_OPTIONS PRAGMA_POP_OPTIONS PRAGMA_RESTART_SOLVER PRAGMA_PRINT_EFFECTS_GRAPH +%token TYP_APP_LESS TYP_APP_GREATER SUBTYPE EQUALTYPE SUBKIND BY +%token AND ASSERT SYNTH BEGIN ELSE END +%token EXCEPTION FALSE FUN FUNCTION IF IN MODULE DEFAULT +%token MATCH OF +%token FRIEND OPEN REC THEN TRUE TRY TYPE CALC CLASS INSTANCE EFFECT VAL +%token INTRO ELIM +%token INCLUDE +%token WHEN AS RETURNS RETURNS_EQ WITH HASH AMP LPAREN RPAREN LPAREN_RPAREN COMMA LONG_LEFT_ARROW LARROW RARROW +%token IFF IMPLIES CONJUNCTION DISJUNCTION +%token DOT COLON COLON_COLON SEMICOLON +%token QMARK_DOT +%token QMARK +%token EQUALS PERCENT_LBRACK LBRACK_AT LBRACK_AT_AT LBRACK_AT_AT_AT DOT_LBRACK +%token DOT_LENS_PAREN_LEFT DOT_LPAREN DOT_LBRACK_BAR LBRACK LBRACK_BAR LBRACE_BAR LBRACE BANG_LBRACE +%token BAR_RBRACK BAR_RBRACE UNDERSCORE LENS_PAREN_LEFT LENS_PAREN_RIGHT +%token SEQ_BANG_LBRACK +%token BAR RBRACK RBRACE DOLLAR +%token PRIVATE REIFIABLE REFLECTABLE REIFY RANGE_OF SET_RANGE_OF LBRACE_COLON_PATTERN +%token PIPE_LEFT PIPE_RIGHT +%token NEW_EFFECT SUB_EFFECT LAYERED_EFFECT POLYMONADIC_BIND POLYMONADIC_SUBCOMP SPLICE SPLICET SQUIGGLY_RARROW TOTAL +%token REQUIRES ENSURES DECREASES LBRACE_COLON_WELL_FOUNDED +%token MINUS COLON_EQUALS QUOTE BACKTICK_AT BACKTICK_HASH +%token BACKTICK UNIV_HASH +%token BACKTICK_PERC + +%token OPPREFIX OPINFIX0a OPINFIX0b OPINFIX0c OPINFIX0d OPINFIX1 OPINFIX2 OPINFIX3 OPINFIX4 +%token OP_MIXFIX_ASSIGNMENT OP_MIXFIX_ACCESS +%token BLOB +%token USE_LANG_BLOB + +/* These are artificial */ +%token EOF + +%nonassoc THEN +%nonassoc ELSE + +%nonassoc ASSERT +%nonassoc EQUALTYPE +%nonassoc SUBTYPE +%nonassoc BY + +%right COLON_COLON +%right AMP + +%nonassoc COLON_EQUALS +%left OPINFIX0a +%left OPINFIX0b +%left OPINFIX0c EQUALS +%left OPINFIX0d +%left PIPE_RIGHT +%right PIPE_LEFT +%right OPINFIX1 +%left OPINFIX2 MINUS QUOTE +%left OPINFIX3 +%left BACKTICK +%right OPINFIX4 + +%start inputFragment +%start term +%start warn_error_list +%start oneDeclOrEOF +%type inputFragment +%type <(FStarC_Parser_AST.decl list * FStarC_Sedlexing.snap option) option> oneDeclOrEOF +%type term +%type lident +%type <(FStarC_Errors_Codes.error_flag * string) list> warn_error_list +%% + +(* inputFragment is used at the same time for whole files and fragment of codes (for interactive mode) *) +inputFragment: + | decls=list(decl) EOF + { + as_frag (List.flatten decls) + } + +oneDeclOrEOF: + | EOF { None } + | ds=idecl { Some ds } + +idecl: + | d=decl snap=startOfNextDeclToken + { d, snap } + +%public +startOfNextDeclToken: + | EOF { None } + | pragmaStartToken { None } + | LBRACK_AT { None } (* Attribute start *) + | LBRACK_AT_AT { None } (* Attribute start *) + | qualifier { None } + | CLASS { None } + | INSTANCE { None } + | OPEN { None } + | FRIEND { None } + | INCLUDE { None } + | MODULE { None } + | TYPE { None } + | EFFECT { None } + | LET { None } + | VAL { None } + | SPLICE { None } + | SPLICET { None } + | EXCEPTION { None } + | NEW_EFFECT { None } + | LAYERED_EFFECT { None } + | SUB_EFFECT { None } + | POLYMONADIC_BIND { None } + | POLYMONADIC_SUBCOMP { None } + | b=BLOB { let _, _, _, snap = b in Some snap } + | b=USE_LANG_BLOB { let _, _, _, snap = b in Some snap } + +pragmaStartToken: + | PRAGMA_SHOW_OPTIONS + { () } + | PRAGMA_SET_OPTIONS + { () } + | PRAGMA_RESET_OPTIONS + { () } + | PRAGMA_PUSH_OPTIONS + { () } + | PRAGMA_POP_OPTIONS + { () } + | PRAGMA_RESTART_SOLVER + { () } + | PRAGMA_PRINT_EFFECTS_GRAPH + { () } + +/******************************************************************************/ +/* Top level declarations */ +/******************************************************************************/ + +pragma: + | PRAGMA_SHOW_OPTIONS + { ShowOptions } + | PRAGMA_SET_OPTIONS s=string + { SetOptions s } + | PRAGMA_RESET_OPTIONS s_opt=string? + { ResetOptions s_opt } + | PRAGMA_PUSH_OPTIONS s_opt=string? + { PushOptions s_opt } + | PRAGMA_POP_OPTIONS + { PopOptions } + | PRAGMA_RESTART_SOLVER + { RestartSolver } + | PRAGMA_PRINT_EFFECTS_GRAPH + { PrintEffectsGraph } + +attribute: + | LBRACK_AT x = list(atomicTerm) RBRACK + { + let _ = + match x with + | _::_::_ -> + log_issue_text (rr $loc) Warning_DeprecatedAttributeSyntax old_attribute_syntax_warning + | _ -> () in + x + } + | LBRACK_AT_AT x = semiColonTermList RBRACK + { x } + +%public +decoration: + | x=attribute + { DeclAttributes x } + | x=qualifier + { Qualifier x } + +%public +decl: + | ASSUME lid=uident COLON phi=formula + { [mk_decl (Assume(lid, phi)) (rr $loc) [ Qualifier Assumption ]] } + + | blob=USE_LANG_BLOB + { + let ext_name, contents, pos, snap = blob in + (* blob_range is the full range of the blob, starting from the #lang pragma *) + let blob_range = rr (snd snap, snd $loc) in + (* extension_syntax_start_range is where the extension syntax starts not including + the "#lang ident" prefix *) + let extension_syntax_start_range = (rr (pos, pos)) in + let ds = parse_use_lang_blob ext_name contents blob_range extension_syntax_start_range in + mk_decl (UseLangDecls ext_name) extension_syntax_start_range [] :: ds + } + + | ds=list(decoration) decl=rawDecl + { [mk_decl decl (rr $loc(decl)) ds] } + + | ds=list(decoration) decl=typeclassDecl + { let (decl, extra_attrs) = decl in + let d = mk_decl decl (rr $loc(decl)) ds in + [{ d with attrs = extra_attrs @ d.attrs }] + } + +%public +noDecorationDecl: + | ASSUME lid=uident COLON phi=formula + { [mk_decl (Assume(lid, phi)) (rr $loc) [ Qualifier Assumption ]] } + + | blob=USE_LANG_BLOB + { + let ext_name, contents, pos, snap = blob in + (* blob_range is the full range of the blob, starting from the #lang pragma *) + let blob_range = rr (snd snap, snd $loc) in + (* extension_syntax_start_range is where the extension syntax starts not including + the "#lang ident" prefix *) + let extension_syntax_start_range = (rr (pos, pos)) in + let ds = parse_use_lang_blob ext_name contents blob_range extension_syntax_start_range in + mk_decl (UseLangDecls ext_name) extension_syntax_start_range [] :: ds + } + +%public +decoratableDecl: + | decl=rawDecl + { [mk_decl decl (rr $loc(decl)) []] } + + | decl=typeclassDecl + { let (decl, extra_attrs) = decl in + let d = mk_decl decl (rr $loc(decl)) [] in + [{ d with attrs = extra_attrs }] + } + + +typeclassDecl: + | CLASS tcdef=typeDecl + { + (* Only a single type decl allowed, but construct it the same as for multiple ones. + * Only difference is the `true` below marking that this a class so desugaring + * adds the needed %splice. *) + let d = Tycon (false, true, [tcdef]) in + + (* No attrs yet, but perhaps we want a `class` attribute *) + (d, []) + } + + | INSTANCE q=letqualifier lb=letbinding + { + (* Making a single letbinding *) + let r = rr $loc in + let lbs = focusLetBindings [lb] r in (* lbs is a singleton really *) + let d = TopLevelLet(q, lbs) in + + (* Slapping a `tcinstance` attribute to it *) + let at = mk_term (Var tcinstance_lid) r Type_level in + + (d, [at]) + } + + | INSTANCE VAL lid=lidentOrOperator bs=binders COLON t=typ + { + (* Some duplication from rawDecl... *) + let r = rr $loc in + let t = match bs with + | [] -> t + | bs -> mk_term (Product(bs, t)) (rr2 $loc(bs) $loc(t)) Type_level + in + let d = Val(lid, t) in + (* Slapping a `tcinstance` attribute to it *) + let at = mk_term (Var tcinstance_lid) r Type_level in + + (d, [at]) + } + +restriction: + | LBRACE ids=separated_list(COMMA, id=ident renamed=option(AS id=ident {id} ) {(id, renamed)}) RBRACE + { FStarC_Syntax_Syntax.AllowList ids } + | { FStarC_Syntax_Syntax.Unrestricted } + +rawDecl: + | p=pragma + { Pragma p } + | OPEN uid=quident r=restriction + { Open (uid, r) } + | FRIEND uid=quident + { Friend uid } + | INCLUDE uid=quident r=restriction + { Include (uid, r) } + | MODULE UNDERSCORE EQUALS uid=quident + { Open (uid, FStarC_Syntax_Syntax.AllowList []) } + | MODULE uid1=uident EQUALS uid2=quident + { ModuleAbbrev(uid1, uid2) } + | MODULE q=qlident + { raise_error_text (rr $loc(q)) Fatal_SyntaxError "Syntax error: expected a module name" } + | MODULE uid=quident + { TopLevelModule uid } + | TYPE tcdefs=separated_nonempty_list(AND,typeDecl) + { Tycon (false, false, tcdefs) } + | EFFECT uid=uident tparams=typars EQUALS t=typ + { Tycon(true, false, [(TyconAbbrev(uid, tparams, None, t))]) } + | LET q=letqualifier lbs=separated_nonempty_list(AND, letbinding) + { + let r = rr $loc in + let lbs = focusLetBindings lbs r in + if q <> Rec && List.length lbs <> 1 + then raise_error_text r Fatal_MultipleLetBinding "Unexpected multiple let-binding (Did you forget some rec qualifier ?)"; + TopLevelLet(q, lbs) + } + | VAL c=constant + { + (* This is just to provide a better error than "syntax error" *) + raise_error_text (rr $loc) Fatal_SyntaxError "Syntax error: constants are not allowed in val declarations" + } + | VAL lid=lidentOrOperator bs=binders COLON t=typ + { + let t = match bs with + | [] -> t + | bs -> mk_term (Product(bs, t)) (rr2 $loc(bs) $loc(t)) Type_level + in Val(lid, t) + } + | SPLICE LBRACK ids=separated_list(SEMICOLON, ident) RBRACK t=thunk(atomicTerm) + { Splice (false, ids, t) } + | SPLICET LBRACK ids=separated_list(SEMICOLON, ident) RBRACK t=atomicTerm + { Splice (true, ids, t) } + | EXCEPTION lid=uident t_opt=option(OF t=typ {t}) + { Exception(lid, t_opt) } + | NEW_EFFECT ne=newEffect + { NewEffect ne } + | LAYERED_EFFECT ne=effectDefinition + { LayeredEffect ne } + | EFFECT ne=layeredEffectDefinition + { LayeredEffect ne } + | SUB_EFFECT se=subEffect + { SubEffect se } + | POLYMONADIC_BIND b=polymonadic_bind + { Polymonadic_bind b } + | POLYMONADIC_SUBCOMP c=polymonadic_subcomp + { Polymonadic_subcomp c } + | blob=BLOB + { + let ext_name, contents, pos, snap = blob in + (* blob_range is the full range of the blob, including the enclosing ``` *) + let blob_range = rr (snd snap, snd $loc) in + (* extension_syntax_start_range is where the extension syntax starts not including + the "```ident" prefix *) + let extension_syntax_start_range = (rr (pos, pos)) in + parse_extension_blob ext_name contents blob_range extension_syntax_start_range + } + + +typeDecl: + (* TODO : change to lident with stratify *) + | lid=ident tparams=typars ascr_opt=ascribeKind? tcdef=typeDefinition + { tcdef lid tparams ascr_opt } + +typars: + | x=tvarinsts { x } + | x=binders { x } + +tvarinsts: + | TYP_APP_LESS tvs=separated_nonempty_list(COMMA, tvar) TYP_APP_GREATER + { map (fun tv -> mk_binder (TVariable(tv)) (range_of_id tv) Kind None) tvs } + +%inline recordDefinition: + | LBRACE record_field_decls=right_flexible_nonempty_list(SEMICOLON, recordFieldDecl) RBRACE + { record_field_decls } + +typeDefinition: + | { (fun id binders kopt -> check_id id; TyconAbstract(id, binders, kopt)) } + | EQUALS t=typ + { (fun id binders kopt -> check_id id; TyconAbbrev(id, binders, kopt, t)) } + /* A documentation on the first branch creates a conflict with { x with a = ... }/{ a = ... } */ + | EQUALS attrs_opt=ioption(binderAttributes) record_field_decls=recordDefinition + { (fun id binders kopt -> check_id id; TyconRecord(id, binders, kopt, none_to_empty_list attrs_opt, record_field_decls)) } + (* having the first BAR optional using left-flexible list creates a s/r on FSDOC since any decl can be preceded by a FSDOC *) + | EQUALS ct_decls=list(constructorDecl) + { (fun id binders kopt -> check_id id; TyconVariant(id, binders, kopt, ct_decls)) } + +recordFieldDecl: + | qualified_lid=aqualifiedWithAttrs(lidentOrOperator) COLON t=typ + { + let (qual, attrs), lid = qualified_lid in + (lid, qual, attrs, t) + } + +constructorPayload: + | COLON t=typ {VpArbitrary t} + | OF t=typ {VpOfNotation t} + | fields=recordDefinition opt=option(COLON t=typ {t}) {VpRecord(fields, opt)} + +constructorDecl: + | BAR attrs_opt=ioption(binderAttributes) + uid=uident + payload=option(constructorPayload) + { uid, payload, none_to_empty_list attrs_opt } + +attr_letbinding: + | attr=ioption(attribute) AND lb=letbinding + { attr, lb } + +letoperatorbinding: + | pat=tuplePattern ascr_opt=ascribeTyp? tm=option(EQUALS tm=term {tm}) + { + let h tm + = ( ( match ascr_opt with + | None -> pat + | Some t -> mk_pattern (PatAscribed(pat, t)) (rr2 $loc(pat) $loc(ascr_opt)) ) + , tm) + in + match pat.pat, tm with + | _ , Some tm -> h tm + | PatVar (v, _, _), None -> + let v = lid_of_ns_and_id [] v in + h (mk_term (Var v) (rr $loc(pat)) Expr) + | _ -> raise_error_text (rr $loc(ascr_opt)) Fatal_SyntaxError "Syntax error: let-punning expects a name, not a pattern" + } + +letbinding: + | focus_opt=maybeFocus lid=lidentOrOperator lbp=nonempty_list(patternOrMultibinder) ascr_opt=ascribeTyp? EQUALS tm=term + { + let pat = mk_pattern (PatVar(lid, None, [])) (rr $loc(lid)) in + let pat = mk_pattern (PatApp (pat, flatten lbp)) (rr2 $loc(focus_opt) $loc(lbp)) in + let pos = rr2 $loc(focus_opt) $loc(tm) in + match ascr_opt with + | None -> (focus_opt, (pat, tm)) + | Some t -> (focus_opt, (mk_pattern (PatAscribed(pat, t)) pos, tm)) + } + | focus_opt=maybeFocus pat=tuplePattern ascr=ascribeTyp eq=EQUALS tm=term + { focus_opt, (mk_pattern (PatAscribed(pat, ascr)) (rr2 $loc(focus_opt) $loc(eq)), tm) } + | focus_opt=maybeFocus pat=tuplePattern EQUALS tm=term + { focus_opt, (pat, tm) } + +/******************************************************************************/ +/* Effects */ +/******************************************************************************/ + +newEffect: + | ed=effectRedefinition + | ed=effectDefinition + { ed } + +effectRedefinition: + | lid=uident EQUALS t=simpleTerm + { RedefineEffect(lid, [], t) } + +effectDefinition: + | LBRACE lid=uident bs=binders COLON typ=tmArrow(tmNoEq) + WITH eds=separated_nonempty_list(SEMICOLON, effectDecl) + RBRACE + { DefineEffect(lid, bs, typ, eds) } + +layeredEffectDefinition: + | LBRACE lid=uident bs=binders WITH r=tmNoEq RBRACE + { + let typ = (* bs -> Effect *) + let first_b, last_b = + match bs with + | [] -> + raise_error_text (range_of_id lid) Fatal_SyntaxError + "Syntax error: unexpected empty binders list in the layered effect definition" + | _ -> hd bs, last bs in + let r = union_ranges first_b.brange last_b.brange in + mk_term (Product (bs, mk_term (Name (lid_of_str "Effect")) r Type_level)) r Type_level in + let rec decls (r:term) = + match r.tm with + | Paren r -> decls r + | Record (None, flds) -> + flds |> List.map (fun (lid, t) -> + mk_decl (Tycon (false, + false, + [TyconAbbrev (ident_of_lid lid, [], None, t)])) + t.range []) + | _ -> + raise_error_text r.range Fatal_SyntaxError + "Syntax error: layered effect combinators should be declared as a record" + in + DefineEffect (lid, [], typ, decls r) } + +effectDecl: + | lid=lident action_params=binders EQUALS t=simpleTerm + { mk_decl (Tycon (false, false, [TyconAbbrev(lid, action_params, None, t)])) (rr $loc) [] } + +subEffect: + | src_eff=quident SQUIGGLY_RARROW tgt_eff=quident EQUALS lift=simpleTerm + { { msource = src_eff; mdest = tgt_eff; lift_op = NonReifiableLift lift; braced=false } } + | src_eff=quident SQUIGGLY_RARROW tgt_eff=quident + LBRACE + lift1=separated_pair(IDENT, EQUALS, simpleTerm) + lift2_opt=ioption(separated_pair(SEMICOLON id=IDENT {id}, EQUALS, simpleTerm)) + /* might be nice for homogeneity if possible : ioption(SEMICOLON) */ + RBRACE + { + match lift2_opt with + | None -> + begin match lift1 with + | ("lift", lift) -> + { msource = src_eff; mdest = tgt_eff; lift_op = LiftForFree lift; braced=true } + | ("lift_wp", lift_wp) -> + { msource = src_eff; mdest = tgt_eff; lift_op = NonReifiableLift lift_wp; braced=true } + | _ -> + raise_error_text (rr $loc) Fatal_UnexpectedIdentifier "Unexpected identifier; expected {'lift', and possibly 'lift_wp'}" + end + | Some (id2, tm2) -> + let (id1, tm1) = lift1 in + let lift, lift_wp = match (id1, id2) with + | "lift_wp", "lift" -> tm1, tm2 + | "lift", "lift_wp" -> tm2, tm1 + | _ -> raise_error_text (rr $loc) Fatal_UnexpectedIdentifier "Unexpected identifier; expected {'lift', 'lift_wp'}" + in + { msource = src_eff; mdest = tgt_eff; lift_op = ReifiableLift (lift, lift_wp); braced=true } + } + +polymonadic_bind: + | LPAREN m_eff=quident COMMA n_eff=quident RPAREN PIPE_RIGHT p_eff=quident EQUALS bind=simpleTerm + { (m_eff, n_eff, p_eff, bind) } + +polymonadic_subcomp: + | m_eff=quident SUBTYPE n_eff=quident EQUALS subcomp=simpleTerm + { (m_eff, n_eff, subcomp) } + + +/******************************************************************************/ +/* Qualifiers, tags, ... */ +/******************************************************************************/ + +qualifier: + | ASSUME { Assumption } + | INLINE { + raise_error_text (rr $loc) Fatal_InlineRenamedAsUnfold + "The 'inline' qualifier has been renamed to 'unfold'" + } + | UNFOLDABLE { + raise_error_text (rr $loc) Fatal_UnfoldableDeprecated + "The 'unfoldable' qualifier is no longer denotable; it is the default qualifier so just omit it" + } + | INLINE_FOR_EXTRACTION { + Inline_for_extraction + } + | UNFOLD { + Unfold_for_unification_and_vcgen + } + | IRREDUCIBLE { Irreducible } + | NOEXTRACT { NoExtract } + | DEFAULT { DefaultEffect } + | TOTAL { TotalEffect } + | PRIVATE { Private } + + | NOEQUALITY { Noeq } + | UNOPTEQUALITY { Unopteq } + | NEW { New } + | LOGIC { log_issue_text (rr $loc) Warning_logicqualifier logic_qualifier_deprecation_warning; + Logic } + | OPAQUE { Opaque } + | REIFIABLE { Reifiable } + | REFLECTABLE { Reflectable } + +maybeFocus: + | b=boption(SQUIGGLY_RARROW) { b } + +letqualifier: + | REC { Rec } + | { NoLetQualifier } + +(* + * AR: this should be generalized to: + * (a) allow attributes on non-implicit binders + * note that in the [@@ case, we choose the Implicit aqual + *) +aqual: + | HASH LBRACK t=thunk(term) RBRACK { mk_meta_tac t } + | HASH { Implicit } + | DOLLAR { Equality } + +binderAttributes: + | LBRACK_AT_AT_AT t=semiColonTermList RBRACK { t } + +/******************************************************************************/ +/* Patterns, binders */ +/******************************************************************************/ + +(* disjunction should be allowed in nested patterns *) +disjunctivePattern: + | pats=separated_nonempty_list(BAR, tuplePattern) { pats } + +%public +tuplePattern: + | pats=separated_nonempty_list(COMMA, constructorPattern) + { match pats with | [x] -> x | l -> mk_pattern (PatTuple (l, false)) (rr $loc) } + +constructorPattern: + | pat=constructorPattern COLON_COLON pats=constructorPattern + { mk_pattern (consPat (rr $loc(pats)) pat pats) (rr $loc) } + | uid=quident args=nonempty_list(atomicPattern) + { + let head_pat = mk_pattern (PatName uid) (rr $loc(uid)) in + mk_pattern (PatApp (head_pat, args)) (rr $loc) + } + | pat=atomicPattern + { pat } + +atomicPattern: + | LPAREN pat=tuplePattern COLON t=simpleArrow phi_opt=refineOpt RPAREN + { + let pos_t = rr2 $loc(pat) $loc(t) in + let pos = rr $loc in + mkRefinedPattern pat t true phi_opt pos_t pos + } + | LBRACK pats=separated_list(SEMICOLON, tuplePattern) RBRACK + { mk_pattern (PatList pats) (rr2 $loc($1) $loc($3)) } + | LBRACE record_pat=right_flexible_list(SEMICOLON, fieldPattern) RBRACE + { mk_pattern (PatRecord record_pat) (rr $loc) } + | LENS_PAREN_LEFT pat0=constructorPattern COMMA pats=separated_nonempty_list(COMMA, constructorPattern) LENS_PAREN_RIGHT + { mk_pattern (PatTuple(pat0::pats, true)) (rr $loc) } + | LPAREN pat=tuplePattern RPAREN { pat } + | tv=tvar { mk_pattern (PatTvar (tv, None, [])) (rr $loc(tv)) } + | LPAREN op=operator RPAREN + { mk_pattern (PatOp op) (rr $loc) } + | UNDERSCORE + { mk_pattern (PatWild (None, [])) (rr $loc) } + | HASH UNDERSCORE + { mk_pattern (PatWild (Some Implicit, [])) (rr $loc) } + | c=constant + { mk_pattern (PatConst c) (rr $loc(c)) } + | tok=MINUS c=constant + { let r = rr2 $loc(tok) $loc(c) in + let c = + match c with + | Const_int (s, swopt) -> + (match swopt with + | None + | Some (Signed, _) -> Const_int ("-" ^ s, swopt) + | _ -> raise_error_text r Fatal_SyntaxError "Syntax_error: negative integer constant with unsigned width") + | _ -> raise_error_text r Fatal_SyntaxError "Syntax_error: negative constant that is not an integer" + in + mk_pattern (PatConst c) r } + | BACKTICK_PERC q=atomicTerm + { mk_pattern (PatVQuote q) (rr $loc) } + | qual_id=aqualifiedWithAttrs(lident) + { + let (aqual, attrs), lid = qual_id in + mk_pattern (PatVar (lid, aqual, attrs)) (rr $loc(qual_id)) } + | uid=quident + { mk_pattern (PatName uid) (rr $loc(uid)) } + +fieldPattern: + | p = separated_pair(qlident, EQUALS, tuplePattern) + { p } + | lid=qlident + { lid, mk_pattern (PatVar (ident_of_lid lid, None, [])) (rr $loc(lid)) } + + (* (x : t) is already covered by atomicPattern *) + (* we do *NOT* allow _ in multibinder () since it creates reduce/reduce conflicts when*) + (* preprocessing to ocamlyacc/fsyacc (which is expected since the macro are expanded) *) +patternOrMultibinder: + | LBRACE_BAR id=lidentOrUnderscore COLON t=simpleArrow BAR_RBRACE + { let r = rr $loc in + let w = mk_pattern (PatVar (id, Some TypeClassArg, [])) r in + let asc = (t, None) in + [mk_pattern (PatAscribed(w, asc)) r] + } + + | LBRACE_BAR t=simpleArrow BAR_RBRACE + { let r = rr $loc in + let id = gen r in + let w = mk_pattern (PatVar (id, Some TypeClassArg, [])) r in + let asc = (t, None) in + [mk_pattern (PatAscribed(w, asc)) r] + } + | pat=atomicPattern { [pat] } + | LPAREN qual_id0=aqualifiedWithAttrs(lident) qual_ids=nonempty_list(aqualifiedWithAttrs(lident)) COLON t=simpleArrow r=refineOpt RPAREN + { + let pos = rr $loc in + let t_pos = rr $loc(t) in + let qual_ids = qual_id0 :: qual_ids in + List.map (fun ((aq, attrs), x) -> mkRefinedPattern (mk_pattern (PatVar (x, aq, attrs)) pos) t false r t_pos pos) qual_ids + } + +binder: + | aqualifiedWithAttrs_lid=aqualifiedWithAttrs(lidentOrUnderscore) + { + let (q, attrs), lid = aqualifiedWithAttrs_lid in + mk_binder_with_attrs (Variable lid) (rr $loc(aqualifiedWithAttrs_lid)) Type_level q attrs + } + + | tv=tvar { mk_binder (TVariable tv) (rr $loc) Kind None } + (* small regression here : fun (=x : t) ... is not accepted anymore *) + +%public +multiBinder: + | LBRACE_BAR id=lidentOrUnderscore COLON t=simpleArrow BAR_RBRACE + { let r = rr $loc in + [mk_binder (Annotated (id, t)) r Type_level (Some TypeClassArg)] + } + + | LBRACE_BAR t=simpleArrow BAR_RBRACE + { let r = rr $loc in + let id = gen r in + [mk_binder (Annotated (id, t)) r Type_level (Some TypeClassArg)] + } + + | LPAREN qual_ids=nonempty_list(aqualifiedWithAttrs(lidentOrUnderscore)) COLON t=simpleArrow r=refineOpt RPAREN + { + let should_bind_var = match qual_ids with | [ _ ] -> true | _ -> false in + List.map (fun ((q, attrs), x) -> + mkRefinedBinder x t should_bind_var r (rr $loc) q attrs) qual_ids + } + + | LPAREN_RPAREN + { + let r = rr $loc in + let unit_t = mk_term (Var (lid_of_ids [(mk_ident("unit", r))])) r Un in + [mk_binder (Annotated (gen r, unit_t)) r Un None] + } + + | b=binder { [b] } + +%public +binders: bss=list(bs=multiBinder {bs}) { flatten bss } + +aqualifiedWithAttrs(X): + | aq=aqual attrs=binderAttributes x=X { (Some aq, attrs), x } + | aq=aqual x=X { (Some aq, []), x } + | attrs=binderAttributes x=X { (None, attrs), x } + | x=X { (None, []), x } + +/******************************************************************************/ +/* Identifiers, module paths */ +/******************************************************************************/ + +%public +qlident: + | ids=path(lident) { lid_of_ids ids } + +%public +quident: + | ids=path(uident) { lid_of_ids ids } + +path(Id): + | id=Id { [id] } + | uid=uident DOT p=path(Id) { uid::p } + +ident: + | x=lident { x } + | x=uident { x } + +qlidentOrOperator: + | qid=qlident { qid } + | LPAREN id=operator RPAREN + { lid_of_ns_and_id [] (id_of_text (compile_op' (string_of_id id) (range_of_id id))) } + +%inline lidentOrOperator: + | id=lident { id } + | LPAREN id=operator RPAREN + { mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) } + +matchMaybeOp: + | MATCH {None} + | op=MATCH_OP { Some (mk_ident ("let" ^ op, rr $loc(op))) } + +ifMaybeOp: + | IF {None} + | op=IF_OP { Some (mk_ident ("let" ^ op, rr $loc(op))) } + +%public +lidentOrUnderscore: + | id=IDENT { mk_ident(id, rr $loc(id))} + | UNDERSCORE { gen (rr $loc) } + +%public +lident: + | id=IDENT { mk_ident(id, rr $loc(id))} + +uident: + | id=NAME { mk_ident(id, rr $loc(id)) } + +tvar: + | tv=TVAR { mk_ident(tv, rr $loc(tv)) } + + +/******************************************************************************/ +/* Types and terms */ +/******************************************************************************/ + +thunk(X): | t=X { mk_term (Abs ([mk_pattern (PatWild (None, [])) (rr $loc)], t)) (rr $loc) Expr } + +thunk2(X): + | t=X + { let u = mk_term (Const Const_unit) (rr $loc) Expr in + let t = mk_term (Seq (u, t)) (rr $loc) Expr in + mk_term (Abs ([mk_pattern (PatWild (None, [])) (rr $loc)], t)) (rr $loc) Expr } + +ascribeTyp: + | COLON t=tmArrow(tmNoEq) tacopt=option(BY tactic=thunk(trailingTerm) {tactic}) { t, tacopt } + +(* Remove for stratify *) +ascribeKind: + | COLON k=kind { k } + +(* Remove for stratify *) +kind: + | t=tmArrow(tmNoEq) { {t with level=Kind} } + + +term: + | e=noSeqTerm + { e } + | e1=noSeqTerm SEMICOLON e2=term + { mk_term (Seq(e1, e2)) (rr2 $loc(e1) $loc(e2)) Expr } +(* Added this form for sequencing; *) +(* but it results in an additional shift/reduce conflict *) +(* ... which is actually be benign, since the same conflict already *) +(* exists for the previous production *) + | e1=noSeqTerm op=SEMICOLON_OP e2=term + { let t = match op with + | Some op -> + let op = mk_ident ("let" ^ op, rr $loc(op)) in + let pat = mk_pattern (PatWild(None, [])) (rr $loc(op)) in + LetOperator ([(op, pat, e1)], e2) + | None -> + log_issue_text (rr $loc) Warning_DeprecatedLightDoNotation do_notation_deprecation_warning; + Bind(gen (rr $loc(op)), e1, e2) + in mk_term t (rr2 $loc(e1) $loc(e2)) Expr + } + | x=lidentOrUnderscore LONG_LEFT_ARROW e1=noSeqTerm SEMICOLON e2=term + { log_issue_text (rr $loc) Warning_DeprecatedLightDoNotation do_notation_deprecation_warning; + mk_term (Bind(x, e1, e2)) (rr2 $loc(x) $loc(e2)) Expr } + +match_returning: + | as_opt=option(AS i=lident {i}) RETURNS t=tmIff {as_opt,t,false} + | as_opt=option(AS i=lident {i}) RETURNS_EQ t=tmIff {as_opt,t,true} + +%public +noSeqTerm: + | t=typ { t } + | e=tmIff SUBTYPE t=tmIff + { mk_term (Ascribed(e,{t with level=Expr},None,false)) (rr $loc(e)) Expr } + | e=tmIff SUBTYPE t=tmIff BY tactic=thunk(typ) + { mk_term (Ascribed(e,{t with level=Expr},Some tactic,false)) (rr2 $loc(e) $loc(tactic)) Expr } + | e=tmIff EQUALTYPE t=tmIff + { + log_issue_text (rr $loc) Warning_BleedingEdge_Feature + "Equality type ascriptions is an experimental feature subject to redesign in the future"; + mk_term (Ascribed(e,{t with level=Expr},None,true)) (rr $loc(e)) Expr + } + | e=tmIff EQUALTYPE t=tmIff BY tactic=thunk(typ) + { + log_issue_text (rr $loc) Warning_BleedingEdge_Feature + "Equality type ascriptions is an experimental feature subject to redesign in the future"; + mk_term (Ascribed(e,{t with level=Expr},Some tactic,true)) (rr2 $loc(e) $loc(tactic)) Expr + } + | e1=atomicTermNotQUident op_expr=dotOperator LARROW e3=noSeqTerm + { + let (op, e2, _) = op_expr in + let opid = mk_ident (string_of_id op ^ "<-", range_of_id op) in + mk_term (Op(opid, [ e1; e2; e3 ])) (rr2 $loc(e1) $loc(e3)) Expr + } + | REQUIRES t=typ + { mk_term (Requires(t, None)) (rr2 $loc($1) $loc(t)) Type_level } + | ENSURES t=typ + { mk_term (Ensures(t, None)) (rr2 $loc($1) $loc(t)) Type_level } + | DECREASES t=typ + { mk_term (Decreases (t, None)) (rr2 $loc($1) $loc(t)) Type_level } + | DECREASES LBRACE_COLON_WELL_FOUNDED t=noSeqTerm RBRACE + (* + * decreases clause with relation is written as e1 e2, + * where e1 is a relation and e2 is a term + * + * this is parsed as an app node, so we destruct the app node + *) + { match t.tm with + | App (t1, t2, _) -> + let ot = mk_term (WFOrder (t1, t2)) (rr2 $loc(t) $loc(t)) Type_level in + mk_term (Decreases (ot, None)) (rr2 $loc($1) $loc($4)) Type_level + | _ -> + raise_error_text (rr $loc(t)) Fatal_SyntaxError + "Syntax error: To use well-founded relations, write e1 e2" + } + + | ATTRIBUTES es=nonempty_list(atomicTerm) + { mk_term (Attributes es) (rr2 $loc($1) $loc(es)) Type_level } + | op=ifMaybeOp e1=noSeqTerm ret_opt=option(match_returning) THEN e2=noSeqTerm ELSE e3=noSeqTerm + { mk_term (If(e1, op, ret_opt, e2, e3)) (rr2 $loc(op) $loc(e3)) Expr } + | op=ifMaybeOp e1=noSeqTerm ret_opt=option(match_returning) THEN e2=noSeqTerm + { + let e3 = mk_term (Const Const_unit) (rr2 $loc(op) $loc(e2)) Expr in + mk_term (If(e1, op, ret_opt, e2, e3)) (rr2 $loc(op) $loc(e2)) Expr + } + | TRY e1=term WITH pbs=left_flexible_nonempty_list(BAR, patternBranch) + { + let branches = focusBranches (pbs) (rr2 $loc($1) $loc(pbs)) in + mk_term (TryWith(e1, branches)) (rr2 $loc($1) $loc(pbs)) Expr + } + | op=matchMaybeOp e=term ret_opt=option(match_returning) WITH pbs=left_flexible_list(BAR, pb=patternBranch {pb}) + { + let branches = focusBranches pbs (rr2 $loc(op) $loc(pbs)) in + mk_term (Match(e, op, ret_opt, branches)) (rr2 $loc(op) $loc(pbs)) Expr + } + | LET OPEN t=term IN e=term + { + match t.tm with + | Ascribed(r, rty, None, _) -> + mk_term (LetOpenRecord(r, rty, e)) (rr2 $loc($1) $loc(e)) Expr + + | Name uid -> + mk_term (LetOpen(uid, e)) (rr2 $loc($1) $loc(e)) Expr + + | _ -> + raise_error_text (rr $loc(t)) Fatal_SyntaxError + "Syntax error: local opens expects either opening\n\ + a module or namespace using `let open T in e`\n\ + or, a record type with `let open e <: t in e'`" + } + + | attrs=ioption(attribute) + LET q=letqualifier lb=letbinding lbs=list(attr_letbinding) IN e=term + { + let lbs = (attrs, lb)::lbs in + let lbs = focusAttrLetBindings lbs (rr2 $loc(q) $loc(lb)) in + mk_term (Let(q, lbs, e)) (rr $loc) Expr + } + | op=let_op b=letoperatorbinding lbs=list(op=and_op b=letoperatorbinding {(op, b)}) IN e=term + { let lbs = (op, b)::lbs in + mk_term (LetOperator ( List.map (fun (op, (pat, tm)) -> (op, pat, tm)) lbs + , e)) (rr2 $loc(op) $loc(e)) Expr + } + | FUNCTION pbs=left_flexible_nonempty_list(BAR, patternBranch) + { + let branches = focusBranches pbs (rr2 $loc($1) $loc(pbs)) in + mk_function branches (rr $loc) (rr2 $loc($1) $loc(pbs)) + } + | a=ASSUME e=noSeqTerm + { let a = set_lid_range assume_lid (rr $loc(a)) in + mkExplicitApp (mk_term (Var a) (rr $loc(a)) Expr) [e] (rr $loc) + } + + | a=ASSERT e=noSeqTerm + { + let a = set_lid_range assert_lid (rr $loc(a)) in + mkExplicitApp (mk_term (Var a) (rr $loc(a)) Expr) [e] (rr $loc) + } + + | a=ASSERT e=noSeqTerm BY tactic=thunk2(typ) + { + let a = set_lid_range assert_by_tactic_lid (rr $loc(a)) in + mkExplicitApp (mk_term (Var a) (rr $loc(a)) Expr) [e; tactic] (rr $loc) + } + + | u=UNDERSCORE BY tactic=thunk(atomicTerm) + { + let a = set_lid_range synth_lid (rr $loc(u)) in + mkExplicitApp (mk_term (Var a) (rr $loc(u)) Expr) [tactic] (rr $loc) + } + + | s=SYNTH tactic=atomicTerm + { + let a = set_lid_range synth_lid (rr $loc(s)) in + mkExplicitApp (mk_term (Var a) (rr $loc(s)) Expr) [tactic] (rr $loc) + } + + | CALC rel=atomicTerm LBRACE init=noSeqTerm SEMICOLON steps=list(calcStep) RBRACE + { + mk_term (CalcProof (rel, init, steps)) (rr2 $loc($1) $loc($7)) Expr + } + + | INTRO FORALL bs=binders DOT p=noSeqTerm WITH e=noSeqTerm + { + mk_term (IntroForall(bs, p, e)) (rr2 $loc($1) $loc(e)) Expr + } + + | INTRO EXISTS bs=binders DOT p=noSeqTerm WITH vs=list(atomicTerm) AND e=noSeqTerm + { + if List.length bs <> List.length vs + then raise_error_text (rr $loc(vs)) Fatal_SyntaxError "Syntax error: expected instantiations for all binders" + else mk_term (IntroExists(bs, p, vs, e)) (rr2 $loc($1) $loc(e)) Expr + } + + | INTRO p=tmFormula IMPLIES q=tmFormula WITH y=singleBinder DOT e=noSeqTerm + { + mk_term (IntroImplies(p, q, y, e)) (rr2 $loc($1) $loc(e)) Expr + } + + | INTRO p=tmFormula DISJUNCTION q=tmConjunction WITH lr=NAME e=noSeqTerm + { + let b = + if lr = "Left" then true + else if lr = "Right" then false + else raise_error_text (rr $loc(lr)) Fatal_SyntaxError "Syntax error: _intro_ \\/ expects either 'Left' or 'Right'" + in + mk_term (IntroOr(b, p, q, e)) (rr2 $loc($1) $loc(e)) Expr + } + + | INTRO p=tmConjunction CONJUNCTION q=tmTuple WITH e1=noSeqTerm AND e2=noSeqTerm + { + mk_term (IntroAnd(p, q, e1, e2)) (rr2 $loc($1) $loc(e2)) Expr + } + + | ELIM FORALL xs=binders DOT p=noSeqTerm WITH vs=list(atomicTerm) + { + mk_term (ElimForall(xs, p, vs)) (rr2 $loc($1) $loc(vs)) Expr + } + + | ELIM EXISTS bs=binders DOT p=noSeqTerm RETURNS q=noSeqTerm WITH y=singleBinder DOT e=noSeqTerm + { + mk_term (ElimExists(bs, p, q, y, e)) (rr2 $loc($1) $loc(e)) Expr + } + + | ELIM p=tmFormula IMPLIES q=tmFormula WITH e=noSeqTerm + { + mk_term (ElimImplies(p, q, e)) (rr2 $loc($1) $loc(e)) Expr + } + + | ELIM p=tmFormula DISJUNCTION q=tmConjunction RETURNS r=noSeqTerm WITH x=singleBinder DOT e1=noSeqTerm AND y=singleBinder DOT e2=noSeqTerm + { + mk_term (ElimOr(p, q, r, x, e1, y, e2)) (rr2 $loc($1) $loc(e2)) Expr + } + + | ELIM p=tmConjunction CONJUNCTION q=tmTuple RETURNS r=noSeqTerm WITH xs=binders DOT e=noSeqTerm + { + match xs with + | [x;y] -> mk_term (ElimAnd(p, q, r, x, y, e)) (rr2 $loc($1) $loc(e)) Expr + } + +singleBinder: + | bs=binders + { + match bs with + | [b] -> b + | _ -> raise_error_text (rr $loc(bs)) Fatal_SyntaxError "Syntax error: expected a single binder" + } + +calcRel: + | i=binop_name { mk_term (Op (i, [])) (rr $loc(i)) Expr } + | BACKTICK id=qlident BACKTICK { mk_term (Var id) (rr $loc) Un } + | t=atomicTerm { t } + +calcStep: + | rel=calcRel LBRACE justif=option(term) RBRACE next=noSeqTerm SEMICOLON + { + let justif = + match justif with + | Some t -> t + | None -> mk_term (Const Const_unit) (rr2 $loc($2) $loc($4)) Expr + in + CalcStep (rel, justif, next) + } + +%inline +typ: + | t=simpleTerm { t } + +%public +%inline quantifier: + | FORALL { fun x -> QForall x } + | EXISTS { fun x -> QExists x} + | op=FORALL_OP + { + let op = mk_ident("forall" ^ op, rr $loc(op)) in + fun (x,y,z) -> QuantOp (op, x, y, z) + } + | op=EXISTS_OP + { + let op = mk_ident("exists" ^ op, rr $loc(op)) in + fun (x,y,z) -> QuantOp (op, x, y, z) + } + +%public +trigger: + | { [] } + | LBRACE_COLON_PATTERN pats=disjunctivePats RBRACE { pats } + +disjunctivePats: + | pats=separated_nonempty_list(DISJUNCTION, conjunctivePat) { pats } + +conjunctivePat: + | pats=separated_nonempty_list(SEMICOLON, appTerm) { pats } + +%inline simpleTerm: + | e=tmIff { e } + +maybeFocusArrow: + | RARROW { false } + | SQUIGGLY_RARROW { true } + +patternBranch: + | pat=disjunctivePattern when_opt=maybeWhen focus=maybeFocusArrow e=term + { + let pat = match pat with + | [p] -> p + | ps -> mk_pattern (PatOr ps) (rr2 $loc(pat) $loc(pat)) + in + (focus, (pat, when_opt, e)) + } + +%inline maybeWhen: + | { None } + | WHEN e=tmFormula { Some e } + + + +tmIff: + | e1=tmImplies tok=IFF e2=tmIff + { mk_term (Op(mk_ident("<==>", rr $loc(tok)), [e1; e2])) (rr2 $loc(e1) $loc(e2)) Formula } + | e=tmImplies { e } + +tmImplies: + | e1=tmArrow(tmFormula) tok=IMPLIES e2=tmImplies + { mk_term (Op(mk_ident("==>", rr $loc(tok)), [e1; e2])) (rr2 $loc(e1) $loc(e2)) Formula } + | e=tmArrow(tmFormula) + { e } + + +(* Tm : either tmFormula, containing EQUALS or tmNoEq, without EQUALS *) +tmArrow(Tm): + | dom=tmArrowDomain(Tm) RARROW tgt=tmArrow(Tm) + { + let ((aq_opt, attrs), dom_tm) = dom in + let b = match extract_named_refinement true dom_tm with + | None -> mk_binder_with_attrs (NoName dom_tm) (rr $loc(dom)) Un aq_opt attrs + | Some (x, t, f) -> mkRefinedBinder x t true f (rr2 $loc(dom) $loc(dom)) aq_opt attrs + in + mk_term (Product([b], tgt)) (rr2 $loc(dom) $loc(tgt)) Un + } + | e=Tm { e } + +simpleArrow: + | dom=simpleArrowDomain RARROW tgt=simpleArrow + { + let ((aq_opt, attrs), dom_tm) = dom in + let b = match extract_named_refinement true dom_tm with + | None -> mk_binder_with_attrs (NoName dom_tm) (rr $loc(dom)) Un aq_opt attrs + | Some (x, t, f) -> mkRefinedBinder x t true f (rr2 $loc(dom) $loc(dom)) aq_opt attrs + in + mk_term (Product([b], tgt)) (rr2 $loc(dom) $loc(tgt)) Un + } + | e=tmEqNoRefinement { e } + +simpleArrowDomain: + | LBRACE_BAR t=tmEqNoRefinement BAR_RBRACE { ((Some TypeClassArg, []), t) } + | aq_opt=ioption(aqual) attrs_opt=ioption(binderAttributes) dom_tm=tmEqNoRefinement { (aq_opt, none_to_empty_list attrs_opt), dom_tm } + +(* Tm already accounts for ( term ), we need to add an explicit case for (#Tm), (#[@@@...]Tm) and ([@@@...]Tm) *) +%inline tmArrowDomain(Tm): + | LBRACE_BAR t=Tm BAR_RBRACE { ((Some TypeClassArg, []), t) } + | LPAREN q=aqual attrs_opt=ioption(binderAttributes) dom_tm=Tm RPAREN { (Some q, none_to_empty_list attrs_opt), dom_tm } + | LPAREN attrs=binderAttributes dom_tm=Tm RPAREN { (None, attrs), dom_tm } + | aq_opt=ioption(aqual) attrs_opt=ioption(binderAttributes) dom_tm=Tm { (aq_opt, none_to_empty_list attrs_opt), dom_tm } + +tmFormula: + | e1=tmFormula tok=DISJUNCTION e2=tmConjunction + { mk_term (Op(mk_ident("\\/", rr $loc(tok)), [e1;e2])) (rr2 $loc(e1) $loc(e2)) Formula } + | e=tmConjunction { e } + +tmConjunction: + | e1=tmConjunction tok=CONJUNCTION e2=tmTuple + { mk_term (Op(mk_ident("/\\", rr $loc(tok)), [e1;e2])) (rr2 $loc(e1) $loc(e2)) Formula } + | e=tmTuple { e } + +tmTuple: + | el=separated_nonempty_list(COMMA, tmEq) + { + match el with + | [x] -> x + | components -> mkTuple components (rr2 $loc(el) $loc(el)) + } + + + +%public +tmEqWith(X): + | e1=tmEqWith(X) tok=EQUALS e2=tmEqWith(X) + { mk_term (Op(mk_ident("=", rr $loc(tok)), [e1; e2])) (rr $loc) Un} + (* non-associativity of COLON_EQUALS is currently not well handled by fsyacc which reports a s/r conflict *) + (* see https:/ /github.com/fsprojects/FsLexYacc/issues/39 *) + | e1=tmEqWith(X) tok=COLON_EQUALS e2=tmEqWith(X) + { mk_term (Op(mk_ident(":=", rr $loc(tok)), [e1; e2])) (rr $loc) Un} + + | e1=tmEqWith(X) op=PIPE_LEFT e2=tmEqWith(X) + { mk_term (Op(mk_ident("<|", rr $loc(op)), [e1; e2])) (rr $loc) Un} + + | e1=tmEqWith(X) op=PIPE_RIGHT e2=tmEqWith(X) + { mk_term (Op(mk_ident("|>", rr $loc(op)), [e1; e2])) (rr $loc) Un} + + + | e1=tmEqWith(X) op=operatorInfix0ad12 e2=tmEqWith(X) + { mk_term (Op(op, [e1; e2])) (rr2 $loc(e1) $loc(e2)) Un} + | e1=tmEqWith(X) tok=MINUS e2=tmEqWith(X) + { mk_term (Op(mk_ident("-", rr $loc(tok)), [e1; e2])) (rr $loc) Un} + | tok=MINUS e=tmEqWith(X) + { mk_uminus e (rr $loc(tok)) (rr $loc) Expr } + | QUOTE e=tmEqWith(X) + { mk_term (Quote (e, Dynamic)) (rr $loc) Un } + | BACKTICK e=tmEqWith(X) + { mk_term (Quote (e, Static)) (rr $loc) Un } + | BACKTICK_AT e=atomicTerm + { let q = mk_term (Quote (e, Dynamic)) (rr $loc) Un in + mk_term (Antiquote q) (rr $loc) Un } + | BACKTICK_HASH e=atomicTerm + { mk_term (Antiquote e) (rr $loc) Un } + | e=tmNoEqWith(X) + { e } + +%inline recordTerm: + | LBRACE e=recordExp RBRACE { e } + +tmNoEqWith(X): + | e1=tmNoEqWith(X) COLON_COLON e2=tmNoEqWith(X) + { consTerm (rr $loc) e1 e2 } + | e1=tmNoEqWith(X) AMP e2=tmNoEqWith(X) + { + let dom = + match extract_named_refinement false e1 with + | Some (x, t, f) -> + let dom = mkRefinedBinder x t true f (rr $loc(e1)) None [] in + Inl dom + | _ -> + Inr e1 + in + let tail = e2 in + let dom, res = + match tail.tm with + | Sum(dom', res) -> dom::dom', res + | _ -> [dom], tail + in + mk_term (Sum(dom, res)) (rr2 $loc(e1) $loc(e2)) Type_level + } + | e1=tmNoEqWith(X) op=OPINFIX3 e2=tmNoEqWith(X) + { mk_term (Op(mk_ident(op, rr $loc(op)), [e1; e2])) (rr $loc) Un} + | e1=tmNoEqWith(X) BACKTICK op=tmNoEqWith(X) BACKTICK e2=tmNoEqWith(X) + { mkApp op [ e1, Infix; e2, Nothing ] (rr $loc) } + | e1=tmNoEqWith(X) op=OPINFIX4 e2=tmNoEqWith(X) + { mk_term (Op(mk_ident(op, rr $loc(op)), [e1; e2])) (rr $loc) Un} + | e=recordTerm { e } + | BACKTICK_PERC e=atomicTerm + { mk_term (VQuote e) (rr $loc) Un } + | op=TILDE e=atomicTerm + { mk_term (Op(mk_ident (op, rr $loc(op)), [e])) (rr $loc) Formula } + | e=X { e } + +binop_name: + | o=OPINFIX0a { mk_ident (o, rr $loc) } + | o=OPINFIX0b { mk_ident (o, rr $loc) } + | o=OPINFIX0c { mk_ident (o, rr $loc) } + | o=EQUALS { mk_ident ("=", rr $loc) } + | o=OPINFIX0d { mk_ident (o, rr $loc) } + | o=OPINFIX1 { mk_ident (o, rr $loc) } + | o=OPINFIX2 { mk_ident (o, rr $loc) } + | o=OPINFIX3 { mk_ident (o, rr $loc) } + | o=OPINFIX4 { mk_ident (o, rr $loc) } + | o=IMPLIES { mk_ident ("==>", rr $loc) } + | o=CONJUNCTION { mk_ident ("/\\", rr $loc) } + | o=DISJUNCTION { mk_ident ("\\/", rr $loc) } + | o=IFF { mk_ident ("<==>", rr $loc) } + | o=COLON_EQUALS { mk_ident (":=", rr $loc) } + | o=COLON_COLON { mk_ident ("::", rr $loc) } + | o=OP_MIXFIX_ASSIGNMENT { mk_ident (o, rr $loc) } + | o=OP_MIXFIX_ACCESS { mk_ident (o, rr $loc) } + +tmEqNoRefinement: + | e=tmEqWith(appTermNoRecordExp) { e } + +tmEq: + | e=tmEqWith(tmRefinement) { e } + +tmNoEq: + | e=tmNoEqWith(tmRefinement) { e } + +tmRefinement: + | id=lidentOrUnderscore COLON e=appTermNoRecordExp phi_opt=refineOpt + { + let t = match phi_opt with + | None -> NamedTyp(id, e) + | Some phi -> Refine(mk_binder (Annotated(id, e)) (rr2 $loc(id) $loc(e)) Type_level None, phi) + in mk_term t (rr2 $loc(id) $loc(phi_opt)) Type_level + } + | e=appTerm { e } + +refineOpt: + | phi_opt=option(LBRACE phi=formula RBRACE {phi}) {phi_opt} + +%inline formula: + | e=noSeqTerm { {e with level=Formula} } + +%public +recordExp: + | record_fields=right_flexible_nonempty_list(SEMICOLON, simpleDef) + { mk_term (Record (None, record_fields)) (rr $loc(record_fields)) Expr } + | e=appTerm WITH record_fields=right_flexible_nonempty_list(SEMICOLON, simpleDef) + { mk_term (Record (Some e, record_fields)) (rr2 $loc(e) $loc(record_fields)) Expr } + +simpleDef: + | e=separated_pair(qlidentOrOperator, EQUALS, noSeqTerm) { e } + | lid=qlidentOrOperator { lid, mk_term (Name (lid_of_ids [ ident_of_lid lid ])) (rr $loc(lid)) Un } + +appTermArgs: + | h=maybeHash a=onlyTrailingTerm { [h, a] } + | h=maybeHash a=indexingTerm rest=appTermArgs { (h, a) :: rest } + | h=maybeHash a=recordTerm rest=appTermArgs { (h, a) :: rest } + | a=universe rest=appTermArgs { a :: rest } + | { [] } + +appTermCommon(args): + | head=indexingTerm args=args + { mkApp head (map (fun (x,y) -> (y,x)) args) (rr2 $loc(head) $loc(args)) } + +%public +appTerm: + | t=onlyTrailingTerm { t } + | t=appTermCommon(appTermArgs) { t } + +appTermArgsNoRecordExp: + | h=maybeHash a=indexingTerm rest=appTermArgsNoRecordExp { (h, a) :: rest } + | a=universe rest=appTermArgsNoRecordExp { a :: rest } + | { [] } + +%public +appTermNoRecordExp: + | t=appTermCommon(appTermArgsNoRecordExp) {t} + +%inline maybeHash: + | { Nothing } + | HASH { Hash } + +%public +indexingTerm: + | e1=atomicTermNotQUident op_exprs=nonempty_list(dotOperator) + { + List.fold_left (fun e1 (op, e2, r) -> + mk_term (Op(op, [ e1; e2 ])) (union_ranges e1.range r) Expr) + e1 op_exprs + } + | e=atomicTerm + { e } + +%public +atomicTerm: + | x=atomicTermNotQUident + { x } + | x=atomicTermQUident + { x } + | x=opPrefixTerm(atomicTermQUident) + { x } + +trailingTerm: + | x=atomicTerm + { x } + | x=onlyTrailingTerm + { x } + +onlyTrailingTerm: + | FUN pats=nonempty_list(patternOrMultibinder) RARROW e=term + { mk_term (Abs(flatten pats, e)) (rr2 $loc($1) $loc(e)) Un } + | q=quantifier bs=binders DOT trigger=trigger e=term + { + match bs with + | [] -> + raise_error_text (rr2 $loc(q) $loc($3)) Fatal_MissingQuantifierBinder "Missing binders for a quantifier" + | _ -> + let idents = idents_of_binders bs (rr2 $loc(q) $loc($3)) in + mk_term (q (bs, (idents, trigger), e)) (rr2 $loc(q) $loc(e)) Formula + } + +atomicTermQUident: + | id=quident + { + let t = Name id in + let e = mk_term t (rr $loc(id)) Un in + e + } + | id=quident DOT_LPAREN t=term RPAREN + { + mk_term (LetOpen (id, t)) (rr2 $loc(id) $loc($4)) Expr + } + +atomicTermNotQUident: + | UNDERSCORE { mk_term Wild (rr $loc) Un } + | tv=tvar { mk_term (Tvar tv) (rr $loc) Type_level } + | c=constant { mk_term (Const c) (rr $loc) Expr } + | x=opPrefixTerm(atomicTermNotQUident) + { x } + | LPAREN op=operator RPAREN + { mk_term (Op(op, [])) (rr2 $loc($1) $loc($3)) Un } + | LENS_PAREN_LEFT e0=tmEq COMMA el=separated_nonempty_list(COMMA, tmEq) LENS_PAREN_RIGHT + { mkDTuple (e0::el) (rr2 $loc($1) $loc($5)) } + | e=projectionLHS field_projs=list(DOT id=qlident {id}) + { fold_left (fun e lid -> mk_term (Project(e, lid)) (rr2 $loc(e) $loc(field_projs)) Expr ) e field_projs } + | BEGIN e=term END + { e } + +(* Tm: atomicTermQUident or atomicTermNotQUident *) +opPrefixTerm(Tm): + | op=OPPREFIX e=Tm + { mk_term (Op(mk_ident(op, rr $loc(op)), [e])) (rr2 $loc(op) $loc(e)) Expr } + + +projectionLHS: + | e=qidentWithTypeArgs(qlident, option(fsTypeArgs)) + { e } + | e=qidentWithTypeArgs(quident, some(fsTypeArgs)) + { e } + | LPAREN e=term sort_opt=option(pair(hasSort, simpleTerm)) RPAREN + { + (* Note: we have to keep the parentheses here. Consider t * u * v. This + * is parsed as Op2( *, Op2( *, t, u), v). The desugaring phase then looks + * up * and figures out that it hasn't been overridden, meaning that + * it's a tuple type, and proceeds to flatten out the whole tuple. Now + * consider (t * u) * v. We keep the Paren node, which prevents the + * flattening from happening, hence ensuring the proper type is + * generated. *) + let e1 = match sort_opt with + | None -> e + | Some (level, t) -> mk_term (Ascribed(e,{t with level=level},None,false)) (rr2 $loc($1) $loc($4)) level + in mk_term (Paren e1) (rr2 $loc($1) $loc($4)) (e.level) + } + | LBRACK es=semiColonTermList RBRACK + { mkListLit (rr2 $loc($1) $loc($3)) es } + | SEQ_BANG_LBRACK es=semiColonTermList RBRACK + { mkSeqLit (rr2 $loc($1) $loc($3)) es } + | PERCENT_LBRACK es=semiColonTermList RBRACK + { mk_term (LexList es) (rr2 $loc($1) $loc($3)) Type_level } + | BANG_LBRACE es=separated_list(COMMA, appTerm) RBRACE + { mkRefSet (rr2 $loc($1) $loc($3)) es } + | ns=quident QMARK_DOT id=lident + { mk_term (Projector (ns, id)) (rr2 $loc(ns) $loc(id)) Expr } + | lid=quident QMARK + { mk_term (Discrim lid) (rr2 $loc(lid) $loc($2)) Un } + +fsTypeArgs: + | TYP_APP_LESS targs=separated_nonempty_list(COMMA, atomicTerm) TYP_APP_GREATER + {targs} + +(* Qid : quident or qlident. + TypeArgs : option(fsTypeArgs) or someFsTypeArgs. *) +qidentWithTypeArgs(Qid,TypeArgs): + | id=Qid targs_opt=TypeArgs + { + let t = if is_name id then Name id else Var id in + let e = mk_term t (rr $loc(id)) Un in + match targs_opt with + | None -> e + | Some targs -> mkFsTypApp e targs (rr2 $loc(id) $loc(targs_opt)) + } + +hasSort: + (* | SUBTYPE { Expr } *) + | SUBKIND { Type_level } (* Remove with stratify *) + + (* use flexible_list *) +%inline semiColonTermList: + | l=right_flexible_list(SEMICOLON, noSeqTerm) { l } + +constant: + | LPAREN_RPAREN { Const_unit } + | n=INT + { + if snd n then + log_issue_text (rr $loc) Error_OutOfRange "This number is outside the allowable range for representable integer constants"; + Const_int (fst n, None) + } + | c=CHAR { Const_char c } + | s=STRING { Const_string (s, rr $loc) } + | TRUE { Const_bool true } + | FALSE { Const_bool false } + | r=REAL { Const_real r } + | n=UINT8 { Const_int (n, Some (Unsigned, Int8)) } + | n=INT8 + { + if snd n then + log_issue_text (rr $loc) Error_OutOfRange "This number is outside the allowable range for 8-bit signed integers"; + Const_int (fst n, Some (Signed, Int8)) + } + | n=UINT16 { Const_int (n, Some (Unsigned, Int16)) } + | n=INT16 + { + if snd n then + log_issue_text (rr $loc) Error_OutOfRange "This number is outside the allowable range for 16-bit signed integers"; + Const_int (fst n, Some (Signed, Int16)) + } + | n=UINT32 { Const_int (n, Some (Unsigned, Int32)) } + | n=INT32 + { + if snd n then + log_issue_text (rr $loc) Error_OutOfRange "This number is outside the allowable range for 32-bit signed integers"; + Const_int (fst n, Some (Signed, Int32)) + } + | n=UINT64 { Const_int (n, Some (Unsigned, Int64)) } + | n=INT64 + { + if snd n then + log_issue_text (rr $loc) Error_OutOfRange "This number is outside the allowable range for 64-bit signed integers"; + Const_int (fst n, Some (Signed, Int64)) + } + | n=SIZET { Const_int (n, Some (Unsigned, Sizet)) } + (* TODO : What about reflect ? There is also a constant representing it *) + | REIFY { Const_reify None } + | RANGE_OF { Const_range_of } + | SET_RANGE_OF { Const_set_range_of } + + +universe: + | UNIV_HASH ua=atomicUniverse { (UnivApp, ua) } + +universeFrom: + | ua=atomicUniverse { ua } + | u1=universeFrom op_plus=OPINFIX2 u2=universeFrom + { + if op_plus <> "+" + then log_issue_text (rr $loc(u1)) Error_OpPlusInUniverse ("The operator " ^ op_plus ^ " was found in universe context." + ^ "The only allowed operator in that context is +."); + mk_term (Op(mk_ident (op_plus, rr $loc(op_plus)), [u1 ; u2])) (rr2 $loc(u1) $loc(u2)) Expr + } + | max=ident us=nonempty_list(atomicUniverse) + { + if string_of_id max <> string_of_lid max_lid + then log_issue_text (rr $loc(max)) Error_InvalidUniverseVar ("A lower case ident " ^ string_of_id max ^ + " was found in a universe context. " ^ + "It should be either max or a universe variable 'usomething."); + let max = mk_term (Var (lid_of_ids [max])) (rr $loc(max)) Expr in + mkApp max (map (fun u -> u, Nothing) us) (rr $loc) + } + +atomicUniverse: + | UNDERSCORE + { mk_term Wild (rr $loc) Expr } + | n=INT + { + if snd n then + log_issue_text (rr $loc) Error_OutOfRange ("This number is outside the allowable range for representable integer constants"); + mk_term (Const (Const_int (fst n, None))) (rr $loc(n)) Expr + } + | u=lident { mk_term (Uvar u) (range_of_id u) Expr } + | LPAREN u=universeFrom RPAREN + { u (*mk_term (Paren u) (rr2 $loc($1) $loc($3)) Expr*) } + +warn_error_list: + | e=warn_error EOF { e } + +warn_error: + | f=flag r=range + { [(f, r)] } + | f=flag r=range e=warn_error + { (f, r) :: e } + +flag: + | op=OPINFIX1 + { if op = "@" then CAlwaysError else failwith (format1 "unexpected token %s in warn-error list" op)} + | op=OPINFIX2 + { if op = "+" then CWarning else failwith (format1 "unexpected token %s in warn-error list" op)} + | MINUS + { CSilent } + +range: + | i=INT + { format2 "%s..%s" (fst i) (fst i) } + | r=RANGE + { r } + + +/******************************************************************************/ +/* Miscellanous, tools */ +/******************************************************************************/ + +string: + | s=STRING { s } + +%inline operator: + | op=OPPREFIX { mk_ident (op, rr $loc) } + | op=binop_name { op } + | op=TILDE { mk_ident (op, rr $loc) } + | op=and_op {op} + | op=let_op {op} + | op=quantifier_op {op} + +%inline quantifier_op: + | op=EXISTS_OP { mk_ident ("exists" ^ op, rr $loc) } + | op=FORALL_OP { mk_ident ("forall" ^ op, rr $loc) } + +%inline and_op: + | op=AND_OP { mk_ident ("and" ^ op, rr $loc) } +%inline let_op: + | op=LET_OP { mk_ident ("let" ^ op, rr $loc) } + +/* These infix operators have a lower precedence than EQUALS */ +%inline operatorInfix0ad12: + | op=OPINFIX0a + | op=OPINFIX0b + | op=OPINFIX0c + | op=OPINFIX0d + | op=OPINFIX1 + | op=OPINFIX2 + { mk_ident (op, rr $loc) } + +%inline dotOperator: + | op=DOT_LPAREN e=term RPAREN { mk_ident (".()", rr $loc(op)), e, rr2 $loc(op) $loc($3) } + | op=DOT_LBRACK e=term RBRACK { mk_ident (".[]", rr $loc(op)), e, rr2 $loc(op) $loc($3) } + | op=DOT_LBRACK_BAR e=term BAR_RBRACK { mk_ident (".[||]", rr $loc(op)), e, rr2 $loc(op) $loc($3) } + | op=DOT_LENS_PAREN_LEFT e=term LENS_PAREN_RIGHT { mk_ident (".(||)", rr $loc(op)), e, rr2 $loc(op) $loc($3) } + +some(X): + | x=X { Some x } + +right_flexible_list(SEP, X): + | { [] } + | x=X { [x] } + | x=X SEP xs=right_flexible_list(SEP, X) { x :: xs } + +right_flexible_nonempty_list(SEP, X): + | x=X { [x] } + | x=X SEP xs=right_flexible_list(SEP, X) { x :: xs } + +reverse_left_flexible_list(delim, X): +| (* nothing *) + { [] } +| x = X + { [x] } +| xs = reverse_left_flexible_list(delim, X) delim x = X + { x :: xs } + +%inline left_flexible_list(delim, X): + xs = reverse_left_flexible_list(delim, X) + { List.rev xs } + +reverse_left_flexible_nonempty_list(delim, X): +| ioption(delim) x = X + { [x] } +| xs = reverse_left_flexible_nonempty_list(delim, X) delim x = X + { x :: xs } + +%inline left_flexible_nonempty_list(delim, X): + xs = reverse_left_flexible_nonempty_list(delim, X) + { List.rev xs } diff --git a/stage0/fstar-lib/FStarC_Parser_ParseIt.ml b/stage0/fstar-lib/FStarC_Parser_ParseIt.ml new file mode 100644 index 00000000000..792cd095f58 --- /dev/null +++ b/stage0/fstar-lib/FStarC_Parser_ParseIt.ml @@ -0,0 +1,452 @@ +module U = FStarC_Compiler_Util +open FStarC_Errors +open FStarC_Syntax_Syntax +open Lexing +open FStarC_Sedlexing +open FStarC_Errors_Codes +module Codes = FStarC_Errors_Codes +module Msg = FStarC_Errors_Msg + +type filename = string + +type input_frag = { + frag_fname:filename; + frag_text:string; + frag_line:Prims.int; + frag_col:Prims.int +} + +let resetLexbufPos filename lexbuf = + lexbuf.cur_p <- { + pos_fname= filename; + pos_cnum = 0; + pos_bol = 0; + pos_lnum = 1 } + +let setLexbufPos filename lexbuf line col = + lexbuf.cur_p <- { + pos_fname= filename; + pos_cnum = col; + pos_bol = 0; + pos_lnum = line } + +module Path = BatPathGen.OfString + +let find_file filename = + match FStarC_Find.find_file filename with + | Some s -> + s + | None -> + raise_error_text FStarC_Compiler_Range.dummyRange Fatal_ModuleOrFileNotFound (U.format1 "Unable to find file: %s\n" filename) + +let vfs_entries : (U.time_of_day * string) U.smap = U.smap_create (Z.of_int 1) + +let read_vfs_entry fname = + U.smap_try_find vfs_entries (U.normalize_file_path fname) + +let add_vfs_entry fname contents = + U.smap_add vfs_entries (U.normalize_file_path fname) (U.get_time_of_day (), contents) + +let get_file_last_modification_time filename = + match read_vfs_entry filename with + | Some (mtime, _contents) -> mtime + | None -> U.get_file_last_modification_time filename + +let read_physical_file (filename: string) = + (* BatFile.with_file_in uses Unix.openfile (which isn't available in + js_of_ocaml) instead of Pervasives.open_in, so we don't use it here. *) + try + let channel = open_in_bin filename in + BatPervasives.finally + (fun () -> close_in channel) + (fun channel -> really_input_string channel (in_channel_length channel)) + channel + with e -> + raise_error_text FStarC_Compiler_Range.dummyRange Fatal_UnableToReadFile (U.format1 "Unable to read file %s\n" filename) + +let read_file (filename:string) = + let debug = FStarC_Compiler_Debug.any () in + match read_vfs_entry filename with + | Some (_mtime, contents) -> + if debug then U.print1 "Reading in-memory file %s\n" filename; + filename, contents + | None -> + let filename = find_file filename in + if debug then U.print1 "Opening file %s\n" filename; + filename, read_physical_file filename + +let fs_extensions = [".fs"; ".fsi"] +let fst_extensions = [".fst"; ".fsti"] +let interface_extensions = [".fsti"; ".fsi"] + +let valid_extensions () = + fst_extensions @ if FStarC_Options.ml_ish () then fs_extensions else [] + +let has_extension file extensions = + FStar_List.existsb (U.ends_with file) extensions + +let check_extension fn = + if (not (has_extension fn (valid_extensions ()))) then + let message = U.format1 "Unrecognized extension '%s'" fn in + raise_error_text FStarC_Compiler_Range.dummyRange Fatal_UnrecognizedExtension + (if has_extension fn fs_extensions + then message ^ " (pass --MLish to process .fs and .fsi files)" + else message) + +type parse_frag = + | Filename of filename + | Toplevel of input_frag + | Incremental of input_frag + | Fragment of input_frag + +type parse_error = (Codes.error_code * Msg.error_message * FStarC_Compiler_Range.range) + + +type code_fragment = { + range: FStarC_Compiler_Range.range; + code: string; +} + +type 'a incremental_result = + ('a * code_fragment) list * (string * FStarC_Compiler_Range.range) list * parse_error option + +type parse_result = + | ASTFragment of (FStarC_Parser_AST.inputFragment * (string * FStarC_Compiler_Range.range) list) + | IncrementalFragment of FStarC_Parser_AST.decl incremental_result + | Term of FStarC_Parser_AST.term + | ParseError of parse_error + +module BU = FStarC_Compiler_Util +module Range = FStarC_Compiler_Range +module MHL = MenhirLib.Convert + +let range_of_positions filename start fin = + let start_pos = FStarC_Parser_Util.pos_of_lexpos start in + let end_pos = FStarC_Parser_Util.pos_of_lexpos fin in + FStarC_Compiler_Range.mk_range filename start_pos end_pos + +let err_of_parse_error filename lexbuf tag = + let pos = lexbuf.cur_p in + let tag = + match tag with + | None -> "Syntax error" + | Some tag -> tag + in + Fatal_SyntaxError, + Msg.mkmsg tag, + range_of_positions filename pos pos + +let string_of_lexpos lp = + let r = range_of_positions "" lp lp in + FStarC_Compiler_Range.string_of_range r + +let parse_incremental_decls + filename + (contents:string) + lexbuf + (lexer:unit -> 'token * Lexing.position * Lexing.position) + (range_of: 'semantic_value -> FStarC_Compiler_Range.range) + (parse_one: + (Lexing.lexbuf -> 'token) -> + Lexing.lexbuf -> + ('semantic_value list * FStarC_Sedlexing.snap option) option) +: 'semantic_value list * parse_error option += let parse_one = MenhirLib.Convert.Simplified.traditional2revised parse_one in + let err_of_parse_error tag = err_of_parse_error filename lexbuf tag in + let open FStar_Pervasives in + let push_decls ds decls = List.fold_left (fun decls d -> d::decls) decls ds in + let rec parse decls = + let start_pos = current_pos lexbuf in + let d = + try + (* Reset the gensym between decls, to ensure determinism, + otherwise, every _ is parsed as different name *) + FStarC_GenSym.reset_gensym(); + Inl (parse_one lexer) + with + | FStarC_Errors.Error(e, msg, r, ctx) -> + Inr (e, msg, r) + + | e -> + Inr (err_of_parse_error None) + in + match d with + | Inl None -> + List.rev decls, None + | Inl (Some (ds, snap_opt)) -> + (* The parser may advance the lexer beyond the decls last token. + E.g., in `let f x = 0 let g = 1`, we will have parsed the decl for `f` + but the lexer will have advanced to `let ^ g ...` since the + parser will have looked ahead. + Rollback the lexer one token for declarations whose syntax + requires such lookahead to complete a production. + *) + let _ = + match snap_opt with + | None -> + rollback lexbuf + | Some p -> + restore_snapshot lexbuf p + in + parse (push_decls ds decls) + | Inr err -> + List.rev decls, Some err + in + parse [] + +let contents_at contents = + let lines = U.splitlines contents in + let split_line_at_col line col = + if col > 0 + then ( + (* Don't index directly into the string, since this is a UTF-8 string. + Convert first to a list of characters, index into that, and then convert + back to a string *) + let chars = FStar_String.list_of_string line in + if col <= List.length chars + then ( + let prefix, suffix = FStarC_Compiler_Util.first_N (Z.of_int col) chars in + Some (FStar_String.string_of_list prefix, + FStar_String.string_of_list suffix) + ) + else ( + None + ) + ) + else None + in + let line_from_col line pos = + match split_line_at_col line pos with + | None -> None + | Some (_, p) -> Some p + in + let line_to_col line pos = + match split_line_at_col line pos with + | None -> None + | Some (p, _) -> Some p + in + (* Find the raw content of the input from the line of the start_pos to the end_pos. + This is used by Interactive.Incremental to record exactly the raw content of the + fragment that was checked *) + fun (range:Range.range) -> + (* discard all lines until the start line *) + let start_pos = Range.start_of_range range in + let end_pos = Range.end_of_range range in + let start_line = Z.to_int (Range.line_of_pos start_pos) in + let start_col = Z.to_int (Range.col_of_pos start_pos) in + let end_line = Z.to_int (Range.line_of_pos end_pos) in + let end_col = Z.to_int (Range.col_of_pos end_pos) in + let suffix = + FStarC_Compiler_Util.nth_tail + (Z.of_int (if start_line > 0 then start_line - 1 else 0)) + lines + in + (* Take all the lines between the start and end lines *) + let text, rest = + FStarC_Compiler_Util.first_N + (Z.of_int (end_line - start_line)) + suffix + in + let text = + match text with + | first_line::rest -> ( + match line_from_col first_line start_col with + | Some s -> s :: rest + | _ -> text + ) + | _ -> text + in + let text = + (* For the last line itself, take the prefix of it up to the character of the end_pos *) + match rest with + | last::_ -> ( + match line_to_col last end_col with + | None -> text + | Some last -> + (* The last line is also the first line *) + match text with + | [] -> ( + match line_from_col last start_col with + | None -> [last] + | Some l -> [l] + ) + | _ -> text @ [last] + ) + | _ -> text + in + { range; + code = FStar_String.concat "\n" text } + + +let parse_incremental_fragment + filename + (contents:string) + lexbuf + (lexer:unit -> 'token * Lexing.position * Lexing.position) + (range_of: 'semantic_value -> FStarC_Compiler_Range.range) + (parse_one: + (Lexing.lexbuf -> 'token) -> + Lexing.lexbuf -> + ('semantic_value list * FStarC_Sedlexing.snap option) option) +: 'semantic_value incremental_result += let res = parse_incremental_decls filename contents lexbuf lexer range_of parse_one in + let comments = FStarC_Parser_Util.flush_comments () in + let contents_at = contents_at contents in + let decls, err_opt = res in + let decls = List.map (fun d -> d, contents_at (range_of d)) decls in + decls, comments, err_opt + +let parse_fstar_incrementally +: FStarC_Parser_AST_Util.extension_lang_parser += let f = + fun (s:string) (r:FStarC_Compiler_Range.range) -> + let open FStar_Pervasives in + let open FStarC_Compiler_Range in + let lexbuf = + create s + (file_of_range r) + (Z.to_int (line_of_pos (start_of_range r))) + (Z.to_int (col_of_pos (start_of_range r))) + in + let filename = file_of_range r in + let contents = s in + let lexer () = + let tok = FStarC_Parser_LexFStar.token lexbuf in + (tok, lexbuf.start_p, lexbuf.cur_p) + in + try + let decls, err_opt = + parse_incremental_decls + filename + contents + lexbuf + lexer + (fun (d:FStarC_Parser_AST.decl) -> d.drange) + FStarC_Parser_Parse.oneDeclOrEOF + in + match err_opt with + | None -> Inr decls + | Some (_, msg, r) -> + let open FStarC_Parser_AST in + let err_decl = mk_decl Unparseable r [] in + Inr (decls @ [err_decl]) + with + | FStarC_Errors.Error(e, msg, r, _ctx) -> + let msg = FStarC_Errors_Msg.rendermsg msg in + let err : FStarC_Parser_AST_Util.error_message = { message = msg; range = r } in + Inl err + | e -> + let pos = FStarC_Parser_Util.pos_of_lexpos (lexbuf.cur_p) in + let r = FStarC_Compiler_Range.mk_range filename pos pos in + let err : FStarC_Parser_AST_Util.error_message = { message = "Syntax error parsing #lang-fstar block: "; range = r } in + Inl err + in + { parse_decls = f } +let _ = FStarC_Parser_AST_Util.register_extension_lang_parser "fstar" parse_fstar_incrementally + +type lang_opts = string option + +let parse_lang lang fn = + match fn with + | Filename _ -> + failwith "parse_lang: only in incremental mode" + | Incremental s + | Toplevel s + | Fragment s -> + try + let frag_pos = FStarC_Compiler_Range.mk_pos s.frag_line s.frag_col in + let rng = FStarC_Compiler_Range.mk_range s.frag_fname frag_pos frag_pos in + let decls = FStarC_Parser_AST_Util.parse_extension_lang lang s.frag_text rng in + let comments = FStarC_Parser_Util.flush_comments () in + ASTFragment (Inr decls, comments) + with + | FStarC_Errors.Error(e, msg, r, _ctx) -> + ParseError (e, msg, r) + +let parse (lang_opt:lang_opts) fn = + FStarC_Parser_Util.warningHandler := (function + | e -> Printf.printf "There was some warning (TODO)\n"); + match lang_opt with + | Some lang -> parse_lang lang fn + | _ -> + let lexbuf, filename, contents = + match fn with + | Filename f -> + check_extension f; + let f', contents = read_file f in + (try create contents f' 1 0, f', contents + with _ -> raise_error_text FStarC_Compiler_Range.dummyRange Fatal_InvalidUTF8Encoding (U.format1 "File %s has invalid UTF-8 encoding." f')) + | Incremental s + | Toplevel s + | Fragment s -> + create s.frag_text s.frag_fname (Z.to_int s.frag_line) (Z.to_int s.frag_col), "", s.frag_text + in + + let lexer () = + let tok = FStarC_Parser_LexFStar.token lexbuf in + (tok, lexbuf.start_p, lexbuf.cur_p) + in + try + match fn with + | Filename _ + | Toplevel _ -> begin + let fileOrFragment = + MenhirLib.Convert.Simplified.traditional2revised FStarC_Parser_Parse.inputFragment lexer + in + let frags = match fileOrFragment with + | FStar_Pervasives.Inl modul -> + if has_extension filename interface_extensions + then match modul with + | FStarC_Parser_AST.Module(l,d) -> + FStar_Pervasives.Inl (FStarC_Parser_AST.Interface(l, d, true)) + | _ -> failwith "Impossible" + else FStar_Pervasives.Inl modul + | _ -> fileOrFragment + in ASTFragment (frags, FStarC_Parser_Util.flush_comments ()) + end + + | Incremental i -> + let decls, comments, err_opt = + parse_incremental_fragment + filename + i.frag_text + lexbuf + lexer + (fun (d:FStarC_Parser_AST.decl) -> d.drange) + FStarC_Parser_Parse.oneDeclOrEOF + in + IncrementalFragment(decls, comments, err_opt) + + | Fragment _ -> + Term (MenhirLib.Convert.Simplified.traditional2revised FStarC_Parser_Parse.term lexer) + with + | FStarC_Errors.Empty_frag -> + ASTFragment (FStar_Pervasives.Inr [], []) + + | FStarC_Errors.Error(e, msg, r, _ctx) -> + ParseError (e, msg, r) + + | e -> + (* + | Parsing.Parse_error as _e + | FStarC_Parser_Parse.MenhirBasics.Error as _e -> + *) + ParseError (err_of_parse_error filename lexbuf None) + + +(** Parsing of command-line error/warning/silent flags. *) +let parse_warn_error s = + let user_flags = + if s = "" + then [] + else + let lexbuf = FStarC_Sedlexing.create s "" 0 (String.length s) in + let lexer() = let tok = FStarC_Parser_LexFStar.token lexbuf in + (tok, lexbuf.start_p, lexbuf.cur_p) + in + try + MenhirLib.Convert.Simplified.traditional2revised FStarC_Parser_Parse.warn_error_list lexer + with e -> + failwith (U.format1 "Malformed warn-error list: %s" s) + in + FStarC_Errors.update_flags user_flags diff --git a/ocaml/fstar-lib/FStarC_Parser_ParseIt.mli b/stage0/fstar-lib/FStarC_Parser_ParseIt.mli similarity index 100% rename from ocaml/fstar-lib/FStarC_Parser_ParseIt.mli rename to stage0/fstar-lib/FStarC_Parser_ParseIt.mli diff --git a/stage0/fstar-lib/FStarC_Parser_Utf8.ml b/stage0/fstar-lib/FStarC_Parser_Utf8.ml new file mode 100644 index 00000000000..ad7c081844d --- /dev/null +++ b/stage0/fstar-lib/FStarC_Parser_Utf8.ml @@ -0,0 +1,164 @@ +(* + Originally part of the ulex package with the following license: + + Copyright 2005 by Alain Frisch. + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*) + + +exception MalFormed + +(* cf http://www.faqs.org/rfcs/rfc3629.html *) + +let width = Array.make 256 (-1) +let () = + for i = 0 to 127 do width.(i) <- 1 done; + for i = 192 to 223 do width.(i) <- 2 done; + for i = 224 to 239 do width.(i) <- 3 done; + for i = 240 to 247 do width.(i) <- 4 done + +let next s i = + match s.[i] with + | '\000'..'\127' as c -> + Char.code c + | '\192'..'\223' as c -> + let n1 = Char.code c in + let n2 = Char.code s.[i+1] in + if (n2 lsr 6 != 0b10) then raise MalFormed; + ((n1 land 0x1f) lsl 6) lor (n2 land 0x3f) + | '\224'..'\239' as c -> + let n1 = Char.code c in + let n2 = Char.code s.[i+1] in + let n3 = Char.code s.[i+2] in + if (n2 lsr 6 != 0b10) || (n3 lsr 6 != 0b10) then raise MalFormed; + let p = + ((n1 land 0x0f) lsl 12) lor ((n2 land 0x3f) lsl 6) lor (n3 land 0x3f) + in + if (p >= 0xd800) && (p <= 0xdf00) then raise MalFormed; + p + | '\240'..'\247' as c -> + let n1 = Char.code c in + let n2 = Char.code s.[i+1] in + let n3 = Char.code s.[i+2] in + let n4 = Char.code s.[i+3] in + if (n2 lsr 6 != 0b10) || (n3 lsr 6 != 0b10) || (n4 lsr 6 != 0b10) + then raise MalFormed; + ((n1 land 0x07) lsl 18) lor ((n2 land 0x3f) lsl 12) lor + ((n3 land 0x3f) lsl 6) lor (n4 land 0x3f) + | _ -> raise MalFormed + + +(* With this implementation, a truncated code point will result + in Stream.Failure, not in MalFormed. *) + +let from_stream s = + match Stream.next s with + | '\000'..'\127' as c -> + Char.code c + | '\192'..'\223' as c -> + let n1 = Char.code c in + let n2 = Char.code (Stream.next s) in + if (n2 lsr 6 != 0b10) then raise MalFormed; + ((n1 land 0x1f) lsl 6) lor (n2 land 0x3f) + | '\224'..'\239' as c -> + let n1 = Char.code c in + let n2 = Char.code (Stream.next s) in + let n3 = Char.code (Stream.next s) in + if (n2 lsr 6 != 0b10) || (n3 lsr 6 != 0b10) then raise MalFormed; + ((n1 land 0x0f) lsl 12) lor ((n2 land 0x3f) lsl 6) lor (n3 land 0x3f) + | '\240'..'\247' as c -> + let n1 = Char.code c in + let n2 = Char.code (Stream.next s) in + let n3 = Char.code (Stream.next s) in + let n4 = Char.code (Stream.next s) in + if (n2 lsr 6 != 0b10) || (n3 lsr 6 != 0b10) || (n4 lsr 6 != 0b10) + then raise MalFormed; + ((n1 land 0x07) lsl 18) lor ((n2 land 0x3f) lsl 12) lor + ((n3 land 0x3f) lsl 6) lor (n4 land 0x3f) + | _ -> raise MalFormed + + + +let compute_len s pos bytes = + let rec aux n i = + if i >= pos + bytes then if i = pos + bytes then n else raise MalFormed + else + let w = width.(Char.code s.[i]) in + if w > 0 then aux (succ n) (i + w) + else raise MalFormed + in + aux 0 pos + +let rec blit_to_int s spos a apos n = + if n > 0 then begin + a.(apos) <- next s spos; + blit_to_int s (spos + width.(Char.code s.[spos])) a (succ apos) (pred n) + end + +let to_int_array s pos bytes = + let n = compute_len s pos bytes in + let a = Array.make n 0 in + blit_to_int s pos a 0 n; + a + +(**************************) + +let width_code_point p = + if p <= 0x7f then 1 + else if p <= 0x7ff then 2 + else if p <= 0xffff then 3 + else if p <= 0x10ffff then 4 + else raise MalFormed + +let store b p = + if p <= 0x7f then + Buffer.add_char b (Char.chr p) + else if p <= 0x7ff then ( + Buffer.add_char b (Char.chr (0xc0 lor (p lsr 6))); + Buffer.add_char b (Char.chr (0x80 lor (p land 0x3f))) + ) + else if p <= 0xffff then ( + if (p >= 0xd800 && p < 0xe000) then raise MalFormed; + Buffer.add_char b (Char.chr (0xe0 lor (p lsr 12))); + Buffer.add_char b (Char.chr (0x80 lor ((p lsr 6) land 0x3f))); + Buffer.add_char b (Char.chr (0x80 lor (p land 0x3f))) + ) + else if p <= 0x10ffff then ( + Buffer.add_char b (Char.chr (0xf0 lor (p lsr 18))); + Buffer.add_char b (Char.chr (0x80 lor ((p lsr 12) land 0x3f))); + Buffer.add_char b (Char.chr (0x80 lor ((p lsr 6) land 0x3f))); + Buffer.add_char b (Char.chr (0x80 lor (p land 0x3f))) + ) + else raise MalFormed + + +let from_int_array a apos len = + let b = Buffer.create (len * 4) in + let rec aux apos len = + if len > 0 then (store b a.(apos); aux (succ apos) (pred len)) + else Buffer.contents b in + aux apos len + +let stream_from_char_stream s = + Stream.from + (fun _ -> + try Some (from_stream s) + with Stream.Failure -> None) diff --git a/stage0/fstar-lib/FStarC_Parser_Util.ml b/stage0/fstar-lib/FStarC_Parser_Util.ml new file mode 100644 index 00000000000..c6c03febb0f --- /dev/null +++ b/stage0/fstar-lib/FStarC_Parser_Util.ml @@ -0,0 +1,44 @@ +open FStarC_Compiler_Range +open Lexing + +(* This brings into scope enough the translation of F# type names into the + * corresponding OCaml type names; the reason for that is that we massage + * parse.fsy (using sed) into parse.mly; but, we don't rename types. *) +include FStarC_BaseTypes +type single = float +type decimal = int +type bytes = byte array + +let parseState = () + +let pos_of_lexpos (p:position) = + mk_pos (Z.of_int p.pos_lnum) (Z.of_int (p.pos_cnum - p.pos_bol)) + +let mksyn_range (p1:position) p2 = + mk_range p1.pos_fname (pos_of_lexpos p1) (pos_of_lexpos p2) + +let translate_range (pos : Lexing.position * Lexing.position) = + mksyn_range (fst pos) (snd pos) + +let translate_range2 (pos1 : Lexing.position * Lexing.position) (pos2 : Lexing.position * Lexing.position) = + mksyn_range (fst pos1) (snd pos2) + +exception WrappedError of exn * range +exception ReportedError +exception StopProcessing + +let warningHandler = ref (fun (e:exn) -> + FStarC_Compiler_Util.print_string "no warning handler installed\n" ; + FStarC_Compiler_Util.print_any e; ()) +let errorHandler = ref (fun (e:exn) -> + FStarC_Compiler_Util.print_string "no warning handler installed\n" ; + FStarC_Compiler_Util.print_any e; ()) +let errorAndWarningCount = ref 0 +let errorR exn = incr errorAndWarningCount; match exn with StopProcessing | ReportedError -> raise exn | _ -> !errorHandler exn +let warning exn = incr errorAndWarningCount; match exn with StopProcessing | ReportedError -> raise exn | _ -> !warningHandler exn + +let comments : (string * FStarC_Compiler_Range.range) list ref = ref [] +let add_comment x = comments := x :: !comments +let flush_comments () = + let lexed_comments = !comments in + comments := []; lexed_comments diff --git a/stage0/fstar-lib/FStarC_Platform.ml b/stage0/fstar-lib/FStarC_Platform.ml new file mode 100644 index 00000000000..038ed9060a9 --- /dev/null +++ b/stage0/fstar-lib/FStarC_Platform.ml @@ -0,0 +1,17 @@ +type sys = +| Windows +| Posix + +let system = + if Sys.win32 || Sys.cygwin then + Windows + else + Posix + +let exe name = + if Sys.unix then + name + else + name^".exe" + +let is_fstar_compiler_using_ocaml = true diff --git a/stage0/fstar-lib/FStarC_Pprint.ml b/stage0/fstar-lib/FStarC_Pprint.ml new file mode 100644 index 00000000000..bbe281ae431 --- /dev/null +++ b/stage0/fstar-lib/FStarC_Pprint.ml @@ -0,0 +1,87 @@ +(* + Copyright 2016 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +(* prettyprint.fsti's OCaml implementation is just a thin wrapper around + Francois Pottier's pprint package. *) +include PPrint + +(* FIXME(adl) also print the char in a comment if it's representable *) +let doc_of_char c = PPrint.OCaml.char (Char.chr c) +let doc_of_string = PPrint.string +let doc_of_bool b = PPrint.string (string_of_bool b) +let blank_buffer_doc = [ ("", PPrint.empty) ] + +let substring s ofs len = + PPrint.substring s (Z.to_int ofs) (Z.to_int len) + +let fancystring s apparent_length = + PPrint.fancystring s (Z.to_int apparent_length) + +let fancysubstring s ofs len apparent_length = + PPrint.fancysubstring s (Z.to_int ofs) (Z.to_int len) (Z.to_int apparent_length) + +let blank n = PPrint.blank (Z.to_int n) + +let break_ n = PPrint.break (Z.to_int n) + +let op_Hat_Hat = PPrint.(^^) +let op_Hat_Slash_Hat = PPrint.(^/^) + +let nest j doc = PPrint.nest (Z.to_int j) doc + +let long_left_arrow = PPrint.string "<--" +let larrow = PPrint.string "<-" +let rarrow = PPrint.string "->" + +let repeat n doc = PPrint.repeat (Z.to_int n) doc + +let hang n doc = PPrint.hang (Z.to_int n) doc + +let prefix n b left right = + PPrint.prefix (Z.to_int n) (Z.to_int b) left right + +let jump n b right = + PPrint.jump (Z.to_int n) (Z.to_int b) right + +let infix n b middle left right = + PPrint.infix (Z.to_int n) (Z.to_int b) middle left right + +let surround n b opening contents closing = + PPrint.surround (Z.to_int n) (Z.to_int b) opening contents closing + +let soft_surround n b opening contents closing = + PPrint.soft_surround (Z.to_int n) (Z.to_int b) opening contents closing + +let surround_separate n b void_ opening sep closing docs = + PPrint.surround_separate (Z.to_int n) (Z.to_int b) void_ opening sep closing docs + +let surround_separate_map n b void_ opening sep closing f xs = + PPrint.surround_separate_map (Z.to_int n) (Z.to_int b) void_ opening sep closing f xs + +(* Wrap up ToBuffer.pretty. *) +let pretty_string rfrac width doc = + let buf = Buffer.create 0 in + PPrint.ToBuffer.pretty rfrac (Z.to_int width) buf doc; + Buffer.contents buf + +(* Wrap up ToChannel.pretty *) +let pretty_out_channel rfrac width doc ch = + PPrint.ToChannel.pretty rfrac (Z.to_int width) ch doc; + flush ch + +(* A simple renderer, with some default values. *) +let render (doc:document) : string = + pretty_string 1.0 (Z.of_int 80) doc diff --git a/stage0/fstar-lib/FStarC_Reflection_Types.ml b/stage0/fstar-lib/FStarC_Reflection_Types.ml new file mode 100644 index 00000000000..f0a3c0a42da --- /dev/null +++ b/stage0/fstar-lib/FStarC_Reflection_Types.ml @@ -0,0 +1,26 @@ +open FStar_All + +(* TODO: make this an F* module, no need to drop to OCaml for this *) + +type binder = FStarC_Syntax_Syntax.binder +type bv = FStarC_Syntax_Syntax.bv +type namedv = bv +type term = FStarC_Syntax_Syntax.term +type env = FStarC_TypeChecker_Env.env +type fv = FStarC_Syntax_Syntax.fv +type comp = FStarC_Syntax_Syntax.comp +type sigelt = FStarC_Syntax_Syntax.sigelt +type ctx_uvar_and_subst = FStarC_Syntax_Syntax.ctx_uvar_and_subst +type optionstate = FStarC_Options.optionstate +type letbinding = FStarC_Syntax_Syntax.letbinding + +type universe_uvar = FStarC_Syntax_Syntax.universe_uvar +type universe = FStarC_Syntax_Syntax.universe + +type name = string list +type ident = FStarC_Ident.ident +type univ_name = ident +type typ = term +type binders = binder list +type match_returns_ascription = FStarC_Syntax_Syntax.match_returns_ascription +type decls = sigelt list diff --git a/stage0/fstar-lib/FStarC_Sedlexing.ml b/stage0/fstar-lib/FStarC_Sedlexing.ml new file mode 100644 index 00000000000..eb4520bd949 --- /dev/null +++ b/stage0/fstar-lib/FStarC_Sedlexing.ml @@ -0,0 +1,126 @@ +(** +A custom version of Sedlexing enhanced with +lc, bol and fname position tracking and +specialized for UTF-8 string inputs +(the parser driver always reads whole files) +**) + +exception Error + +module L = Lexing +type pos = L.position + +type lexbuf = { + buf: int array; + len: int; + + mutable cur: int; + mutable cur_p: pos; + mutable start: int; + mutable start_p: pos; + + mutable mark: int; + mutable mark_p: pos; + mutable mark_val: int; +} + +let get_buf lb = lb.buf +let get_cur lb = lb.cur +let get_start lb = lb.start + +(* N.B. the offsets are for interactive mode + we want to ble able to interpret a fragment as if it was part + of a larger file and report absolute error positions *) +let create (s:string) fn loffset coffset = + let a = FStarC_Parser_Utf8.to_int_array s 0 (String.length s) in + let start_p = { + L.pos_fname = fn; + L.pos_cnum = coffset; + L.pos_bol = 0; + L.pos_lnum = loffset; } + in { + buf = a; + len = Array.length a; + + cur = 0; + cur_p = start_p; + + start = 0; + start_p = start_p; + + mark = 0; + mark_p = start_p; + mark_val = 0; + } + +let current_pos b = b.cur_p + +let start b = + b.mark <- b.cur; + b.mark_val <- (-1); + b.mark_p <- b.cur_p; + b.start <- b.cur; + b.start_p <- b.cur_p + +let mark b i = + b.mark <- b.cur; + b.mark_p <- b.cur_p; + b.mark_val <- i + +let backtrack b = + b.cur <- b.mark; + b.cur_p <- b.mark_p; + b.mark_val + +type snap = int * pos + +let snapshot b = b.start, b.start_p +let restore_snapshot b (cur, cur_p) = + b.cur <- cur; + b.cur_p <- cur_p + +let next b = + if b.cur = b.len then None + else + let c = b.buf.(b.cur) in + (b.cur <- b.cur + 1; + b.cur_p <- {b.cur_p with L.pos_cnum = b.cur_p.L.pos_cnum + 1}; Some (Uchar.of_int c)) + +let new_line b = + b.cur_p <- { b.cur_p with + L.pos_lnum = b.cur_p.L.pos_lnum + 1; + L.pos_bol = b.cur_p.L.pos_cnum; + } + +let range b = (b.start_p, b.cur_p) + +let ulexeme lexbuf = + Array.sub lexbuf.buf lexbuf.start (lexbuf.cur - lexbuf.start) + +let rollback b = + b.cur <- b.start; + b.cur_p <- b.start_p + +let lexeme lexbuf = + FStarC_Parser_Utf8.from_int_array lexbuf.buf lexbuf.start (lexbuf.cur - lexbuf.start) + +let lookahead b pos = + if b.len <= pos then "" + else FStarC_Parser_Utf8.from_int_array b.buf pos (b.len - pos) + +let source_file b = + b.cur_p.L.pos_fname + +let current_line b = + b.cur_p.Lexing.pos_lnum + +(* Since sedlex 2.4, we need to expose Sedlexing.__private_next_int + (see #2343) + + From https://github.com/ocaml-communi-ty/sedlex/blob/268c553f474457574e22701679d68f66aa771551/src/lib/sedlexing.mli#L154-L161 + [next] and [__private__next_int] have the same doc description, + the only difference is the return type *) +let __private__next_int b = + match next b with + | Some v -> Uchar.to_int v + | None -> -1 diff --git a/stage0/fstar-lib/FStarC_StringBuffer.ml b/stage0/fstar-lib/FStarC_StringBuffer.ml new file mode 100644 index 00000000000..a35ba05d3a4 --- /dev/null +++ b/stage0/fstar-lib/FStarC_StringBuffer.ml @@ -0,0 +1,7 @@ +(* See FStar.StringBuffer.fsi *) +type t = Buffer.t +let create (i:FStarC_BigInt.t) = Buffer.create (FStarC_BigInt.to_int i) +let add s t = Buffer.add_string t s; t +let contents = Buffer.contents +let clear t = Buffer.clear t; t +let output_channel = Buffer.output_buffer diff --git a/stage0/fstar-lib/FStarC_Syntax_TermHashTable.ml b/stage0/fstar-lib/FStarC_Syntax_TermHashTable.ml new file mode 100644 index 00000000000..3e018dd7a62 --- /dev/null +++ b/stage0/fstar-lib/FStarC_Syntax_TermHashTable.ml @@ -0,0 +1,73 @@ +module S = FStarC_Syntax_Syntax +module P = FStarC_Profiling +module BU = FStarC_Compiler_Util +let now () = BatUnix.gettimeofday () +let record_time f = + let start = now () in + let res = f () in + let elapsed = (now()) -. start in + res, int_of_float (elapsed *. 1000.0) +let eq_term_ctr = ref (0, 0) +let num_eq_term_calls = ref (0, 0) +let incr (r:(int * int) ref) (time:int) = let n, t = !r in r := (n + 1, time + t) +module HashKey = + struct + type t = S.term + let equal (x:t) (y:t) = FStarC_Syntax_Hash.equal_term x y +(* This function is often hot. Its useful to enable the profiling code when debugging + P.profile (fun _ -> + let res, time = record_time (fun _ -> FStarC_Syntax_Hash.equal_term x y) in + incr num_eq_term_calls time; + if res + then ( incr eq_term_ctr time; true ) + else ( false)) + None + "FStar.Syntax.TermHashTable.equal" +*) + let hash (x:t) = FStarC_Syntax_Hash.ext_hash_term x +(* P.profile (fun _ -> + None + "FStar.Syntax.TermHashTable.hash" +*) + end +module HT = BatHashtbl.Make(HashKey) + +type 'a hashtable = 'a HT.t + +let create (n:Z.t) = HT.create (Z.to_int n) +module Print = FStarC_Syntax_Print + +let insert (key: S.term) (v:'a) (ht:'a hashtable) = HT.add ht key v + +let lookup (key: S.term) (ht:'a hashtable) : 'a option = + try + let l = HT.find ht key in + Some l + with + | Not_found -> None + +let reset_counters (x:'a hashtable) = + eq_term_ctr := (0,0); + num_eq_term_calls := (0,0) + +let clear (x:'a hashtable) = + HT.clear x; + reset_counters x + +let print_stats (x:'a hashtable) : unit = + let stats = HT.stats x in + let string_of_ctr ctr = let n, t = !ctr in BU.format2 "%s in %s ms" (string_of_int n) (string_of_int t) in + BU.print4 "THT Statistics { num_bindings = %s; max_bucket_length = %s; num_eq_term_calls = %s; eq_term_ctr = %s }\n" + (string_of_int stats.num_bindings) + (string_of_int stats.max_bucket_length) + (string_of_ctr num_eq_term_calls) + (string_of_ctr eq_term_ctr) + +(* Histogram + (BatString.concat "; " + (List.map (function Some x -> x) + (List.filter + (function None -> false | _ -> true) + (Array.to_list ( + (Array.mapi (fun i n -> if n = 0 then None else Some ("(" ^ (string_of_int i) ^", "^ (string_of_int n)^ ")")) stats.bucket_histogram)))))) +*) diff --git a/stage0/fstar-lib/FStarC_Tactics_Native.ml b/stage0/fstar-lib/FStarC_Tactics_Native.ml new file mode 100644 index 00000000000..10c405034fb --- /dev/null +++ b/stage0/fstar-lib/FStarC_Tactics_Native.ml @@ -0,0 +1,102 @@ +open FStarC_Compiler_Range +open FStarC_Tactics_Types +open FStarC_Tactics_Result +open FStarC_Tactics_Monad +open FStarC_Syntax_Syntax + +module N = FStarC_TypeChecker_Normalize +module C = FStarC_TypeChecker_Cfg +module BU = FStarC_Compiler_Util +module NBETerm = FStarC_TypeChecker_NBETerm +module O = FStarC_Options +module PO = FStarC_TypeChecker_Primops +module POB = FStarC_TypeChecker_Primops_Base + +(* These definitions are ≡ to the ones generated by F*'s extraction of the + tactic effect. We need them here to break a circular dependency between the + compiler and ulib (cf. tactics meeting of 2017-08-03). *) +type 'a __tac = FStarC_Tactics_Types.proofstate -> 'a __result + +let r = dummyRange + +type itac = + POB.psc -> FStarC_Syntax_Embeddings_Base.norm_cb -> universes -> args -> term option +type nbe_itac = + NBETerm.nbe_cbs -> universes -> NBETerm.args -> NBETerm.t option + +type native_primitive_step = + { name: FStarC_Ident.lid; + arity: Prims.int; + strong_reduction_ok: bool; + tactic: itac} + +let perr s = if FStarC_Compiler_Debug.any () then BU.print_error s +let perr1 s x = if FStarC_Compiler_Debug.any () then BU.print1_error s x + +let compiled_tactics: native_primitive_step list ref = ref [] + +let list_all () = + if FStarC_Options.no_plugins () + then [] + else !compiled_tactics + +let register_plugin (s: string) (arity: Prims.int) (t: itac) (n:nbe_itac) = + let step = + { POB.name=FStarC_Ident.lid_of_str s; + POB.arity=arity; + POB.auto_reflect=None; + POB.strong_reduction_ok=true; + POB.requires_binder_substitution = false; + POB.renorm_after = false; + POB.interpretation=t; + POB.univ_arity=Z.of_int 0; + POB.interpretation_nbe=n; + } + in + FStarC_TypeChecker_Cfg.register_plugin step; + (* perr1 "Registered plugin %s\n" s; *) + () + +let register_tactic (s: string) (arity: Prims.int) (t: itac)= + let step = + { name=FStarC_Ident.lid_of_str s; + arity = arity; + strong_reduction_ok=true; + tactic=t } in + compiled_tactics := step :: !compiled_tactics; + (* perr1 "Registered tactic %s\n" s; *) + () + +let bump (f : 'b -> 'c) (g : 'a -> 'b) : 'a -> 'c = + fun x -> f (g x) + +let from_tactic_0 (tau: 'b __tac) : 'b tac = + (fun (ps: proofstate) -> + (* perr "Entering native tactic\n"; *) + tau ps) |> mk_tac + +let from_tactic_1 t = bump from_tactic_0 t +let from_tactic_2 t = bump from_tactic_1 t +let from_tactic_3 t = bump from_tactic_2 t +let from_tactic_4 t = bump from_tactic_3 t +let from_tactic_5 t = bump from_tactic_4 t +let from_tactic_6 t = bump from_tactic_5 t +let from_tactic_7 t = bump from_tactic_6 t +let from_tactic_8 t = bump from_tactic_7 t +let from_tactic_9 t = bump from_tactic_8 t +let from_tactic_10 t = bump from_tactic_9 t +let from_tactic_11 t = bump from_tactic_10 t +let from_tactic_12 t = bump from_tactic_11 t +let from_tactic_13 t = bump from_tactic_12 t +let from_tactic_14 t = bump from_tactic_13 t +let from_tactic_15 t = bump from_tactic_14 t +let from_tactic_16 t = bump from_tactic_15 t +let from_tactic_17 t = bump from_tactic_16 t +let from_tactic_18 t = bump from_tactic_17 t +let from_tactic_19 t = bump from_tactic_18 t +let from_tactic_20 t = bump from_tactic_19 t +let from_tactic_21 t = bump from_tactic_20 t +let from_tactic_22 t = bump from_tactic_21 t +let from_tactic_23 t = bump from_tactic_22 t +let from_tactic_24 t = bump from_tactic_23 t +let from_tactic_25 t = bump from_tactic_24 t diff --git a/ocaml/fstar-lib/FStarC_Tactics_Unseal.ml b/stage0/fstar-lib/FStarC_Tactics_Unseal.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Tactics_Unseal.ml rename to stage0/fstar-lib/FStarC_Tactics_Unseal.ml diff --git a/ocaml/fstar-lib/FStarC_Tactics_V1_Builtins.ml b/stage0/fstar-lib/FStarC_Tactics_V1_Builtins.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Tactics_V1_Builtins.ml rename to stage0/fstar-lib/FStarC_Tactics_V1_Builtins.ml diff --git a/ocaml/fstar-lib/FStarC_Tactics_V2_Builtins.ml b/stage0/fstar-lib/FStarC_Tactics_V2_Builtins.ml similarity index 100% rename from ocaml/fstar-lib/FStarC_Tactics_V2_Builtins.ml rename to stage0/fstar-lib/FStarC_Tactics_V2_Builtins.ml diff --git a/stage0/fstar-lib/FStarC_Unionfind.ml b/stage0/fstar-lib/FStarC_Unionfind.ml new file mode 100644 index 00000000000..aa13f1e8d8d --- /dev/null +++ b/stage0/fstar-lib/FStarC_Unionfind.ml @@ -0,0 +1,161 @@ +(* Persistent union-find implementation adapted from + https://www.lri.fr/~filliatr/puf/ *) + +open FStarC_Compiler_Effect +open FStarC_Compiler_Util + +(* Persistent arrays *) +type 'a pa_t = 'a data ref +and 'a data = + | PArray of 'a array + | PDiff of int * 'a * 'a pa_t + +let pa_create n v = mk_ref (PArray (Array.make n v)) + +let pa_init n f = mk_ref (PArray (Array.init n f)) + +let rec pa_rerootk t k = match !t with + | PArray _ -> k () + | PDiff (i, v, t') -> + pa_rerootk t' (fun () -> begin match !t' with + | PArray a -> + let v' = a.(i) in + a.(i) <- v; + t := PArray a; + t' := PDiff (i, v', t) + | PDiff _ -> failwith "Impossible" end; k()) + +let pa_reroot t = pa_rerootk t (fun () -> ()) + +let pa_get t i = match !t with + | PArray a -> a.(i) + | PDiff _ -> + pa_reroot t; + begin match !t with + | PArray a -> a.(i) + | PDiff _ -> failwith "Impossible" end + +let pa_set (t: 'a pa_t) (i: int) (v: 'a): 'a pa_t = + pa_reroot t; + match !t with + | PArray a -> + let old = a.(i) in + a.(i) <- v; + let res = mk_ref (PArray a) in + t := PDiff (i, old, res); + res + | PDiff _ -> failwith "Impossible" + +(* apply impure function from Array to a persistent array *) +let impure f t = + pa_reroot t; + match !t with PArray a -> f a | PDiff _ -> failwith "Impossible" + +let pa_length t = impure Array.length t + +(* double the array whenever its bounds are reached *) +let pa_new t x l empty = + pa_reroot t; + match !t with + | PArray a -> + if (pa_length t == l) then begin + let arr_tail = Array.make l empty in + arr_tail.(0) <- x; + t := PArray (Array.append a arr_tail) + end else + a.(l) <- x + | PDiff _ -> failwith "Impossible" + + +(* Union-find implementation based on persistent arrays *) +type 'a puf = { + (* array of parents of each node + contains either path or root element *) + mutable parent: (int, 'a) FStar_Pervasives.either pa_t; (* mutable to allow path compression *) + ranks: int pa_t; + (* keep track of how many elements are allocated in the array *) + count: int ref +} +type 'a p_uvar = P of int + [@printer fun fmt x -> Format.pp_print_string fmt "!!!"] + [@@deriving yojson,show] + (* failwith "cannot pretty-print a unification variable" *) + +let puf_empty () = + { parent = pa_create 2 (FStar_Pervasives.Inl (-1)) ; + ranks = pa_create 2 0; + count = mk_ref 0 } + +let puf_fresh (h: 'a puf) (x: 'a): 'a p_uvar = + let count = !(h.count) in + pa_new h.parent (FStar_Pervasives.Inr x) count (FStar_Pervasives.Inl (-1)); + pa_new h.ranks 0 count 0; + h.count := count + 1; + P count + +(* implements path compression, returns new array *) +let rec puf_find_aux f i = + match (pa_get f i) with + | FStar_Pervasives.Inl fi -> + let f, r, id = puf_find_aux f fi in + let f = pa_set f i (FStar_Pervasives.Inl id) in + f, r, id + | FStar_Pervasives.Inr x -> f, FStar_Pervasives.Inr x, i + +(* return both rep and previous version of parent array *) +let puf_find_i (h: 'a puf) (x: 'a p_uvar) = + let x = match x with | P a -> a in + let f, rx, i = puf_find_aux h.parent x in + h.parent <- f; + match rx with + | FStar_Pervasives.Inr r -> r, i + | FStar_Pervasives.Inl _ -> failwith "Impossible" + +(* only return the equivalence class *) +let puf_id' (h:'a puf) (x:'a p_uvar) : int = + let _, i = puf_find_i h x in + i + +let puf_id (h: 'a puf) (x: 'a p_uvar): Prims.int = + Z.of_int (puf_id' h x) + +let puf_unique_id (x: 'a p_uvar): Prims.int = + match x with + | P a -> Z.of_int a + +let puf_fromid (_:'a puf) (id : Prims.int) : 'a p_uvar = + P (Z.to_int id) + +(* only return the rep *) +let puf_find (h: 'a puf) (x: 'a p_uvar) = + let v, _ = puf_find_i h x in + v + +let puf_equivalent (h:'a puf) (x:'a p_uvar) (y:'a p_uvar) = + (puf_id' h x) = (puf_id' h y) + +let puf_change (h:'a puf) (x:'a p_uvar) (v:'a) : 'a puf = + let i = puf_id' h x in + let hp = pa_set h.parent i (FStar_Pervasives.Inr v) in + { h with parent = hp} + +let puf_union (h: 'a puf) (x: 'a p_uvar) (y: 'a p_uvar) = + let ix = puf_id' h x in + let iy = puf_id' h y in + if ix!=iy then begin + let rxc = pa_get h.ranks ix in + let ryc = pa_get h.ranks iy in + if rxc > ryc then + { parent = pa_set h.parent iy (FStar_Pervasives.Inl ix); + ranks = h.ranks; + count = h.count} + else if rxc < ryc then + { parent = pa_set h.parent ix (FStar_Pervasives.Inl iy); + ranks = h.ranks; + count = h.count} + else + { parent = pa_set h.parent iy (FStar_Pervasives.Inl ix); + ranks = pa_set h.ranks ix (rxc+1); + count = h.count } + end else + h diff --git a/ocaml/fstar-lib/FStar_All.ml b/stage0/fstar-lib/FStar_All.ml similarity index 100% rename from ocaml/fstar-lib/FStar_All.ml rename to stage0/fstar-lib/FStar_All.ml diff --git a/ocaml/fstar-lib/FStar_Buffer.ml b/stage0/fstar-lib/FStar_Buffer.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Buffer.ml rename to stage0/fstar-lib/FStar_Buffer.ml diff --git a/ocaml/fstar-lib/FStar_Bytes.ml b/stage0/fstar-lib/FStar_Bytes.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Bytes.ml rename to stage0/fstar-lib/FStar_Bytes.ml diff --git a/ocaml/fstar-lib/FStar_Char.ml b/stage0/fstar-lib/FStar_Char.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Char.ml rename to stage0/fstar-lib/FStar_Char.ml diff --git a/ocaml/fstar-lib/FStar_CommonST.ml b/stage0/fstar-lib/FStar_CommonST.ml similarity index 100% rename from ocaml/fstar-lib/FStar_CommonST.ml rename to stage0/fstar-lib/FStar_CommonST.ml diff --git a/ocaml/fstar-lib/FStar_Date.ml b/stage0/fstar-lib/FStar_Date.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Date.ml rename to stage0/fstar-lib/FStar_Date.ml diff --git a/ocaml/fstar-lib/FStar_Exn.ml b/stage0/fstar-lib/FStar_Exn.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Exn.ml rename to stage0/fstar-lib/FStar_Exn.ml diff --git a/ocaml/fstar-lib/FStar_Float.ml b/stage0/fstar-lib/FStar_Float.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Float.ml rename to stage0/fstar-lib/FStar_Float.ml diff --git a/ocaml/fstar-lib/FStar_Ghost.ml b/stage0/fstar-lib/FStar_Ghost.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Ghost.ml rename to stage0/fstar-lib/FStar_Ghost.ml diff --git a/ocaml/fstar-lib/FStar_Heap.ml b/stage0/fstar-lib/FStar_Heap.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Heap.ml rename to stage0/fstar-lib/FStar_Heap.ml diff --git a/ocaml/fstar-lib/FStar_HyperStack_All.ml b/stage0/fstar-lib/FStar_HyperStack_All.ml similarity index 100% rename from ocaml/fstar-lib/FStar_HyperStack_All.ml rename to stage0/fstar-lib/FStar_HyperStack_All.ml diff --git a/ocaml/fstar-lib/FStar_HyperStack_IO.ml b/stage0/fstar-lib/FStar_HyperStack_IO.ml similarity index 100% rename from ocaml/fstar-lib/FStar_HyperStack_IO.ml rename to stage0/fstar-lib/FStar_HyperStack_IO.ml diff --git a/ocaml/fstar-lib/FStar_HyperStack_ST.ml b/stage0/fstar-lib/FStar_HyperStack_ST.ml similarity index 100% rename from ocaml/fstar-lib/FStar_HyperStack_ST.ml rename to stage0/fstar-lib/FStar_HyperStack_ST.ml diff --git a/ocaml/fstar-lib/FStar_IO.ml b/stage0/fstar-lib/FStar_IO.ml similarity index 100% rename from ocaml/fstar-lib/FStar_IO.ml rename to stage0/fstar-lib/FStar_IO.ml diff --git a/ocaml/fstar-lib/FStar_ImmutableArray.ml b/stage0/fstar-lib/FStar_ImmutableArray.ml similarity index 100% rename from ocaml/fstar-lib/FStar_ImmutableArray.ml rename to stage0/fstar-lib/FStar_ImmutableArray.ml diff --git a/ocaml/fstar-lib/FStar_ImmutableArray_Base.ml b/stage0/fstar-lib/FStar_ImmutableArray_Base.ml similarity index 100% rename from ocaml/fstar-lib/FStar_ImmutableArray_Base.ml rename to stage0/fstar-lib/FStar_ImmutableArray_Base.ml diff --git a/ocaml/fstar-lib/FStar_Issue.ml b/stage0/fstar-lib/FStar_Issue.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Issue.ml rename to stage0/fstar-lib/FStar_Issue.ml diff --git a/ocaml/fstar-lib/FStar_List.ml b/stage0/fstar-lib/FStar_List.ml similarity index 100% rename from ocaml/fstar-lib/FStar_List.ml rename to stage0/fstar-lib/FStar_List.ml diff --git a/ocaml/fstar-lib/FStar_List_Tot_Base.ml b/stage0/fstar-lib/FStar_List_Tot_Base.ml similarity index 100% rename from ocaml/fstar-lib/FStar_List_Tot_Base.ml rename to stage0/fstar-lib/FStar_List_Tot_Base.ml diff --git a/ocaml/fstar-lib/FStar_Monotonic_Heap.ml b/stage0/fstar-lib/FStar_Monotonic_Heap.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Monotonic_Heap.ml rename to stage0/fstar-lib/FStar_Monotonic_Heap.ml diff --git a/ocaml/fstar-lib/FStar_Mul.ml b/stage0/fstar-lib/FStar_Mul.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Mul.ml rename to stage0/fstar-lib/FStar_Mul.ml diff --git a/ocaml/fstar-lib/FStar_Option.ml b/stage0/fstar-lib/FStar_Option.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Option.ml rename to stage0/fstar-lib/FStar_Option.ml diff --git a/ocaml/fstar-lib/FStar_Pervasives_Native.ml b/stage0/fstar-lib/FStar_Pervasives_Native.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Pervasives_Native.ml rename to stage0/fstar-lib/FStar_Pervasives_Native.ml diff --git a/ocaml/fstar-lib/FStar_Pprint.ml b/stage0/fstar-lib/FStar_Pprint.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Pprint.ml rename to stage0/fstar-lib/FStar_Pprint.ml diff --git a/ocaml/fstar-lib/FStar_Range.ml b/stage0/fstar-lib/FStar_Range.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Range.ml rename to stage0/fstar-lib/FStar_Range.ml diff --git a/ocaml/fstar-lib/FStar_Reflection_Typing_Builtins.ml b/stage0/fstar-lib/FStar_Reflection_Typing_Builtins.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Reflection_Typing_Builtins.ml rename to stage0/fstar-lib/FStar_Reflection_Typing_Builtins.ml diff --git a/ocaml/fstar-lib/FStar_ST.ml b/stage0/fstar-lib/FStar_ST.ml similarity index 100% rename from ocaml/fstar-lib/FStar_ST.ml rename to stage0/fstar-lib/FStar_ST.ml diff --git a/ocaml/fstar-lib/FStar_Sealed.ml b/stage0/fstar-lib/FStar_Sealed.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Sealed.ml rename to stage0/fstar-lib/FStar_Sealed.ml diff --git a/stage0/fstar-lib/FStar_String.ml b/stage0/fstar-lib/FStar_String.ml new file mode 100644 index 00000000000..9dcff4a94df --- /dev/null +++ b/stage0/fstar-lib/FStar_String.ml @@ -0,0 +1,43 @@ +let make i c = BatUTF8.init (Z.to_int i) (fun _ -> BatUChar.chr c) +let strcat s t = s ^ t +let op_Hat s t = strcat s t + +(* restore pre-2.11 BatString.nsplit behavior, + see https://github.com/ocaml-batteries-team/batteries-included/issues/845 *) +let batstring_nsplit s t = + if s = "" then [] else BatString.split_on_string t s + +let split seps s = + let rec repeat_split acc = function + | [] -> acc + | sep::seps -> + let usep = BatUTF8.init 1 (fun _ -> BatUChar.chr sep) in + let l = BatList.flatten (BatList.map (fun x -> batstring_nsplit x usep) acc) in + repeat_split l seps in + repeat_split [s] seps +let compare x y = Z.of_int (BatString.compare x y) +type char = FStar_Char.char +let concat = BatString.concat +let length s = Z.of_int (BatUTF8.length s) +let strlen s = length s + +let substring s i j = + BatUTF8.init (Z.to_int j) (fun k -> BatUTF8.get s (k + Z.to_int i)) +let sub = substring + +let get s i = BatUChar.code (BatUTF8.get s (Z.to_int i)) +let collect f s = + let r = ref "" in + BatUTF8.iter (fun c -> r := !r ^ f (BatUChar.code c)) s; !r +let lowercase = BatString.lowercase_ascii +let uppercase = BatString.uppercase_ascii +let escaped = BatString.escaped +let index = get +exception Found of int +let index_of s c = + let c = BatUChar.chr c in + try let _ = BatUTF8.iteri (fun c' i -> if c = c' then raise (Found i) else ()) s in Z.of_int (-1) + with Found i -> Z.of_int i +let list_of_string s = BatList.init (BatUTF8.length s) (fun i -> BatUChar.code (BatUTF8.get s i)) +let string_of_list l = BatUTF8.init (BatList.length l) (fun i -> BatUChar.chr (BatList.at l i)) +let string_of_char (c:char) = BatString.of_char (Char.chr c) diff --git a/ocaml/fstar-lib/FStar_Tcp.ml b/stage0/fstar-lib/FStar_Tcp.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Tcp.ml rename to stage0/fstar-lib/FStar_Tcp.ml diff --git a/ocaml/fstar-lib/FStar_UInt8.ml b/stage0/fstar-lib/FStar_UInt8.ml similarity index 100% rename from ocaml/fstar-lib/FStar_UInt8.ml rename to stage0/fstar-lib/FStar_UInt8.ml diff --git a/ocaml/fstar-lib/FStar_Udp.ml b/stage0/fstar-lib/FStar_Udp.ml similarity index 100% rename from ocaml/fstar-lib/FStar_Udp.ml rename to stage0/fstar-lib/FStar_Udp.ml diff --git a/ocaml/fstar-lib/dune b/stage0/fstar-lib/dune similarity index 100% rename from ocaml/fstar-lib/dune rename to stage0/fstar-lib/dune diff --git a/ocaml/fstar-lib/generated/FStarC_Basefiles.ml b/stage0/fstar-lib/generated/FStarC_Basefiles.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Basefiles.ml rename to stage0/fstar-lib/generated/FStarC_Basefiles.ml diff --git a/ocaml/fstar-lib/generated/FStarC_CheckedFiles.ml b/stage0/fstar-lib/generated/FStarC_CheckedFiles.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_CheckedFiles.ml rename to stage0/fstar-lib/generated/FStarC_CheckedFiles.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Class_Binders.ml b/stage0/fstar-lib/generated/FStarC_Class_Binders.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Class_Binders.ml rename to stage0/fstar-lib/generated/FStarC_Class_Binders.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Class_Deq.ml b/stage0/fstar-lib/generated/FStarC_Class_Deq.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Class_Deq.ml rename to stage0/fstar-lib/generated/FStarC_Class_Deq.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Class_HasRange.ml b/stage0/fstar-lib/generated/FStarC_Class_HasRange.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Class_HasRange.ml rename to stage0/fstar-lib/generated/FStarC_Class_HasRange.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Class_Hashable.ml b/stage0/fstar-lib/generated/FStarC_Class_Hashable.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Class_Hashable.ml rename to stage0/fstar-lib/generated/FStarC_Class_Hashable.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Class_Listlike.ml b/stage0/fstar-lib/generated/FStarC_Class_Listlike.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Class_Listlike.ml rename to stage0/fstar-lib/generated/FStarC_Class_Listlike.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Class_Monad.ml b/stage0/fstar-lib/generated/FStarC_Class_Monad.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Class_Monad.ml rename to stage0/fstar-lib/generated/FStarC_Class_Monad.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Class_Monoid.ml b/stage0/fstar-lib/generated/FStarC_Class_Monoid.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Class_Monoid.ml rename to stage0/fstar-lib/generated/FStarC_Class_Monoid.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Class_Ord.ml b/stage0/fstar-lib/generated/FStarC_Class_Ord.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Class_Ord.ml rename to stage0/fstar-lib/generated/FStarC_Class_Ord.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Class_PP.ml b/stage0/fstar-lib/generated/FStarC_Class_PP.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Class_PP.ml rename to stage0/fstar-lib/generated/FStarC_Class_PP.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Class_Setlike.ml b/stage0/fstar-lib/generated/FStarC_Class_Setlike.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Class_Setlike.ml rename to stage0/fstar-lib/generated/FStarC_Class_Setlike.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Class_Show.ml b/stage0/fstar-lib/generated/FStarC_Class_Show.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Class_Show.ml rename to stage0/fstar-lib/generated/FStarC_Class_Show.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Class_Tagged.ml b/stage0/fstar-lib/generated/FStarC_Class_Tagged.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Class_Tagged.ml rename to stage0/fstar-lib/generated/FStarC_Class_Tagged.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Common.ml b/stage0/fstar-lib/generated/FStarC_Common.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Common.ml rename to stage0/fstar-lib/generated/FStarC_Common.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_CList.ml b/stage0/fstar-lib/generated/FStarC_Compiler_CList.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_CList.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_CList.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_Debug.ml b/stage0/fstar-lib/generated/FStarC_Compiler_Debug.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_Debug.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_Debug.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_FlatSet.ml b/stage0/fstar-lib/generated/FStarC_Compiler_FlatSet.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_FlatSet.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_FlatSet.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_MachineInts.ml b/stage0/fstar-lib/generated/FStarC_Compiler_MachineInts.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_MachineInts.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_MachineInts.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_Misc.ml b/stage0/fstar-lib/generated/FStarC_Compiler_Misc.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_Misc.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_Misc.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_Option.ml b/stage0/fstar-lib/generated/FStarC_Compiler_Option.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_Option.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_Option.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_Order.ml b/stage0/fstar-lib/generated/FStarC_Compiler_Order.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_Order.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_Order.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_Path.ml b/stage0/fstar-lib/generated/FStarC_Compiler_Path.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_Path.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_Path.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_Plugins.ml b/stage0/fstar-lib/generated/FStarC_Compiler_Plugins.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_Plugins.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_Plugins.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_RBSet.ml b/stage0/fstar-lib/generated/FStarC_Compiler_RBSet.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_RBSet.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_RBSet.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_Range_Ops.ml b/stage0/fstar-lib/generated/FStarC_Compiler_Range_Ops.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_Range_Ops.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_Range_Ops.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_Range_Type.ml b/stage0/fstar-lib/generated/FStarC_Compiler_Range_Type.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_Range_Type.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_Range_Type.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_Real.ml b/stage0/fstar-lib/generated/FStarC_Compiler_Real.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_Real.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_Real.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_Sealed.ml b/stage0/fstar-lib/generated/FStarC_Compiler_Sealed.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_Sealed.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_Sealed.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Compiler_Writer.ml b/stage0/fstar-lib/generated/FStarC_Compiler_Writer.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Compiler_Writer.ml rename to stage0/fstar-lib/generated/FStarC_Compiler_Writer.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Const.ml b/stage0/fstar-lib/generated/FStarC_Const.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Const.ml rename to stage0/fstar-lib/generated/FStarC_Const.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Defensive.ml b/stage0/fstar-lib/generated/FStarC_Defensive.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Defensive.ml rename to stage0/fstar-lib/generated/FStarC_Defensive.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Dependencies.ml b/stage0/fstar-lib/generated/FStarC_Dependencies.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Dependencies.ml rename to stage0/fstar-lib/generated/FStarC_Dependencies.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Errors.ml b/stage0/fstar-lib/generated/FStarC_Errors.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Errors.ml rename to stage0/fstar-lib/generated/FStarC_Errors.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Errors_Codes.ml b/stage0/fstar-lib/generated/FStarC_Errors_Codes.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Errors_Codes.ml rename to stage0/fstar-lib/generated/FStarC_Errors_Codes.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Errors_Msg.ml b/stage0/fstar-lib/generated/FStarC_Errors_Msg.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Errors_Msg.ml rename to stage0/fstar-lib/generated/FStarC_Errors_Msg.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Extraction_Krml.ml b/stage0/fstar-lib/generated/FStarC_Extraction_Krml.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Extraction_Krml.ml rename to stage0/fstar-lib/generated/FStarC_Extraction_Krml.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Extraction_ML_Code.ml b/stage0/fstar-lib/generated/FStarC_Extraction_ML_Code.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Extraction_ML_Code.ml rename to stage0/fstar-lib/generated/FStarC_Extraction_ML_Code.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Extraction_ML_Modul.ml b/stage0/fstar-lib/generated/FStarC_Extraction_ML_Modul.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Extraction_ML_Modul.ml rename to stage0/fstar-lib/generated/FStarC_Extraction_ML_Modul.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Extraction_ML_RegEmb.ml b/stage0/fstar-lib/generated/FStarC_Extraction_ML_RegEmb.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Extraction_ML_RegEmb.ml rename to stage0/fstar-lib/generated/FStarC_Extraction_ML_RegEmb.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Extraction_ML_RemoveUnusedParameters.ml b/stage0/fstar-lib/generated/FStarC_Extraction_ML_RemoveUnusedParameters.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Extraction_ML_RemoveUnusedParameters.ml rename to stage0/fstar-lib/generated/FStarC_Extraction_ML_RemoveUnusedParameters.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Extraction_ML_Syntax.ml b/stage0/fstar-lib/generated/FStarC_Extraction_ML_Syntax.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Extraction_ML_Syntax.ml rename to stage0/fstar-lib/generated/FStarC_Extraction_ML_Syntax.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Extraction_ML_Term.ml b/stage0/fstar-lib/generated/FStarC_Extraction_ML_Term.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Extraction_ML_Term.ml rename to stage0/fstar-lib/generated/FStarC_Extraction_ML_Term.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Extraction_ML_UEnv.ml b/stage0/fstar-lib/generated/FStarC_Extraction_ML_UEnv.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Extraction_ML_UEnv.ml rename to stage0/fstar-lib/generated/FStarC_Extraction_ML_UEnv.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Extraction_ML_Util.ml b/stage0/fstar-lib/generated/FStarC_Extraction_ML_Util.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Extraction_ML_Util.ml rename to stage0/fstar-lib/generated/FStarC_Extraction_ML_Util.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Find.ml b/stage0/fstar-lib/generated/FStarC_Find.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Find.ml rename to stage0/fstar-lib/generated/FStarC_Find.ml diff --git a/ocaml/fstar-lib/generated/FStarC_GenSym.ml b/stage0/fstar-lib/generated/FStarC_GenSym.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_GenSym.ml rename to stage0/fstar-lib/generated/FStarC_GenSym.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Ident.ml b/stage0/fstar-lib/generated/FStarC_Ident.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Ident.ml rename to stage0/fstar-lib/generated/FStarC_Ident.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Interactive_CompletionTable.ml b/stage0/fstar-lib/generated/FStarC_Interactive_CompletionTable.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Interactive_CompletionTable.ml rename to stage0/fstar-lib/generated/FStarC_Interactive_CompletionTable.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Interactive_Ide.ml b/stage0/fstar-lib/generated/FStarC_Interactive_Ide.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Interactive_Ide.ml rename to stage0/fstar-lib/generated/FStarC_Interactive_Ide.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Interactive_Ide_Types.ml b/stage0/fstar-lib/generated/FStarC_Interactive_Ide_Types.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Interactive_Ide_Types.ml rename to stage0/fstar-lib/generated/FStarC_Interactive_Ide_Types.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Interactive_Incremental.ml b/stage0/fstar-lib/generated/FStarC_Interactive_Incremental.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Interactive_Incremental.ml rename to stage0/fstar-lib/generated/FStarC_Interactive_Incremental.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Interactive_JsonHelper.ml b/stage0/fstar-lib/generated/FStarC_Interactive_JsonHelper.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Interactive_JsonHelper.ml rename to stage0/fstar-lib/generated/FStarC_Interactive_JsonHelper.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Interactive_Legacy.ml b/stage0/fstar-lib/generated/FStarC_Interactive_Legacy.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Interactive_Legacy.ml rename to stage0/fstar-lib/generated/FStarC_Interactive_Legacy.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Interactive_Lsp.ml b/stage0/fstar-lib/generated/FStarC_Interactive_Lsp.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Interactive_Lsp.ml rename to stage0/fstar-lib/generated/FStarC_Interactive_Lsp.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Interactive_PushHelper.ml b/stage0/fstar-lib/generated/FStarC_Interactive_PushHelper.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Interactive_PushHelper.ml rename to stage0/fstar-lib/generated/FStarC_Interactive_PushHelper.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Interactive_QueryHelper.ml b/stage0/fstar-lib/generated/FStarC_Interactive_QueryHelper.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Interactive_QueryHelper.ml rename to stage0/fstar-lib/generated/FStarC_Interactive_QueryHelper.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Main.ml b/stage0/fstar-lib/generated/FStarC_Main.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Main.ml rename to stage0/fstar-lib/generated/FStarC_Main.ml diff --git a/ocaml/fstar-lib/generated/FStarC_OCaml.ml b/stage0/fstar-lib/generated/FStarC_OCaml.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_OCaml.ml rename to stage0/fstar-lib/generated/FStarC_OCaml.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Options.ml b/stage0/fstar-lib/generated/FStarC_Options.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Options.ml rename to stage0/fstar-lib/generated/FStarC_Options.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Options_Ext.ml b/stage0/fstar-lib/generated/FStarC_Options_Ext.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Options_Ext.ml rename to stage0/fstar-lib/generated/FStarC_Options_Ext.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Parser_AST.ml b/stage0/fstar-lib/generated/FStarC_Parser_AST.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Parser_AST.ml rename to stage0/fstar-lib/generated/FStarC_Parser_AST.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Parser_AST_Util.ml b/stage0/fstar-lib/generated/FStarC_Parser_AST_Util.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Parser_AST_Util.ml rename to stage0/fstar-lib/generated/FStarC_Parser_AST_Util.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Parser_Const.ml b/stage0/fstar-lib/generated/FStarC_Parser_Const.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Parser_Const.ml rename to stage0/fstar-lib/generated/FStarC_Parser_Const.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Parser_Dep.ml b/stage0/fstar-lib/generated/FStarC_Parser_Dep.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Parser_Dep.ml rename to stage0/fstar-lib/generated/FStarC_Parser_Dep.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Parser_Driver.ml b/stage0/fstar-lib/generated/FStarC_Parser_Driver.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Parser_Driver.ml rename to stage0/fstar-lib/generated/FStarC_Parser_Driver.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Parser_ToDocument.ml b/stage0/fstar-lib/generated/FStarC_Parser_ToDocument.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Parser_ToDocument.ml rename to stage0/fstar-lib/generated/FStarC_Parser_ToDocument.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Prettyprint.ml b/stage0/fstar-lib/generated/FStarC_Prettyprint.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Prettyprint.ml rename to stage0/fstar-lib/generated/FStarC_Prettyprint.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Profiling.ml b/stage0/fstar-lib/generated/FStarC_Profiling.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Profiling.ml rename to stage0/fstar-lib/generated/FStarC_Profiling.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Reflection_V1_Builtins.ml b/stage0/fstar-lib/generated/FStarC_Reflection_V1_Builtins.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Reflection_V1_Builtins.ml rename to stage0/fstar-lib/generated/FStarC_Reflection_V1_Builtins.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Reflection_V1_Constants.ml b/stage0/fstar-lib/generated/FStarC_Reflection_V1_Constants.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Reflection_V1_Constants.ml rename to stage0/fstar-lib/generated/FStarC_Reflection_V1_Constants.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Reflection_V1_Data.ml b/stage0/fstar-lib/generated/FStarC_Reflection_V1_Data.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Reflection_V1_Data.ml rename to stage0/fstar-lib/generated/FStarC_Reflection_V1_Data.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Reflection_V1_Embeddings.ml b/stage0/fstar-lib/generated/FStarC_Reflection_V1_Embeddings.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Reflection_V1_Embeddings.ml rename to stage0/fstar-lib/generated/FStarC_Reflection_V1_Embeddings.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Reflection_V1_Interpreter.ml b/stage0/fstar-lib/generated/FStarC_Reflection_V1_Interpreter.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Reflection_V1_Interpreter.ml rename to stage0/fstar-lib/generated/FStarC_Reflection_V1_Interpreter.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Reflection_V1_NBEEmbeddings.ml b/stage0/fstar-lib/generated/FStarC_Reflection_V1_NBEEmbeddings.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Reflection_V1_NBEEmbeddings.ml rename to stage0/fstar-lib/generated/FStarC_Reflection_V1_NBEEmbeddings.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Reflection_V2_Builtins.ml b/stage0/fstar-lib/generated/FStarC_Reflection_V2_Builtins.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Reflection_V2_Builtins.ml rename to stage0/fstar-lib/generated/FStarC_Reflection_V2_Builtins.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Reflection_V2_Constants.ml b/stage0/fstar-lib/generated/FStarC_Reflection_V2_Constants.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Reflection_V2_Constants.ml rename to stage0/fstar-lib/generated/FStarC_Reflection_V2_Constants.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Reflection_V2_Data.ml b/stage0/fstar-lib/generated/FStarC_Reflection_V2_Data.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Reflection_V2_Data.ml rename to stage0/fstar-lib/generated/FStarC_Reflection_V2_Data.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Reflection_V2_Embeddings.ml b/stage0/fstar-lib/generated/FStarC_Reflection_V2_Embeddings.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Reflection_V2_Embeddings.ml rename to stage0/fstar-lib/generated/FStarC_Reflection_V2_Embeddings.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Reflection_V2_Interpreter.ml b/stage0/fstar-lib/generated/FStarC_Reflection_V2_Interpreter.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Reflection_V2_Interpreter.ml rename to stage0/fstar-lib/generated/FStarC_Reflection_V2_Interpreter.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Reflection_V2_NBEEmbeddings.ml b/stage0/fstar-lib/generated/FStarC_Reflection_V2_NBEEmbeddings.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Reflection_V2_NBEEmbeddings.ml rename to stage0/fstar-lib/generated/FStarC_Reflection_V2_NBEEmbeddings.ml diff --git a/ocaml/fstar-lib/generated/FStarC_SMTEncoding_Encode.ml b/stage0/fstar-lib/generated/FStarC_SMTEncoding_Encode.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_SMTEncoding_Encode.ml rename to stage0/fstar-lib/generated/FStarC_SMTEncoding_Encode.ml diff --git a/ocaml/fstar-lib/generated/FStarC_SMTEncoding_EncodeTerm.ml b/stage0/fstar-lib/generated/FStarC_SMTEncoding_EncodeTerm.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_SMTEncoding_EncodeTerm.ml rename to stage0/fstar-lib/generated/FStarC_SMTEncoding_EncodeTerm.ml diff --git a/ocaml/fstar-lib/generated/FStarC_SMTEncoding_Env.ml b/stage0/fstar-lib/generated/FStarC_SMTEncoding_Env.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_SMTEncoding_Env.ml rename to stage0/fstar-lib/generated/FStarC_SMTEncoding_Env.ml diff --git a/ocaml/fstar-lib/generated/FStarC_SMTEncoding_ErrorReporting.ml b/stage0/fstar-lib/generated/FStarC_SMTEncoding_ErrorReporting.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_SMTEncoding_ErrorReporting.ml rename to stage0/fstar-lib/generated/FStarC_SMTEncoding_ErrorReporting.ml diff --git a/ocaml/fstar-lib/generated/FStarC_SMTEncoding_Pruning.ml b/stage0/fstar-lib/generated/FStarC_SMTEncoding_Pruning.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_SMTEncoding_Pruning.ml rename to stage0/fstar-lib/generated/FStarC_SMTEncoding_Pruning.ml diff --git a/ocaml/fstar-lib/generated/FStarC_SMTEncoding_Solver.ml b/stage0/fstar-lib/generated/FStarC_SMTEncoding_Solver.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_SMTEncoding_Solver.ml rename to stage0/fstar-lib/generated/FStarC_SMTEncoding_Solver.ml diff --git a/ocaml/fstar-lib/generated/FStarC_SMTEncoding_SolverState.ml b/stage0/fstar-lib/generated/FStarC_SMTEncoding_SolverState.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_SMTEncoding_SolverState.ml rename to stage0/fstar-lib/generated/FStarC_SMTEncoding_SolverState.ml diff --git a/ocaml/fstar-lib/generated/FStarC_SMTEncoding_Solver_Cache.ml b/stage0/fstar-lib/generated/FStarC_SMTEncoding_Solver_Cache.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_SMTEncoding_Solver_Cache.ml rename to stage0/fstar-lib/generated/FStarC_SMTEncoding_Solver_Cache.ml diff --git a/ocaml/fstar-lib/generated/FStarC_SMTEncoding_Term.ml b/stage0/fstar-lib/generated/FStarC_SMTEncoding_Term.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_SMTEncoding_Term.ml rename to stage0/fstar-lib/generated/FStarC_SMTEncoding_Term.ml diff --git a/ocaml/fstar-lib/generated/FStarC_SMTEncoding_UnsatCore.ml b/stage0/fstar-lib/generated/FStarC_SMTEncoding_UnsatCore.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_SMTEncoding_UnsatCore.ml rename to stage0/fstar-lib/generated/FStarC_SMTEncoding_UnsatCore.ml diff --git a/ocaml/fstar-lib/generated/FStarC_SMTEncoding_Util.ml b/stage0/fstar-lib/generated/FStarC_SMTEncoding_Util.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_SMTEncoding_Util.ml rename to stage0/fstar-lib/generated/FStarC_SMTEncoding_Util.ml diff --git a/ocaml/fstar-lib/generated/FStarC_SMTEncoding_Z3.ml b/stage0/fstar-lib/generated/FStarC_SMTEncoding_Z3.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_SMTEncoding_Z3.ml rename to stage0/fstar-lib/generated/FStarC_SMTEncoding_Z3.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Compress.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Compress.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Compress.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Compress.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_DsEnv.ml b/stage0/fstar-lib/generated/FStarC_Syntax_DsEnv.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_DsEnv.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_DsEnv.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Embeddings.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Embeddings.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Embeddings.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Embeddings.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Embeddings_AppEmb.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Embeddings_AppEmb.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Embeddings_AppEmb.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Embeddings_AppEmb.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Embeddings_Base.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Embeddings_Base.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Embeddings_Base.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Embeddings_Base.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Formula.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Formula.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Formula.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Formula.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Free.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Free.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Free.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Free.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Hash.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Hash.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Hash.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Hash.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_InstFV.ml b/stage0/fstar-lib/generated/FStarC_Syntax_InstFV.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_InstFV.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_InstFV.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_MutRecTy.ml b/stage0/fstar-lib/generated/FStarC_Syntax_MutRecTy.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_MutRecTy.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_MutRecTy.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Print.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Print.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Print.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Print.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Print_Pretty.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Print_Pretty.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Print_Pretty.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Print_Pretty.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Print_Ugly.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Print_Ugly.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Print_Ugly.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Print_Ugly.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Resugar.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Resugar.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Resugar.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Resugar.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Subst.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Subst.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Subst.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Subst.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Syntax.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Syntax.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Syntax.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Syntax.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Unionfind.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Unionfind.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Unionfind.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Unionfind.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Util.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Util.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Util.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Util.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_Visit.ml b/stage0/fstar-lib/generated/FStarC_Syntax_Visit.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_Visit.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_Visit.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Syntax_VisitM.ml b/stage0/fstar-lib/generated/FStarC_Syntax_VisitM.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Syntax_VisitM.ml rename to stage0/fstar-lib/generated/FStarC_Syntax_VisitM.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Tactics_Common.ml b/stage0/fstar-lib/generated/FStarC_Tactics_Common.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Tactics_Common.ml rename to stage0/fstar-lib/generated/FStarC_Tactics_Common.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Tactics_CtrlRewrite.ml b/stage0/fstar-lib/generated/FStarC_Tactics_CtrlRewrite.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Tactics_CtrlRewrite.ml rename to stage0/fstar-lib/generated/FStarC_Tactics_CtrlRewrite.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Tactics_Embedding.ml b/stage0/fstar-lib/generated/FStarC_Tactics_Embedding.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Tactics_Embedding.ml rename to stage0/fstar-lib/generated/FStarC_Tactics_Embedding.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Tactics_Hooks.ml b/stage0/fstar-lib/generated/FStarC_Tactics_Hooks.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Tactics_Hooks.ml rename to stage0/fstar-lib/generated/FStarC_Tactics_Hooks.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Tactics_InterpFuns.ml b/stage0/fstar-lib/generated/FStarC_Tactics_InterpFuns.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Tactics_InterpFuns.ml rename to stage0/fstar-lib/generated/FStarC_Tactics_InterpFuns.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Tactics_Interpreter.ml b/stage0/fstar-lib/generated/FStarC_Tactics_Interpreter.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Tactics_Interpreter.ml rename to stage0/fstar-lib/generated/FStarC_Tactics_Interpreter.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Tactics_Monad.ml b/stage0/fstar-lib/generated/FStarC_Tactics_Monad.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Tactics_Monad.ml rename to stage0/fstar-lib/generated/FStarC_Tactics_Monad.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Tactics_Printing.ml b/stage0/fstar-lib/generated/FStarC_Tactics_Printing.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Tactics_Printing.ml rename to stage0/fstar-lib/generated/FStarC_Tactics_Printing.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Tactics_Result.ml b/stage0/fstar-lib/generated/FStarC_Tactics_Result.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Tactics_Result.ml rename to stage0/fstar-lib/generated/FStarC_Tactics_Result.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Tactics_Types.ml b/stage0/fstar-lib/generated/FStarC_Tactics_Types.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Tactics_Types.ml rename to stage0/fstar-lib/generated/FStarC_Tactics_Types.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Tactics_V1_Basic.ml b/stage0/fstar-lib/generated/FStarC_Tactics_V1_Basic.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Tactics_V1_Basic.ml rename to stage0/fstar-lib/generated/FStarC_Tactics_V1_Basic.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Tactics_V1_Primops.ml b/stage0/fstar-lib/generated/FStarC_Tactics_V1_Primops.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Tactics_V1_Primops.ml rename to stage0/fstar-lib/generated/FStarC_Tactics_V1_Primops.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Tactics_V2_Basic.ml b/stage0/fstar-lib/generated/FStarC_Tactics_V2_Basic.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Tactics_V2_Basic.ml rename to stage0/fstar-lib/generated/FStarC_Tactics_V2_Basic.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Tactics_V2_Primops.ml b/stage0/fstar-lib/generated/FStarC_Tactics_V2_Primops.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Tactics_V2_Primops.ml rename to stage0/fstar-lib/generated/FStarC_Tactics_V2_Primops.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Thunk.ml b/stage0/fstar-lib/generated/FStarC_Thunk.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Thunk.ml rename to stage0/fstar-lib/generated/FStarC_Thunk.ml diff --git a/ocaml/fstar-lib/generated/FStarC_ToSyntax_Interleave.ml b/stage0/fstar-lib/generated/FStarC_ToSyntax_Interleave.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_ToSyntax_Interleave.ml rename to stage0/fstar-lib/generated/FStarC_ToSyntax_Interleave.ml diff --git a/ocaml/fstar-lib/generated/FStarC_ToSyntax_ToSyntax.ml b/stage0/fstar-lib/generated/FStarC_ToSyntax_ToSyntax.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_ToSyntax_ToSyntax.ml rename to stage0/fstar-lib/generated/FStarC_ToSyntax_ToSyntax.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Cfg.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Cfg.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Cfg.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Cfg.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Common.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Common.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Common.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Common.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Core.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Core.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Core.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Core.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_DMFF.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_DMFF.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_DMFF.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_DMFF.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_DeferredImplicits.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_DeferredImplicits.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_DeferredImplicits.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_DeferredImplicits.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Env.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Env.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Env.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Env.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Err.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Err.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Err.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Err.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Generalize.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Generalize.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Generalize.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Generalize.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_NBE.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_NBE.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_NBE.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_NBE.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_NBETerm.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_NBETerm.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_NBETerm.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_NBETerm.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Normalize.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Normalize.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Normalize.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Normalize.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Normalize_Unfolding.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Normalize_Unfolding.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Normalize_Unfolding.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Normalize_Unfolding.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_PatternUtils.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_PatternUtils.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_PatternUtils.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_PatternUtils.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Positivity.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Positivity.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Positivity.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Positivity.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Primops.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Primops.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Array.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Array.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Array.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Array.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Base.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Base.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Base.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Base.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Docs.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Docs.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Docs.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Docs.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Eq.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Eq.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Eq.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Eq.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Erased.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Erased.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Erased.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Erased.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Errors_Msg.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Errors_Msg.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Errors_Msg.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Errors_Msg.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Issue.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Issue.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Issue.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Issue.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_MachineInts.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_MachineInts.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_MachineInts.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_MachineInts.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Range.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Range.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Range.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Range.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Real.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Real.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Real.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Real.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Sealed.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Sealed.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Primops_Sealed.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Primops_Sealed.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Quals.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Quals.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Quals.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Quals.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Rel.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Rel.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Rel.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Rel.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Tc.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Tc.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Tc.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Tc.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_TcEffect.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_TcEffect.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_TcEffect.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_TcEffect.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_TcInductive.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_TcInductive.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_TcInductive.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_TcInductive.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_TcTerm.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_TcTerm.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_TcTerm.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_TcTerm.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_TermEqAndSimplify.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_TermEqAndSimplify.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_TermEqAndSimplify.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_TermEqAndSimplify.ml diff --git a/ocaml/fstar-lib/generated/FStarC_TypeChecker_Util.ml b/stage0/fstar-lib/generated/FStarC_TypeChecker_Util.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_TypeChecker_Util.ml rename to stage0/fstar-lib/generated/FStarC_TypeChecker_Util.ml diff --git a/ocaml/fstar-lib/generated/FStarC_Universal.ml b/stage0/fstar-lib/generated/FStarC_Universal.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_Universal.ml rename to stage0/fstar-lib/generated/FStarC_Universal.ml diff --git a/ocaml/fstar-lib/generated/FStarC_VConfig.ml b/stage0/fstar-lib/generated/FStarC_VConfig.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStarC_VConfig.ml rename to stage0/fstar-lib/generated/FStarC_VConfig.ml diff --git a/ocaml/fstar-lib/generated/FStar_Algebra_CommMonoid.ml b/stage0/fstar-lib/generated/FStar_Algebra_CommMonoid.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Algebra_CommMonoid.ml rename to stage0/fstar-lib/generated/FStar_Algebra_CommMonoid.ml diff --git a/ocaml/fstar-lib/generated/FStar_Algebra_CommMonoid_Equiv.ml b/stage0/fstar-lib/generated/FStar_Algebra_CommMonoid_Equiv.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Algebra_CommMonoid_Equiv.ml rename to stage0/fstar-lib/generated/FStar_Algebra_CommMonoid_Equiv.ml diff --git a/ocaml/fstar-lib/generated/FStar_Algebra_CommMonoid_Fold.ml b/stage0/fstar-lib/generated/FStar_Algebra_CommMonoid_Fold.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Algebra_CommMonoid_Fold.ml rename to stage0/fstar-lib/generated/FStar_Algebra_CommMonoid_Fold.ml diff --git a/ocaml/fstar-lib/generated/FStar_Algebra_CommMonoid_Fold_Nested.ml b/stage0/fstar-lib/generated/FStar_Algebra_CommMonoid_Fold_Nested.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Algebra_CommMonoid_Fold_Nested.ml rename to stage0/fstar-lib/generated/FStar_Algebra_CommMonoid_Fold_Nested.ml diff --git a/ocaml/fstar-lib/generated/FStar_Algebra_Monoid.ml b/stage0/fstar-lib/generated/FStar_Algebra_Monoid.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Algebra_Monoid.ml rename to stage0/fstar-lib/generated/FStar_Algebra_Monoid.ml diff --git a/ocaml/fstar-lib/generated/FStar_BV.ml b/stage0/fstar-lib/generated/FStar_BV.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_BV.ml rename to stage0/fstar-lib/generated/FStar_BV.ml diff --git a/ocaml/fstar-lib/generated/FStar_BigOps.ml b/stage0/fstar-lib/generated/FStar_BigOps.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_BigOps.ml rename to stage0/fstar-lib/generated/FStar_BigOps.ml diff --git a/ocaml/fstar-lib/generated/FStar_BitVector.ml b/stage0/fstar-lib/generated/FStar_BitVector.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_BitVector.ml rename to stage0/fstar-lib/generated/FStar_BitVector.ml diff --git a/ocaml/fstar-lib/generated/FStar_Calc.ml b/stage0/fstar-lib/generated/FStar_Calc.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Calc.ml rename to stage0/fstar-lib/generated/FStar_Calc.ml diff --git a/ocaml/fstar-lib/generated/FStar_Cardinality_Cantor.ml b/stage0/fstar-lib/generated/FStar_Cardinality_Cantor.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Cardinality_Cantor.ml rename to stage0/fstar-lib/generated/FStar_Cardinality_Cantor.ml diff --git a/ocaml/fstar-lib/generated/FStar_Cardinality_Universes.ml b/stage0/fstar-lib/generated/FStar_Cardinality_Universes.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Cardinality_Universes.ml rename to stage0/fstar-lib/generated/FStar_Cardinality_Universes.ml diff --git a/ocaml/fstar-lib/generated/FStar_Class_Add.ml b/stage0/fstar-lib/generated/FStar_Class_Add.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Class_Add.ml rename to stage0/fstar-lib/generated/FStar_Class_Add.ml diff --git a/ocaml/fstar-lib/generated/FStar_Class_Embeddable.ml b/stage0/fstar-lib/generated/FStar_Class_Embeddable.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Class_Embeddable.ml rename to stage0/fstar-lib/generated/FStar_Class_Embeddable.ml diff --git a/ocaml/fstar-lib/generated/FStar_Class_Eq.ml b/stage0/fstar-lib/generated/FStar_Class_Eq.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Class_Eq.ml rename to stage0/fstar-lib/generated/FStar_Class_Eq.ml diff --git a/ocaml/fstar-lib/generated/FStar_Class_Eq_Raw.ml b/stage0/fstar-lib/generated/FStar_Class_Eq_Raw.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Class_Eq_Raw.ml rename to stage0/fstar-lib/generated/FStar_Class_Eq_Raw.ml diff --git a/ocaml/fstar-lib/generated/FStar_Class_Printable.ml b/stage0/fstar-lib/generated/FStar_Class_Printable.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Class_Printable.ml rename to stage0/fstar-lib/generated/FStar_Class_Printable.ml diff --git a/ocaml/fstar-lib/generated/FStar_Class_TotalOrder_Raw.ml b/stage0/fstar-lib/generated/FStar_Class_TotalOrder_Raw.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Class_TotalOrder_Raw.ml rename to stage0/fstar-lib/generated/FStar_Class_TotalOrder_Raw.ml diff --git a/ocaml/fstar-lib/generated/FStar_Classical.ml b/stage0/fstar-lib/generated/FStar_Classical.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Classical.ml rename to stage0/fstar-lib/generated/FStar_Classical.ml diff --git a/ocaml/fstar-lib/generated/FStar_Classical_Sugar.ml b/stage0/fstar-lib/generated/FStar_Classical_Sugar.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Classical_Sugar.ml rename to stage0/fstar-lib/generated/FStar_Classical_Sugar.ml diff --git a/ocaml/fstar-lib/generated/FStar_ConstantTime_Integers.ml b/stage0/fstar-lib/generated/FStar_ConstantTime_Integers.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_ConstantTime_Integers.ml rename to stage0/fstar-lib/generated/FStar_ConstantTime_Integers.ml diff --git a/ocaml/fstar-lib/generated/FStar_DependentMap.ml b/stage0/fstar-lib/generated/FStar_DependentMap.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_DependentMap.ml rename to stage0/fstar-lib/generated/FStar_DependentMap.ml diff --git a/ocaml/fstar-lib/generated/FStar_Endianness.ml b/stage0/fstar-lib/generated/FStar_Endianness.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Endianness.ml rename to stage0/fstar-lib/generated/FStar_Endianness.ml diff --git a/ocaml/fstar-lib/generated/FStar_ErasedLogic.ml b/stage0/fstar-lib/generated/FStar_ErasedLogic.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_ErasedLogic.ml rename to stage0/fstar-lib/generated/FStar_ErasedLogic.ml diff --git a/ocaml/fstar-lib/generated/FStar_Error.ml b/stage0/fstar-lib/generated/FStar_Error.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Error.ml rename to stage0/fstar-lib/generated/FStar_Error.ml diff --git a/ocaml/fstar-lib/generated/FStar_ExtractAs.ml b/stage0/fstar-lib/generated/FStar_ExtractAs.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_ExtractAs.ml rename to stage0/fstar-lib/generated/FStar_ExtractAs.ml diff --git a/ocaml/fstar-lib/generated/FStar_Fin.ml b/stage0/fstar-lib/generated/FStar_Fin.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Fin.ml rename to stage0/fstar-lib/generated/FStar_Fin.ml diff --git a/ocaml/fstar-lib/generated/FStar_FiniteMap_Ambient.ml b/stage0/fstar-lib/generated/FStar_FiniteMap_Ambient.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_FiniteMap_Ambient.ml rename to stage0/fstar-lib/generated/FStar_FiniteMap_Ambient.ml diff --git a/ocaml/fstar-lib/generated/FStar_FiniteMap_Base.ml b/stage0/fstar-lib/generated/FStar_FiniteMap_Base.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_FiniteMap_Base.ml rename to stage0/fstar-lib/generated/FStar_FiniteMap_Base.ml diff --git a/ocaml/fstar-lib/generated/FStar_FiniteSet_Ambient.ml b/stage0/fstar-lib/generated/FStar_FiniteSet_Ambient.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_FiniteSet_Ambient.ml rename to stage0/fstar-lib/generated/FStar_FiniteSet_Ambient.ml diff --git a/ocaml/fstar-lib/generated/FStar_FiniteSet_Base.ml b/stage0/fstar-lib/generated/FStar_FiniteSet_Base.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_FiniteSet_Base.ml rename to stage0/fstar-lib/generated/FStar_FiniteSet_Base.ml diff --git a/ocaml/fstar-lib/generated/FStar_FunctionalExtensionality.ml b/stage0/fstar-lib/generated/FStar_FunctionalExtensionality.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_FunctionalExtensionality.ml rename to stage0/fstar-lib/generated/FStar_FunctionalExtensionality.ml diff --git a/ocaml/fstar-lib/generated/FStar_FunctionalQueue.ml b/stage0/fstar-lib/generated/FStar_FunctionalQueue.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_FunctionalQueue.ml rename to stage0/fstar-lib/generated/FStar_FunctionalQueue.ml diff --git a/ocaml/fstar-lib/generated/FStar_Functions.ml b/stage0/fstar-lib/generated/FStar_Functions.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Functions.ml rename to stage0/fstar-lib/generated/FStar_Functions.ml diff --git a/ocaml/fstar-lib/generated/FStar_GSet.ml b/stage0/fstar-lib/generated/FStar_GSet.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_GSet.ml rename to stage0/fstar-lib/generated/FStar_GSet.ml diff --git a/ocaml/fstar-lib/generated/FStar_GhostSet.ml b/stage0/fstar-lib/generated/FStar_GhostSet.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_GhostSet.ml rename to stage0/fstar-lib/generated/FStar_GhostSet.ml diff --git a/ocaml/fstar-lib/generated/FStar_HyperStack.ml b/stage0/fstar-lib/generated/FStar_HyperStack.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_HyperStack.ml rename to stage0/fstar-lib/generated/FStar_HyperStack.ml diff --git a/ocaml/fstar-lib/generated/FStar_IFC.ml b/stage0/fstar-lib/generated/FStar_IFC.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_IFC.ml rename to stage0/fstar-lib/generated/FStar_IFC.ml diff --git a/ocaml/fstar-lib/generated/FStar_IndefiniteDescription.ml b/stage0/fstar-lib/generated/FStar_IndefiniteDescription.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_IndefiniteDescription.ml rename to stage0/fstar-lib/generated/FStar_IndefiniteDescription.ml diff --git a/ocaml/fstar-lib/generated/FStar_Int.ml b/stage0/fstar-lib/generated/FStar_Int.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Int.ml rename to stage0/fstar-lib/generated/FStar_Int.ml diff --git a/ocaml/fstar-lib/generated/FStar_Int128.ml b/stage0/fstar-lib/generated/FStar_Int128.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Int128.ml rename to stage0/fstar-lib/generated/FStar_Int128.ml diff --git a/ocaml/fstar-lib/generated/FStar_Int16.ml b/stage0/fstar-lib/generated/FStar_Int16.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Int16.ml rename to stage0/fstar-lib/generated/FStar_Int16.ml diff --git a/ocaml/fstar-lib/generated/FStar_Int32.ml b/stage0/fstar-lib/generated/FStar_Int32.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Int32.ml rename to stage0/fstar-lib/generated/FStar_Int32.ml diff --git a/ocaml/fstar-lib/generated/FStar_Int64.ml b/stage0/fstar-lib/generated/FStar_Int64.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Int64.ml rename to stage0/fstar-lib/generated/FStar_Int64.ml diff --git a/ocaml/fstar-lib/generated/FStar_Int8.ml b/stage0/fstar-lib/generated/FStar_Int8.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Int8.ml rename to stage0/fstar-lib/generated/FStar_Int8.ml diff --git a/ocaml/fstar-lib/generated/FStar_Int_Cast.ml b/stage0/fstar-lib/generated/FStar_Int_Cast.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Int_Cast.ml rename to stage0/fstar-lib/generated/FStar_Int_Cast.ml diff --git a/ocaml/fstar-lib/generated/FStar_Int_Cast_Full.ml b/stage0/fstar-lib/generated/FStar_Int_Cast_Full.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Int_Cast_Full.ml rename to stage0/fstar-lib/generated/FStar_Int_Cast_Full.ml diff --git a/ocaml/fstar-lib/generated/FStar_IntegerIntervals.ml b/stage0/fstar-lib/generated/FStar_IntegerIntervals.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_IntegerIntervals.ml rename to stage0/fstar-lib/generated/FStar_IntegerIntervals.ml diff --git a/ocaml/fstar-lib/generated/FStar_Integers.ml b/stage0/fstar-lib/generated/FStar_Integers.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Integers.ml rename to stage0/fstar-lib/generated/FStar_Integers.ml diff --git a/ocaml/fstar-lib/generated/FStar_InteractiveHelpers.ml b/stage0/fstar-lib/generated/FStar_InteractiveHelpers.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_InteractiveHelpers.ml rename to stage0/fstar-lib/generated/FStar_InteractiveHelpers.ml diff --git a/ocaml/fstar-lib/generated/FStar_InteractiveHelpers_Base.ml b/stage0/fstar-lib/generated/FStar_InteractiveHelpers_Base.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_InteractiveHelpers_Base.ml rename to stage0/fstar-lib/generated/FStar_InteractiveHelpers_Base.ml diff --git a/ocaml/fstar-lib/generated/FStar_InteractiveHelpers_Effectful.ml b/stage0/fstar-lib/generated/FStar_InteractiveHelpers_Effectful.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_InteractiveHelpers_Effectful.ml rename to stage0/fstar-lib/generated/FStar_InteractiveHelpers_Effectful.ml diff --git a/ocaml/fstar-lib/generated/FStar_InteractiveHelpers_ExploreTerm.ml b/stage0/fstar-lib/generated/FStar_InteractiveHelpers_ExploreTerm.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_InteractiveHelpers_ExploreTerm.ml rename to stage0/fstar-lib/generated/FStar_InteractiveHelpers_ExploreTerm.ml diff --git a/ocaml/fstar-lib/generated/FStar_InteractiveHelpers_Output.ml b/stage0/fstar-lib/generated/FStar_InteractiveHelpers_Output.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_InteractiveHelpers_Output.ml rename to stage0/fstar-lib/generated/FStar_InteractiveHelpers_Output.ml diff --git a/ocaml/fstar-lib/generated/FStar_InteractiveHelpers_PostProcess.ml b/stage0/fstar-lib/generated/FStar_InteractiveHelpers_PostProcess.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_InteractiveHelpers_PostProcess.ml rename to stage0/fstar-lib/generated/FStar_InteractiveHelpers_PostProcess.ml diff --git a/ocaml/fstar-lib/generated/FStar_InteractiveHelpers_Propositions.ml b/stage0/fstar-lib/generated/FStar_InteractiveHelpers_Propositions.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_InteractiveHelpers_Propositions.ml rename to stage0/fstar-lib/generated/FStar_InteractiveHelpers_Propositions.ml diff --git a/ocaml/fstar-lib/generated/FStar_LexicographicOrdering.ml b/stage0/fstar-lib/generated/FStar_LexicographicOrdering.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_LexicographicOrdering.ml rename to stage0/fstar-lib/generated/FStar_LexicographicOrdering.ml diff --git a/ocaml/fstar-lib/generated/FStar_List_Pure_Base.ml b/stage0/fstar-lib/generated/FStar_List_Pure_Base.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_List_Pure_Base.ml rename to stage0/fstar-lib/generated/FStar_List_Pure_Base.ml diff --git a/ocaml/fstar-lib/generated/FStar_List_Tot_Properties.ml b/stage0/fstar-lib/generated/FStar_List_Tot_Properties.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_List_Tot_Properties.ml rename to stage0/fstar-lib/generated/FStar_List_Tot_Properties.ml diff --git a/ocaml/fstar-lib/generated/FStar_MRef.ml b/stage0/fstar-lib/generated/FStar_MRef.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_MRef.ml rename to stage0/fstar-lib/generated/FStar_MRef.ml diff --git a/ocaml/fstar-lib/generated/FStar_Map.ml b/stage0/fstar-lib/generated/FStar_Map.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Map.ml rename to stage0/fstar-lib/generated/FStar_Map.ml diff --git a/ocaml/fstar-lib/generated/FStar_MarkovsPrinciple.ml b/stage0/fstar-lib/generated/FStar_MarkovsPrinciple.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_MarkovsPrinciple.ml rename to stage0/fstar-lib/generated/FStar_MarkovsPrinciple.ml diff --git a/ocaml/fstar-lib/generated/FStar_Math_Euclid.ml b/stage0/fstar-lib/generated/FStar_Math_Euclid.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Math_Euclid.ml rename to stage0/fstar-lib/generated/FStar_Math_Euclid.ml diff --git a/ocaml/fstar-lib/generated/FStar_Math_Fermat.ml b/stage0/fstar-lib/generated/FStar_Math_Fermat.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Math_Fermat.ml rename to stage0/fstar-lib/generated/FStar_Math_Fermat.ml diff --git a/ocaml/fstar-lib/generated/FStar_Math_Lemmas.ml b/stage0/fstar-lib/generated/FStar_Math_Lemmas.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Math_Lemmas.ml rename to stage0/fstar-lib/generated/FStar_Math_Lemmas.ml diff --git a/ocaml/fstar-lib/generated/FStar_Math_Lib.ml b/stage0/fstar-lib/generated/FStar_Math_Lib.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Math_Lib.ml rename to stage0/fstar-lib/generated/FStar_Math_Lib.ml diff --git a/ocaml/fstar-lib/generated/FStar_Matrix.ml b/stage0/fstar-lib/generated/FStar_Matrix.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Matrix.ml rename to stage0/fstar-lib/generated/FStar_Matrix.ml diff --git a/ocaml/fstar-lib/generated/FStar_Modifies.ml b/stage0/fstar-lib/generated/FStar_Modifies.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Modifies.ml rename to stage0/fstar-lib/generated/FStar_Modifies.ml diff --git a/ocaml/fstar-lib/generated/FStar_ModifiesGen.ml b/stage0/fstar-lib/generated/FStar_ModifiesGen.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_ModifiesGen.ml rename to stage0/fstar-lib/generated/FStar_ModifiesGen.ml diff --git a/ocaml/fstar-lib/generated/FStar_Monotonic_DependentMap.ml b/stage0/fstar-lib/generated/FStar_Monotonic_DependentMap.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Monotonic_DependentMap.ml rename to stage0/fstar-lib/generated/FStar_Monotonic_DependentMap.ml diff --git a/ocaml/fstar-lib/generated/FStar_Monotonic_HyperHeap.ml b/stage0/fstar-lib/generated/FStar_Monotonic_HyperHeap.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Monotonic_HyperHeap.ml rename to stage0/fstar-lib/generated/FStar_Monotonic_HyperHeap.ml diff --git a/ocaml/fstar-lib/generated/FStar_Monotonic_HyperStack.ml b/stage0/fstar-lib/generated/FStar_Monotonic_HyperStack.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Monotonic_HyperStack.ml rename to stage0/fstar-lib/generated/FStar_Monotonic_HyperStack.ml diff --git a/ocaml/fstar-lib/generated/FStar_Monotonic_Map.ml b/stage0/fstar-lib/generated/FStar_Monotonic_Map.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Monotonic_Map.ml rename to stage0/fstar-lib/generated/FStar_Monotonic_Map.ml diff --git a/ocaml/fstar-lib/generated/FStar_Monotonic_Pure.ml b/stage0/fstar-lib/generated/FStar_Monotonic_Pure.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Monotonic_Pure.ml rename to stage0/fstar-lib/generated/FStar_Monotonic_Pure.ml diff --git a/ocaml/fstar-lib/generated/FStar_Monotonic_Seq.ml b/stage0/fstar-lib/generated/FStar_Monotonic_Seq.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Monotonic_Seq.ml rename to stage0/fstar-lib/generated/FStar_Monotonic_Seq.ml diff --git a/ocaml/fstar-lib/generated/FStar_Monotonic_Witnessed.ml b/stage0/fstar-lib/generated/FStar_Monotonic_Witnessed.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Monotonic_Witnessed.ml rename to stage0/fstar-lib/generated/FStar_Monotonic_Witnessed.ml diff --git a/ocaml/fstar-lib/generated/FStar_OrdMap.ml b/stage0/fstar-lib/generated/FStar_OrdMap.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_OrdMap.ml rename to stage0/fstar-lib/generated/FStar_OrdMap.ml diff --git a/ocaml/fstar-lib/generated/FStar_OrdMapProps.ml b/stage0/fstar-lib/generated/FStar_OrdMapProps.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_OrdMapProps.ml rename to stage0/fstar-lib/generated/FStar_OrdMapProps.ml diff --git a/ocaml/fstar-lib/generated/FStar_OrdSet.ml b/stage0/fstar-lib/generated/FStar_OrdSet.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_OrdSet.ml rename to stage0/fstar-lib/generated/FStar_OrdSet.ml diff --git a/ocaml/fstar-lib/generated/FStar_OrdSetProps.ml b/stage0/fstar-lib/generated/FStar_OrdSetProps.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_OrdSetProps.ml rename to stage0/fstar-lib/generated/FStar_OrdSetProps.ml diff --git a/ocaml/fstar-lib/generated/FStar_Order.ml b/stage0/fstar-lib/generated/FStar_Order.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Order.ml rename to stage0/fstar-lib/generated/FStar_Order.ml diff --git a/ocaml/fstar-lib/generated/FStar_PCM.ml b/stage0/fstar-lib/generated/FStar_PCM.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_PCM.ml rename to stage0/fstar-lib/generated/FStar_PCM.ml diff --git a/ocaml/fstar-lib/generated/FStar_Parse.ml b/stage0/fstar-lib/generated/FStar_Parse.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Parse.ml rename to stage0/fstar-lib/generated/FStar_Parse.ml diff --git a/ocaml/fstar-lib/generated/FStar_PartialMap.ml b/stage0/fstar-lib/generated/FStar_PartialMap.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_PartialMap.ml rename to stage0/fstar-lib/generated/FStar_PartialMap.ml diff --git a/ocaml/fstar-lib/generated/FStar_Pervasives.ml b/stage0/fstar-lib/generated/FStar_Pervasives.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Pervasives.ml rename to stage0/fstar-lib/generated/FStar_Pervasives.ml diff --git a/ocaml/fstar-lib/generated/FStar_PredicateExtensionality.ml b/stage0/fstar-lib/generated/FStar_PredicateExtensionality.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_PredicateExtensionality.ml rename to stage0/fstar-lib/generated/FStar_PredicateExtensionality.ml diff --git a/ocaml/fstar-lib/generated/FStar_Preorder.ml b/stage0/fstar-lib/generated/FStar_Preorder.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Preorder.ml rename to stage0/fstar-lib/generated/FStar_Preorder.ml diff --git a/ocaml/fstar-lib/generated/FStar_PropositionalExtensionality.ml b/stage0/fstar-lib/generated/FStar_PropositionalExtensionality.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_PropositionalExtensionality.ml rename to stage0/fstar-lib/generated/FStar_PropositionalExtensionality.ml diff --git a/ocaml/fstar-lib/generated/FStar_PtrdiffT.ml b/stage0/fstar-lib/generated/FStar_PtrdiffT.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_PtrdiffT.ml rename to stage0/fstar-lib/generated/FStar_PtrdiffT.ml diff --git a/ocaml/fstar-lib/generated/FStar_Pure_BreakVC.ml b/stage0/fstar-lib/generated/FStar_Pure_BreakVC.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Pure_BreakVC.ml rename to stage0/fstar-lib/generated/FStar_Pure_BreakVC.ml diff --git a/ocaml/fstar-lib/generated/FStar_Real_Old.ml b/stage0/fstar-lib/generated/FStar_Real_Old.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Real_Old.ml rename to stage0/fstar-lib/generated/FStar_Real_Old.ml diff --git a/ocaml/fstar-lib/generated/FStar_Ref.ml b/stage0/fstar-lib/generated/FStar_Ref.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Ref.ml rename to stage0/fstar-lib/generated/FStar_Ref.ml diff --git a/ocaml/fstar-lib/generated/FStar_RefinementExtensionality.ml b/stage0/fstar-lib/generated/FStar_RefinementExtensionality.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_RefinementExtensionality.ml rename to stage0/fstar-lib/generated/FStar_RefinementExtensionality.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection.ml b/stage0/fstar-lib/generated/FStar_Reflection.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection.ml rename to stage0/fstar-lib/generated/FStar_Reflection.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_Const.ml b/stage0/fstar-lib/generated/FStar_Reflection_Const.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_Const.ml rename to stage0/fstar-lib/generated/FStar_Reflection_Const.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_Formula.ml b/stage0/fstar-lib/generated/FStar_Reflection_Formula.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_Formula.ml rename to stage0/fstar-lib/generated/FStar_Reflection_Formula.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_TermEq.ml b/stage0/fstar-lib/generated/FStar_Reflection_TermEq.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_TermEq.ml rename to stage0/fstar-lib/generated/FStar_Reflection_TermEq.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_TermEq_Simple.ml b/stage0/fstar-lib/generated/FStar_Reflection_TermEq_Simple.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_TermEq_Simple.ml rename to stage0/fstar-lib/generated/FStar_Reflection_TermEq_Simple.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_Typing.ml b/stage0/fstar-lib/generated/FStar_Reflection_Typing.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_Typing.ml rename to stage0/fstar-lib/generated/FStar_Reflection_Typing.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_V1.ml b/stage0/fstar-lib/generated/FStar_Reflection_V1.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_V1.ml rename to stage0/fstar-lib/generated/FStar_Reflection_V1.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_V1_Derived.ml b/stage0/fstar-lib/generated/FStar_Reflection_V1_Derived.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_V1_Derived.ml rename to stage0/fstar-lib/generated/FStar_Reflection_V1_Derived.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_V1_Derived_Lemmas.ml b/stage0/fstar-lib/generated/FStar_Reflection_V1_Derived_Lemmas.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_V1_Derived_Lemmas.ml rename to stage0/fstar-lib/generated/FStar_Reflection_V1_Derived_Lemmas.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_V1_Formula.ml b/stage0/fstar-lib/generated/FStar_Reflection_V1_Formula.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_V1_Formula.ml rename to stage0/fstar-lib/generated/FStar_Reflection_V1_Formula.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_V2.ml b/stage0/fstar-lib/generated/FStar_Reflection_V2.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_V2.ml rename to stage0/fstar-lib/generated/FStar_Reflection_V2.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_V2_Arith.ml b/stage0/fstar-lib/generated/FStar_Reflection_V2_Arith.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_V2_Arith.ml rename to stage0/fstar-lib/generated/FStar_Reflection_V2_Arith.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_V2_Collect.ml b/stage0/fstar-lib/generated/FStar_Reflection_V2_Collect.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_V2_Collect.ml rename to stage0/fstar-lib/generated/FStar_Reflection_V2_Collect.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_V2_Compare.ml b/stage0/fstar-lib/generated/FStar_Reflection_V2_Compare.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_V2_Compare.ml rename to stage0/fstar-lib/generated/FStar_Reflection_V2_Compare.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_V2_Derived.ml b/stage0/fstar-lib/generated/FStar_Reflection_V2_Derived.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_V2_Derived.ml rename to stage0/fstar-lib/generated/FStar_Reflection_V2_Derived.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_V2_Derived_Lemmas.ml b/stage0/fstar-lib/generated/FStar_Reflection_V2_Derived_Lemmas.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_V2_Derived_Lemmas.ml rename to stage0/fstar-lib/generated/FStar_Reflection_V2_Derived_Lemmas.ml diff --git a/ocaml/fstar-lib/generated/FStar_Reflection_V2_Formula.ml b/stage0/fstar-lib/generated/FStar_Reflection_V2_Formula.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Reflection_V2_Formula.ml rename to stage0/fstar-lib/generated/FStar_Reflection_V2_Formula.ml diff --git a/ocaml/fstar-lib/generated/FStar_ReflexiveTransitiveClosure.ml b/stage0/fstar-lib/generated/FStar_ReflexiveTransitiveClosure.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_ReflexiveTransitiveClosure.ml rename to stage0/fstar-lib/generated/FStar_ReflexiveTransitiveClosure.ml diff --git a/ocaml/fstar-lib/generated/FStar_Sealed_Inhabited.ml b/stage0/fstar-lib/generated/FStar_Sealed_Inhabited.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Sealed_Inhabited.ml rename to stage0/fstar-lib/generated/FStar_Sealed_Inhabited.ml diff --git a/ocaml/fstar-lib/generated/FStar_Seq.ml b/stage0/fstar-lib/generated/FStar_Seq.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Seq.ml rename to stage0/fstar-lib/generated/FStar_Seq.ml diff --git a/ocaml/fstar-lib/generated/FStar_Seq_Base.ml b/stage0/fstar-lib/generated/FStar_Seq_Base.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Seq_Base.ml rename to stage0/fstar-lib/generated/FStar_Seq_Base.ml diff --git a/ocaml/fstar-lib/generated/FStar_Seq_Equiv.ml b/stage0/fstar-lib/generated/FStar_Seq_Equiv.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Seq_Equiv.ml rename to stage0/fstar-lib/generated/FStar_Seq_Equiv.ml diff --git a/ocaml/fstar-lib/generated/FStar_Seq_Permutation.ml b/stage0/fstar-lib/generated/FStar_Seq_Permutation.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Seq_Permutation.ml rename to stage0/fstar-lib/generated/FStar_Seq_Permutation.ml diff --git a/ocaml/fstar-lib/generated/FStar_Seq_Properties.ml b/stage0/fstar-lib/generated/FStar_Seq_Properties.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Seq_Properties.ml rename to stage0/fstar-lib/generated/FStar_Seq_Properties.ml diff --git a/ocaml/fstar-lib/generated/FStar_Seq_Sorted.ml b/stage0/fstar-lib/generated/FStar_Seq_Sorted.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Seq_Sorted.ml rename to stage0/fstar-lib/generated/FStar_Seq_Sorted.ml diff --git a/ocaml/fstar-lib/generated/FStar_Sequence.ml b/stage0/fstar-lib/generated/FStar_Sequence.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Sequence.ml rename to stage0/fstar-lib/generated/FStar_Sequence.ml diff --git a/ocaml/fstar-lib/generated/FStar_Sequence_Ambient.ml b/stage0/fstar-lib/generated/FStar_Sequence_Ambient.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Sequence_Ambient.ml rename to stage0/fstar-lib/generated/FStar_Sequence_Ambient.ml diff --git a/ocaml/fstar-lib/generated/FStar_Sequence_Base.ml b/stage0/fstar-lib/generated/FStar_Sequence_Base.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Sequence_Base.ml rename to stage0/fstar-lib/generated/FStar_Sequence_Base.ml diff --git a/ocaml/fstar-lib/generated/FStar_Sequence_Permutation.ml b/stage0/fstar-lib/generated/FStar_Sequence_Permutation.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Sequence_Permutation.ml rename to stage0/fstar-lib/generated/FStar_Sequence_Permutation.ml diff --git a/ocaml/fstar-lib/generated/FStar_Sequence_Seq.ml b/stage0/fstar-lib/generated/FStar_Sequence_Seq.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Sequence_Seq.ml rename to stage0/fstar-lib/generated/FStar_Sequence_Seq.ml diff --git a/ocaml/fstar-lib/generated/FStar_Sequence_Util.ml b/stage0/fstar-lib/generated/FStar_Sequence_Util.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Sequence_Util.ml rename to stage0/fstar-lib/generated/FStar_Sequence_Util.ml diff --git a/ocaml/fstar-lib/generated/FStar_Set.ml b/stage0/fstar-lib/generated/FStar_Set.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Set.ml rename to stage0/fstar-lib/generated/FStar_Set.ml diff --git a/ocaml/fstar-lib/generated/FStar_SizeT.ml b/stage0/fstar-lib/generated/FStar_SizeT.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_SizeT.ml rename to stage0/fstar-lib/generated/FStar_SizeT.ml diff --git a/ocaml/fstar-lib/generated/FStar_Squash.ml b/stage0/fstar-lib/generated/FStar_Squash.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Squash.ml rename to stage0/fstar-lib/generated/FStar_Squash.ml diff --git a/ocaml/fstar-lib/generated/FStar_SquashProperties.ml b/stage0/fstar-lib/generated/FStar_SquashProperties.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_SquashProperties.ml rename to stage0/fstar-lib/generated/FStar_SquashProperties.ml diff --git a/ocaml/fstar-lib/generated/FStar_StrongExcludedMiddle.ml b/stage0/fstar-lib/generated/FStar_StrongExcludedMiddle.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_StrongExcludedMiddle.ml rename to stage0/fstar-lib/generated/FStar_StrongExcludedMiddle.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_Arith.ml b/stage0/fstar-lib/generated/FStar_Tactics_Arith.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_Arith.ml rename to stage0/fstar-lib/generated/FStar_Tactics_Arith.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_BV.ml b/stage0/fstar-lib/generated/FStar_Tactics_BV.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_BV.ml rename to stage0/fstar-lib/generated/FStar_Tactics_BV.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_BV_Lemmas.ml b/stage0/fstar-lib/generated/FStar_Tactics_BV_Lemmas.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_BV_Lemmas.ml rename to stage0/fstar-lib/generated/FStar_Tactics_BV_Lemmas.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_BreakVC.ml b/stage0/fstar-lib/generated/FStar_Tactics_BreakVC.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_BreakVC.ml rename to stage0/fstar-lib/generated/FStar_Tactics_BreakVC.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_Canon.ml b/stage0/fstar-lib/generated/FStar_Tactics_Canon.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_Canon.ml rename to stage0/fstar-lib/generated/FStar_Tactics_Canon.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_CanonCommMonoid.ml b/stage0/fstar-lib/generated/FStar_Tactics_CanonCommMonoid.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_CanonCommMonoid.ml rename to stage0/fstar-lib/generated/FStar_Tactics_CanonCommMonoid.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_CanonCommMonoidSimple.ml b/stage0/fstar-lib/generated/FStar_Tactics_CanonCommMonoidSimple.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_CanonCommMonoidSimple.ml rename to stage0/fstar-lib/generated/FStar_Tactics_CanonCommMonoidSimple.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_CanonCommMonoidSimple_Equiv.ml b/stage0/fstar-lib/generated/FStar_Tactics_CanonCommMonoidSimple_Equiv.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_CanonCommMonoidSimple_Equiv.ml rename to stage0/fstar-lib/generated/FStar_Tactics_CanonCommMonoidSimple_Equiv.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_CanonCommSemiring.ml b/stage0/fstar-lib/generated/FStar_Tactics_CanonCommSemiring.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_CanonCommSemiring.ml rename to stage0/fstar-lib/generated/FStar_Tactics_CanonCommSemiring.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_CanonCommSwaps.ml b/stage0/fstar-lib/generated/FStar_Tactics_CanonCommSwaps.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_CanonCommSwaps.ml rename to stage0/fstar-lib/generated/FStar_Tactics_CanonCommSwaps.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_CanonMonoid.ml b/stage0/fstar-lib/generated/FStar_Tactics_CanonMonoid.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_CanonMonoid.ml rename to stage0/fstar-lib/generated/FStar_Tactics_CanonMonoid.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_Canon_Lemmas.ml b/stage0/fstar-lib/generated/FStar_Tactics_Canon_Lemmas.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_Canon_Lemmas.ml rename to stage0/fstar-lib/generated/FStar_Tactics_Canon_Lemmas.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_CheckLN.ml b/stage0/fstar-lib/generated/FStar_Tactics_CheckLN.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_CheckLN.ml rename to stage0/fstar-lib/generated/FStar_Tactics_CheckLN.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_Derived.ml b/stage0/fstar-lib/generated/FStar_Tactics_Derived.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_Derived.ml rename to stage0/fstar-lib/generated/FStar_Tactics_Derived.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_Effect.ml b/stage0/fstar-lib/generated/FStar_Tactics_Effect.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_Effect.ml rename to stage0/fstar-lib/generated/FStar_Tactics_Effect.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_Logic.ml b/stage0/fstar-lib/generated/FStar_Tactics_Logic.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_Logic.ml rename to stage0/fstar-lib/generated/FStar_Tactics_Logic.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_MApply.ml b/stage0/fstar-lib/generated/FStar_Tactics_MApply.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_MApply.ml rename to stage0/fstar-lib/generated/FStar_Tactics_MApply.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_MApply0.ml b/stage0/fstar-lib/generated/FStar_Tactics_MApply0.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_MApply0.ml rename to stage0/fstar-lib/generated/FStar_Tactics_MApply0.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_MkProjectors.ml b/stage0/fstar-lib/generated/FStar_Tactics_MkProjectors.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_MkProjectors.ml rename to stage0/fstar-lib/generated/FStar_Tactics_MkProjectors.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_NamedView.ml b/stage0/fstar-lib/generated/FStar_Tactics_NamedView.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_NamedView.ml rename to stage0/fstar-lib/generated/FStar_Tactics_NamedView.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_Names.ml b/stage0/fstar-lib/generated/FStar_Tactics_Names.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_Names.ml rename to stage0/fstar-lib/generated/FStar_Tactics_Names.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_Parametricity.ml b/stage0/fstar-lib/generated/FStar_Tactics_Parametricity.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_Parametricity.ml rename to stage0/fstar-lib/generated/FStar_Tactics_Parametricity.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_PatternMatching.ml b/stage0/fstar-lib/generated/FStar_Tactics_PatternMatching.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_PatternMatching.ml rename to stage0/fstar-lib/generated/FStar_Tactics_PatternMatching.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_Print.ml b/stage0/fstar-lib/generated/FStar_Tactics_Print.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_Print.ml rename to stage0/fstar-lib/generated/FStar_Tactics_Print.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_SMT.ml b/stage0/fstar-lib/generated/FStar_Tactics_SMT.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_SMT.ml rename to stage0/fstar-lib/generated/FStar_Tactics_SMT.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_Simplifier.ml b/stage0/fstar-lib/generated/FStar_Tactics_Simplifier.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_Simplifier.ml rename to stage0/fstar-lib/generated/FStar_Tactics_Simplifier.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_SyntaxHelpers.ml b/stage0/fstar-lib/generated/FStar_Tactics_SyntaxHelpers.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_SyntaxHelpers.ml rename to stage0/fstar-lib/generated/FStar_Tactics_SyntaxHelpers.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_TypeRepr.ml b/stage0/fstar-lib/generated/FStar_Tactics_TypeRepr.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_TypeRepr.ml rename to stage0/fstar-lib/generated/FStar_Tactics_TypeRepr.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_Typeclasses.ml b/stage0/fstar-lib/generated/FStar_Tactics_Typeclasses.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_Typeclasses.ml rename to stage0/fstar-lib/generated/FStar_Tactics_Typeclasses.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_Util.ml b/stage0/fstar-lib/generated/FStar_Tactics_Util.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_Util.ml rename to stage0/fstar-lib/generated/FStar_Tactics_Util.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_V1_Derived.ml b/stage0/fstar-lib/generated/FStar_Tactics_V1_Derived.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_V1_Derived.ml rename to stage0/fstar-lib/generated/FStar_Tactics_V1_Derived.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_V1_Logic.ml b/stage0/fstar-lib/generated/FStar_Tactics_V1_Logic.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_V1_Logic.ml rename to stage0/fstar-lib/generated/FStar_Tactics_V1_Logic.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_V1_Logic_Lemmas.ml b/stage0/fstar-lib/generated/FStar_Tactics_V1_Logic_Lemmas.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_V1_Logic_Lemmas.ml rename to stage0/fstar-lib/generated/FStar_Tactics_V1_Logic_Lemmas.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_V1_SyntaxHelpers.ml b/stage0/fstar-lib/generated/FStar_Tactics_V1_SyntaxHelpers.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_V1_SyntaxHelpers.ml rename to stage0/fstar-lib/generated/FStar_Tactics_V1_SyntaxHelpers.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_V2_Derived.ml b/stage0/fstar-lib/generated/FStar_Tactics_V2_Derived.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_V2_Derived.ml rename to stage0/fstar-lib/generated/FStar_Tactics_V2_Derived.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_V2_Logic.ml b/stage0/fstar-lib/generated/FStar_Tactics_V2_Logic.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_V2_Logic.ml rename to stage0/fstar-lib/generated/FStar_Tactics_V2_Logic.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_V2_SyntaxCoercions.ml b/stage0/fstar-lib/generated/FStar_Tactics_V2_SyntaxCoercions.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_V2_SyntaxCoercions.ml rename to stage0/fstar-lib/generated/FStar_Tactics_V2_SyntaxCoercions.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_V2_SyntaxHelpers.ml b/stage0/fstar-lib/generated/FStar_Tactics_V2_SyntaxHelpers.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_V2_SyntaxHelpers.ml rename to stage0/fstar-lib/generated/FStar_Tactics_V2_SyntaxHelpers.ml diff --git a/ocaml/fstar-lib/generated/FStar_Tactics_Visit.ml b/stage0/fstar-lib/generated/FStar_Tactics_Visit.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Tactics_Visit.ml rename to stage0/fstar-lib/generated/FStar_Tactics_Visit.ml diff --git a/ocaml/fstar-lib/generated/FStar_UInt.ml b/stage0/fstar-lib/generated/FStar_UInt.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_UInt.ml rename to stage0/fstar-lib/generated/FStar_UInt.ml diff --git a/ocaml/fstar-lib/generated/FStar_UInt128.ml b/stage0/fstar-lib/generated/FStar_UInt128.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_UInt128.ml rename to stage0/fstar-lib/generated/FStar_UInt128.ml diff --git a/ocaml/fstar-lib/generated/FStar_UInt16.ml b/stage0/fstar-lib/generated/FStar_UInt16.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_UInt16.ml rename to stage0/fstar-lib/generated/FStar_UInt16.ml diff --git a/ocaml/fstar-lib/generated/FStar_UInt32.ml b/stage0/fstar-lib/generated/FStar_UInt32.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_UInt32.ml rename to stage0/fstar-lib/generated/FStar_UInt32.ml diff --git a/ocaml/fstar-lib/generated/FStar_UInt64.ml b/stage0/fstar-lib/generated/FStar_UInt64.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_UInt64.ml rename to stage0/fstar-lib/generated/FStar_UInt64.ml diff --git a/ocaml/fstar-lib/generated/FStar_Universe.ml b/stage0/fstar-lib/generated/FStar_Universe.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Universe.ml rename to stage0/fstar-lib/generated/FStar_Universe.ml diff --git a/ocaml/fstar-lib/generated/FStar_Universe_PCM.ml b/stage0/fstar-lib/generated/FStar_Universe_PCM.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Universe_PCM.ml rename to stage0/fstar-lib/generated/FStar_Universe_PCM.ml diff --git a/ocaml/fstar-lib/generated/FStar_WellFounded.ml b/stage0/fstar-lib/generated/FStar_WellFounded.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_WellFounded.ml rename to stage0/fstar-lib/generated/FStar_WellFounded.ml diff --git a/ocaml/fstar-lib/generated/FStar_WellFoundedRelation.ml b/stage0/fstar-lib/generated/FStar_WellFoundedRelation.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_WellFoundedRelation.ml rename to stage0/fstar-lib/generated/FStar_WellFoundedRelation.ml diff --git a/ocaml/fstar-lib/generated/FStar_WellFounded_Util.ml b/stage0/fstar-lib/generated/FStar_WellFounded_Util.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_WellFounded_Util.ml rename to stage0/fstar-lib/generated/FStar_WellFounded_Util.ml diff --git a/ocaml/fstar-lib/generated/FStar_Witnessed_Core.ml b/stage0/fstar-lib/generated/FStar_Witnessed_Core.ml similarity index 100% rename from ocaml/fstar-lib/generated/FStar_Witnessed_Core.ml rename to stage0/fstar-lib/generated/FStar_Witnessed_Core.ml diff --git a/ocaml/fstar-lib/generated/LowStar_Buffer.ml b/stage0/fstar-lib/generated/LowStar_Buffer.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_Buffer.ml rename to stage0/fstar-lib/generated/LowStar_Buffer.ml diff --git a/ocaml/fstar-lib/generated/LowStar_BufferOps.ml b/stage0/fstar-lib/generated/LowStar_BufferOps.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_BufferOps.ml rename to stage0/fstar-lib/generated/LowStar_BufferOps.ml diff --git a/ocaml/fstar-lib/generated/LowStar_BufferView.ml b/stage0/fstar-lib/generated/LowStar_BufferView.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_BufferView.ml rename to stage0/fstar-lib/generated/LowStar_BufferView.ml diff --git a/ocaml/fstar-lib/generated/LowStar_BufferView_Down.ml b/stage0/fstar-lib/generated/LowStar_BufferView_Down.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_BufferView_Down.ml rename to stage0/fstar-lib/generated/LowStar_BufferView_Down.ml diff --git a/ocaml/fstar-lib/generated/LowStar_BufferView_Up.ml b/stage0/fstar-lib/generated/LowStar_BufferView_Up.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_BufferView_Up.ml rename to stage0/fstar-lib/generated/LowStar_BufferView_Up.ml diff --git a/ocaml/fstar-lib/generated/LowStar_Comment.ml b/stage0/fstar-lib/generated/LowStar_Comment.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_Comment.ml rename to stage0/fstar-lib/generated/LowStar_Comment.ml diff --git a/ocaml/fstar-lib/generated/LowStar_ConstBuffer.ml b/stage0/fstar-lib/generated/LowStar_ConstBuffer.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_ConstBuffer.ml rename to stage0/fstar-lib/generated/LowStar_ConstBuffer.ml diff --git a/ocaml/fstar-lib/generated/LowStar_Endianness.ml b/stage0/fstar-lib/generated/LowStar_Endianness.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_Endianness.ml rename to stage0/fstar-lib/generated/LowStar_Endianness.ml diff --git a/ocaml/fstar-lib/generated/LowStar_ImmutableBuffer.ml b/stage0/fstar-lib/generated/LowStar_ImmutableBuffer.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_ImmutableBuffer.ml rename to stage0/fstar-lib/generated/LowStar_ImmutableBuffer.ml diff --git a/ocaml/fstar-lib/generated/LowStar_Modifies.ml b/stage0/fstar-lib/generated/LowStar_Modifies.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_Modifies.ml rename to stage0/fstar-lib/generated/LowStar_Modifies.ml diff --git a/ocaml/fstar-lib/generated/LowStar_ModifiesPat.ml b/stage0/fstar-lib/generated/LowStar_ModifiesPat.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_ModifiesPat.ml rename to stage0/fstar-lib/generated/LowStar_ModifiesPat.ml diff --git a/ocaml/fstar-lib/generated/LowStar_Monotonic_Buffer.ml b/stage0/fstar-lib/generated/LowStar_Monotonic_Buffer.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_Monotonic_Buffer.ml rename to stage0/fstar-lib/generated/LowStar_Monotonic_Buffer.ml diff --git a/ocaml/fstar-lib/generated/LowStar_PrefixFreezableBuffer.ml b/stage0/fstar-lib/generated/LowStar_PrefixFreezableBuffer.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_PrefixFreezableBuffer.ml rename to stage0/fstar-lib/generated/LowStar_PrefixFreezableBuffer.ml diff --git a/ocaml/fstar-lib/generated/LowStar_RVector.ml b/stage0/fstar-lib/generated/LowStar_RVector.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_RVector.ml rename to stage0/fstar-lib/generated/LowStar_RVector.ml diff --git a/ocaml/fstar-lib/generated/LowStar_Regional.ml b/stage0/fstar-lib/generated/LowStar_Regional.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_Regional.ml rename to stage0/fstar-lib/generated/LowStar_Regional.ml diff --git a/ocaml/fstar-lib/generated/LowStar_Regional_Instances.ml b/stage0/fstar-lib/generated/LowStar_Regional_Instances.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_Regional_Instances.ml rename to stage0/fstar-lib/generated/LowStar_Regional_Instances.ml diff --git a/ocaml/fstar-lib/generated/LowStar_UninitializedBuffer.ml b/stage0/fstar-lib/generated/LowStar_UninitializedBuffer.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_UninitializedBuffer.ml rename to stage0/fstar-lib/generated/LowStar_UninitializedBuffer.ml diff --git a/ocaml/fstar-lib/generated/LowStar_Vector.ml b/stage0/fstar-lib/generated/LowStar_Vector.ml similarity index 100% rename from ocaml/fstar-lib/generated/LowStar_Vector.ml rename to stage0/fstar-lib/generated/LowStar_Vector.ml diff --git a/ocaml/fstar-lib/make_fstar_version.sh b/stage0/fstar-lib/make_fstar_version.sh similarity index 100% rename from ocaml/fstar-lib/make_fstar_version.sh rename to stage0/fstar-lib/make_fstar_version.sh diff --git a/ocaml/fstar-lib/prims.ml b/stage0/fstar-lib/prims.ml similarity index 100% rename from ocaml/fstar-lib/prims.ml rename to stage0/fstar-lib/prims.ml diff --git a/ocaml/fstar-tests/FStar_Tests_Main.ml b/stage0/fstar-tests/FStar_Tests_Main.ml similarity index 100% rename from ocaml/fstar-tests/FStar_Tests_Main.ml rename to stage0/fstar-tests/FStar_Tests_Main.ml diff --git a/ocaml/fstar-tests/dune b/stage0/fstar-tests/dune similarity index 100% rename from ocaml/fstar-tests/dune rename to stage0/fstar-tests/dune diff --git a/ocaml/fstar-tests/generated/FStarC_Tests_Data.ml b/stage0/fstar-tests/generated/FStarC_Tests_Data.ml similarity index 100% rename from ocaml/fstar-tests/generated/FStarC_Tests_Data.ml rename to stage0/fstar-tests/generated/FStarC_Tests_Data.ml diff --git a/ocaml/fstar-tests/generated/FStarC_Tests_Norm.ml b/stage0/fstar-tests/generated/FStarC_Tests_Norm.ml similarity index 100% rename from ocaml/fstar-tests/generated/FStarC_Tests_Norm.ml rename to stage0/fstar-tests/generated/FStarC_Tests_Norm.ml diff --git a/ocaml/fstar-tests/generated/FStarC_Tests_Pars.ml b/stage0/fstar-tests/generated/FStarC_Tests_Pars.ml similarity index 100% rename from ocaml/fstar-tests/generated/FStarC_Tests_Pars.ml rename to stage0/fstar-tests/generated/FStarC_Tests_Pars.ml diff --git a/ocaml/fstar-tests/generated/FStarC_Tests_Test.ml b/stage0/fstar-tests/generated/FStarC_Tests_Test.ml similarity index 100% rename from ocaml/fstar-tests/generated/FStarC_Tests_Test.ml rename to stage0/fstar-tests/generated/FStarC_Tests_Test.ml diff --git a/ocaml/fstar-tests/generated/FStarC_Tests_Unif.ml b/stage0/fstar-tests/generated/FStarC_Tests_Unif.ml similarity index 100% rename from ocaml/fstar-tests/generated/FStarC_Tests_Unif.ml rename to stage0/fstar-tests/generated/FStarC_Tests_Unif.ml diff --git a/ocaml/fstar-tests/generated/FStarC_Tests_Util.ml b/stage0/fstar-tests/generated/FStarC_Tests_Util.ml similarity index 100% rename from ocaml/fstar-tests/generated/FStarC_Tests_Util.ml rename to stage0/fstar-tests/generated/FStarC_Tests_Util.ml diff --git a/ocaml/fstar/dune b/stage0/fstar/dune similarity index 100% rename from ocaml/fstar/dune rename to stage0/fstar/dune diff --git a/ocaml/fstar/main.ml b/stage0/fstar/main.ml similarity index 100% rename from ocaml/fstar/main.ml rename to stage0/fstar/main.ml diff --git a/ulib/.gitignore b/stage0/ulib/.gitignore similarity index 100% rename from ulib/.gitignore rename to stage0/ulib/.gitignore diff --git a/stage0/ulib/FStar.Algebra.CommMonoid.Equiv.fst b/stage0/ulib/FStar.Algebra.CommMonoid.Equiv.fst new file mode 100644 index 00000000000..51515a0d264 --- /dev/null +++ b/stage0/ulib/FStar.Algebra.CommMonoid.Equiv.fst @@ -0,0 +1,76 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Algebra.CommMonoid.Equiv + +open FStar.Mul + +unopteq +type equiv (a:Type) = + | EQ : + eq:(a -> a -> Type0) -> + reflexivity:(x:a -> Lemma (x `eq` x)) -> + symmetry:(x:a -> y:a -> Lemma (requires (x `eq` y)) (ensures (y `eq` x))) -> + transitivity:(x:a -> y:a -> z:a -> Lemma (requires (x `eq` y /\ y `eq` z)) (ensures (x `eq` z))) -> + equiv a + +let elim_eq_laws #a (eq:equiv a) + : Lemma ( + (forall x.{:pattern (x `eq.eq` x)} x `eq.eq` x) /\ + (forall x y.{:pattern (x `eq.eq` y)} x `eq.eq` y ==> y `eq.eq` x) /\ + (forall x y z.{:pattern eq.eq x y; eq.eq y z} (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z) + ) + = introduce forall x. x `eq.eq` x + with (eq.reflexivity x); + + introduce forall x y. x `eq.eq` y ==> y `eq.eq` x + with (introduce _ ==> _ + with _. eq.symmetry x y); + + introduce forall x y z. (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z + with (introduce _ ==> _ + with _. eq.transitivity x y z) + +let equality_equiv (a:Type) : equiv a = + EQ (fun x y -> x == y) (fun x -> ()) (fun x y -> ()) (fun x y z -> ()) + +unopteq +type cm (a:Type) (eq:equiv a) = + | CM : + unit:a -> + mult:(a -> a -> a) -> + identity : (x:a -> Lemma ((unit `mult` x) `EQ?.eq eq` x)) -> + associativity : (x:a -> y:a -> z:a -> + Lemma ((x `mult` y `mult` z) `EQ?.eq eq` (x `mult` (y `mult` z)))) -> + commutativity:(x:a -> y:a -> Lemma ((x `mult` y) `EQ?.eq eq` (y `mult` x))) -> + congruence:(x:a -> y:a -> z:a -> w:a -> Lemma (requires (x `EQ?.eq eq` z /\ y `EQ?.eq eq` w)) (ensures ((mult x y) `EQ?.eq eq` (mult z w)))) -> + cm a eq + + + +// temporarily fixing the universe of this lemma to u#1 because +// otherwise tactics for LowStar.Resource canonicalization fails +// by picking up an incorrect universe u#0 for resource type +let right_identity (#a:Type u#aa) (eq:equiv a) (m:cm a eq) (x:a) + : Lemma (x `CM?.mult m` (CM?.unit m) `EQ?.eq eq` x) = + CM?.commutativity m x (CM?.unit m); + CM?.identity m x; + EQ?.transitivity eq (x `CM?.mult m` (CM?.unit m)) ((CM?.unit m) `CM?.mult m` x) x + +let int_plus_cm : cm int (equality_equiv int) = + CM 0 (+) (fun _ -> ()) (fun _ _ _ -> ()) (fun _ _ -> ()) (fun _ _ _ _ -> ()) + +let int_multiply_cm : cm int (equality_equiv int) = + CM 1 ( * ) (fun _ -> ()) (fun _ _ _ -> ()) (fun _ _ -> ()) (fun _ _ _ _ -> ()) diff --git a/stage0/ulib/FStar.Algebra.CommMonoid.Fold.Nested.fst b/stage0/ulib/FStar.Algebra.CommMonoid.Fold.Nested.fst new file mode 100644 index 00000000000..0d93db33faa --- /dev/null +++ b/stage0/ulib/FStar.Algebra.CommMonoid.Fold.Nested.fst @@ -0,0 +1,93 @@ +(* + Copyright 2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: A. Rozanov +*) + +(* + Here we reason about nested folds of functions over arbitrary + integer intervals. We call such functions generators. +*) + +module FStar.Algebra.CommMonoid.Fold.Nested + +module CF = FStar.Algebra.CommMonoid.Fold +module CE = FStar.Algebra.CommMonoid.Equiv + +open FStar.IntegerIntervals +open FStar.Matrix + +(* Auxiliary utility that casts (matrix c m n) to seq of length (m*n) *) +let matrix_seq #c #m #r (generator: matrix_generator c m r) = + seq_of_matrix (Matrix.init generator) + +(* + Most general form of nested fold swap theorem. Here we prove that we can + exchange the order of nested foldings over any suitable generator function. + + We use the previously proved weaker version (for zero-based indices) in + order to prove this, because this way the two proofs together are way shorter. + + I keep the argument types explicit in order to make the proof easier to read. +*) +let double_fold_transpose_lemma #c #eq + (#m0: int) (#mk: not_less_than m0) + (#n0: int) (#nk: not_less_than n0) + (cm: CE.cm c eq) + (offset_gen: ifrom_ito m0 mk -> ifrom_ito n0 nk -> c) + : Lemma (double_fold cm offset_gen + `eq.eq` + double_fold cm (transpose_generator offset_gen)) = + let m = interval_size (ifrom_ito m0 mk) in + let n = interval_size (ifrom_ito n0 nk) in + let gen : matrix_generator c m n = fun i j -> offset_gen (m0+i) (n0+j) in + let trans #c #a #b (f: matrix_generator c a b) = transposed_matrix_gen f in + let trans_ofs #c (#a1 #a2 #b1 #b2:int) (f: ifrom_ito a1 a2 -> ifrom_ito b1 b2 -> c) + = transpose_generator f in + // Here, F* agrees that (n-1) == (nk-n0). + // But, replace (n-1) with (nk-n0) below, and the proof will fail :) + let subfold_lhs_base0 (i: under m) = CF.fold cm 0 (n-1) (gen i) in + let subfold_rhs_base0 (j: under n) = CF.fold cm 0 (m-1) (trans gen j) in + let subfold_lhs_precise (i: ifrom_ito m0 mk) + = CF.fold cm n0 nk (offset_gen i) in + let subfold_rhs_precise (j: ifrom_ito n0 nk) + = CF.fold cm m0 mk (trans_ofs offset_gen j) in + let lhs = CF.fold cm m0 mk subfold_lhs_precise in + let rhs = CF.fold cm n0 nk subfold_rhs_precise in + let aux_lhs (i: under m) : Lemma + (CF.fold cm n0 nk (offset_gen (m0+i)) == CF.fold cm 0 (n-1) (gen i)) = + CF.fold_offset_irrelevance_lemma cm n0 nk (offset_gen (m0+i)) 0 (n-1) (gen i) in + let aux_rhs (j: under n) : Lemma + (CF.fold cm m0 mk (trans_ofs offset_gen (n0+j)) == + CF.fold cm 0 (m-1) (trans gen j)) + = CF.fold_offset_irrelevance_lemma cm m0 mk (trans_ofs offset_gen (n0+j)) + 0 (m-1) (trans gen j) in + FStar.Classical.forall_intro aux_lhs; + FStar.Classical.forall_intro aux_rhs; + FStar.Classical.forall_intro eq.reflexivity; + matrix_fold_equals_func_double_fold cm gen; + matrix_fold_equals_func_double_fold cm (trans gen); + let matrix_mn = matrix_seq gen in + let matrix_nm = matrix_seq (trans gen) in + CF.fold_offset_elimination_lemma cm m0 mk subfold_lhs_precise subfold_lhs_base0; + CF.fold_offset_elimination_lemma cm n0 nk subfold_rhs_precise subfold_rhs_base0; + FStar.Classical.forall_intro_2 (FStar.Classical.move_requires_2 eq.symmetry); + FStar.Classical.forall_intro_3 (FStar.Classical.move_requires_3 eq.transitivity); + matrix_fold_equals_fold_of_transpose cm gen; + matrix_fold_equals_func_double_fold cm gen; + matrix_fold_equals_func_double_fold cm (transposed_matrix_gen gen); + assert_norm (double_fold cm (transpose_generator offset_gen) == rhs); + eq.transitivity (FStar.Seq.Permutation.foldm_snoc cm matrix_mn) lhs rhs + diff --git a/stage0/ulib/FStar.Algebra.CommMonoid.Fold.Nested.fsti b/stage0/ulib/FStar.Algebra.CommMonoid.Fold.Nested.fsti new file mode 100644 index 00000000000..2f247b99b11 --- /dev/null +++ b/stage0/ulib/FStar.Algebra.CommMonoid.Fold.Nested.fsti @@ -0,0 +1,58 @@ +(* + Copyright 2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: A. Rozanov +*) + +(* + Here we reason about nested folds of functions over arbitrary + integer intervals. We call such functions generators. +*) + +module FStar.Algebra.CommMonoid.Fold.Nested + +module CF = FStar.Algebra.CommMonoid.Fold +module CE = FStar.Algebra.CommMonoid.Equiv + +open FStar.IntegerIntervals + +(* This constructs a generator function that has its arguments in reverse + order. Useful when reasoning about nested folds, transposed matrices, etc. + + Note how this utility is more general than transposed_matrix_gen + found in FStar.Seq.Matrix -- but for zero-based domains, latter is + more convenient. *) +let transpose_generator #c (#m0 #mk: int) + (#n0 #nk: int) + (gen: ifrom_ito m0 mk -> ifrom_ito n0 nk -> c) + : (f: (ifrom_ito n0 nk -> ifrom_ito m0 mk -> c) { forall i j. f j i == gen i j }) + = fun j i -> gen i j + +let double_fold #c #eq #a0 (#ak: not_less_than a0) #b0 (#bk:not_less_than b0) + (cm: CE.cm c eq) + (g: ifrom_ito a0 ak -> ifrom_ito b0 bk -> c) = + CF.fold cm a0 ak (fun (i: ifrom_ito a0 ak) -> CF.fold cm b0 bk (g i)) + + +(* Most general form of nested fold swap theorem. Here we prove that we can + exchange the order of nested foldings over any suitable generator function. *) +val double_fold_transpose_lemma (#c:_) (#eq: _) + (#m0: int) (#mk: not_less_than m0) + (#n0: int) (#nk: not_less_than n0) + (cm: CE.cm c eq) + (offset_gen: ifrom_ito m0 mk -> ifrom_ito n0 nk -> c) + : Lemma (double_fold cm offset_gen + `eq.eq` + double_fold cm (transpose_generator offset_gen)) diff --git a/stage0/ulib/FStar.Algebra.CommMonoid.Fold.fst b/stage0/ulib/FStar.Algebra.CommMonoid.Fold.fst new file mode 100644 index 00000000000..123f94356a0 --- /dev/null +++ b/stage0/ulib/FStar.Algebra.CommMonoid.Fold.fst @@ -0,0 +1,122 @@ +(* + Copyright 2008-2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: A. Rozanov +*) + +module FStar.Algebra.CommMonoid.Fold +module CE = FStar.Algebra.CommMonoid.Equiv + +open FStar.Seq.Base +open FStar.Seq.Properties +open FStar.Seq.Permutation + +open FStar.IntegerIntervals + +(* Here we define the notion for big sums and big products for + arbitrary commutative monoids. We construct the folds from + an integer range and a function, then calculate the fold -- + a sum or a product, depending on the monoid operation. *) + +(* We refine multiplication a bit to make proofs smoothier *) + +open FStar.Mul + +let rec fold #c #eq + (cm: CE.cm c eq) + (a: int) (b: not_less_than a) + (expr: (ifrom_ito a b) -> c) + // some of the lemmas want (ensures (fun (x:c) -> ((nk = n0) ==> (x == expr nk)))) + : Tot (c) (decreases b-a) + = if b = a then expr b + else (fold cm a (b-1) expr) `cm.mult` expr b + +let rec fold_equality #c #eq (cm: CE.cm c eq) + (a: int) (b: not_less_than a) + (expr1 expr2: (ifrom_ito a b) -> c) + : Lemma (requires (forall (i: ifrom_ito a b). expr1 i == expr2 i)) + (ensures fold cm a b expr1 == fold cm a b expr2) + (decreases b - a) = + if b > a then fold_equality cm a (b - 1) expr1 expr2 + +let fold_singleton_lemma #c #eq cm a expr + : Lemma (fold #c #eq cm a a expr == expr a) = () + +let fold_snoc_decomposition #c #eq (cm: CE.cm c eq) a b expr + : Lemma (fold cm a b expr == fold cm a (b-1) expr `cm.mult` (expr b)) = () + + +let rec fold_equals_seq_foldm #c #eq (cm: CE.cm c eq) + (a: int) + (b: not_less_than a) + (expr: (ifrom_ito a b) -> c) + : Lemma (ensures fold cm a b expr `eq.eq` + foldm_snoc cm (init (closed_interval_size a b) + (init_func_from_expr expr a b))) + (decreases b-a) = + if (b=a) then + let ts = init (closed_interval_size a b) (init_func_from_expr expr a b) in + lemma_eq_elim (create 1 (expr b)) ts; + foldm_snoc_singleton cm (expr b); + eq.symmetry (foldm_snoc cm ts) (expr b); + eq.reflexivity (expr b); + eq.transitivity (fold cm a b expr) (expr b) (foldm_snoc cm ts) + else + let lhs = fold cm a b expr in + let subexpr : ifrom_ito a (b-1) -> c = expr in + let fullseq = init (b+1-a) (init_func_from_expr expr a b) in + let rhs = foldm_snoc cm fullseq in + let subseq = init (b-a) (init_func_from_expr subexpr a (b-1)) in + let subsum = fold cm a (b-1) expr in + let subfold = foldm_snoc cm subseq in + let last = expr b in + let op = cm.mult in + fold_equals_seq_foldm cm a (b-1) subexpr; + cm.commutativity last subfold; + eq.reflexivity last; + cm.congruence subsum last subfold last; + foldm_snoc_decomposition cm fullseq; + lemma_eq_elim subseq (fst (un_snoc fullseq)); + eq.symmetry rhs (subfold `op` last); + eq.transitivity lhs (subfold `op` last) rhs + +(* I keep the argument types explicitly stated here because it makes + the lemma easier to read. *) +let rec fold_offset_irrelevance_lemma #c #eq (cm: CE.cm c eq) + (m0: int) (mk: not_less_than m0) (expr1 : ifrom_ito m0 mk -> c) + (n0: int) (nk: not_less_than n0) (expr2 : ifrom_ito n0 nk -> c) + : Lemma (requires (((mk-m0) = (nk-n0)) /\ + (forall (i:under (closed_interval_size m0 mk)). + expr1 (i+m0) == expr2 (i+n0)))) + (ensures fold cm m0 mk expr1 == fold cm n0 nk expr2) + (decreases (mk-m0)) = + if (mk>m0 && nk>n0) then ( + fold_offset_irrelevance_lemma cm m0 (mk-1) expr1 n0 (nk-1) expr2; + assert (expr1 ((mk-m0)+m0) == expr2 ((nk-n0)+n0)) + ) else if (mk=m0) then ( + eq.reflexivity (expr1 m0); + assert (expr1 (0+m0) == expr2 (0+n0)); + assert (expr1 m0 == expr2 n0) + ) + +let fold_offset_elimination_lemma #c #eq (cm: CE.cm c eq) + (m0: int) (mk: not_less_than m0) + (expr1 : ifrom_ito m0 mk -> c) + (expr2 : under (closed_interval_size m0 mk) -> c) + : Lemma (requires ((forall (i:under (closed_interval_size m0 mk)). + expr2 i == expr1 (i+m0)))) + (ensures fold cm m0 mk expr1 == fold cm 0 (mk-m0) expr2) + (decreases (mk-m0)) + = fold_offset_irrelevance_lemma cm m0 mk expr1 0 (mk-m0) expr2 diff --git a/stage0/ulib/FStar.Algebra.CommMonoid.Fold.fsti b/stage0/ulib/FStar.Algebra.CommMonoid.Fold.fsti new file mode 100644 index 00000000000..c2124a561c7 --- /dev/null +++ b/stage0/ulib/FStar.Algebra.CommMonoid.Fold.fsti @@ -0,0 +1,114 @@ +(* + Copyright 2008-2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: A. Rozanov +*) + +module FStar.Algebra.CommMonoid.Fold +module CE = FStar.Algebra.CommMonoid.Equiv + +open FStar.Seq.Base +open FStar.Seq.Properties +open FStar.Seq.Permutation + +open FStar.IntegerIntervals +open FStar.Mul + + +(* Here we define the notion for big sums and big products for + arbitrary commutative monoids. We construct the folds from + an integer range and a function, then calculate the fold -- + a sum or a product, depending on the monoid operation. *) + +(* We refine multiplication a bit to make proofs smoothier *) + + +(* Notice how we can't just use a and b if we don't want to break + recursive calls with the same exprs *) +let init_func_from_expr #c (#n0: int) (#nk: not_less_than n0) + (expr: (ifrom_ito n0 nk) -> c) + (a: ifrom_ito n0 nk) (b: ifrom_ito a nk) + : (counter_for (ifrom_ito a b) -> c) + = fun (i: counter_for (ifrom_ito a b)) -> expr (n0 + i) + +(* + Fold (Big Sum or Big Product notation in mathematics) of an arbitrary + function expr defined over a finite range of integers. + + Notice how one should very strictly control the domains of + lambdas, otherwise the proofs easily fail. +*) + +val fold (#c:_) (#eq:_) (cm: CE.cm c eq) + (a: int) (b: not_less_than a) (expr: ifrom_ito a b -> c) : c + +(* This lemma establishes the provable equality of the fold + given said equality for all the values from the allowed range *) +val fold_equality (#c:_) (#eq:_) (cm: CE.cm c eq) + (a: int) (b: not_less_than a) + (expr1 expr2: (ifrom_ito a b) -> c) + : Lemma (requires (forall (i: ifrom_ito a b). expr1 i == expr2 i)) + (ensures fold cm a b expr1 == fold cm a b expr2) + +val fold_singleton_lemma (#c:_) (#eq:_) (cm:CE.cm c eq) + (a:int) (expr: ifrom_ito a a -> c) + : Lemma (fold cm a a expr == expr a) + +(* This lemma decomposes the big_sum into the sum of the first + (k-1) elements plus the remaining last one. + Obviously requires the argument range that is at least + 2 elements long. *) +val fold_snoc_decomposition (#c:_) (#eq:_) + (cm: CE.cm c eq) + (a: int) (b: greater_than a) + (expr: (ifrom_ito a b) -> c) + : Lemma (fold cm a b expr == fold cm a (b-1) expr `cm.mult` (expr b)) + +(* This lemma establishes the equality of fold over int range to its + seq-based foldm_snoc counterpart. *) +val fold_equals_seq_foldm (#c:_) (#eq:_) + (cm: CE.cm c eq) + (a: int) + (b: not_less_than a) + (expr: (ifrom_ito a b) -> c) + : Lemma (ensures fold cm a b expr `eq.eq` + foldm_snoc cm (init (closed_interval_size a b) + (init_func_from_expr expr a b))) + +(* This lemma proves that if we offset some function by some value, + fold of the function against its own domain will be equal to fold + of the offset function against the offset domain + + Notice how we make bounds explicit in order for the lemma to be + readily usable in subdomain reasoning, provided exprs are + compatible too *) +val fold_offset_irrelevance_lemma (#c:_) (#eq:_) (cm: CE.cm c eq) + (m0: int) (mk: not_less_than m0) + (expr1 : ifrom_ito m0 mk -> c) + (n0: int) (nk: not_less_than n0) + (expr2 : ifrom_ito n0 nk -> c) + : Lemma (requires (((mk-m0) = (nk-n0)) /\ + (forall (i:under (closed_interval_size m0 mk)). + expr1 (i+m0) == expr2 (i+n0)))) + (ensures fold cm m0 mk expr1 == fold cm n0 nk expr2) + +(* More specific version for zero-based domain *) +val fold_offset_elimination_lemma (#c:_) (#eq:_) (cm: CE.cm c eq) + (m0: int) (mk: not_less_than m0) + (expr1 : ifrom_ito m0 mk -> c) + (expr2 : under (closed_interval_size m0 mk) -> c) + : Lemma (requires ((forall (i:under (closed_interval_size m0 mk)). + expr2 i == expr1 (i+m0)))) + (ensures fold cm m0 mk expr1 == fold cm 0 (mk-m0) expr2) diff --git a/stage0/ulib/FStar.Algebra.CommMonoid.fst b/stage0/ulib/FStar.Algebra.CommMonoid.fst new file mode 100644 index 00000000000..427aaee3bc0 --- /dev/null +++ b/stage0/ulib/FStar.Algebra.CommMonoid.fst @@ -0,0 +1,39 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Algebra.CommMonoid + +open FStar.Mul + +unopteq +type cm (a:Type) = + | CM : + unit:a -> + mult:(a -> a -> a) -> + identity : (x:a -> Lemma (unit `mult` x == x)) -> + associativity : (x:a -> y:a -> z:a -> + Lemma (x `mult` y `mult` z == x `mult` (y `mult` z))) -> + commutativity:(x:a -> y:a -> Lemma (x `mult` y == y `mult` x)) -> + cm a + +let right_identity (#a:Type) (m:cm a) (x:a) : + Lemma (CM?.mult m x (CM?.unit m) == x) = + CM?.commutativity m x (CM?.unit m); CM?.identity m x + +let int_plus_cm : cm int = + CM 0 (+) (fun x -> ()) (fun x y z -> ()) (fun x y -> ()) + +let int_multiply_cm : cm int = + CM 1 ( * ) (fun x -> ()) (fun x y z -> ()) (fun x y -> ()) diff --git a/stage0/ulib/FStar.Algebra.Monoid.fst b/stage0/ulib/FStar.Algebra.Monoid.fst new file mode 100644 index 00000000000..90849fb3eec --- /dev/null +++ b/stage0/ulib/FStar.Algebra.Monoid.fst @@ -0,0 +1,214 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Algebra.Monoid + +open FStar.Classical +module PropExt = FStar.PropositionalExtensionality + +(* + * AR: 05/12: adding calls to equational lemmas from PropositionalExtensionality + * these should go away with proper prop support + * also see the comment in PropositionalExtensionality.fst + *) + +(** Definition of a monoid *) + +let right_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = + forall (x:m). x `mult` u == x + +let left_unitality_lemma (m:Type) (u:m) (mult:m -> m -> m) = + forall (x:m). u `mult` x == x + +let associativity_lemma (m:Type) (mult:m -> m -> m) = + forall (x y z:m). x `mult` y `mult` z == x `mult` (y `mult` z) + +unopteq +type monoid (m:Type) = + | Monoid : + unit:m -> + mult:(m -> m -> m) -> + right_unitality:squash (right_unitality_lemma m unit mult) -> + left_unitality:squash (left_unitality_lemma m unit mult) -> + associativity:squash (associativity_lemma m mult) -> + monoid m + + +let intro_monoid (m:Type) (u:m) (mult:m -> m -> m) + : Pure (monoid m) + (requires (right_unitality_lemma m u mult /\ left_unitality_lemma m u mult /\ associativity_lemma m mult)) + (ensures (fun mm -> Monoid?.unit mm == u /\ Monoid?.mult mm == mult)) += + Monoid u mult () () () + + +(** Some monoid structures *) + +let nat_plus_monoid : monoid nat = + let add (x y : nat) : nat = x + y in + intro_monoid nat 0 add + +let int_plus_monoid : monoid int = + intro_monoid int 0 (+) + +(* let int_mul_monoid : monoid int = *) +(* intro_monoid int 1 op_Multiply *) + +let conjunction_monoid : monoid prop = + let u : prop = singleton True in + let mult (p q : prop) : prop = p /\ q in + + let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = + assert ((u `mult` p) <==> p) ; + PropExt.apply (u `mult` p) p + in + + let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = + assert ((p `mult` u) <==> p) ; + PropExt.apply (p `mult` u) p + in + + let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = + assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; + PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) + in + + forall_intro right_unitality_helper ; + assert (right_unitality_lemma prop u mult) ; + forall_intro left_unitality_helper ; + assert (left_unitality_lemma prop u mult) ; + forall_intro_3 associativity_helper; + assert (associativity_lemma prop mult) ; + intro_monoid prop u mult + + +let disjunction_monoid : monoid prop = + let u : prop = singleton False in + let mult (p q : prop) : prop = p \/ q in + + let left_unitality_helper (p:prop) : Lemma ((u `mult` p) == p) = + assert ((u `mult` p) <==> p) ; + PropExt.apply (u `mult` p) p + in + + let right_unitality_helper (p:prop) : Lemma ((p `mult` u) == p) = + assert ((p `mult` u) <==> p) ; + PropExt.apply (p `mult` u) p + in + + let associativity_helper (p1 p2 p3 : prop) : Lemma (p1 `mult` p2 `mult` p3 == p1 `mult` (p2 `mult` p3)) = + assert (p1 `mult` p2 `mult` p3 <==> p1 `mult` (p2 `mult` p3)) ; + PropExt.apply (p1 `mult` p2 `mult` p3) (p1 `mult` (p2 `mult` p3)) + in + + forall_intro right_unitality_helper ; + assert (right_unitality_lemma prop u mult) ; + forall_intro left_unitality_helper ; + assert (left_unitality_lemma prop u mult) ; + forall_intro_3 associativity_helper; + assert (associativity_lemma prop mult) ; + intro_monoid prop u mult + +let bool_and_monoid : monoid bool = + let and_ b1 b2 = b1 && b2 in + intro_monoid bool true and_ + +let bool_or_monoid : monoid bool = + let or_ b1 b2 = b1 || b2 in + intro_monoid bool false or_ + +let bool_xor_monoid : monoid bool = + let xor b1 b2 = (b1 || b2) && not (b1 && b2) in + intro_monoid bool false xor + +let lift_monoid_option (#a:Type) (m:monoid a) : monoid (option a) = + let mult (x y:option a) = + match x, y with + | Some x0, Some y0 -> Some (m.mult x0 y0) + | _, _ -> None + in + intro_monoid (option a) (Some m.unit) mult + +(* Definition of a morphism of monoid *) + +let monoid_morphism_unit_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = + f (Monoid?.unit ma) == Monoid?.unit mb + +let monoid_morphism_mult_lemma (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = + forall (x y:a). Monoid?.mult mb (f x) (f y) == f (Monoid?.mult ma x y) + +type monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) = + | MonoidMorphism : + unit:squash (monoid_morphism_unit_lemma f ma mb) -> + mult:squash (monoid_morphism_mult_lemma f ma mb) -> + monoid_morphism f ma mb + +let intro_monoid_morphism (#a #b:Type) (f:a -> b) (ma:monoid a) (mb:monoid b) + : Pure (monoid_morphism f ma mb) + (requires (monoid_morphism_unit_lemma f ma mb /\ monoid_morphism_mult_lemma f ma mb)) + (ensures (fun _ -> True)) += + MonoidMorphism () () + +let embed_nat_int (n:nat) : int = n +let _ = intro_monoid_morphism embed_nat_int nat_plus_monoid int_plus_monoid + +let neg (p:prop) : prop = ~p +let _ = + assert (neg True <==> False) ; + PropExt.apply (neg True) False ; + let mult_lemma_helper (p q:prop) : Lemma (neg (p /\ q) == (neg p \/ neg q)) = + assert (neg (p /\ q) <==> (neg p \/ neg q)) ; + PropExt.apply (neg (p /\ q)) (neg p \/ neg q) + in + forall_intro_2 mult_lemma_helper ; + intro_monoid_morphism neg conjunction_monoid disjunction_monoid + +let _ = + assert (neg False <==> True) ; + PropExt.apply (neg False) True ; + let mult_lemma_helper (p q:prop) : Lemma (neg (p \/ q) == (neg p /\ neg q)) = + assert (neg (p \/ q) <==> (neg p /\ neg q)) ; + PropExt.apply (neg (p \/ q)) (neg p /\ neg q) + in + forall_intro_2 mult_lemma_helper ; + intro_monoid_morphism neg disjunction_monoid conjunction_monoid + +(* Definition of a left action *) + +let mult_act_lemma (m a:Type) (mult:m -> m -> m) (act:m -> a -> a) = + forall (x x':m) (y:a). (x `mult` x') `act` y == x `act` (x' `act` y) + +let unit_act_lemma (m a:Type) (u:m) (act:m -> a -> a) = + forall (y:a). u `act` y == y + +unopteq +type left_action (#m:Type) (mm:monoid m) (a:Type) = + | LAct : + act:(m -> a -> a) -> + mult_lemma: squash (mult_act_lemma m a (Monoid?.mult mm) act) -> + unit_lemma: squash (unit_act_lemma m a (Monoid?.unit mm) act) -> + left_action mm a + +let left_action_morphism + (#a #b #ma #mb:Type) + (f:a -> b) + (* mf ought to be a monoid morphism but we don't use this fact in the property *) + (mf: ma -> mb) + (#mma:monoid ma) + (#mmb:monoid mb) + (la:left_action mma a) + (lb:left_action mmb b) += forall (g:ma) (x:a). LAct?.act lb (mf g) (f x) == f (LAct?.act la g x) diff --git a/stage0/ulib/FStar.All.fsti b/stage0/ulib/FStar.All.fsti new file mode 100644 index 00000000000..750653d6651 --- /dev/null +++ b/stage0/ulib/FStar.All.fsti @@ -0,0 +1,43 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.All +open FStar.Heap +include FStar.ST +include FStar.Exn + +let all_pre = all_pre_h heap +let all_post' (a : Type) (pre:Type) = all_post_h' heap a pre +let all_post (a : Type) = all_post_h heap a +let all_wp (a : Type) = all_wp_h heap a +new_effect ALL = ALL_h heap + +unfold let lift_state_all (a : Type) (wp : st_wp a) (p : all_post a) = wp (fun a -> p (V a)) +sub_effect STATE ~> ALL { lift_wp = lift_state_all } + +unfold +let lift_exn_all (a : Type) (wp : ex_wp a) (p : all_post a) (h : heap) = wp (fun ra -> p ra h) +sub_effect EXN ~> ALL { lift_wp = lift_exn_all } + +effect All (a:Type) (pre:all_pre) (post:(h:heap -> Tot (all_post' a (pre h)))) = + ALL a + (fun (p : all_post a) (h : heap) -> pre h /\ (forall ra h1. post h ra h1 ==> p ra h1)) +effect ML (a:Type) = ALL a (fun (p:all_post a) (_:heap) -> forall (a:result a) (h:heap). p a h) + +val exit : int -> ML 'a +val try_with : (unit -> ML 'a) -> (exn -> ML 'a) -> ML 'a + +exception Failure of string +val failwith : string -> All 'a (fun h -> True) (fun h a h' -> Err? a /\ h == h') diff --git a/stage0/ulib/FStar.BV.fst b/stage0/ulib/FStar.BV.fst new file mode 100644 index 00000000000..e00496b4f9e --- /dev/null +++ b/stage0/ulib/FStar.BV.fst @@ -0,0 +1,139 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.BV + +module U = FStar.UInt +module B = FStar.BitVector +module S = FStar.Seq + +let bv_t (n : nat) = B.bv_t n + +let bv_uext #n #i a = + Seq.append (Seq.create i false) a + +let int2bv = U.to_vec +let bv2int = U.from_vec + +let int2bv_lemma_1 = U.to_vec_lemma_1 +let int2bv_lemma_2 = U.to_vec_lemma_2 +let inverse_vec_lemma = U.inverse_vec_lemma +let inverse_num_lemma = U.inverse_num_lemma + +(** Mapping an unbounded nat to a bitvector; only used for bvshl and bvshr + compatibility funs *) +let int2bv_nat (#n: pos) (num: nat): Tot (bv_t n) = U.to_vec (num % pow2 n) + +let int2bv_nat_lemma (#n: pos) (num: uint_t n) + : Lemma + (ensures (int2bv_nat #n num == int2bv #n num)) = + assert (num < pow2 n); + FStar.Math.Lemmas.modulo_lemma num (pow2 n); + assert (num % pow2 n = num) + +let list2bv #n l = S.seq_of_list l +let bv2list #n s = S.seq_to_list s +let list2bv_bij #n a = S.lemma_list_seq_bij a +let bv2list_bij #n a = S.lemma_seq_list_bij a + +let bvand = B.logand_vec +let int2bv_logand #n #x #y #z pf = + inverse_vec_lemma #n (bvand #n (int2bv #n x) (int2bv #n y)) + +let bvxor = B.logxor_vec +let int2bv_logxor #n #x #y #z pf = + inverse_vec_lemma #n (bvxor #n (int2bv x) (int2bv y)) + +let bvor = B.logor_vec +let int2bv_logor #n #x #y #z pf = + inverse_vec_lemma #n (bvor #n (int2bv x) (int2bv y)) + +let bvnot = B.lognot_vec +let int2bv_lognot #n #x #y pf = + inverse_vec_lemma #n (bvnot #n (int2bv x)) + +(*TODO: specify index functions? *) +let bvshl' (#n: pos) (a: bv_t n) (s: bv_t n): bv_t n = + B.shift_left_vec #n a (bv2int #n s) +let bvshl (#n: pos) (a: bv_t n) (s: nat): bv_t n = + bvshl' #n a (int2bv_nat #n s) + +let int2bv_shl' #n #x #y #z pf = + inverse_vec_lemma #n (bvshl' #n (int2bv #n x) (int2bv #n y)) +let int2bv_shl #n #x #y #z pf = + int2bv_nat_lemma #n y; + inverse_vec_lemma #n (bvshl #n (int2bv #n x) y) + +let bvshr' (#n: pos) (a: bv_t n) (s: bv_t n): bv_t n = + B.shift_right_vec #n a (bv2int #n s) +let bvshr (#n: pos) (a: bv_t n) (s: nat) : bv_t n = + bvshr' #n a (int2bv_nat #n s) +let int2bv_shr' #n #x #y #z pf = + inverse_vec_lemma #n (bvshr' #n (int2bv #n x) (int2bv #n y)) +let int2bv_shr #n #x #y #z pf = + int2bv_nat_lemma #n y; + inverse_vec_lemma #n (bvshr #n (int2bv #n x) y) + + + +let bvult #n a b = (bv2int #n a) < (bv2int #n b) + +let int2bv_lemma_ult_1 #n a b = + inverse_num_lemma #n a; + inverse_num_lemma #n b + +let int2bv_lemma_ult_2 #n a b = + inverse_num_lemma #n a; + inverse_num_lemma #n b + + +let bvadd #n a b = + int2bv #n (U.add_mod (bv2int #n a) (bv2int #n b)) +let int2bv_add #n #x #y #z pf = + inverse_vec_lemma #n (bvadd #n (int2bv #n x) (int2bv #n y)) + +let bvsub #n a b = + int2bv #n (U.sub_mod (bv2int #n a) (bv2int #n b)) +let int2bv_sub #n #x #y #z pf = + inverse_vec_lemma #n (bvsub #n (int2bv #n x) (int2bv #n y)) + +let bvdiv #n a b = + int2bv #n (U.udiv #n (bv2int #n a) b) +let int2bv_div #n #x #y #z pf = + inverse_vec_lemma #n (bvdiv #n (int2bv #n x) y) + +let bvdiv_unsafe #n a b = if (bv2int b <> 0) then bvdiv a (bv2int b) else int2bv 0 +let bvdiv_unsafe_sound #n #a #b b_nonzero_pf = () + + +let bvmod #n a b = + int2bv #n (U.mod #n (bv2int #n a) b) +let int2bv_mod #n #x #y #z pf = + inverse_vec_lemma #n (bvmod #n (int2bv #n x) y) + +let bvmod_unsafe #n a b = if (bv2int b <> 0) then bvmod a (bv2int b) else int2bv 0 +let bvmod_unsafe_sound #n #a #b b_nonzero_pf = () + +// Z3's bvmul is also modulo +let bvmul #n a b = + int2bv #n (U.mul_mod #n (bv2int #n a) b) +let int2bv_mul #n #x #y #z pf = + inverse_vec_lemma #n (bvmul #n (int2bv #n x) y) + +let bvmul' #n a b = + int2bv #n (U.mul_mod #n (bv2int #n a) (bv2int #n b)) + +let int2bv_mul' #n #x #y #z pf = + inverse_vec_lemma #n (bvmul' #n (int2bv #n x) (int2bv #n y)) diff --git a/stage0/ulib/FStar.BV.fsti b/stage0/ulib/FStar.BV.fsti new file mode 100644 index 00000000000..3c3bb88fe4e --- /dev/null +++ b/stage0/ulib/FStar.BV.fsti @@ -0,0 +1,304 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.BV + +/// This module defines an abstract type of length-indexed bit +/// vectors. The type and its operations are handled primitively in +/// F*'s SMT encoding, which maps them to the SMT sort of bit vectors +/// and operations on that sort. Note that this encoding only applies +/// when the length [n] is a syntactic literal: bit vectors with a +/// length referring to some variable, bound or otherwise, are encoded +/// as abstract sequences of bits. +/// +/// Because of this syntactic encoding, it is also often helpful to +/// explicitly specify the bit length on all operations -- for example +/// constructing a 64-bit vector with [int2bv #64 1]. These explicit +/// annotations ensure that the encoding uses the literal length 64, +/// rather than inferring some variable as the length. +/// +/// One way to use this module is in conjunction with +/// FStar.Tactics.BV. Its main tactic, [bv_tac], converts bitwise +/// operations on unsigned integers to operations on bit vectors and +/// back using the [int2bv / bv2int] isomorphism. This can be an +/// effective way of discharging such proof obligations for bitwise +/// operations on integers using the SMT solver's theory of +/// bitvectors. + +open FStar.UInt +// for now just opening this for logand, logxor, etc. but we need a better solution. + +(** The main type of this module, bit vectors of length [n], with + decidable equality *) +val bv_t (n: nat) : eqtype + +(* Experimental: + Redefining basic type from UInt to avoid importing UInt + Reduces verification time by 50% in small examples +// let max_int (n:nat) : Tot int = pow2 n - 1 +// let min_int (n:nat) : Tot int = 0 +// let fits (x:int) (n:nat) : Tot bool = min_int n <= x && x <= max_int n +// let size (x:int) (n:nat) : Tot Type0 = b2t(fits x n) +// type uint_t' (n:nat) = x:int{size x n} +*) + +(** Extending a bit vector of length [n] to a larger vector of size + [m+n], filling the extra bits with 0 *) +val bv_uext (#n #m: pos) (a: bv_t n) : Tot (normalize (bv_t (m + n))) + +(**** Relating unsigned integers to bitvectors *) + +(** Mapping a bounded unsigned integer of size [< 2^n], to a n-length + bit vector *) +val int2bv (#n: pos) (num: uint_t n) : Tot (bv_t n) + +(** Mapping a bit vector back to a bounded unsigned integer of size [< + 2^n] *) +val bv2int (#n: pos) (vec: bv_t n) : Tot (uint_t n) + +val int2bv_lemma_1 (#n: pos) (a b: uint_t n) + : Lemma (requires a = b) (ensures (int2bv #n a = int2bv #n b)) + +val int2bv_lemma_2 (#n: pos) (a b: uint_t n) + : Lemma (requires (int2bv a = int2bv b)) (ensures a = b) + +val inverse_vec_lemma (#n: pos) (vec: bv_t n) + : Lemma (requires True) (ensures vec = (int2bv (bv2int vec))) [SMTPat (int2bv (bv2int vec))] + +val inverse_num_lemma (#n: pos) (num: uint_t n) + : Lemma (requires True) + (ensures num = bv2int #n (int2bv #n num)) + [SMTPat (bv2int #n (int2bv #n num))] + +(**** Relating lists to bitvectors *) + +(** Mapping a list of booleans to a bitvector *) +val list2bv (#n: pos) (l: list bool {List.length l = n}) : Tot (bv_t n) + +(** Mapping a bitvector to a list of booleans *) +val bv2list: #n: pos -> bv_t n -> Tot (l: list bool {List.length l = n}) + +val list2bv_bij (#n: pos) (a: list bool {List.length a = n}) + : Lemma (requires (True)) (ensures (bv2list (list2bv #n a) = a)) + +val bv2list_bij (#n: pos) (a: bv_t n) + : Lemma (requires (True)) (ensures (list2bv (bv2list #n a) = a)) + +(**** Bitwise logical operators *) + +(** Bitwise conjunction *) +val bvand (#n: pos) (a b: bv_t n) : Tot (bv_t n) + +val int2bv_logand: + #n: pos -> + #x: uint_t n -> + #y: uint_t n -> + #z: bv_t n -> + squash (bvand #n (int2bv #n x) (int2bv #n y) == z) + -> Lemma (int2bv #n (logand #n x y) == z) + +(** Bitwise exclusive or *) +val bvxor (#n: pos) (a b: bv_t n) : Tot (bv_t n) + +val int2bv_logxor: + #n: pos -> + #x: uint_t n -> + #y: uint_t n -> + #z: bv_t n -> + squash (bvxor #n (int2bv #n x) (int2bv #n y) == z) + -> Lemma (int2bv #n (logxor #n x y) == z) + +(** Bitwise disjunction *) +val bvor (#n: pos) (a b: bv_t n) : Tot (bv_t n) + +val int2bv_logor: + #n: pos -> + #x: uint_t n -> + #y: uint_t n -> + #z: bv_t n -> + squash (bvor #n (int2bv #n x) (int2bv #n y) == z) + -> Lemma (int2bv #n (logor #n x y) == z) + +(** Bitwise negation *) +val bvnot (#n: pos) (a: bv_t n) : Tot (bv_t n) + +val int2bv_lognot: #n: pos -> #x: uint_t n -> #z: bv_t n -> squash (bvnot #n (int2bv #n x) == z) + -> Lemma (int2bv #n (lognot #n x) == z) + +(** Bitwise shift left: shift by bit-vector. + This variant directly corresponds to the SMT-LIB bvshl function. In some + cases, it may be more efficient to use this variant rather than the below + natural number [bvshl] variant, as the below requires a conversion from + unbounded integers. *) +val bvshl' (#n: pos) (a: bv_t n) (s: bv_t n) : Tot (bv_t n) + +(** Bitwise shift left: shift by integer. + This variant uses an unbounded natural and exists for compatibility. *) +val bvshl (#n: pos) (a: bv_t n) (s: nat) : Tot (bv_t n) + +val int2bv_shl': + #n: pos -> + #x: uint_t n -> + #y: uint_t n -> + #z: bv_t n -> + squash (bvshl' #n (int2bv #n x) (int2bv #n y) == z) + -> Lemma (int2bv #n (shift_left #n x y) == z) + +val int2bv_shl: + #n: pos -> + #x: uint_t n -> + #y: uint_t n -> + #z: bv_t n -> + squash (bvshl #n (int2bv #n x) y == z) + -> Lemma (int2bv #n (shift_left #n x y) == z) + +(** Bitwise shift right: shift by bit-vector. + This variant directly corresponds to the SMT-LIB bvshr function. In some + cases, it may be more efficient to use this variant rather than the below + natural number [bvshr] variant, as the below requires a conversion from + unbounded integers. + *) +val bvshr' (#n: pos) (a: bv_t n) (s: bv_t n) : Tot (bv_t n) + +(** Bitwise shift right: shift by integer. + This variant uses an unbounded natural and exists for compatibility. *) +val bvshr (#n: pos) (a: bv_t n) (s: nat) : Tot (bv_t n) + +val int2bv_shr': + #n: pos -> + #x: uint_t n -> + #y: uint_t n -> + #z: bv_t n -> + squash (bvshr' #n (int2bv #n x) (int2bv #n y) == z) + -> Lemma (int2bv #n (shift_right #n x y) == z) + +val int2bv_shr: + #n: pos -> + #x: uint_t n -> + #y: uint_t n -> + #z: bv_t n -> + squash (bvshr #n (int2bv #n x) y == z) + -> Lemma (int2bv #n (shift_right #n x y) == z) + +(**** Arithmetic operations *) +unfold +let bv_zero #n = int2bv #n 0 + +(** Inequality on bitvectors *) +val bvult (#n: pos) (a b: bv_t n) : Tot (bool) + +val int2bv_lemma_ult_1 (#n: pos) (a b: uint_t n) + : Lemma (requires a < b) (ensures (bvult #n (int2bv #n a) (int2bv #n b))) + +val int2bv_lemma_ult_2 (#n: pos) (a b: uint_t n) + : Lemma (requires (bvult #n (int2bv #n a) (int2bv #n b))) (ensures a < b) + +(** Addition *) +val bvadd (#n: pos) (a b: bv_t n) : Tot (bv_t n) + +val int2bv_add: + #n: pos -> + #x: uint_t n -> + #y: uint_t n -> + #z: bv_t n -> + squash (bvadd #n (int2bv #n x) (int2bv #n y) == z) + -> Lemma (int2bv #n (add_mod #n x y) == z) + +(** Subtraction *) +val bvsub (#n: pos) (a b: bv_t n) : Tot (bv_t n) + +val int2bv_sub: + #n: pos -> + #x: uint_t n -> + #y: uint_t n -> + #z: bv_t n -> + squash (bvsub #n (int2bv #n x) (int2bv #n y) == z) + -> Lemma (int2bv #n (sub_mod #n x y) == z) + +(** Division *) +val bvdiv (#n: pos) (a: bv_t n) (b: uint_t n {b <> 0}) : Tot (bv_t n) + +val int2bv_div: + #n: pos -> + #x: uint_t n -> + #y: uint_t n {y <> 0} -> + #z: bv_t n -> + squash (bvdiv #n (int2bv #n x) y == z) + -> Lemma (int2bv #n (udiv #n x y) == z) + + +(** 'bvdiv_unsafe' is an uninterpreted function on 'bv_t n', + modeling the corresponding operator from SMT-LIB. + When its second argument is nonzero, the lemma below + says that it is equivalent to bvdiv. *) +val bvdiv_unsafe (#n: pos) (a b: bv_t n) : Tot (bv_t n) + +(** 'bvdiv_unsafe' behaves as 'bvdiv' when denominator is nonzero *) +val bvdiv_unsafe_sound : + #n: pos -> + #a : bv_t n -> + #b : bv_t n -> + squash (bv2int b <> 0) + -> Lemma (bvdiv_unsafe #n a b = bvdiv a (bv2int b)) + + +(** Modulus *) +val bvmod (#n: pos) (a: bv_t n) (b: uint_t n {b <> 0}) : Tot (bv_t n) + +val int2bv_mod: + #n: pos -> + #x: uint_t n -> + #y: uint_t n {y <> 0} -> + #z: bv_t n -> + squash (bvmod #n (int2bv #n x) y == z) + -> Lemma (int2bv #n (mod #n x y) == z) + +(** 'bvmod_unsafe' is an uninterpreted function on 'bv_t n', + modeling the corresponding operator from SMT-LIB. + When its second argument is nonzero, the lemma below + says that it is equivalent to bvmod. *) +val bvmod_unsafe (#n: pos) (a b: bv_t n) : Tot (bv_t n) + +(** 'bvmod_unsafe' behaves as 'bvmod' when denominator is nonzero *) +val bvmod_unsafe_sound : + #n: pos -> + #a : bv_t n -> + #b : bv_t n -> + squash (bv2int b <> 0) + -> Lemma (bvmod_unsafe #n a b = bvmod a (bv2int b)) + +(** Multiplication modulo*) +val bvmul (#n: pos) (a: bv_t n) (b: uint_t n) : Tot (bv_t n) + +val int2bv_mul: + #n: pos -> + #x: uint_t n -> + #y: uint_t n -> + #z: bv_t n -> + squash (bvmul #n (int2bv #n x) y == z) + -> Lemma (int2bv #n (mul_mod #n x y) == z) + +(** Bit-vector multiplication *) +val bvmul' (#n: pos) (a b: bv_t n) : Tot (bv_t n) + +val int2bv_mul': + #n: pos -> + #x: uint_t n -> + #y: uint_t n -> + #z: bv_t n -> + squash (bvmul' #n (int2bv #n x) (int2bv #n y) == z) + -> Lemma (int2bv #n (mul_mod #n x y) == z) + diff --git a/stage0/ulib/FStar.BigOps.fst b/stage0/ulib/FStar.BigOps.fst new file mode 100644 index 00000000000..552c224774c --- /dev/null +++ b/stage0/ulib/FStar.BigOps.fst @@ -0,0 +1,132 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.BigOps +module T = FStar.Tactics.V2 + +let normal_eq (#a:Type) (f:a) + = () + +//////////////////////////////////////////////////////////////////////////////// +let map_op'_nil + (#a:Type) (#b:Type) (#c:Type) + (op:b -> c -> GTot c) (f:a -> GTot b) (z:c) + : Lemma (map_op' op f [] z == z) + = () + +let map_op'_cons #a #b #c (op:b -> c -> GTot c) (f:a -> GTot b) (hd:a) (tl:list a) (z:c) + : Lemma (map_op' op f (hd::tl) z == f hd `op` map_op' op f tl z) + = () + +//////////////////////////////////////////////////////////////////////////////// +let big_and'_nil (#a:Type) (f:a -> Type) + = assert (big_and' f [] == True) by (T.compute()) + +let big_and'_cons (#a:Type) (f:a -> Type) (hd:a) (tl:list a) + = assert (big_and' f (hd :: tl) == (f hd /\ big_and' f tl)) by (T.compute()) + +let big_and'_prop (#a:Type) (f:a -> Type) (l:list a) + = match l with + | [] -> big_and'_nil f + | hd::tl -> big_and'_cons f hd tl + +let rec big_and'_forall (#a:Type) (f:a -> Type) (l:list a) + = match l with + | [] -> big_and'_nil f; () + | hd::tl -> big_and'_cons f hd tl; big_and'_forall f tl + +//////////////////////////////////////////////////////////////////////////////// +let big_or'_nil (#a:Type) (f:a -> Type) + = assert (big_or' f [] == False) by (T.compute()) + +let big_or'_cons (#a:Type) (f:a -> Type) (hd:a) (tl:list a) + = assert (big_or' f (hd :: tl) == (f hd \/ big_or' f tl)) by (T.compute()) + +let big_or'_prop (#a:Type) (f:a -> Type) (l:list a) + = match l with + | [] -> big_or'_nil f + | hd::tl -> big_or'_cons f hd tl + +let rec big_or'_exists (#a:Type) (f:a -> Type) (l:list a) + = match l with + | [] -> big_or'_nil f; () + | hd::tl -> big_or'_cons f hd tl; big_or'_exists f tl + +//////////////////////////////////////////////////////////////////////////////// +let pairwise_and'_nil (#a:Type) (f:a -> a -> Type0) + = assert (pairwise_and' f [] == True) by (T.compute()) + +let pairwise_and'_cons (#a:Type) (f:a -> a -> Type) (hd:a) (tl:list a) + = assert (pairwise_and' f (hd::tl) == (big_and' (f hd) tl /\ pairwise_and' f tl)) + by (T.trefl()) + +let pairwise_and'_prop (#a:Type) (f:a -> a -> Type) (l:list a) + = match l with + | [] -> pairwise_and'_nil f + | hd::tl -> pairwise_and'_cons f hd tl + +(* Note, this is good example of where the difference between + the implicitly and explicitly reducing variants of the definitions + makes a difference. + + Proving this lemma directly on the `pairwise_and` is much harder + since one has to reason about many partially reduced forms. + + Instead, we first prove the lemma on the non-reducing primed + version of the definition, and then obtain the lemma we want + at the end using `normal_eq` *) +let rec pairwise_and'_forall (#a:Type) (f: a -> a -> Type) (l:list a) + = match l with + | [] -> pairwise_and'_nil f + | hd::tl -> + pairwise_and'_cons f hd tl; + pairwise_and'_forall f tl; + big_and'_forall (f hd) tl + +let rec pairwise_and'_forall_no_repeats (#a:Type) (f: a -> a -> Type) (l:list a) + = match l with + | [] -> pairwise_and'_nil f + | hd::tl -> + pairwise_and'_cons f hd tl; + pairwise_and'_forall_no_repeats f tl; + big_and'_forall (f hd) tl +//////////////////////////////////////////////////////////////////////////////// + +let pairwise_or'_nil (#a:Type) (f:a -> a -> Type0) + = assert (pairwise_or' f [] == False) by (T.compute()) + +let pairwise_or'_cons (#a:Type) (f:a -> a -> Type) (hd:a) (tl:list a) + = assert (pairwise_or' f (hd::tl) == (big_or' (f hd) tl \/ pairwise_or' f tl)) + +let pairwise_or'_prop (#a:Type) (f:a -> a -> Type) (l:list a) + = match l with + | [] -> pairwise_or'_nil f + | hd::tl -> pairwise_or'_cons f hd tl + +let rec pairwise_or'_exists (#a:Type) (f: a -> a -> Type) (l:list a) + = match l with + | [] -> pairwise_or'_nil f + | hd::tl -> + pairwise_or'_cons f hd tl; + pairwise_or'_exists f tl; + big_or'_exists (f hd) tl + +let rec pairwise_or'_exists_no_repeats (#a:Type) (f: a -> a -> Type) (l:list a) + = match l with + | [] -> pairwise_or'_nil f + | hd::tl -> + pairwise_or'_cons f hd tl; + pairwise_or'_exists_no_repeats f tl; + big_or'_exists (f hd) tl diff --git a/stage0/ulib/FStar.BigOps.fsti b/stage0/ulib/FStar.BigOps.fsti new file mode 100644 index 00000000000..b3a307a7fac --- /dev/null +++ b/stage0/ulib/FStar.BigOps.fsti @@ -0,0 +1,274 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.BigOps + +/// This library provides propositional connectives over finite sets +/// expressed as lists, aka "big operators", in analogy with LaTeX +/// usage for \bigand, \bigor, etc. +/// +/// The library is designed with a dual usage in mind: +/// +/// 1. Normalization: When applied to a list literal, we want +/// {[big_and f [a;b;c]]} to implicilty reduce to [f a /\ f b /\ f c] +/// +/// 2. Symbolic manipulation: We provide lemmas of the form +/// +/// [big_and f l <==> forall x. L.memP x l ==> f x] +/// +/// In this latter form, partially computing [big_and] as a fold over +/// a list is cumbersome for proof. So, we provide variants [big_and'] +/// etc., that do not reduce implicitly. + +module L = FStar.List.Tot.Base + +(** We control reduction using the [delta_attr] feature of the + normalizer. See FStar.Pervasives for how that works. Every term + that is to be reduced is with the [__reduce__] attribute *) +let __reduce__ = () + +(** We wrap [norm] with a module-specific custom usage, triggering + specific reduction steps *) + +[@@ __reduce__] +unfold +let normal (#a: Type) (x: a) : a = + FStar.Pervasives.norm [ + iota; + zeta; + delta_only [`%L.fold_right_gtot; `%L.map_gtot]; + delta_attr [`%__reduce__]; + primops; + simplify + ] + x + +(** A useful lemma to relate terms to their implicilty reducing variants *) +val normal_eq (#a: Type) (f: a) : Lemma (f == normal f) + +(**** Map and fold *) + +(** A utility that combines map and fold: [map_op' op f l z] maps each + element of [l] by [f] and then combines them using [op] *) +[@@ __reduce__] +let map_op' #a #b #c (op: (b -> c -> GTot c)) (f: (a -> GTot b)) (l: list a) (z: c) : GTot c = + L.fold_right_gtot #a #c l (fun x acc -> (f x) `op` acc) z + +(** Equations for [map_op'] showing how it folds over the empty list *) +val map_op'_nil (#a #b #c: Type) (op: (b -> c -> GTot c)) (f: (a -> GTot b)) (z: c) + : Lemma (map_op' op f [] z == z) + +(** Equations for [map_op'] showing how it folds over a cons cell *) +val map_op'_cons + (#a #b #c: Type) + (op: (b -> c -> GTot c)) + (f: (a -> GTot b)) + (hd: a) + (tl: list a) + (z: c) + : Lemma (map_op' op f (hd :: tl) z == (f hd) `op` (map_op' op f tl z)) + +(**** Conjunction *) + +(** [big_and' f l] = [/\_{x in l} f x] *) +[@@ __reduce__] +let big_and' #a (f: (a -> Type)) (l: list a) : Type = map_op' l_and f l True + +(** Equations for [big_and'] showing it to be trivial over the empty list *) +val big_and'_nil (#a: Type) (f: (a -> Type)) : Lemma (big_and' f [] == True) + +(** Equations for [big_and'] showing it to be a fold over a list with [/\] *) +val big_and'_cons (#a: Type) (f: (a -> Type)) (hd: a) (tl: list a) + : Lemma (big_and' f (hd :: tl) == (f hd /\ big_and' f tl)) + +(** [big_and' f l] is a [prop], i.e., it is proof irrelevant. + + Note: defining `big_and'` to intrinsically be in `prop` + is also possible, but it's much more tedious in proofs. + + This is in part because the [/\] is not defined in prop, + though one can prove that [a /\ b] is a prop. + + The discrepancy means that I preferred to prove these + operators in [prop] extrinsically. +*) +val big_and'_prop (#a: Type) (f: (a -> Type)) (l: list a) : Lemma ((big_and' f l) `subtype_of` unit) + +(** Interpreting the finite conjunction [big_and f l] + as an infinite conjunction [forall] *) +val big_and'_forall (#a: Type) (f: (a -> Type)) (l: list a) + : Lemma (big_and' f l <==> (forall x. L.memP x l ==> f x)) + +(** [big_and f l] is an implicitly reducing variant of [big_and'] + It is defined in [prop] *) + +[@@ __reduce__] +unfold +let big_and #a (f: (a -> Type)) (l: list a) : prop = + big_and'_prop f l; + normal (big_and' f l) + +(**** Disjunction *) + +(** [big_or f l] = [\/_{x in l} f x] *) +[@@ __reduce__] +let big_or' #a (f: (a -> Type)) (l: list a) : Type = map_op' l_or f l False + +(** Equations for [big_or] showing it to be [False] on the empty list *) +val big_or'_nil (#a: Type) (f: (a -> Type)) : Lemma (big_or' f [] == False) + +(** Equations for [big_or] showing it to fold over a list *) +val big_or'_cons (#a: Type) (f: (a -> Type)) (hd: a) (tl: list a) + : Lemma (big_or' f (hd :: tl) == (f hd \/ big_or' f tl)) + +(** [big_or f l] is a `prop` + See the remark above on the style of proof for prop *) +val big_or'_prop (#a: Type) (f: (a -> Type)) (l: list a) : Lemma ((big_or' f l) `subtype_of` unit) + +(** Interpreting the finite disjunction [big_or f l] + as an infinite disjunction [exists] *) +val big_or'_exists (#a: Type) (f: (a -> Type)) (l: list a) + : Lemma (big_or' f l <==> (exists x. L.memP x l /\ f x)) + +(** [big_or f l] is an implicitly reducing variant of [big_or'] + It is defined in [prop] *) + +[@@ __reduce__] +unfold +let big_or #a (f: (a -> Type)) (l: list a) : prop = + big_or'_prop f l; + normal (big_or' f l) + +(**** Pairwise operators *) + +/// We provide functions to apply a reflexive, symmetric binary +/// operator to elements in a list [l] pairwise, in a triangle of +/// elements in the square matrix of [l X l]. To illustrate, for a +/// list of [n] elements, we fold the operator over the pairwise +/// elements of the list in top-down, left-to-right order of the +/// diagram below +/// +/// +/// {[ +/// 0 1 2 3 ... n +/// 0 +/// 1 x +/// 2 x x +/// 3 x x x +/// . x x x x +/// n x x x x ]} + +(** Mapping pairs of elements of [l] using [f] and combining them with + [op]. *) +[@@ __reduce__] +let rec pairwise_op' #a #b (op: (b -> b -> GTot b)) (f: (a -> a -> b)) (l: list a) (z: b) : GTot b = + match l with + | [] -> z + | hd :: tl -> (map_op' op (f hd) tl z) `op` (pairwise_op' op f tl z) + +(** [f] is a symmetric relation *) +let symmetric (#a: Type) (f: (a -> a -> Type)) = forall x y. f x y <==> f y x + +(** [f] is a reflexive relation *) +let reflexive (#a: Type) (f: (a -> a -> Type)) = forall x. f x x + +(** [f] is a anti-reflexive relation *) +let anti_reflexive (#a: Type) (f: (a -> a -> Type)) = forall x. ~(f x x) + +(**** Pairwise conjunction *) + +(** [pairwise_and f l] conjoins [f] on all pairs excluding the diagonal + i.e., + + {[ pairwise_and f [a; b; c] = f a b /\ f a c /\ f b c ]} *) +[@@ __reduce__] +let pairwise_and' #a (f: (a -> a -> Type)) (l: list a) : Type = pairwise_op' l_and f l True + +(** Equations for [pairwise_and] showing it to be a fold with [big_and] *) +val pairwise_and'_nil (#a: Type) (f: (a -> a -> Type0)) : Lemma (pairwise_and' f [] == True) + +(** Equations for [pairwise_and] showing it to be a fold with [big_and] *) +val pairwise_and'_cons (#a: Type) (f: (a -> a -> Type0)) (hd: a) (tl: list a) + : Lemma (pairwise_and' f (hd :: tl) == (big_and' (f hd) tl /\ pairwise_and' f tl)) + +(** [pairwise_and' f l] is a prop + See the remark above on the style of proof for prop *) +val pairwise_and'_prop (#a: Type) (f: (a -> a -> Type)) (l: list a) + : Lemma ((pairwise_and' f l) `subtype_of` unit) + +(** [pairwise_and' f l] for symmetric reflexive relations [f] + is interpreted as universal quantification over pairs of list elements **) +val pairwise_and'_forall (#a: Type) (f: (a -> a -> Type)) (l: list a) + : Lemma (requires symmetric f /\ reflexive f) + (ensures (pairwise_and' f l <==> (forall x y. L.memP x l /\ L.memP y l ==> f x y))) + +(** [pairwise_and' f l] for symmetric relations [f] interpreted as + universal quantification over pairs of list of unique elements *) +val pairwise_and'_forall_no_repeats (#a: Type) (f: (a -> a -> Type)) (l: list a) + : Lemma (requires symmetric f /\ L.no_repeats_p l) + (ensures (pairwise_and' f l <==> (forall x y. L.memP x l /\ L.memP y l /\ x =!= y ==> f x y))) + +(** [pairwise_and f l] is an implicitly reducing variant of [pairwise_and'] + It is defined in [prop] *) + +[@@ __reduce__] +unfold +let pairwise_and #a (f: (a -> a -> Type)) (l: list a) : prop = + pairwise_and'_prop f l; + normal (pairwise_and' f l) + +(**** Pairwise disjunction *) + +(** [pairwise_or f l] disjoins [f] on all pairs excluding the diagonal + i.e., [pairwise_or f [a; b; c] = f a b \/ f a c \/ f b c] *) +[@@ __reduce__] +let pairwise_or' #a (f: (a -> a -> Type)) (l: list a) : Type = pairwise_op' l_or f l False + +(** Equations for [pairwise_or'] showing it to be a fold with [big_or'] *) +val pairwise_or'_nil (#a: Type) (f: (a -> a -> Type0)) : Lemma (pairwise_or' f [] == False) + +(** Equations for [pairwise_or'] showing it to be a fold with [big_or'] *) +val pairwise_or'_cons (#a: Type) (f: (a -> a -> Type0)) (hd: a) (tl: list a) + : Lemma (pairwise_or' f (hd :: tl) == (big_or' (f hd) tl \/ pairwise_or' f tl)) + +(** [pairwise_or' f l] is a prop + See the remark above on the style of proof for prop *) +val pairwise_or'_prop (#a: Type) (f: (a -> a -> Type)) (l: list a) + : Lemma ((pairwise_or' f l) `subtype_of` unit) + +(** [pairwise_or' f l] for symmetric, anti-reflexive relations [f] + interpreted as existential quantification over + pairs of list elements *) +val pairwise_or'_exists (#a: Type) (f: (a -> a -> Type)) (l: list a) + : Lemma (requires symmetric f /\ anti_reflexive f) + (ensures (pairwise_or' f l <==> (exists x y. L.memP x l /\ L.memP y l /\ f x y))) + +(** [pairwise_or' f l] for symmetric, anti-reflexive relations [f] + interpreted as existential quantification over + pairs of list elements *) +val pairwise_or'_exists_no_repeats (#a: Type) (f: (a -> a -> Type)) (l: list a) + : Lemma (requires symmetric f /\ L.no_repeats_p l) + (ensures (pairwise_or' f l <==> (exists x y. L.memP x l /\ L.memP y l /\ x =!= y /\ f x y))) + +(** [pairwise_or f l] is an implicitly reducing variant of [pairwise_or'] + It is defined in [prop] *) + +[@@ __reduce__] +unfold +let pairwise_or #a (f: (a -> a -> Type)) (l: list a) : prop = + pairwise_or'_prop f l; + normal (pairwise_or' f l) + diff --git a/stage0/ulib/FStar.BitVector.fst b/stage0/ulib/FStar.BitVector.fst new file mode 100644 index 00000000000..c75e87c7c1e --- /dev/null +++ b/stage0/ulib/FStar.BitVector.fst @@ -0,0 +1,118 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.BitVector + +/// This module defines a bit vector as a sequence of booleans of a +/// given length, and provides various utilities. +/// +/// NOTE: THE TYPE [bv_t] DEFINED IS UNRELATED TO THE SMT SOLVER'S +/// THEORY OF BIT VECTORS. SEE [FStar.BV] FOR THAT. +/// +/// TODO: We might rename this module to FStar.Seq.Boolean? + +open FStar.Mul +open FStar.Seq + +(** [logand] defined in terms of its indexing behavior *) +let rec logand_vec_definition (#n: pos) (a b: bv_t n) (i: nat{i < n}) + : Lemma (ensures index (logand_vec #n a b) i = (index a i && index b i)) + [SMTPat (index (logand_vec #n a b) i)] = + if i = 0 then () else logand_vec_definition #(n - 1) (slice a 1 n) (slice b 1 n) (i - 1) + +(** [logxor] defined in terms of its indexing behavior *) +let rec logxor_vec_definition (#n: pos) (a b: bv_t n) (i: nat{i < n}) + : Lemma (ensures index (logxor_vec #n a b) i = (index a i <> index b i)) + [SMTPat (index (logxor_vec #n a b) i)] = + if i = 0 then () else logxor_vec_definition #(n - 1) (slice a 1 n) (slice b 1 n) (i - 1) + + +(** [logor] defined in terms of its indexing behavior *) +let rec logor_vec_definition (#n: pos) (a b: bv_t n) (i: nat{i < n}) + : Lemma (ensures index (logor_vec #n a b) i = (index a i || index b i)) + [SMTPat (index (logor_vec #n a b) i)] = + if i = 0 then () else logor_vec_definition #(n - 1) (slice a 1 n) (slice b 1 n) (i - 1) + +(** [lognot] defined in terms of its indexing behavior *) +let rec lognot_vec_definition (#n: pos) (a: bv_t n) (i: nat{i < n}) + : Lemma (ensures index (lognot_vec #n a) i = not (index a i)) + [SMTPat (index (lognot_vec #n a) i)] = + if i = 0 then () else lognot_vec_definition #(n - 1) (slice a 1 n) (i - 1) + +(* Bitwise lemmas *) + +(** If both [x] and [y] are false at a given index [i], then so is they logical xor at [i] *) +let lemma_xor_bounded (m: pos) (n: nat) (x y: bv_t m) + : Lemma + (requires + (forall (i: nat). + (i < m /\ i >= n) ==> + (Seq.index x (m - 1 - i) = false /\ Seq.index y (m - 1 - i) = false))) + (ensures + (forall (i: nat). (i < m /\ i >= n) ==> (Seq.index (logxor_vec x y) (m - 1 - i) = false))) = + () + +(** Proves that the subset property is conserved in subslices. *) +let lemma_slice_subset_vec (#n: pos) (a b: bv_t n) (i: nat) (j: nat{i < j && j <= n}) + : Lemma (requires is_subset_vec a b) + (ensures + (match n with + | 1 -> True + | _ -> is_subset_vec #(j - i) (slice a i j) (slice b i j))) = () + +(** Proves that the superset property is conserved in subslices. *) +let lemma_slice_superset_vec (#n: pos) (a b: bv_t n) (i: nat) (j: nat{i < j && j <= n}) + : Lemma (requires is_superset_vec a b) + (ensures + (match n with + | 1 -> True + | _ -> is_superset_vec #(j - i) (slice a i j) (slice b i j))) = () + +(**** Shift operators *) + +(** The fill bits of a shift left are zero *) +let shift_left_vec_lemma_1 (#n: pos) (a: bv_t n) (s: nat) (i: nat{i < n && i >= n - s}) + : Lemma (ensures index (shift_left_vec #n a s) i = false) + [SMTPat (index (shift_left_vec #n a s) i)] = () + +(** Relating the indexes of the shifted vector to the original *) +let shift_left_vec_lemma_2 (#n: pos) (a: bv_t n) (s: nat) (i: nat{i < n && i < n - s}) + : Lemma (ensures index (shift_left_vec #n a s) i = index a (i + s)) + [SMTPat (index (shift_left_vec #n a s) i)] = () + +(** The fill bits of a shift right are zero *) +let shift_right_vec_lemma_1 (#n: pos) (a: bv_t n) (s: nat) (i: nat{i < n && i < s}) + : Lemma (ensures index (shift_right_vec #n a s) i = false) + [SMTPat (index (shift_right_vec #n a s) i)] = () + +(** Relating the indexes of the shifted vector to the original *) +let shift_right_vec_lemma_2 (#n: pos) (a: bv_t n) (s: nat) (i: nat{i < n && i >= s}) + : Lemma (ensures index (shift_right_vec #n a s) i = index a (i - s)) + [SMTPat (index (shift_right_vec #n a s) i)] = () + +(** Arithmetic shift right of [a], interpreting position [0] as the + most-significant bit, and using its value to fill *) +(** The fill bits of arithmetic shift right is the value of its + most-significant bit (position zero) *) +let shift_arithmetic_right_vec_lemma_1 (#n: pos) (a: bv_t n) (s: nat) (i: nat{i < n && i < s}) + : Lemma (ensures index (shift_arithmetic_right_vec #n a s) i = index a 0) + [SMTPat (index (shift_arithmetic_right_vec #n a s) i)] = () + +(** Relating the indexes of the shifted vector to the original *) +let shift_arithmetic_right_vec_lemma_2 (#n: pos) (a: bv_t n) (s: nat) (i: nat{i < n && i >= s}) + : Lemma (ensures index (shift_arithmetic_right_vec #n a s) i = index a (i - s)) + [SMTPat (index (shift_arithmetic_right_vec #n a s) i)] = () + diff --git a/stage0/ulib/FStar.BitVector.fsti b/stage0/ulib/FStar.BitVector.fsti new file mode 100644 index 00000000000..909161e5398 --- /dev/null +++ b/stage0/ulib/FStar.BitVector.fsti @@ -0,0 +1,176 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.BitVector + +/// This module defines a bit vector as a sequence of booleans of a +/// given length, and provides various utilities. +/// +/// NOTE: THE TYPE [bv_t] DEFINED IS UNRELATED TO THE SMT SOLVER'S +/// THEORY OF BIT VECTORS. SEE [FStar.BV] FOR THAT. +/// +/// TODO: We might rename this module to FStar.Seq.Boolean? + +open FStar.Mul +open FStar.Seq.Base + +(** [bv_t n] is just a sequence of booleans of length [n] *) +[@@do_not_unrefine] +type bv_t (n: nat) = vec: seq bool {length vec = n} + +(**** Common constants *) + +(** A length [n] zero vector *) +let zero_vec (#n: pos) : Tot (bv_t n) = create n false + +(** A vector of length [n] whose [i]th bit is set, only *) +let elem_vec (#n: pos) (i: nat{i < n}) : Tot (bv_t n) = upd (create n false) i true + +(** A length [n] vector all of whose bits are set *) +let ones_vec (#n: pos) : Tot (bv_t n) = create n true + +(** Bitwise logical and *) +let rec logand_vec (#n: pos) (a b: bv_t n) : Tot (bv_t n) = + if n = 1 + then create 1 (index a 0 && index b 0) + else append (create 1 (index a 0 && index b 0)) (logand_vec #(n - 1) (slice a 1 n) (slice b 1 n)) + +(** [logand] defined in terms of its indexing behavior *) +val logand_vec_definition (#n: pos) (a b: bv_t n) (i: nat{i < n}) + : Lemma (ensures index (logand_vec #n a b) i = (index a i && index b i)) + [SMTPat (index (logand_vec #n a b) i)] + +(** Bitwise logical exclusive or *) +let rec logxor_vec (#n: pos) (a b: bv_t n) : Tot (bv_t n) = + if n = 1 + then create 1 (index a 0 <> index b 0) + else append (create 1 (index a 0 <> index b 0)) (logxor_vec #(n - 1) (slice a 1 n) (slice b 1 n)) + +(** [logxor] defined in terms of its indexing behavior *) +val logxor_vec_definition (#n: pos) (a b: bv_t n) (i: nat{i < n}) + : Lemma (ensures index (logxor_vec #n a b) i = (index a i <> index b i)) + [SMTPat (index (logxor_vec #n a b) i)] + +(** Bitwise logical or *) +let rec logor_vec (#n: pos) (a b: bv_t n) : Tot (bv_t n) = + if n = 1 + then create 1 (index a 0 || index b 0) + else append (create 1 (index a 0 || index b 0)) (logor_vec #(n - 1) (slice a 1 n) (slice b 1 n)) + +(** [logor] defined in terms of its indexing behavior *) +val logor_vec_definition (#n: pos) (a b: bv_t n) (i: nat{i < n}) + : Lemma (ensures index (logor_vec #n a b) i = (index a i || index b i)) + [SMTPat (index (logor_vec #n a b) i)] + +(** Bitwise negation *) +let rec lognot_vec (#n: pos) (a: bv_t n) : Tot (bv_t n) = + if n = 1 + then create 1 (not (index a 0)) + else append (create 1 (not (index a 0))) (lognot_vec #(n - 1) (slice a 1 n)) + +(** [lognot] defined in terms of its indexing behavior *) +val lognot_vec_definition (#n: pos) (a: bv_t n) (i: nat{i < n}) + : Lemma (ensures index (lognot_vec #n a) i = not (index a i)) + [SMTPat (index (lognot_vec #n a) i)] + +(* Bitwise lemmas *) + +(** If both [x] and [y] are false at a given index [i], then so is they logical xor at [i] *) +val lemma_xor_bounded (m: pos) (n: nat) (x y: bv_t m) + : Lemma + (requires + (forall (i: nat). + (i < m /\ i >= n) ==> + (index x (m - 1 - i) = false /\ index y (m - 1 - i) = false))) + (ensures + (forall (i: nat). (i < m /\ i >= n) ==> (index (logxor_vec x y) (m - 1 - i) = false))) + +(** The property that the zero bits of b are also zero in a. + I.e. that a is a subset of b. *) +let is_subset_vec (#n: pos) (a b: bv_t n) = + forall (i: nat). i < n ==> index b i = false ==> index a i = false + +(** The property that the non-zero bits of b are also non-zero in a. + I.e. that a is a superset of b. *) +let is_superset_vec (#n: pos) (a b: bv_t n) = + forall (i: nat). i < n ==> index b i = true ==> index a i = true + +(** Proves that the subset property is conserved in subslices. *) +val lemma_slice_subset_vec (#n: pos) (a b: bv_t n) (i: nat) (j: nat{i < j && j <= n}) + : Lemma (requires is_subset_vec a b) + (ensures + (match n with + | 1 -> True + | _ -> is_subset_vec #(j - i) (slice a i j) (slice b i j))) + +(** Proves that the superset property is conserved in subslices. *) +val lemma_slice_superset_vec (#n: pos) (a b: bv_t n) (i: nat) (j: nat{i < j && j <= n}) + : Lemma (requires is_superset_vec a b) + (ensures + (match n with + | 1 -> True + | _ -> is_superset_vec #(j - i) (slice a i j) (slice b i j))) + +(**** Shift operators *) + +(* Note: the shift amount is extracted as a bitvector + NS: Not sure what this remark means. *) + +(** Shift [a] left by [s] bits, filling with zeroes *) +let shift_left_vec (#n: pos) (a: bv_t n) (s: nat) : Tot (bv_t n) = + if s >= n then zero_vec #n else if s = 0 then a else append (slice a s n) (zero_vec #s) + +(** The fill bits of a shift left are zero *) +val shift_left_vec_lemma_1 (#n: pos) (a: bv_t n) (s: nat) (i: nat{i < n && i >= n - s}) + : Lemma (ensures index (shift_left_vec #n a s) i = false) + [SMTPat (index (shift_left_vec #n a s) i)] + +(** Relating the indexes of the shifted vector to the original *) +val shift_left_vec_lemma_2 (#n: pos) (a: bv_t n) (s: nat) (i: nat{i < n && i < n - s}) + : Lemma (ensures index (shift_left_vec #n a s) i = index a (i + s)) + [SMTPat (index (shift_left_vec #n a s) i)] + +(** Shift [a] right by [s] bits, filling with zeroes *) +let shift_right_vec (#n: pos) (a: bv_t n) (s: nat) : Tot (bv_t n) = + if s >= n then zero_vec #n else if s = 0 then a else append (zero_vec #s) (slice a 0 (n - s)) + +(** The fill bits of a shift right are zero *) +val shift_right_vec_lemma_1 (#n: pos) (a: bv_t n) (s: nat) (i: nat{i < n && i < s}) + : Lemma (ensures index (shift_right_vec #n a s) i = false) + [SMTPat (index (shift_right_vec #n a s) i)] + +(** Relating the indexes of the shifted vector to the original *) +val shift_right_vec_lemma_2 (#n: pos) (a: bv_t n) (s: nat) (i: nat{i < n && i >= s}) + : Lemma (ensures index (shift_right_vec #n a s) i = index a (i - s)) + [SMTPat (index (shift_right_vec #n a s) i)] + +(** Arithmetic shift right of [a], interpreting position [0] as the + most-significant bit, and using its value to fill *) +let shift_arithmetic_right_vec (#n: pos) (a: bv_t n) (s: nat) : Tot (bv_t n) = + if index a 0 + then if s >= n then ones_vec #n else if s = 0 then a else append (ones_vec #s) (slice a 0 (n - s)) + else shift_right_vec a s + +(** The fill bits of arithmetic shift right is the value of its + most-significant bit (position zero) *) +val shift_arithmetic_right_vec_lemma_1 (#n: pos) (a: bv_t n) (s: nat) (i: nat{i < n && i < s}) + : Lemma (ensures index (shift_arithmetic_right_vec #n a s) i = index a 0) + [SMTPat (index (shift_arithmetic_right_vec #n a s) i)] + +(** Relating the indexes of the shifted vector to the original *) +val shift_arithmetic_right_vec_lemma_2 (#n: pos) (a: bv_t n) (s: nat) (i: nat{i < n && i >= s}) + : Lemma (ensures index (shift_arithmetic_right_vec #n a s) i = index a (i - s)) + [SMTPat (index (shift_arithmetic_right_vec #n a s) i)] diff --git a/stage0/ulib/FStar.Bytes.fsti b/stage0/ulib/FStar.Bytes.fsti new file mode 100644 index 00000000000..5092aa1e02e --- /dev/null +++ b/stage0/ulib/FStar.Bytes.fsti @@ -0,0 +1,316 @@ +(* + Copyright 2008-2017 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +(* +A standard library for manipulation of value bytes. + +This model is realized by Bytes.bytes in OCaml and by +struct {uintX_t size; char *bytes} (or similar) in C. + +This file is essentially a specialized version of FStar.Seq, +with lemmas and refinements taylored for typical operations on +bytes, and with support for machine integers and C-extractible versions +(which Seq does not provide.) + +@summary Value bytes standard library +*) +module FStar.Bytes + +module S = FStar.Seq +module U = FStar.UInt +module U8 = FStar.UInt8 +module U16 = FStar.UInt16 +module U32 = FStar.UInt32 +module U64 = FStar.UInt64 +module Str = FStar.String +module Chr = FStar.Char + +unfold let u8 = U8.t +unfold let u16 = U16.t +unfold let u32 = U32.t + +(** Realized by uint8_t in C and int in OCaml (char does not have necessary operators...) *) +unfold type byte = u8 + +(** Realized in C by a pair of a length field and uint8_t* in C + Realized in OCaml by a string *) +val bytes : t:Type0{hasEq t} +val len : bytes -> u32 + +unfold let length b = FStar.UInt32.v (len b) + +(** representation for specs that need lemmas not defined here. *) +val reveal: + bytes + -> GTot (S.seq byte) + +val length_reveal: + x:bytes + -> Lemma (ensures (S.length (reveal x) = length x)) + [SMTPatOr [[SMTPat (S.length (reveal x))]; + [SMTPat (len x)]]] + +val hide: + s:S.seq byte{S.length s < pow2 32} + -> GTot bytes + +val hide_reveal: + x:bytes + -> Lemma (ensures (hide (reveal x) = x)) + [SMTPat (reveal x)] + +val reveal_hide: + x:S.seq byte{S.length x < pow2 32} + -> Lemma (ensures (reveal (hide x) == x)) + [SMTPat (hide x)] + +type lbytes (l:nat) = b:bytes{length b = l} +type kbytes (k:nat) = b:bytes{length b < pow2 k} + +let lbytes32 (l:UInt32.t) = b:bytes{len b = l} + +val empty_bytes : lbytes 0 +val empty_unique: + b:bytes + -> Lemma (length b = 0 ==> b = empty_bytes) + [SMTPat (len b)] + +(** If you statically know the length, it is OK to read at arbitrary indexes *) +val get: + b:bytes + -> pos:u32{U32.v pos < length b} + -> Pure byte + (requires True) + (ensures (fun y -> y == S.index (reveal b) (U32.v pos))) + +unfold let op_String_Access = get + +unfold let index (b:bytes) (i:nat{i < length b}) = get b (U32.uint_to_t i) + +let equal b1 b2 = + length b1 = length b2 /\ + (forall (i:u32{U32.v i < length b1}).{:pattern (b1.[i]); (b2.[i])} b1.[i] == b2.[i]) + +val extensionality: + b1:bytes + -> b2:bytes + -> Lemma (requires (equal b1 b2)) + (ensures (b1 = b2)) + +(** creating byte values **) +val create: + len:u32 + -> v:byte + -> b:lbytes (U32.v len){forall (i:u32{U32.(i <^ len)}).{:pattern b.[i]} b.[i] == v} + +unfold +let create_ (n:nat{FStar.UInt.size n U32.n}) v = create (U32.uint_to_t n) v + +val init: + len:u32 + -> f:(i:u32{U32.(i <^ len)} -> byte) + -> b:lbytes (U32.v len){forall (i:u32{U32.(i <^ len)}).{:pattern b.[i]} b.[i] == f i} + +// this is a hack JROESCH +val abyte (b:byte) : lbytes 1 + (* admit () create 1ul b *) + +val twobytes (b:byte&byte) : lbytes 2 + // init 2ul (fun i -> if i = 0ul then fst b else snd b) + +(** appending bytes **) +val append: + b1:bytes + -> b2:bytes + -> Pure bytes + (requires (UInt.size (length b1 + length b2) U32.n)) + (ensures (fun b -> reveal b == S.append (reveal b1) (reveal b2))) +unfold let op_At_Bar = append + +val slice: + b:bytes + -> s:u32 + -> e:u32{U32.(s <=^ e) /\ U32.v e <= length b} + -> r:bytes{reveal r == Seq.slice (reveal b) (U32.v s) (U32.v e)} +let slice_ b (s:nat) (e:nat{s <= e /\ e <= length b}) = slice b (U32.uint_to_t s) (U32.uint_to_t e) + +val sub: + b:bytes + -> s:u32 + -> l:u32{U32.v s + U32.v l <= length b} + -> r:bytes{reveal r == Seq.slice (reveal b) (U32.v s) (U32.v s + U32.v l)} + +val split: + b:bytes + -> k:u32{U32.v k <= length b} + -> p:(bytes&bytes){ + let x, y = p in + (reveal x, reveal y) == Seq.split (reveal b) (U32.v k)} + +unfold let split_ b (k:nat{FStar.UInt.size k U32.n /\ k < length b}) = split b (U32.uint_to_t k) + +(** Interpret a sequence of bytes as a mathematical integer encoded in big endian **) +let fits_in_k_bytes (n:nat) (k:nat) = FStar.UInt.size n (op_Multiply 8 k) +type uint_k (k:nat) = n:nat{fits_in_k_bytes n k} + +(** repr_bytes n: The number of bytes needed to represent a nat **) +val repr_bytes: + n:nat + -> k:pos{fits_in_k_bytes n k} + +val lemma_repr_bytes_values: + n:nat + -> Lemma (ensures ( let k = repr_bytes n in + if n < 256 then k==1 + else if n < 65536 then k==2 + else if n < 16777216 then k==3 + else if n < 4294967296 then k==4 + else if n < 1099511627776 then k==5 + else if n < 281474976710656 then k==6 + else if n < 72057594037927936 then k==7 + else if n < 18446744073709551616 then k==8 + else True )) + [SMTPat (repr_bytes n)] + +val repr_bytes_size: + k:nat + -> n:uint_k k + -> Lemma (ensures (repr_bytes n <= k)) + [SMTPat (fits_in_k_bytes n k)] + +val int_of_bytes: + b:bytes + -> Tot (uint_k (length b)) + +val bytes_of_int: + k:nat + -> n:nat{repr_bytes n <= k /\ k < pow2 32} + -> lbytes k + +val int_of_bytes_of_int: + #k:nat{k <= 32} + -> n:uint_k k + -> Lemma (ensures (int_of_bytes (bytes_of_int k n) == n)) + [SMTPat (bytes_of_int k n)] + +val bytes_of_int_of_bytes: + b:bytes{length b <= 32} + -> Lemma (ensures (bytes_of_int (length b) (int_of_bytes b) == b)) + [SMTPat (int_of_bytes b)] + +//18-02-25 use [uint32] instead of [int32] etc? +val int32_of_bytes: + b:bytes{length b <= 4} + -> n:u32{U32.v n == int_of_bytes b} + +val int16_of_bytes: + b:bytes{length b <= 2} + -> n:u16{U16.v n == int_of_bytes b} + +val int8_of_bytes: + b:bytes{length b = 1} + -> n:u8{U8.v n = int_of_bytes b} + +val bytes_of_int32: + n:U32.t + -> b:lbytes 4{b == bytes_of_int 4 (U32.v n)} + +val bytes_of_int16: + n:U16.t + -> b:lbytes 2{b == bytes_of_int 2 (U16.v n)} + +val bytes_of_int8: + n:U8.t + -> b:lbytes 1{b == bytes_of_int 1 (U8.v n)} + +//////////////////////////////////////////////////////////////////////////////// +type minbytes (n:nat) = b:bytes{length b >= n} + +val xor: + n:u32 + -> b1:minbytes (U32.v n) + -> b2:minbytes (U32.v n) + -> b:bytes{len b = n} + +unfold let xor_ (#n:nat{FStar.UInt.size n U32.n}) (b1:minbytes n) (b2:minbytes n) = xor (U32.uint_to_t n) b1 b2 + +val xor_commutative: + n:u32 + -> b1:minbytes (U32.v n) + -> b2:minbytes (U32.v n) + -> Lemma (ensures (xor n b1 b2 == xor n b2 b1)) + [SMTPat (xor n b1 b2)] + +val xor_append: + b1:bytes + -> b2:bytes{FStar.UInt.size (length b1 + length b2) U32.n} + -> x1:bytes{len x1 = len b1} + -> x2:bytes{len x2 = len b2} + -> Lemma (ensures (xor U32.(len b1 +^ len b2) + (b1 @| b2) + (x1 @| x2) + == + xor (len b1) b1 x1 @| xor (len b2) b2 x2)) + +val xor_idempotent: + n:u32 + -> b1:lbytes (U32.v n) + -> b2:lbytes (U32.v n) + -> Lemma (ensures (xor n (xor n b1 b2) b2 == b1)) + +val utf8_encode: + s:string{Str.maxlen s (pow2 30)} + -> b:bytes{length b <= op_Multiply 4 (Str.length s)} + +val iutf8_opt: + m:bytes + -> (option (s:string{Str.maxlen s (pow2 30) /\ utf8_encode s == m})) + +val string_of_hex: string -> Tot string + +// missing post on the length of the results (exact on constant arguments) +val bytes_of_hex: string -> Tot bytes +val hex_of_string: string -> Tot string +val hex_of_bytes: bytes -> Tot string +val print_bytes: bytes -> Tot string +val bytes_of_string: string -> bytes //abytes + +(** A better implementation of BufferBytes, formerly found in miTLS *) + +module B = LowStar.Buffer +module M = LowStar.Modifies + +open FStar.HyperStack.ST + +type lbuffer (l:UInt32.t) = b:B.buffer UInt8.t {B.length b == U32.v l} + +val of_buffer (l:UInt32.t) (#p #q:_) (buf:B.mbuffer UInt8.t p q{B.length buf == U32.v l}) + : Stack (b:bytes{length b = UInt32.v l}) + (requires fun h0 -> + B.live h0 buf) + (ensures fun h0 b h1 -> + B.(modifies loc_none h0 h1) /\ + b = hide (B.as_seq h0 buf)) + +val store_bytes: src:bytes { length src <> 0 } -> + dst:lbuffer (len src) -> + Stack unit + (requires (fun h0 -> B.live h0 dst)) + (ensures (fun h0 r h1 -> + M.(modifies (loc_buffer dst) h0 h1) /\ + Seq.equal (reveal src) (B.as_seq h1 dst))) + +(* JP: let's not add from_bytes here because we want to leave it up to the +caller to allocate on the stack or on the heap *) diff --git a/stage0/ulib/FStar.Calc.fst b/stage0/ulib/FStar.Calc.fst new file mode 100644 index 00000000000..b61c27b27f1 --- /dev/null +++ b/stage0/ulib/FStar.Calc.fst @@ -0,0 +1,63 @@ +(* + Copyright 2020 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Authors: Guido Martinez, Aseem Rastogi, Nikhil Swamy +*) + +module FStar.Calc + +open FStar.Squash +open FStar.Preorder + +noeq +type calc_chain #a : list (relation a) -> a -> a -> Type = + | CalcRefl : #x:a -> calc_chain [] x x + | CalcStep : + rs:(list (relation a)) -> #p:(relation a) -> + #x:a -> #y:a -> #z:a -> calc_chain rs x y -> squash (p y z) -> calc_chain (p::rs) x z + +let rec elim_calc_chain #a (rs:list (relation a)) (#x #y:a) (pf:calc_chain rs x y) + : Lemma (ensures (calc_chain_related rs x y)) + = let steps = [delta_only [`%calc_chain_related]; iota; zeta] in + let t = norm steps (calc_chain_related rs x y) in + norm_spec steps (calc_chain_related rs x y); + match pf with + | CalcRefl -> () + | CalcStep tl pf _ -> elim_calc_chain tl pf + +let _calc_init (#a:Type) (x:a) : calc_chain [] x x = CalcRefl + +let calc_init #a x = return_squash (_calc_init x) + +let _calc_step (#t:Type) (#rs:list (relation t)) (#x #y:t) + (p:relation t) + (z:t) + (pf:calc_chain rs x y) + (j:squash (p y z)) + : GTot (calc_chain (p::rs) x z) + = CalcStep rs pf j + +let calc_step #a #x #y p z #rs pf j = + bind_squash (pf ()) (fun pk -> return_squash (_calc_step p z pk (j ()))) + +let calc_finish #a p #x #y #rs pf = + let steps = [delta_only [`%calc_chain_related]; iota; zeta] in + let t = norm steps (calc_chain_related rs x y) in + norm_spec steps (calc_chain_related rs x y); + let _ : squash (p x y) = bind_squash (pf ()) (fun pk -> elim_calc_chain rs pk) in + () + +let calc_push_impl #p #q f = + Classical.arrow_to_impl f diff --git a/stage0/ulib/FStar.Calc.fsti b/stage0/ulib/FStar.Calc.fsti new file mode 100644 index 00000000000..af34be0e4cb --- /dev/null +++ b/stage0/ulib/FStar.Calc.fsti @@ -0,0 +1,92 @@ +(* + Copyright 2020 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Authors: Guido Martinez, Aseem Rastogi, Nikhil Swamy +*) + +module FStar.Calc + +open FStar.Preorder + +/// This module provides calculational proofs support +/// +/// Client programs need not use it directly, +/// instead F* provides convenient syntax for writing calculational proofs +/// +/// See examples/calc for some examples + +/// The main type for the calc proof chain + +val calc_chain (#a:Type u#a) (rs:list (relation a)) (x y:a) : Type u#(max 1 a) + +/// Definition of when a calc chain is sound + +[@@"opaque_to_smt"] +let rec calc_chain_related (#a:Type) (rs:list (relation a)) (x y:a) + : Tot Type0 + = match rs with + | [] -> x == y + (* GM: The `:t` annotation below matters a lot for compactness of the formula! *) + | r1::rs -> exists (w:a). calc_chain_related rs x w /\ r1 w y + +[@@"opaque_to_smt"] +let calc_chain_compatible (#t:Type) (rs:list (relation t)) (p:relation t) + : Tot Type0 + = forall (x y:t). calc_chain_related rs x y ==> p x y + +/// A proof irrelevant type for the calc chains + +type calc_pack (#a:Type) (rs:list (relation a)) (x y:a) = + squash (calc_chain rs x y) + +/// Initializing a calc chain + +val calc_init (#a:Type) (x:a) : Tot (calc_pack [] x x) + +/// A single step of the calc chain +/// +/// Note the list of relations is reversed +/// calc_chain_compatible accounts for it + +val calc_step + (#a:Type) + (#x #y:a) + (p:relation a) (* Relation for this step *) + (z:a) (* Next expression *) + (#rs:list (relation a)) + (pf:unit -> Tot (calc_pack rs x y)) (* Rest of the proof *) + (j:unit -> Tot (squash (p y z))) (* Justification *) + : Tot (calc_pack (p::rs) x z) + +/// Finishing a calc proof, +/// providing the top-level relation as the postcondition + +val calc_finish + (#a:Type) + (p:relation a) + (#x #y:a) + (#rs:list (relation a)) + (pf:unit -> Tot (calc_pack rs x y)) + : Lemma + (requires (norm [delta_only [`%calc_chain_compatible; `%calc_chain_related]; + iota; + zeta] + (Range.labeled (range_of pf) + "Could not prove that this calc-chain is compatible" + (calc_chain_compatible rs p)))) + (ensures (p x y)) + +val calc_push_impl (#p #q:Type) (f:squash p -> GTot (squash q)) + : Tot (squash (p ==> q)) diff --git a/stage0/ulib/FStar.Cardinality.Cantor.fst b/stage0/ulib/FStar.Cardinality.Cantor.fst new file mode 100644 index 00000000000..4f2ee440157 --- /dev/null +++ b/stage0/ulib/FStar.Cardinality.Cantor.fst @@ -0,0 +1,24 @@ +module FStar.Cardinality.Cantor + +open FStar.Functions + +let no_surj_powerset (a : Type) (f : a -> powerset a) : Lemma (~(is_surj f)) = + let aux () : Lemma (requires is_surj f) (ensures False) = + (* Cantor's proof: given a supposed surjective f, + we define a set s that cannot be in the image of f. Namely, + the set of x:a such that x is not in f(x). *) + let s : powerset a = fun x -> not (f x x) in + let aux (x : a) : Lemma (requires f x == s) (ensures False) = + // We have f x == s, which means that f x x == not (f x x), contradiction + assert (f x x) // this triggers the SMT appropriately + in + Classical.forall_intro (Classical.move_requires aux) + in + Classical.move_requires aux () + +let no_inj_powerset (a : Type) (f : powerset a -> a) : Lemma (~(is_inj f)) = + let aux () : Lemma (requires is_inj f) (ensures False) = + let g : a -> powerset a = inverse_of_inj f (fun _ -> false) in + no_surj_powerset a g + in + Classical.move_requires aux () diff --git a/stage0/ulib/FStar.Cardinality.Cantor.fsti b/stage0/ulib/FStar.Cardinality.Cantor.fsti new file mode 100644 index 00000000000..af749e8bedc --- /dev/null +++ b/stage0/ulib/FStar.Cardinality.Cantor.fsti @@ -0,0 +1,13 @@ +module FStar.Cardinality.Cantor + +(* Cantor's theorem: there is no surjection from a set to its +powerset, and therefore also no injection from the powerset to the +set. *) + +open FStar.Functions + +val no_surj_powerset (a : Type) (f : a -> powerset a) + : Lemma (~(is_surj f)) + +val no_inj_powerset (a : Type) (f : powerset a -> a) + : Lemma (~(is_inj f)) diff --git a/stage0/ulib/FStar.Cardinality.Universes.fst b/stage0/ulib/FStar.Cardinality.Universes.fst new file mode 100644 index 00000000000..146820b0387 --- /dev/null +++ b/stage0/ulib/FStar.Cardinality.Universes.fst @@ -0,0 +1,42 @@ +module FStar.Cardinality.Universes + +open FStar.Functions +open FStar.Cardinality.Cantor + +(* This type is an injection of all powersets of Type u (i.e. Type u -> bool +functions) into Type (u+1) *) +noeq +type type_powerset : (Type u#a -> bool) -> Type u#(max (a+1) b) = + | Mk : f:(Type u#a -> bool) -> type_powerset f + +let aux_inj_type_powerset (f1 f2 : powerset (Type u#a)) + : Lemma (requires type_powerset u#a u#b f1 == type_powerset u#a u#b f2) + (ensures f1 == f2) += + assert (type_powerset f1 == type_powerset f2); + let xx1 : type_powerset f1 = Mk f1 in + let xx2 : type_powerset f2 = coerce_eq () xx1 in + assert (xx1 === xx2); + assert (f1 == f2) + +let inj_type_powerset () : Lemma (is_inj type_powerset) = + Classical.forall_intro_2 (fun f1 -> Classical.move_requires (aux_inj_type_powerset f1)) + +(* let u' > u be universes. (u' = max(a+1, b), u=a below) + The general structure of this proof is: + 1- We know there is an injection of powerset(Type(u)) into Type(u') (see type_powerset above) + 2- We know there is NO injection from powerset(Type(u)) into Type(u) (see no_inj_powerset) + 3- Therefore, there cannot be an injection from Type(u') into Type(u), otherwise we would + compose it with the first injection and obtain the second impossible injection. +*) +let no_inj_universes (f : Type u#(max (a+1) b) -> Type u#a) : Lemma (~(is_inj f)) = + let aux () : Lemma (requires is_inj f) (ensures False) = + let comp : powerset (Type u#a) -> Type u#a = fun x -> f (type_powerset x) in + inj_type_powerset (); + inj_comp type_powerset f; + no_inj_powerset _ comp + in + Classical.move_requires aux () + +let no_inj_universes_suc (f : Type u#(a+1) -> Type u#a) : Lemma (~(is_inj f)) = + no_inj_universes f diff --git a/stage0/ulib/FStar.Cardinality.Universes.fsti b/stage0/ulib/FStar.Cardinality.Universes.fsti new file mode 100644 index 00000000000..4a4baaa14a0 --- /dev/null +++ b/stage0/ulib/FStar.Cardinality.Universes.fsti @@ -0,0 +1,14 @@ +module FStar.Cardinality.Universes + +open FStar.Functions +open FStar.Cardinality.Cantor + +(* Prove that there can be no injection from a universe into a strictly smaller +universe. We use `max (a+1) b` to represent an arbitrary universe strictly larger +than `a` as we cannot write sums of universe levels. *) +val no_inj_universes (f : Type u#(max (a+1) b) -> Type u#a) + : Lemma (~(is_inj f)) + +(* A simpler version for the +1 case. *) +val no_inj_universes_suc (f : Type u#(a+1) -> Type u#a) + : Lemma (~(is_inj f)) diff --git a/stage0/ulib/FStar.Char.fsti b/stage0/ulib/FStar.Char.fsti new file mode 100644 index 00000000000..8e87b73ad3d --- /dev/null +++ b/stage0/ulib/FStar.Char.fsti @@ -0,0 +1,75 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Char + +/// This module provides the [char] type, an abstract type +/// representing UTF-8 characters. +/// +/// UTF-8 characters are representing in a variable-length encoding of +/// between 1 and 4 bytes, with a maximum of 21 bits used to represent +/// a code. +/// +/// See https://en.wikipedia.org/wiki/UTF-8 and +/// https://erratique.ch/software/uucp/doc/unicode.html + +module U32 = FStar.UInt32 + +(** [char] is a new primitive type with decidable equality *) +new +val char:eqtype + +(** A [char_code] is the representation of a UTF-8 char code in + an unsigned 32-bit integer whose value is at most 0x110000, + and not between 0xd800 and 0xe000 *) +type char_code = n: U32.t{U32.v n < 0xd7ff \/ (U32.v n >= 0xe000 /\ U32.v n <= 0x10ffff)} + +(** A primitive to extract the [char_code] of a [char] *) +val u32_of_char: char -> Tot char_code + +(** A primitive to promote a [char_code] to a [char] *) +val char_of_u32: char_code -> Tot char + +(** Encoding and decoding from [char] to [char_code] is the identity *) +val char_of_u32_of_char (c: char) + : Lemma (ensures (char_of_u32 (u32_of_char c) == c)) [SMTPat (u32_of_char c)] + +(** Encoding and decoding from [char] to [char_code] is the identity *) +val u32_of_char_of_u32 (c: char_code) + : Lemma (ensures (u32_of_char (char_of_u32 c) == c)) [SMTPat (char_of_u32 c)] + +(** A couple of utilities to use mathematical integers rather than [U32.t] + to represent a [char_code] *) +let int_of_char (c: char) : nat = U32.v (u32_of_char c) +let char_of_int (i: nat{i < 0xd7ff \/ (i >= 0xe000 /\ i <= 0x10ffff)}) : char = char_of_u32 (U32.uint_to_t i) + +(** Case conversion *) +val lowercase: char -> Tot char +val uppercase: char -> Tot char + +#set-options "--admit_smt_queries true" + +(** This private primitive is used internally by the compiler to + translate character literals with a desugaring-time check of the + size of the number, rather than an expensive verification check. + Since it is marked private, client programs cannot call it + directly Since it is marked unfold, it eagerly reduces, + eliminating the verification overhead of the wrapper *) + +private unfold +let __char_of_int (x: int) : char = char_of_int x +#reset-options + diff --git a/stage0/ulib/FStar.Class.Add.fst b/stage0/ulib/FStar.Class.Add.fst new file mode 100644 index 00000000000..e98b4c94fbb --- /dev/null +++ b/stage0/ulib/FStar.Class.Add.fst @@ -0,0 +1,40 @@ +(* + Copyright 2008-2023 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Class.Add + +(* A class for (additive, whatever that means) monoids *) +class additive a = { + zero : a; + plus : a -> a -> a; +} + +val (++) : #a:_ -> {| additive a |} -> a -> a -> a +let (++) = plus + +instance add_int : additive int = { + zero = 0; + plus = (+); +} + +instance add_bool : additive bool = { + zero = false; + plus = ( || ); +} + +instance add_list #a : additive (list a) = { + zero = []; + plus = List.Tot.Base.append; +} diff --git a/stage0/ulib/FStar.Class.Embeddable.fst b/stage0/ulib/FStar.Class.Embeddable.fst new file mode 100644 index 00000000000..7731fe42f49 --- /dev/null +++ b/stage0/ulib/FStar.Class.Embeddable.fst @@ -0,0 +1,28 @@ +module FStar.Class.Embeddable + +open FStar.Reflection.V2 + +instance embeddable_string : embeddable string = { + embed = (fun s -> pack_ln (Tv_Const (C_String s))); + typ = (`string); +} + +instance embeddable_bool : embeddable bool = { + embed = (fun b -> pack_ln (Tv_Const (if b then C_True else C_False))); + typ = (`bool); +} + +instance embeddable_int : embeddable int = { + embed = (fun i -> pack_ln (Tv_Const (C_Int i))); + typ = (`int); +} + +let rec e_list #a {| ea : embeddable a |} (xs : list a) : term = + match xs with + | [] -> `(Nil #(`# ea.typ)) + | x::xs -> `(Cons #(`#(ea.typ)) (`#(embed x)) (`#(e_list xs))) + +instance embeddable_list (a:Type) (ea : embeddable a) : embeddable (list a) = { + embed = e_list; + typ = (`list (`#ea.typ)); +} diff --git a/stage0/ulib/FStar.Class.Embeddable.fsti b/stage0/ulib/FStar.Class.Embeddable.fsti new file mode 100644 index 00000000000..84da9e806b3 --- /dev/null +++ b/stage0/ulib/FStar.Class.Embeddable.fsti @@ -0,0 +1,14 @@ +module FStar.Class.Embeddable + +open FStar.Reflection.V2 + +class embeddable (a:Type) = { + embed : a -> Tot term; + typ : term; +} + +instance val embeddable_string : embeddable string +instance val embeddable_bool : embeddable bool +instance val embeddable_int : embeddable int + +instance val embeddable_list (a:Type) (ea : embeddable a) : embeddable (list a) diff --git a/stage0/ulib/FStar.Class.Eq.Raw.fst b/stage0/ulib/FStar.Class.Eq.Raw.fst new file mode 100644 index 00000000000..b032eda328d --- /dev/null +++ b/stage0/ulib/FStar.Class.Eq.Raw.fst @@ -0,0 +1,57 @@ +(* + Copyright 2008-2023 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Class.Eq.Raw + +class deq a = { + eq : a -> a -> bool; +} + +let eq_instance_of_eqtype (#a:eqtype) : deq a = { + eq = (fun x y -> x = y) +} + +// FIXME: It would be easier to have a single eqtype instance, +// but resolution will sometimes use for any type, even though +// it should not. +instance int_has_eq : deq int = eq_instance_of_eqtype +instance unit_has_eq : deq unit = eq_instance_of_eqtype +instance bool_has_eq : deq bool = eq_instance_of_eqtype +instance string_has_eq : deq string = eq_instance_of_eqtype + +let rec eqList #a (eq : a -> a -> bool) (xs ys : list a) : Tot bool = + match xs, ys with + | [], [] -> true + | x::xs, y::ys -> eq x y && eqList eq xs ys + | _, _ -> false + +instance eq_list (_ : deq 'a) : deq (list 'a) = { + eq = eqList eq; +} + +instance eq_pair (_ : deq 'a) (_ : deq 'b) : deq ('a & 'b) = { + eq = (fun (a,b) (c,d) -> eq a c && eq b d) +} + +instance eq_option (_ : deq 'a) : deq (option 'a) = { + eq = (fun o1 o2 -> + match o1, o2 with + | None, None -> true + | Some x, Some y -> eq x y + | _, _ -> false); +} + +val (=) : #a:Type -> {| deq a |} -> a -> a -> bool +let (=) = eq diff --git a/stage0/ulib/FStar.Class.Eq.fst b/stage0/ulib/FStar.Class.Eq.fst new file mode 100644 index 00000000000..e1a3dcbb159 --- /dev/null +++ b/stage0/ulib/FStar.Class.Eq.fst @@ -0,0 +1,72 @@ +(* + Copyright 2008-2023 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Class.Eq + +open FStar.Tactics.Typeclasses +module Raw = FStar.Class.Eq.Raw + +let decides_eq (#a:Type) (f : a -> a -> bool) : prop = + forall x y. f x y <==> x == y + +class deq a = { + raw : Raw.deq a; + eq_dec : squash (decides_eq raw.eq); +} + +(* Superclass *) +instance deq_raw_deq (a:Type) (d:deq a) : Raw.deq a = d.raw + +let eq (#a:Type) {| d : deq a |} (x y : a) : bool = + d.raw.eq x y + +let eq_instance_of_eqtype (#a:eqtype) {| Raw.deq a |} : deq a = { + raw = Raw.eq_instance_of_eqtype #a; + eq_dec = (); +} + +instance int_has_eq : deq int = eq_instance_of_eqtype +instance unit_has_eq : deq unit = eq_instance_of_eqtype +instance bool_has_eq : deq bool = eq_instance_of_eqtype +instance string_has_eq : deq string = eq_instance_of_eqtype + +let eqList_ok (#a:Type) (d : deq a) : Lemma (decides_eq #(list a) (Raw.eqList d.raw.eq)) = + let rec aux (xs ys : list a) : Lemma (Raw.eqList d.raw.eq xs ys <==> xs == ys) = + match xs, ys with + | x::xs, y::ys -> + aux xs ys + | [], [] -> () + | _ -> () + in + Classical.forall_intro_2 aux; + () + +instance eq_list (d : deq 'a) : deq (list 'a) = { + raw = Raw.eq_list d.raw; + eq_dec = eqList_ok d; +} + +instance eq_pair (_ : deq 'a) (_ : deq 'b) : deq ('a & 'b) = { + raw = solve; + eq_dec = (); +} + +instance eq_option (_ : deq 'a) : deq (option 'a) = { + raw = solve; + eq_dec = (); +} + +val (=) : #a:Type -> {| deq a |} -> a -> a -> bool +let (=) = eq diff --git a/stage0/ulib/FStar.Class.Printable.fst b/stage0/ulib/FStar.Class.Printable.fst new file mode 100644 index 00000000000..8f781012915 --- /dev/null +++ b/stage0/ulib/FStar.Class.Printable.fst @@ -0,0 +1,266 @@ +(* + Copyright 2020 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Authors: Brian G. Milnes +*) + +module FStar.Class.Printable + +open FStar.String +open FStar.Seq.Properties + +class printable (a:Type) = +{ + to_string : a -> string +} + +(* First the prim types. *) + +instance printable_unit : printable unit = +{ + to_string = (fun _ -> "()") +} + +instance printable_bool : printable bool = +{ + to_string = Prims.string_of_bool +} + +instance printable_nat : printable nat = +{ + to_string = Prims.string_of_int +} + +instance printable_int : printable int = +{ + to_string = Prims.string_of_int +} + +(* An instance for refinements, they can be printed as long +as the base type is printable. This allows to print [nat], +for instance. *) +instance printable_ref #a #p (d : printable a) : printable (x:a{p x}) = +{ + to_string = d.to_string +} + +instance printable_list (#a:Type) (x:printable a) : printable (list a) = +{ + to_string = (fun l -> "[" ^ FStar.String.concat "; " (List.Tot.map to_string l) ^ "]") +} + +instance printable_string : printable string = +{ + to_string = fun x -> "\"" ^ x ^ "\"" +} + +instance printable_option #a {| printable a |} : printable (option a) = +{ + to_string = (function None -> "None" | Some x -> "(Some " ^ to_string x ^ ")") +} + +instance printable_either #a #b {| printable a |} {| printable b |} : printable (either a b) = +{ + to_string = + (function Inl x -> "(Inl " ^ to_string x ^ ")" | + Inr x -> "(Inr " ^ to_string x ^ ")") +} + +(* Then the base types. *) + +instance printable_char : printable FStar.Char.char = +{ + to_string = string_of_char +} + +(* Floats are not yet well implemented, so these are placeholders.*) +(* +instance printable_float : printable FStar.Float.float = +{ + to_string = FStar.Float.to_string +} + +instance printable_double : printable FStar.Float.double = +{ + to_string = FStar.Float.to_string +} +*) + +instance printable_byte : printable FStar.UInt8.byte = +{ + to_string = FStar.UInt8.to_string +} + +instance printable_int8 : printable FStar.Int8.t = +{ + to_string = FStar.Int8.to_string +} + +instance printable_uint8 : printable FStar.UInt8.t = +{ + to_string = FStar.UInt8.to_string +} + +instance printable_int16 : printable FStar.Int16.t = +{ + to_string = FStar.Int16.to_string +} + +instance printable_uint16 : printable FStar.UInt16.t = +{ + to_string = FStar.UInt16.to_string +} + +instance printable_int32 : printable FStar.Int32.t = +{ + to_string = FStar.Int32.to_string +} + +instance printable_uint32 : printable FStar.UInt32.t = +{ + to_string = FStar.UInt32.to_string +} + +instance printable_int64 : printable FStar.Int64.t = +{ + to_string = FStar.Int64.to_string +} + +instance printable_uint64 : printable FStar.UInt64.t = +{ + to_string = FStar.UInt64.to_string +} + +(* Placeholders in case someone build a 128 bit integer printer. +instance printable_int128 : printable FStar.Int128.t = +{ + to_string = FStar.Int128.to_string +} + +instance printable_uint128 : printable FStar.UInt128.t = +{ + to_string = FStar.UInt128.to_string +} +*) + +(* Up to 7 sized tuples, anything more and why are you using tuples? *) +instance printable_tuple2 #a #b {| printable a |} {| printable b |} : printable (a & b) = +{ + to_string = (fun (x, y) -> "(" ^ to_string x ^ ", " ^ to_string y ^ ")") +} + +instance printable_tuple3 + #t0 #t1 #t2 + {| printable t0 |} {| printable t1 |} {| printable t2 |} +: printable (tuple3 t0 t1 t2) = +{ + to_string = + (fun (v0,v1,v2) -> + "(" ^ + to_string v0 ^ ", " ^ + to_string v1 ^ ", " ^ + to_string v2 ^ ")" ) +} + +instance printable_tuple4 + #t0 #t1 #t2 #t3 + {| printable t0 |} {| printable t1 |} {| printable t2 |} {| printable t3 |} +: printable (tuple4 t0 t1 t2 t3) = +{ + to_string = + (fun (v0,v1,v2,v3) -> + "(" ^ + to_string v0 ^ ", " ^ + to_string v1 ^ ", " ^ + to_string v2 ^ ", " ^ + to_string v3 ^ ")" ) +} + +instance printable_tuple5 + #t0 #t1 #t2 #t3 #t4 + {| printable t0 |} {| printable t1 |} {| printable t2 |} {| printable t3 |} + {| printable t4 |} +: printable (tuple5 t0 t1 t2 t3 t4) = +{ + to_string = + (fun (v0,v1,v2,v3,v4) -> + "(" ^ + to_string v0 ^ ", " ^ + to_string v1 ^ ", " ^ + to_string v2 ^ ", " ^ + to_string v3 ^ ", " ^ + to_string v4 ^ ")" ) +} + +instance printable_tuple6 + #t0 #t1 #t2 #t3 #t4 #t5 + {| printable t0 |} {| printable t1 |} {| printable t2 |} {| printable t3 |} + {| printable t4 |} {| printable t5 |} +: printable (tuple6 t0 t1 t2 t3 t4 t5) = +{ + to_string = + (fun (v0,v1,v2,v3,v4,v5) -> + "(" ^ + to_string v0 ^ ", " ^ + to_string v1 ^ ", " ^ + to_string v2 ^ ", " ^ + to_string v3 ^ ", " ^ + to_string v4 ^ ", " ^ + to_string v5 ^ ")" ) +} + + +instance printable_tuple7 + #t0 #t1 #t2 #t3 #t4 #t5 #t6 + {| printable t0 |} {| printable t1 |} {| printable t2 |} {| printable t3 |} + {| printable t4 |} {| printable t5 |} {| printable t6 |} +: printable (tuple7 t0 t1 t2 t3 t4 t5 t6) = +{ + to_string = + (fun (v0,v1,v2,v3,v4,v5,v6) -> + "(" ^ + to_string v0 ^ ", " ^ + to_string v1 ^ ", " ^ + to_string v2 ^ ", " ^ + to_string v3 ^ ", " ^ + to_string v4 ^ ", " ^ + to_string v5 ^ ", " ^ + to_string v6 ^ ")" ) +} + +(* Sequences, with a <...> syntax. *) + +(* +instance printable_seq (#a:Type) (x:printable a) : printable (Seq.seq a) = +{ + to_string = + (fun l -> "<" ^ + FStar.String.concat "; " (List.Tot.map to_string (Seq.seq_to_list l)) ^ + ">") +} + +*) + + +instance printable_seq (#b:Type) (x:printable b) : printable (Seq.seq b) = +{ + to_string = + (fun s -> + let strings_of_b = map_seq to_string s + in + "<" ^ + FStar.String.concat "; " (Seq.seq_to_list strings_of_b) + ^ ">") +} diff --git a/stage0/ulib/FStar.Class.TotalOrder.Raw.fst b/stage0/ulib/FStar.Class.TotalOrder.Raw.fst new file mode 100644 index 00000000000..ec055f2a041 --- /dev/null +++ b/stage0/ulib/FStar.Class.TotalOrder.Raw.fst @@ -0,0 +1,88 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Class.TotalOrder.Raw + +open FStar.Order + +let flip = function + | Lt -> Gt + | Eq -> Eq + | Gt -> Lt + +let raw_comparator (a:Type) = a -> a -> order + +class totalorder (a:Type) = { + compare : raw_comparator a; +} + +val (<) : #t:Type -> {|totalorder t|} -> t -> t -> bool +let (<) x y = compare x y = Lt + +val (>) : #t:Type -> {|totalorder t|} -> t -> t -> bool +let (>) x y = compare x y = Gt + +val (=) : #t:Type -> {|totalorder t|} -> t -> t -> bool +let (=) x y = compare x y = Eq + +val (<=) : #t:Type -> {|totalorder t|} -> t -> t -> bool +let (<=) x y = compare x y <> Gt + +val (>=) : #t:Type -> {|totalorder t|} -> t -> t -> bool +let (>=) x y = compare x y <> Lt + +val (<>) : #t:Type -> {|totalorder t|} -> t -> t -> bool +let (<>) x y = compare x y <> Eq + +instance _ : totalorder int = { + compare = Order.compare_int; +} + +instance _ : totalorder bool = { + compare = (fun b1 b2 -> match b1, b2 with | false, false | true, true -> Eq | false, _ -> Lt | _ -> Gt); +} + +(* Lex order on tuples *) +instance totalorder_pair #a #b (d1 : totalorder a) (d2 : totalorder b) : totalorder (a & b) = { + compare = (fun (xa,xb) (ya, yb) -> + match compare xa ya with + | Lt -> Lt + | Gt -> Gt + | Eq -> compare xb yb); +} + +instance totalorder_option #a (d : totalorder a) : totalorder (option a) = { + compare = (fun o1 o2 -> match o1, o2 with + | None, None -> Eq + | None, Some _ -> Lt + | Some _, None -> Gt + | Some a1, Some a2 -> compare a1 a2); +} + +let rec raw_compare_lists #a (d : totalorder a) : raw_comparator (list a) = + fun l1 l2 -> + match l1, l2 with + | [], [] -> Eq + | [], _::_ -> Lt + | _::_, [] -> Gt + | x::xs, y::ys -> + match compare x y with + | Lt -> Lt + | Gt -> Gt + | Eq -> raw_compare_lists d xs ys + +instance totalorder_list #a (d : totalorder a) : totalorder (list a) = { + compare = raw_compare_lists d; +} diff --git a/stage0/ulib/FStar.Classical.Sugar.fst b/stage0/ulib/FStar.Classical.Sugar.fst new file mode 100644 index 00000000000..fab73510859 --- /dev/null +++ b/stage0/ulib/FStar.Classical.Sugar.fst @@ -0,0 +1,146 @@ +(* + Copyright 2021 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Classical.Sugar +/// This module provides a few combinators that are targeted +/// by the desugaring phase of the F* front end + +let forall_elim + (#a:Type) + (#p:a -> Type) + (v:a) + (f:squash (forall (x:a). p x)) + : Tot (squash (p v)) + = () + +let exists_elim #t #p #q s_ex_p f + = let open FStar.Squash in + bind_squash s_ex_p (fun ex_p -> + bind_squash ex_p (fun (sig_p: (x:t & p x)) -> + let (| x, px |) = sig_p in + f x (return_squash px))) + +let or_elim_simple + (p:Type) + (q:Type) + (r:Type) + (x:squash (p \/ q)) + (f:squash p -> Tot (squash r)) + (g:squash q -> Tot (squash r)) + : Tot (squash r) + = let open FStar.Squash in + bind_squash x (fun p_or_q -> + bind_squash p_or_q (fun p_cor_q -> + match p_cor_q with + | Prims.Left p -> + f (return_squash p) + | Prims.Right q -> + g (return_squash q))) + +let or_elim + (p:Type) + (q:squash (~p) -> Type) + (r:Type) + (p_or:squash (p \/ q())) + (left:squash p -> Tot (squash r)) + (right:squash (~p) -> squash (q()) -> Tot (squash r)) + : Tot (squash r) + = or_elim_simple p (~p) r () + (fun (s:squash p) -> left s) + (fun (np:squash (~p)) -> + or_elim_simple p (q ()) r p_or + (fun (pf_p:squash p) -> left pf_p) + (fun (pf_q:squash (q())) -> right np pf_q)) + +let and_elim (p:Type) + (q:squash p -> Type) + (r:Type) + (x:squash (p /\ q())) + (f:squash p -> squash (q()) -> Tot (squash r)) + : Tot (squash r) + = let open FStar.Squash in + bind_squash x (fun p_and_q -> + bind_squash p_and_q (fun (Prims.Pair p q) -> + f (return_squash p) (return_squash q))) + +let forall_intro + (a:Type) + (p:a -> Type) + (f: (x:a -> Tot (squash (p x)))) + : Tot (squash (forall x. p x)) + = let open FStar.Squash in + let f' (x:a) + : GTot (squash (p x)) + = f x + in + return_squash (squash_double_arrow (return_squash f')) + +let exists_intro_simple + (a:Type) + (p:a -> Type) + (v:a) + (f: squash (p v)) + : Tot (squash (exists x. p x)) + = let open FStar.Squash in + let p = (| v, f |) in + squash_double_sum (return_squash p) + +let exists_intro + (a:Type) + (p:a -> Type) + (v:a) + (f: unit -> Tot (squash (p v))) + : Tot (squash (exists x. p x)) + = exists_intro_simple a p v (f()) + + +let implies_intro + (p:Type) + (q:squash p -> Type) + (f:(squash p -> Tot (squash (q())))) + : Tot (squash (p ==> q())) + = let open FStar.Squash in + let f' (x:p) + : GTot (squash (q ())) + = f (return_squash x) + in + return_squash (squash_double_arrow (return_squash f')) + +let or_intro_left + (p:Type) + (q:squash (~p) -> Type) + (f:unit -> Tot (squash p)) + : Tot (squash (p \/ q())) + = f() + +let or_intro_right + (p:Type) + (q:squash (~p) -> Type) + (f:squash (~p) -> Tot (squash (q()))) + : Tot (squash (p \/ q())) + = or_elim_simple p (~p) + (p \/ q()) + () + (fun s_p -> or_intro_left p q (fun _ -> s_p)) + (fun s_np -> f s_np) + +let and_intro + (p:Type) + (q:squash p -> Type) + (f:unit -> Tot (squash p)) + (g:squash p -> Tot (squash (q()))) + : Tot (squash (p /\ q())) + = let _ = f() in g() diff --git a/stage0/ulib/FStar.Classical.Sugar.fsti b/stage0/ulib/FStar.Classical.Sugar.fsti new file mode 100644 index 00000000000..ba660277058 --- /dev/null +++ b/stage0/ulib/FStar.Classical.Sugar.fsti @@ -0,0 +1,157 @@ +(* + Copyright 2021 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Classical.Sugar + +/// This module provides a few combinators that are targeted +/// by the desugaring phase of the F* front end +/// +/// The combinators it provides are fairly standard, except for one +/// subtlety. In F*, the typechecking of terms formed using the +/// logical connectives is biased from left to right. That is: +/// +/// * In [p /\ q] and [p ==> q], the well-typedness of [q] is in a +/// context assuming [squash p] +/// +/// * In [p \/ q], the well-typedness of [q] is in a context assuming +/// [squash (~p)] +/// +/// So, many of these combinators reflect that bias by taking as +/// instantiations for [q] functions that depend on [squash p] or +/// [squash (~p)]. +/// +/// The other subtlety is that the when using these combinators, we +/// encapsulate any proof terms provided by the caller within a +/// thunk. This is to ensure that if, for instance, the caller simply +/// admits a goal, that they do not inadvertently discard any proof +/// obligations in the remainder of their programs. +/// +/// For example, consider the difference between +/// +/// 1. exists_intro a p v (admit()); rest +/// +/// and +/// +/// 2. exists_intro a p v (fun _ -> admit()); rest +/// +/// In (1) the proof of rest is admitted also. + + +(** Eliminate a universal quantifier by providing an instantiation *) +val forall_elim + (#a:Type) + (#p:a -> Type) + (v:a) + (f:squash (forall (x:a). p x)) + : Tot (squash (p v)) + +(** Eliminate an existential quantifier into a proof of a goal [q] *) +val exists_elim + (#t:Type) + (#p:t -> Type) + (#q:Type) + ($s_ex_p: squash (exists (x:t). p x)) + (f: (x:t -> squash (p x) -> Tot (squash q))) + : Tot (squash q) + +(** Eliminate an implication, by providing a proof of the hypothesis + Note, the proof is thunked *) +let implies_elim + (p:Type) + (q:Type) + (_:squash (p ==> q)) + (f:unit -> Tot (squash p)) + : squash q + = f() + +(** Eliminate a disjunction + - The type of q can depend on the ~p + - The right proof can assume both ~p and q +*) +val or_elim + (p:Type) + (q:squash (~p) -> Type) + (r:Type) + (p_or:squash (p \/ q())) + (left:squash p -> Tot (squash r)) + (right:squash (~p) -> squash (q()) -> Tot (squash r)) + : Tot (squash r) + +(** Eliminate a conjunction + - The type of q can depend on p +*) +val and_elim + (p:Type) + (q:squash p -> Type) + (r:Type) + (_:squash (p /\ q())) + (f:squash p -> squash (q()) -> Tot (squash r)) + : Tot (squash r) + +(** Introduce a universal quantifier *) +val forall_intro + (a:Type) + (p:a -> Type) + (f: (x:a -> Tot (squash (p x)))) + : Tot (squash (forall x. p x)) + +(** Introduce an existential quantifier *) +val exists_intro + (a:Type) + (p:a -> Type) + (v:a) + (x: unit -> Tot (squash (p v))) + : Tot (squash (exists x. p x)) + +(** Introduce an implication + - The type of q can depend on p + *) +val implies_intro + (p:Type) + (q:squash p -> Type) + (f:(squash p -> Tot (squash (q())))) + : Tot (squash (p ==> q())) + +(** Introduce an disjunction on the left + - The type of q can depend on ~p + - The proof is thunked to avoid polluting the continuation + *) +val or_intro_left + (p:Type) + (q:squash (~p) -> Type) + (f:unit -> Tot (squash p)) + : Tot (squash (p \/ q())) + +(** Introduce an disjunction on the right + - The type of q can depend on ~p + - The proof can assume ~p too + *) +val or_intro_right + (p:Type) + (q:squash (~p) -> Type) + (f:squash (~p) -> Tot (squash (q()))) + : Tot (squash (p \/ q())) + +(** Introduce a conjunction + - The type of q can depend on p + - The proof in the right case can also assume p + *) +val and_intro + (p:Type) + (q:squash p -> Type) + (left:unit -> Tot (squash p)) + (right:squash p -> Tot (squash (q()))) + : Tot (squash (p /\ q())) diff --git a/stage0/ulib/FStar.Classical.fst b/stage0/ulib/FStar.Classical.fst new file mode 100644 index 00000000000..dc7197a0995 --- /dev/null +++ b/stage0/ulib/FStar.Classical.fst @@ -0,0 +1,190 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Classical + +open FStar.Squash + +let give_witness #a x = return_squash x + +let give_witness_from_squash #a x = x + +let lemma_to_squash_gtot #a #p f x = + f x; + get_proof (p x) + +val get_squashed (#b a: Type) : Pure a (requires (a /\ a == squash b)) (ensures (fun _ -> True)) + +#push-options "--smtencoding.valid_intro true --smtencoding.valid_elim true" +[@@ noextract_to "FSharp"] +let get_squashed #b a = + let p = get_proof a in + join_squash #b p +#pop-options + +let get_equality #t a b = get_squashed #(equals a b) (a == b) + +let impl_to_arrow #a #b impl sx = + bind_squash #(a -> GTot b) impl (fun f -> bind_squash sx (fun x -> return_squash (f x))) + +let arrow_to_impl #a #b f = squash_double_arrow (return_squash (fun x -> f (return_squash x))) + +let impl_intro_gtot #p #q f = return_squash f + +let impl_intro_tot #p #q f = return_squash #(p -> GTot q) f + +let impl_intro #p #q f = + give_witness #(p ==> q) (squash_double_arrow (return_squash (lemma_to_squash_gtot f))) + +let move_requires #a #p #q f x = + give_proof (bind_squash (get_proof (l_or (p x) (~(p x)))) + (fun (b: l_or (p x) (~(p x))) -> + bind_squash b + (fun (b': Prims.sum (p x) (~(p x))) -> + match b' with + | Prims.Left hp -> + give_witness hp; + f x; + get_proof (p x ==> q x) + | Prims.Right hnp -> give_witness hnp))) + +let move_requires_2 #a #b #p #q f x y = move_requires (f x) y + +let move_requires_3 #a #b #c #p #q f x y z = move_requires (f x y) z + +let move_requires_4 #a #b #c #d #p #q f x y z w = move_requires (f x y z) w + +// Thanks KM, CH and SZ +let impl_intro_gen #p #q f = + let g () : Lemma (requires p) (ensures (p ==> q ())) = give_proof #(q ()) (f (get_proof p)) in + move_requires g () + +(*** Universal quantification *) +let get_forall #a p = + let t = (forall (x:a). p x) in + assert (norm [delta; delta_only [`%l_Forall]] t == (squash (x:a -> GTot (p x)))); + norm_spec [delta; delta_only [`%l_Forall]] t; + get_squashed #(x: a -> GTot (p x)) (forall (x: a). p x) + +(* TODO: Maybe this should move to FStar.Squash.fst *) +let forall_intro_gtot #a #p f = + let id (#a: Type) (x: a) = x in + let h:(x: a -> GTot (id (p x))) = fun x -> f x in + return_squash #(forall (x: a). id (p x)) () + +let lemma_forall_intro_gtot #a #p f = give_witness (forall_intro_gtot #a #p f) + +let gtot_to_lemma #a #p f x = give_proof #(p x) (return_squash (f x)) + +let forall_intro_squash_gtot #a #p f = + bind_squash #(x: a -> GTot (p x)) + #(forall (x: a). p x) + (squash_double_arrow #a #p (return_squash f)) + (fun f -> lemma_forall_intro_gtot #a #p f) + +let forall_intro_squash_gtot_join #a #p f = + join_squash (bind_squash #(x: a -> GTot (p x)) + #(forall (x: a). p x) + (squash_double_arrow #a #p (return_squash f)) + (fun f -> lemma_forall_intro_gtot #a #p f)) + +let forall_intro #a #p f = give_witness (forall_intro_squash_gtot (lemma_to_squash_gtot #a #p f)) + +let forall_intro_with_pat #a #c #p pat f = forall_intro #a #p f + +let forall_intro_sub #a #p f = forall_intro f + +(* Some basic stuff, should be moved to FStar.Squash, probably *) +let forall_intro_2 #a #b #p f = + let g: x: a -> Lemma (forall (y: b x). p x y) = fun x -> forall_intro (f x) in + forall_intro g + +let forall_intro_2_with_pat #a #b #c #p pat f = forall_intro_2 #a #b #p f + +let forall_intro_3 #a #b #c #p f = + let g: x: a -> Lemma (forall (y: b x) (z: c x y). p x y z) = fun x -> forall_intro_2 (f x) in + forall_intro g + +let forall_intro_3_with_pat #a #b #c #d #p pat f = forall_intro_3 #a #b #c #p f + +let forall_intro_4 #a #b #c #d #p f = + let g: x: a -> Lemma (forall (y: b x) (z: c x y) (w: d x y z). p x y z w) = + fun x -> forall_intro_3 (f x) + in + forall_intro g + + +let forall_impl_intro #a #p #q f = + let f' (x: a) : Lemma (requires (p x)) (ensures (q x)) = f x (get_proof (p x)) in + forall_intro (move_requires f') + + +let ghost_lemma #a #p #q f = + let lem: x: a -> Lemma (p x ==> q x ()) = + (fun x -> + (* basically, the same as above *) + give_proof (bind_squash (get_proof (l_or (p x) (~(p x)))) + (fun (b: l_or (p x) (~(p x))) -> + bind_squash b + (fun (b': Prims.sum (p x) (~(p x))) -> + match b' with + | Prims.Left hp -> + give_witness hp; + f x; + get_proof (p x ==> q x ()) + | Prims.Right hnp -> give_witness hnp)))) + in + forall_intro lem + +(*** Existential quantification *) +let exists_intro #a p witness = () + +#push-options "--warn_error -271" //local SMT pattern misses variables +let exists_intro_not_all_not (#a:Type) (#p:a -> Type) + ($f: (x:a -> Lemma (~(p x))) -> Lemma False) + : Lemma (exists x. p x) + = let open FStar.Squash in + let aux () + : Lemma (requires (forall x. ~(p x))) + (ensures False) + [SMTPat ()] + = bind_squash + (get_proof (forall x. ~ (p x))) + (fun (g: (forall x. ~ (p x))) -> + bind_squash #(x:a -> GTot (~(p x))) #Prims.empty g + (fun (h:(x:a -> GTot (~(p x)))) -> f h)) + in + () +#pop-options + +let forall_to_exists #a #p #r f = forall_intro f + +let forall_to_exists_2 #a #p #b #q #r f = forall_intro_2 f + +let exists_elim goal #a #p have f = + bind_squash #_ + #goal + (join_squash have) + (fun (| x , pf |) -> + return_squash pf; + f x) + +(*** Disjunction *) +let or_elim #l #r #goal hl hr = + impl_intro_gen #l #(fun _ -> goal ()) hl; + impl_intro_gen #r #(fun _ -> goal ()) hr + +let excluded_middle (p: Type) = () diff --git a/stage0/ulib/FStar.Classical.fsti b/stage0/ulib/FStar.Classical.fsti new file mode 100644 index 00000000000..e58b28d8b03 --- /dev/null +++ b/stage0/ulib/FStar.Classical.fsti @@ -0,0 +1,405 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Classical + +/// This module provides various utilities to manipulate the squashed +/// logical connectives [==>], [/\], [\/], [forall], [exists] and [==], +/// defined in Prims in terms of the [squash] type. See Prims and +/// FStar.Squash for basic explanations of the [squash] type. +/// +/// In summary: +/// +/// - [squash p] is proof-irrelevant proof of [p], expressed as a unit +/// refinement. +/// +/// - [Lemma p] is also a proof-irrelevant proof of [p], expressed as +/// a postcondition of a unit-returning Ghost computation. +/// +/// We provide several utilities to turn proofs of various +/// propositions with non-trivial proof terms into proof-irrelevant, +/// classical proofs. + +(** [give_witness x] transforms a constructive proof [x:a] into a + proof-irrelevant postcondition. It is similar to + [FStar.Squash.return_squash] *) +val give_witness (#a: Type) (_: a) : Lemma (ensures a) + +(** [give_witness_from_squash s] moves from a unit-refinement to a + postcondition. It is similar to [FStar.Squash.give_proof] *) +val give_witness_from_squash (#a: Type) (_: squash a) : Lemma (ensures a) + +(** This turns a proof-irrelevant postcondition into a squashed proof *) +val lemma_to_squash_gtot (#a: Type) (#p: (a -> GTot Type)) ($_: (x: a -> Lemma (p x))) (x: a) + : GTot (squash (p x)) + +(**** Equality *) + +(** Turning an equality precondition into returned squash proof, + similar to [FStar.Squash.get_proof], but avoiding an extra squash, + since [==] is already squashed. *) +val get_equality (#t: Type) (a b: t) : Pure (a == b) (requires (a == b)) (ensures (fun _ -> True)) + +(**** Implication *) + +(** Turning an [a ==> b] into a [squash a -> squash b]. Note [a ==> b] is + defined as [squash (a -> b)], so this distributes the squash over the arrow. *) +val impl_to_arrow (#a #b: Type0) (_: (a ==> b)) (_: squash a) : Tot (squash b) + +(** The converse of [impl_to_arrow] *) +val arrow_to_impl (#a #b: Type0) (_: (squash a -> GTot (squash b))) : GTot (a ==> b) + +(** Similar to [arrow_to_impl], but without squashing proofs on the left *) +val impl_intro_gtot (#p #q: Type0) ($_: (p -> GTot q)) : GTot (p ==> q) + +(** Similar to [impl_intro_gtot], but for a Tot arrow *) +val impl_intro_tot (#p #q: Type0) ($_: (p -> Tot q)) : Tot (p ==> q) + +(** Similar to [arrow_to_impl], but not squashing the proof of [p] on the LHS. *) +val impl_intro (#p #q: Type0) ($_: (p -> Lemma q)) : Lemma (p ==> q) + +(** A lemma with a precondition can also be treated as a proof a quantified implication. + + See the remark at the top of this section comparing nested lemmas + with SMT pattern to [move_requires] and [forall_intro] *) +val move_requires + (#a: Type) + (#p #q: (a -> Type)) + ($_: (x: a -> Lemma (requires (p x)) (ensures (q x)))) + (x: a) + : Lemma (p x ==> q x) + +(** The arity 2 version of [move_requires] *) +val move_requires_2 + (#a: Type) + (#b: (a -> Type)) + (#p #q: (x: a -> b x -> Type)) + ($_: (x: a -> y: b x -> Lemma (requires (p x y)) (ensures (q x y)))) + (x: a) + (y: b x) + : Lemma (p x y ==> q x y) + +(** The arity 3 version of [move_requires] *) +val move_requires_3 + (#a: Type) + (#b: (a -> Type)) + (#c: (x: a -> y: b x -> Type)) + (#p #q: (x: a -> y: b x -> c x y -> Type)) + ($_: (x: a -> y: b x -> z: c x y -> Lemma (requires (p x y z)) (ensures (q x y z)))) + (x: a) + (y: b x) + (z: c x y) + : Lemma (p x y z ==> q x y z) + +(** The arity 4 version of [move_requires] *) +val move_requires_4 + (#a: Type) + (#b: (a -> Type)) + (#c: (x: a -> y: b x -> Type)) + (#d: (x: a -> y: b x -> z: c x y -> Type)) + (#p #q: (x: a -> y: b x -> z: c x y -> w: d x y z -> Type)) + ($_: (x: a -> y: b x -> z: c x y -> w: d x y z -> Lemma (requires (p x y z w)) (ensures (q x y z w)))) + (x: a) + (y: b x) + (z: c x y) + (w: d x y z) + : Lemma (p x y z w ==> q x y z w) + +(** When proving predicate [q] whose well-formedness depends on the + predicate [p], it is convenient to have [q] appear only under a + context where [p] is know to be valid. *) +val impl_intro_gen (#p: Type0) (#q: (squash p -> Tot Type0)) (_: (squash p -> Lemma (q ()))) + : Lemma (p ==> q ()) + +(**** Universal quantification *) + +/// Many of the utilities for universal quantification are designed to +/// help in the proofs of lemmas that ensure quantified +/// postconditions. For example, in order to prove [Lemma (forall +/// (x:a). p x)] it is often useful to "get your hands" on a freshly +/// introduced variable [x] and to prove [p x] for it, i.e., to prove +/// [x:a -> Lemma (p x)] and to turn this into a proof for [forall +/// x. p x]. Functions like [forall_intro] in this module let you do +/// just that. +/// +/// That said, it may often be more convenient to prove such +/// properties using local lemmas in inner scopes. For example, here +/// are two proof sketches for [forall x. p x]. +/// +/// {[ +/// assume +/// val p : nat -> prop +/// +/// let proof1 = +/// let lem (x:nat) +/// : Lemma (ensures p x) +/// = admit() +/// in +/// forall_intro lem; +/// assert (forall x. p x) +/// +/// let proof2 = +/// let lem (x:nat) +/// : Lemma (ensures p x) +/// [SMTPat (p x)] +/// = admit() +/// in +/// assert (forall x. p x) +/// ]} +/// +/// In [proof1], we prove an auxiliary lemma [lem] and then use +/// [forall_intro] to turn it into a proof of [forall x. p x]. +/// +/// In [proof2], we simply decorate [lem] with an SMT pattern to +/// allow the solver to use that lemma to prove [forall x. p x] +/// directly. +/// +/// The style of [proof2] is often more robust for several reasons: +/// +/// - [forall_intro] only works with lemmas that do not have +/// preconditions. E.g., if you wanted to prove [forall x. q x ==> +/// p x], you would have had to prove [lem] with the type [x:nat -> +/// Lemma (q x ==> p x)]. In contrast, in the style of [proof2], +/// you could have proven [x:nat -> Lemma (requires q x) (ensures p +/// x)], which is easier, since you can assume the precondition [q +/// x]. To use this style of lemma-with-precondition with +/// [forall_intro], one typically must also use [move_requires] to +/// coerce a lemma with a precondition into a lemma proving an +/// implication, or to use [ghost_lemma]. +/// +/// - [forall_intro] introduces a quantifier without an SMT +/// pattern. This can pollute the local context with an unguarded +/// quantifier, leading to inefficient proofs. Note, the variants +/// [forall_intro_with_pat] help with this somewhat, but they only +/// support a single pattern, rather than conjunctive and +/// disjunctive patterns. +/// +/// - [forall_intro] and its variants are available for only a fixed +/// arity up to 4. The nested SMTPat lemma style of [proof2] works +/// are arbitrary arity. +/// +/// That said, there may still be cases where [forall_intro] etc. are +/// more suitable. + +(** Turning an universally quantified precondition into returned + squash proof, similar to [FStar.Squash.get_proof], but avoiding an + extra squash, since [forall] is already squashed. *) +val get_forall (#a: Type) (p: (a -> GTot Type0)) + : Pure (forall (x: a). p x) (requires (forall (x: a). p x)) (ensures (fun _ -> True)) + +(** This introduces a squash proof of a universal + quantifier. [forall_intro_gtot f] is equivalent to [return_squash + (return_squash f)]. + + TODO: Perhaps remove this? It seems redundant *) +val forall_intro_gtot (#a: Type) (#p: (a -> GTot Type)) ($_: (x: a -> GTot (p x))) + : Tot (squash (forall (x: a). p x)) + +(** This turns a dependent arrow into a proof-irrelevant postcondition + of a universal quantifier. *) +val lemma_forall_intro_gtot (#a: Type) (#p: (a -> GTot Type)) ($_: (x: a -> GTot (p x))) + : Lemma (forall (x: a). p x) + +(** This turns a dependent arrow producing a proof a [p] into a lemma + ensuring [p], effectively squashing the proof of [p], while still + retaining the arrow. *) +val gtot_to_lemma (#a: Type) (#p: (a -> GTot Type)) ($_: (x: a -> GTot (p x))) (x: a) : Lemma (p x) + +(** This is the analog of [lemma_forall_intro_gtot] but with squashed + proofs on both sides, including a redundant extra squash on the result. + + TODO: perhaps remove this? *) +val forall_intro_squash_gtot (#a: Type) (#p: (a -> GTot Type)) ($_: (x: a -> GTot (squash (p x)))) + : Tot (squash (forall (x: a). p x)) + +(** This is the analog of [lemma_forall_intro_gtot] but with squashed + proofs on both sides *) +val forall_intro_squash_gtot_join + (#a: Type) + (#p: (a -> GTot Type)) + ($_: (x: a -> GTot (squash (p x)))) + : Tot (forall (x: a). p x) + +(** The main workhorse for introducing universally quantified postconditions, at arity 1. + + See the remark at the start of this section for guidelines on its + use. You may prefer to use a local lemma with an SMT pattern. *) +val forall_intro (#a: Type) (#p: (a -> GTot Type)) ($_: (x: a -> Lemma (p x))) + : Lemma (forall (x: a). p x) + +(** The main workhorse for introducing universally quantified + postconditions, at arity 1, including a provision for a single + pattern. + + See the remark at the start of this section for guidelines on its + use. You may prefer to use a local lemma with an SMT pattern. *) +val forall_intro_with_pat + (#a: Type) + (#c: (x: a -> Type)) + (#p: (x: a -> GTot Type0)) + ($pat: (x: a -> Tot (c x))) + ($_: (x: a -> Lemma (p x))) + : Lemma (forall (x: a). {:pattern (pat x)} p x) + +(** This function is almost identical to [forall_intro]. The only + difference is that rather in [forall_intro f] the type of [f] is + _unified_ with expected type of that argument, leading to better + resolution of implicit variables. + + However, sometimes it is convenient to introduce a quantifier from + a lemma while relying on subtyping---[forall_intro_sub f] allows + the use of subtyping when comparing the type of [f] to the + expected type of the argument. This will likely mean that the + implicit arguments, notably [p], will have to be provided + explicilty. *) +val forall_intro_sub (#a: Type) (#p: (a -> GTot Type)) (_: (x: a -> Lemma (p x))) + : Lemma (forall (x: a). p x) + +(** The arity 2 version of [forall_intro] *) +val forall_intro_2 + (#a: Type) + (#b: (a -> Type)) + (#p: (x: a -> b x -> GTot Type0)) + ($_: (x: a -> y: b x -> Lemma (p x y))) + : Lemma (forall (x: a) (y: b x). p x y) + +(** The arity 2 version of [forall_intro_with_pat] *) +val forall_intro_2_with_pat + (#a: Type) + (#b: (a -> Type)) + (#c: (x: a -> y: b x -> Type)) + (#p: (x: a -> b x -> GTot Type0)) + ($pat: (x: a -> y: b x -> Tot (c x y))) + ($_: (x: a -> y: b x -> Lemma (p x y))) + : Lemma (forall (x: a) (y: b x). {:pattern (pat x y)} p x y) + +(** The arity 3 version of [forall_intro] *) +val forall_intro_3 + (#a: Type) + (#b: (a -> Type)) + (#c: (x: a -> y: b x -> Type)) + (#p: (x: a -> y: b x -> z: c x y -> Type0)) + ($_: (x: a -> y: b x -> z: c x y -> Lemma (p x y z))) + : Lemma (forall (x: a) (y: b x) (z: c x y). p x y z) + +(** The arity 3 version of [forall_intro_with_pat] *) +val forall_intro_3_with_pat + (#a: Type) + (#b: (a -> Type)) + (#c: (x: a -> y: b x -> Type)) + (#d: (x: a -> y: b x -> z: c x y -> Type)) + (#p: (x: a -> y: b x -> z: c x y -> GTot Type0)) + ($pat: (x: a -> y: b x -> z: c x y -> Tot (d x y z))) + ($_: (x: a -> y: b x -> z: c x y -> Lemma (p x y z))) + : Lemma (forall (x: a) (y: b x) (z: c x y). {:pattern (pat x y z)} p x y z) + +(** The arity 4 version of [forall_intro] *) +val forall_intro_4 + (#a: Type) + (#b: (a -> Type)) + (#c: (x: a -> y: b x -> Type)) + (#d: (x: a -> y: b x -> z: c x y -> Type)) + (#p: (x: a -> y: b x -> z: c x y -> w: d x y z -> Type0)) + ($_: (x: a -> y: b x -> z: c x y -> w: d x y z -> Lemma (p x y z w))) + : Lemma (forall (x: a) (y: b x) (z: c x y) (w: d x y z). p x y z w) + +(** This combines th use of [arrow_to_impl] with [forall_intro]. + + TODO: Seems overly specific; could be removed? *) +val forall_impl_intro + (#a: Type) + (#p #q: (a -> GTot Type)) + ($_: (x: a -> squash (p x) -> Lemma (q x))) + : Lemma (forall x. p x ==> q x) + +(** This is similar to [forall_intro], but with a lemma that has a precondition. + + Note: It's unclear why [q] has an additional [unit] argument. + *) +val ghost_lemma + (#a: Type) + (#p: (a -> GTot Type0)) + (#q: (a -> unit -> GTot Type0)) + ($_: (x: a -> Lemma (requires p x) (ensures (q x ())))) + : Lemma (forall (x: a). p x ==> q x ()) + + +(**** Existential quantification *) + +(** The most basic way to introduce a squashed existential quantifier + [exists x. p x] is to present a witness [w] such that [p w]. + + While [exists_intro] is very explicit, as with universal + quantification and [forall_intro], it is only available for a + fixed arity. + + However, unlike with we do not yet provide any conveniences for + higher arities. One workaround is to tuple witnesses together, + e.g., instead of proving [exists x y. p x y] to prove instead + [exists xy. p (fst xy) (snd xy)] and to allow the SMT solver to convert + the latter to the former. *) +val exists_intro (#a: Type) (p: (a -> Type)) (witness: a) + : Lemma (requires (p witness)) (ensures (exists (x: a). p x)) + +(** Introducing an exists via its classical correspondence with a negated universal quantifier *) +val exists_intro_not_all_not + (#a: Type) + (#p: (a -> Type)) + ($f: ((x: a -> Lemma (~(p x))) -> Lemma False)) + : Lemma (exists x. p x) + +(** If [r] is true for all [x:a{p x}], then one can use + [forall_to_exists] to establish [(exists x. p x) ==> r]. *) +val forall_to_exists (#a: Type) (#p: (a -> Type)) (#r: Type) ($_: (x: a -> Lemma (p x ==> r))) + : Lemma ((exists (x: a). p x) ==> r) + +(** The arity two variant of [forall_to_exists] for two separate + existentially quantified hypotheses. + + TODO: overly specific, remove? *) +val forall_to_exists_2 + (#a: Type) + (#p: (a -> Type)) + (#b: Type) + (#q: (b -> Type)) + (#r: Type) + ($f: (x: a -> y: b -> Lemma ((p x /\ q y) ==> r))) + : Lemma (((exists (x: a). p x) /\ (exists (y: b). q y)) ==> r) + +(** An eliminator for squashed existentials: If every witness can be + eliminated into a squashed proof of the [goal], then the [goal] + postcondition is valid. *) +val exists_elim + (goal #a: Type) + (#p: (a -> Type)) + (_: squash (exists (x: a). p x)) + (_: (x: a{p x} -> GTot (squash goal))) + : Lemma goal + + +(*** Disjunction *) + +(** Eliminating [l \/ r] into a [goal] whose well-formedness depends on + [l \/ r] *) +val or_elim + (#l #r: Type0) + (#goal: (squash (l \/ r) -> Tot Type0)) + (hl: (squash l -> Lemma (goal ()))) + (hr: (squash r -> Lemma (goal ()))) + : Lemma ((l \/ r) ==> goal ()) + +(** The law of excluded middle: squashed types are classical *) +val excluded_middle (p: Type) : Lemma (requires (True)) (ensures (p \/ ~p)) diff --git a/stage0/ulib/FStar.Date.fsti b/stage0/ulib/FStar.Date.fsti new file mode 100644 index 00000000000..8b47742be0f --- /dev/null +++ b/stage0/ulib/FStar.Date.fsti @@ -0,0 +1,31 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Date + +/// A module providing primitives for dates and times +new +val dateTime:Type0 +new +val timeSpan:Type0 + +(** EXT marks an external function *) +val now: unit -> EXT dateTime +val secondsFromDawn: unit -> EXT (n: nat{n < pow2 32}) +val newTimeSpan: int -> int -> int -> int -> Tot timeSpan +val addTimeSpan: dateTime -> timeSpan -> Tot dateTime +val greaterDateTime: dateTime -> dateTime -> Tot bool + diff --git a/stage0/ulib/FStar.DependentMap.fst b/stage0/ulib/FStar.DependentMap.fst new file mode 100644 index 00000000000..d9f7e12a48a --- /dev/null +++ b/stage0/ulib/FStar.DependentMap.fst @@ -0,0 +1,155 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.DependentMap + +module F = FStar.FunctionalExtensionality +noeq +type t (key: eqtype) (value: (key -> Type)) = { mappings:F.restricted_t key value } + +let create (#key: eqtype) (#value: (key -> Tot Type)) (f: (k: key -> Tot (value k))) + : Tot (t key value) = { mappings = F.on_domain key f } + +let sel (#key: eqtype) (#value: (key -> Tot Type)) (m: t key value) (k: key) : Tot (value k) = + m.mappings k + +let sel_create (#key: eqtype) (#value: (key -> Tot Type)) (f: (k: key -> Tot (value k))) (k: key) + : Lemma (requires True) + (ensures (sel #key #value (create f) k == f k)) + [SMTPat (sel #key #value (create f) k)] = () + +let upd (#key: eqtype) (#value: (key -> Tot Type)) (m: t key value) (k: key) (v: value k) + : Tot (t key value) = + { mappings = F.on_domain key (fun k' -> if k' = k then v else m.mappings k') } + +let sel_upd_same (#key: eqtype) (#value: (key -> Tot Type)) (m: t key value) (k: key) (v: value k) = + () + +let sel_upd_other + (#key: eqtype) + (#value: (key -> Tot Type)) + (m: t key value) + (k: key) + (v: value k) + (k': key) + = () + +let equal (#key: eqtype) (#value: (key -> Tot Type)) (m1 m2: t key value) = + forall k. sel m1 k == sel m2 k + +let equal_intro (#key: eqtype) (#value: (key -> Tot Type)) (m1 m2: t key value) = () + +let equal_refl (#key: eqtype) (#value: (key -> Tot Type)) (m: t key value) = () + +let equal_elim (#key: eqtype) (#value: (key -> Tot Type)) (m1 m2: t key value) = + F.extensionality key value m1.mappings m2.mappings + +let restrict (#key: eqtype) (#value: (key -> Tot Type)) (p: (key -> Tot Type0)) (m: t key value) = + { mappings = F.on_domain (k: key{p k}) m.mappings } + +let sel_restrict + (#key: eqtype) + (#value: (key -> Tot Type)) + (p: (key -> Tot Type0)) + (m: t key value) + (k: key{p k}) + = () + +let concat_mappings + (#key1: eqtype) + (#value1: (key1 -> Tot Type)) + (#key2: eqtype) + (#value2: (key2 -> Tot Type)) + (m1: (k1: key1 -> Tot (value1 k1))) + (m2: (k2: key2 -> Tot (value2 k2))) + (k: either key1 key2) + : concat_value value1 value2 k = + match k with + | Inl k1 -> m1 k1 + | Inr k2 -> m2 k2 + +let concat + (#key1: eqtype) + (#value1: (key1 -> Tot Type)) + (#key2: eqtype) + (#value2: (key2 -> Tot Type)) + (m1: t key1 value1) + (m2: t key2 value2) + : Tot (t (either key1 key2) (concat_value value1 value2)) = + { mappings = F.on_domain (either key1 key2) (concat_mappings m1.mappings m2.mappings) } + +let sel_concat_l + (#key1: eqtype) + (#value1: (key1 -> Tot Type)) + (#key2: eqtype) + (#value2: (key2 -> Tot Type)) + (m1: t key1 value1) + (m2: t key2 value2) + (k1: key1) + = () + +let sel_concat_r + (#key1: eqtype) + (#value1: (key1 -> Tot Type)) + (#key2: eqtype) + (#value2: (key2 -> Tot Type)) + (m1: t key1 value1) + (m2: t key2 value2) + (k2: key2) + = () + +let rename + (#key1: eqtype) + (#value1: (key1 -> Tot Type)) + (m: t key1 value1) + (#key2: eqtype) + (ren: (key2 -> Tot key1)) + : Tot (t key2 (rename_value value1 ren)) = + { mappings = F.on_domain key2 (fun k2 -> m.mappings (ren k2)) } + +let sel_rename + (#key1: eqtype) + (#value1: (key1 -> Tot Type)) + (m: t key1 value1) + (#key2: eqtype) + (ren: (key2 -> Tot key1)) + (k2: key2) + : Lemma (ensures (sel (rename m ren) k2 == sel m (ren k2))) = () + +let map + (#key: eqtype) + (#value1 #value2: (key -> Tot Type)) + (f: (k: key -> value1 k -> Tot (value2 k))) + (m: t key value1) + : Tot (t key value2) = { mappings = F.on_domain key (fun k -> f k (sel m k)) } + +let sel_map + (#key: eqtype) + (#value1 #value2: (key -> Tot Type)) + (f: (k: key -> value1 k -> Tot (value2 k))) + (m: t key value1) + (k: key) + = () + +let map_upd + (#key: eqtype) + (#value1 #value2: (key -> Tot Type)) + (f: (k: key -> value1 k -> Tot (value2 k))) + (m: t key value1) + (k: key) + (v: value1 k) + = equal_elim #key #value2 (map f (upd m k v)) (upd (map f m) k (f k v)) + diff --git a/stage0/ulib/FStar.DependentMap.fsti b/stage0/ulib/FStar.DependentMap.fsti new file mode 100644 index 00000000000..f86d7f297da --- /dev/null +++ b/stage0/ulib/FStar.DependentMap.fsti @@ -0,0 +1,242 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.DependentMap + +/// This module provides an abstract type of maps whose co-domain +/// depends on the value of each key. i.e., it is an encapsulation +/// of [x:key -> value x], where [key] supports decidable equality. +/// +/// The main constructors of the type are: +/// * [create]: To create the whole map from a function +/// * [upd]: To update a map at a point +/// * [restrict]: To restrict the domain of a map +/// * [concat]: To concatenate maps by taking the union of their key spaces +/// * [rename]: To rename the keys of a map +/// * [map]: To map a function over the values of a map +/// +/// The main eliminators are: +/// * [sel]: To query the map for its value at a point +/// +/// The interface is specified in a style that describes the action of +/// each eliminator over each of the constructors +/// +/// The map also supports an extensional equality principle. + +(** Abstract type of dependent maps, with universe polymorphic values + and keys in universe 0 with decidable equality *) +val t (key: eqtype) ([@@@strictly_positive] value: (key -> Type u#v)) : Type u#v + +(** Creating a new map from a function *) +val create (#key: eqtype) (#value: (key -> Tot Type)) (f: (k: key -> Tot (value k))) + : Tot (t key value) + +(** Querying the map for its value at a given key *) +val sel (#key: eqtype) (#value: (key -> Tot Type)) (m: t key value) (k: key) : Tot (value k) + +(** Relating [create] to [sel] *) +val sel_create (#key: eqtype) (#value: (key -> Tot Type)) (f: (k: key -> Tot (value k))) (k: key) + : Lemma (ensures (sel #key #value (create f) k == f k)) [SMTPat (sel #key #value (create f) k)] + +(** Updating a map at a point *) +val upd (#key: eqtype) (#value: (key -> Tot Type)) (m: t key value) (k: key) (v: value k) + : Tot (t key value) + +(** The action of selecting a key [k] a map with an updated value [v] + at [k] + + This is one of the classic McCarthy select/update axioms in the + setting of a dependent map. + *) +val sel_upd_same (#key: eqtype) (#value: (key -> Tot Type)) (m: t key value) (k: key) (v: value k) + : Lemma (ensures (sel (upd m k v) k == v)) [SMTPat (sel (upd m k v) k)] + +(** The action of selecting a key [k] a map with an updated value [v] + at a different key [k'] + + This is one of the classic McCarthy select/update axioms in the + setting of a dependent map. + *) +val sel_upd_other + (#key: eqtype) + (#value: (key -> Tot Type)) + (m: t key value) + (k: key) + (v: value k) + (k': key) + : Lemma (requires (k' <> k)) + (ensures (sel (upd m k v) k' == sel m k')) + [SMTPat (sel (upd m k v) k')] + +(** Extensional propositional equality on maps *) +val equal (#key: eqtype) (#value: (key -> Tot Type)) (m1 m2: t key value) : prop + +(** Introducing extensional equality by lifting equality on the map, pointwise *) +val equal_intro (#key: eqtype) (#value: (key -> Tot Type)) (m1 m2: t key value) + : Lemma (requires (forall k. sel m1 k == sel m2 k)) + (ensures (equal m1 m2)) + [SMTPat (equal m1 m2)] + +(** [equal] is reflexive *) +val equal_refl (#key: eqtype) (#value: (key -> Tot Type)) (m: t key value) + : Lemma (ensures (equal m m)) [SMTPat (equal m m)] + +(** [equal] can be eliminated into standard propositional equality + (==), also proving that it is an equivalence relation *) +val equal_elim (#key: eqtype) (#value: (key -> Tot Type)) (m1 m2: t key value) + : Lemma (requires (equal m1 m2)) (ensures (m1 == m2)) [SMTPat (equal m1 m2)] + +(**** Restricting the domain of a map *) + +(** Restricts the domain of the map to those keys satisfying [p] *) +val restrict (#key: eqtype) (#value: (key -> Tot Type)) (p: (key -> Tot Type0)) (m: t key value) + : Tot (t (k: key{p k}) value) + +(** The action of [sel] on [restrict] : the contents of the map isn't changed *) +val sel_restrict + (#key: eqtype) + (#value: (key -> Tot Type)) + (p: (key -> Tot Type0)) + (m: t key value) + (k: key{p k}) + : Lemma (ensures (sel (restrict p m) k == sel m k)) + +(**** Concatenating maps *) + +/// Concatenating [t k1 v1] and [t k2 v2] produces a map +/// [t (either k1 k2) (concat_value v1 v2)] +/// +/// I.e., the key space varies contravariantly, to take the union of +/// the component key spaces. The co-domain is the dependent product +/// of the co-domains of the original map + +(** The key space of a concatenated map is the product of the key spaces *) +let concat_value + (#key1: eqtype) + (value1: (key1 -> Tot Type)) + (#key2: eqtype) + (value2: (key2 -> Tot Type)) + (k: either key1 key2) + : Tot Type = + match k with + | Inl k1 -> value1 k1 + | Inr k2 -> value2 k2 + +(** Concatenating maps *) +val concat + (#key1: eqtype) + (#value1: (key1 -> Tot (Type u#v))) + (#key2: eqtype) + (#value2: (key2 -> Tot (Type u#v))) + (m1: t key1 value1) + (m2: t key2 value2) + : Tot (t (either key1 key2) (concat_value value1 value2)) + +(** The action of [sel] on [concat], for a key on the left picks a + value from the left map *) +val sel_concat_l + (#key1: eqtype) + (#value1: (key1 -> Tot (Type u#v))) + (#key2: eqtype) + (#value2: (key2 -> Tot (Type u#v))) + (m1: t key1 value1) + (m2: t key2 value2) + (k1: key1) + : Lemma (ensures (sel (concat m1 m2) (Inl k1) == sel m1 k1)) + +(** The action of [sel] on [concat], for a key on the right picks a + value from the right map *) +val sel_concat_r + (#key1: eqtype) + (#value1: (key1 -> Tot Type)) + (#key2: eqtype) + (#value2: (key2 -> Tot Type)) + (m1: t key1 value1) + (m2: t key2 value2) + (k2: key2) + : Lemma (ensures (sel (concat m1 m2) (Inr k2) == sel m2 k2)) + +(**** Renamings *) + +/// Given a map from [key2] to [key1], we can revise a map from [t +/// key1 v] to a map [t key2 v], by composing the maps. + +(** The type of the co-domain of the renamed map also involves + transformation along the renaming function *) +let rename_value + (#key1: eqtype) + (value1: (key1 -> Tot Type)) + (#key2: eqtype) + (ren: (key2 -> Tot key1)) + (k: key2) + : Tot Type = value1 (ren k) + +(** Renaming the keys of a map *) +val rename + (#key1: eqtype) + (#value1: (key1 -> Tot Type)) + (m: t key1 value1) + (#key2: eqtype) + (ren: (key2 -> Tot key1)) + : Tot (t key2 (rename_value value1 ren)) + +(** The action of [sel] on [rename] *) +val sel_rename + (#key1: eqtype) + (#value1: (key1 -> Tot Type)) + (m: t key1 value1) + (#key2: eqtype) + (ren: (key2 -> Tot key1)) + (k2: key2) + : Lemma (ensures (sel (rename m ren) k2 == sel m (ren k2))) + +(**** Mapping a function over a dependent map *) + +(** [map f m] applies f to each value in [m]'s co-domain *) +val map + (#key: eqtype) + (#value1 #value2: (key -> Tot Type)) + (f: (k: key -> value1 k -> Tot (value2 k))) + (m: t key value1) + : Tot (t key value2) + +(** The action of [sel] on [map] *) +val sel_map + (#key: eqtype) + (#value1 #value2: (key -> Tot Type)) + (f: (k: key -> value1 k -> Tot (value2 k))) + (m: t key value1) + (k: key) + : Lemma (ensures (sel (map f m) k == f k (sel m k))) + [SMTPat (sel #key #value2 (map #key #value1 #value2 f m) k)] + +(** [map] explained in terms of its action on [upd] *) +val map_upd + (#key: eqtype) + (#value1 #value2: (key -> Tot Type)) + (f: (k: key -> value1 k -> Tot (value2 k))) + (m: t key value1) + (k: key) + (v: value1 k) + : Lemma (ensures (map f (upd m k v) == upd (map f m) k (f k v))) + [ + //AR: wanted to write an SMTPatOr, but gives some error + SMTPat (map #key #value1 #value2 f (upd #key #value1 m k v)) + ] + + +/// We seem to miss lemmas that relate map to the other constructors, +/// including create, restrict etc. diff --git a/stage0/ulib/FStar.Dyn.fst b/stage0/ulib/FStar.Dyn.fst new file mode 100644 index 00000000000..662b2717a74 --- /dev/null +++ b/stage0/ulib/FStar.Dyn.fst @@ -0,0 +1,41 @@ +(* + Copyright 2024 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Dyn + +// Note: this file is only a reference implementation showing that the API can +// be implemented safely, we have a separate handcrafted ML implementation using +// `magic`. +// Extracting this file directly results in extra indirections, since `mkdyn` +// would allocate a closure and `undyn` would allocate a closure and a heap +// cell. + +noeq type value_type_bundle = { t: Type0; x: t } + +let raw_dyn (t: Type u#a) : Type0 = unit -> Dv (b:value_type_bundle {b.t == (unit -> Dv t)}) +let to_raw_dyn (#t: Type u#a) (x: t) : raw_dyn t = fun _ -> { t = unit -> Dv t; x = fun _ -> x } + +let dyn : Type0 = unit -> Dv value_type_bundle +let mkdyn' (#t: Type u#a) (x: t) : dyn = to_raw_dyn x +let dyn_has_ty (y: dyn) (t: Type u#a) = exists (x: t). y == mkdyn' x +let mkdyn #t x = mkdyn' #t x + +let elim_subtype_of s (t: Type { subtype_of s t }) (x: s): t = x + +let undyn (#t: Type u#a) (y: dyn { dyn_has_ty y t }) : Dv t = + let y : raw_dyn t = elim_subtype_of _ _ y in + let b = y () in + let c : unit -> Dv t = b.x in + c () diff --git a/stage0/ulib/FStar.Dyn.fsti b/stage0/ulib/FStar.Dyn.fsti new file mode 100644 index 00000000000..66b9cd89c1e --- /dev/null +++ b/stage0/ulib/FStar.Dyn.fsti @@ -0,0 +1,31 @@ +(* + Copyright 2008-2024 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Dyn + +/// Dynamic casts + +val dyn : Type0 + +(** [dyn_has_ty d a] is true if [d] was promoted from type [a] *) +val dyn_has_ty (d: dyn) (a: Type u#a) : prop + +(** Promoting a value of type [a] to [dyn] *) +val mkdyn (#a: Type u#a) (x: a) : d:dyn { dyn_has_ty d a } + +(** This coerces a value of type [dyn] to its original type [a], + with [dyn_has_ty d a] as precondition *) +val undyn (#a: Type u#a) (d: dyn { dyn_has_ty d a }) : Dv a diff --git a/stage0/ulib/FStar.Endianness.fst b/stage0/ulib/FStar.Endianness.fst new file mode 100644 index 00000000000..337146ec377 --- /dev/null +++ b/stage0/ulib/FStar.Endianness.fst @@ -0,0 +1,366 @@ +module FStar.Endianness + +/// A library of lemmas for reasoning about sequences of machine integers and +/// their (little|big)-endian representation as a sequence of bytes. +/// +/// The functions in this module aim to be as generic as possible, in order to +/// facilitate compatibility with: +/// - Vale's model of machine integers (nat64 et al.), which does not rely on +/// FStar's machine integers +/// - HACL*'s Lib.IntTypes module, which exposes a universal indexed integer +/// type but uses F* machine integers under the hood. +/// +/// To achieve maximum compatibility, we try to state most lemmas using nat +/// rather than UIntX. +/// +/// .. note:: +/// +/// This module supersedes the poorly-named ``FStar.Krml.Endianness``. + +module U8 = FStar.UInt8 +module U32 = FStar.UInt32 +module U64 = FStar.UInt64 +module Math = FStar.Math.Lemmas +module S = FStar.Seq + +open FStar.Mul + +/// Definition of little and big-endianness +/// --------------------------------------- + +let rec le_to_n b + : Tot nat (decreases (S.length b)) + = if S.length b = 0 then 0 + else U8.v (S.head b) + pow2 8 * le_to_n (S.tail b) + +let rec be_to_n b + : Tot nat (decreases (S.length b)) + = if S.length b = 0 then 0 + else U8.v (S.last b) + pow2 8 * be_to_n (S.slice b 0 (S.length b - 1)) + +let reveal_le_to_n _ = () + +let reveal_be_to_n _ = () + +/// Inverse operations +/// ------------------ + +/// TODO: move to FStar.Math.Lemmas? +private val lemma_euclidean_division: r:nat -> b:nat -> q:pos -> Lemma + (requires (r < q)) + (ensures (r + q * b < q * (b+1))) +let lemma_euclidean_division r b q = () + +/// TODO: move to FStar.Math.Lemmas? US spelling? +private val lemma_factorise: a:nat -> b:nat -> Lemma (a + a * b == a * (b + 1)) +let lemma_factorise a b = () + +let rec lemma_le_to_n_is_bounded b = + if Seq.length b = 0 then () + else + begin + let s = Seq.slice b 1 (Seq.length b) in + assert(Seq.length s = Seq.length b - 1); + lemma_le_to_n_is_bounded s; + assert(UInt8.v (Seq.index b 0) < pow2 8); + assert(le_to_n s < pow2 (8 * Seq.length s)); + assert(le_to_n b < pow2 8 + pow2 8 * pow2 (8 * (Seq.length b - 1))); + lemma_euclidean_division (UInt8.v (Seq.index b 0)) (le_to_n s) (pow2 8); + assert(le_to_n b <= pow2 8 * (le_to_n s + 1)); + assert(le_to_n b <= pow2 8 * pow2 (8 * (Seq.length b - 1))); + Math.Lemmas.pow2_plus 8 (8 * (Seq.length b - 1)); + lemma_factorise 8 (Seq.length b - 1) + end + +let rec lemma_be_to_n_is_bounded b = + if Seq.length b = 0 then () + else + begin + let s = Seq.slice b 0 (Seq.length b - 1) in + assert(Seq.length s = Seq.length b - 1); + lemma_be_to_n_is_bounded s; + assert(UInt8.v (Seq.last b) < pow2 8); + assert(be_to_n s < pow2 (8 * Seq.length s)); + assert(be_to_n b < pow2 8 + pow2 8 * pow2 (8 * (Seq.length b - 1))); + lemma_euclidean_division (UInt8.v (Seq.last b)) (be_to_n s) (pow2 8); + assert(be_to_n b <= pow2 8 * (be_to_n s + 1)); + assert(be_to_n b <= pow2 8 * pow2 (8 * (Seq.length b - 1))); + Math.Lemmas.pow2_plus 8 (8 * (Seq.length b - 1)); + lemma_factorise 8 (Seq.length b - 1) + end + +let rec n_to_le len n = + if len = 0 then + S.empty + else + let len = len - 1 in + let byte = U8.uint_to_t (n % 256) in + let n' = n / 256 in + Math.pow2_plus 8 (8 * len); + assert(n' < pow2 (8 * len )); + let b' = n_to_le len n' in + let b = S.cons byte b' in + S.lemma_eq_intro b' (S.tail b); + b + +let rec n_to_be len n = + if len = 0 then + S.empty + else + let len = len - 1 in + let byte = U8.uint_to_t (n % 256) in + let n' = n / 256 in + Math.pow2_plus 8 (8 * len); + let b' = n_to_be len n' in + let b'' = S.create 1 byte in + let b = S.append b' b'' in + S.lemma_eq_intro b' (S.slice b 0 len); + b + +/// Injectivity +/// ----------- + +// this lemma easily follows from le_to_n . (n_to_le len) == id, the inversion +// proof in the spec for n_to_le +let n_to_le_inj len n1 n2 = () + +let n_to_be_inj len n1 n2 = () + +let rec be_to_n_inj b1 b2 = + if Seq.length b1 = 0 + then () + else begin + be_to_n_inj (Seq.slice b1 0 (Seq.length b1 - 1)) (Seq.slice b2 0 (Seq.length b2 - 1)); + Seq.lemma_split b1 (Seq.length b1 - 1); + Seq.lemma_split b2 (Seq.length b2 - 1) + end + +let rec le_to_n_inj b1 b2 = + if Seq.length b1 = 0 + then () + else begin + le_to_n_inj (Seq.slice b1 1 (Seq.length b1)) (Seq.slice b2 1 (Seq.length b2)); + Seq.lemma_split b1 1; + Seq.lemma_split b2 1 + end + +/// Roundtripping +/// ------------- + +let n_to_be_be_to_n len s = + lemma_be_to_n_is_bounded s; + be_to_n_inj s (n_to_be len (be_to_n s)) + +let n_to_le_le_to_n len s = + lemma_le_to_n_is_bounded s; + le_to_n_inj s (n_to_le len (le_to_n s)) + + +/// Reasoning over sequences of integers +/// ------------------------------------ +/// +/// TODO: should these be sequences of nats instead? then re-use these lemmas to +/// export a variant (which we need for, say, hashes) specialized to F* machine +/// integers? + +let rec seq_uint32_of_le l b = + if S.length b = 0 then + S.empty + else + let hd, tl = Seq.split b 4 in + S.cons (uint32_of_le hd) (seq_uint32_of_le (l - 1) tl) + +let rec le_of_seq_uint32 s = + if S.length s = 0 then + S.empty + else + S.append (le_of_uint32 (S.head s)) (le_of_seq_uint32 (S.tail s)) + +let rec seq_uint32_of_be l b = + if S.length b = 0 then + S.empty + else + let hd, tl = Seq.split b 4 in + S.cons (uint32_of_be hd) (seq_uint32_of_be (l - 1) tl) + +let rec be_of_seq_uint32 s = + if S.length s = 0 then + S.empty + else + S.append (be_of_uint32 (S.head s)) (be_of_seq_uint32 (S.tail s)) + +let rec seq_uint64_of_le l b = + if S.length b = 0 then + S.empty + else + let hd, tl = Seq.split b 8 in + S.cons (uint64_of_le hd) (seq_uint64_of_le (l - 1) tl) + +let rec le_of_seq_uint64 s = + if S.length s = 0 then + S.empty + else + S.append (le_of_uint64 (S.head s)) (le_of_seq_uint64 (S.tail s)) + +let rec seq_uint64_of_be l b = + if S.length b = 0 then + S.empty + else + let hd, tl = Seq.split b 8 in + S.cons (uint64_of_be hd) (seq_uint64_of_be (l - 1) tl) + +let rec be_of_seq_uint64 s = + if S.length s = 0 then + S.empty + else + S.append (be_of_uint64 (S.head s)) (be_of_seq_uint64 (S.tail s)) + +/// Pure indexing & update over sequences +/// ------------------------------------- + +#set-options "--max_fuel 1 --max_ifuel 0 --z3rlimit 50" + +let rec offset_uint32_be (b: bytes) (n: nat) (i: nat) = + if S.length b = 0 then + false_elim () + else + let hd, tl = Seq.split b 4 in + if i = 0 then + () + else + offset_uint32_be tl (n - 1) (i - 1) + +let rec offset_uint32_le (b: bytes) (n: nat) (i: nat) = + if S.length b = 0 then + false_elim () + else + let hd, tl = Seq.split b 4 in + if i = 0 then + () + else + offset_uint32_le tl (n - 1) (i - 1) + +let rec offset_uint64_be (b: bytes) (n: nat) (i: nat) = + if S.length b = 0 then + false_elim () + else + let hd, tl = Seq.split b 8 in + if i = 0 then + () + else + offset_uint64_be tl (n - 1) (i - 1) + +let rec offset_uint64_le (b: bytes) (n: nat) (i: nat) = + if S.length b = 0 then + false_elim () + else + let hd, tl = Seq.split b 8 in + if i = 0 then + () + else + offset_uint64_le tl (n - 1) (i - 1) + + +/// Appending and slicing sequences of integers +/// ------------------------------------------- + +#set-options "--max_fuel 1 --z3rlimit 20" + +(* TODO: move to FStar.Seq.Properties, with the pattern *) +let tail_cons (#a: Type) (hd: a) (tl: S.seq a): Lemma + (ensures (S.equal (S.tail (S.cons hd tl)) tl)) +// [ SMTPat (S.tail (S.cons hd tl)) ] += + () + +let be_of_seq_uint32_base s1 s2 = () + +let le_of_seq_uint32_base s1 s2 = () + +let be_of_seq_uint64_base s1 s2 = () + +let rec be_of_seq_uint32_append s1 s2 = + if S.length s1 = 0 then begin + assert (S.equal (be_of_seq_uint32 s1) S.empty); + assert (S.equal s1 S.empty); + S.append_empty_l s2; + S.append_empty_l (be_of_seq_uint32 s2); + assert (S.equal (S.append s1 s2) s2); + () + end else begin + calc S.equal { + be_of_seq_uint32 (S.append s1 s2); + S.equal { () } + be_of_seq_uint32 (S.append (S.cons (S.head s1) (S.tail s1)) s2); + S.equal { S.append_cons (S.head s1) (S.tail s1) s2 } + be_of_seq_uint32 (S.cons (S.head s1) (S.append (S.tail s1) s2)); + S.equal { () } + be_of_seq_uint32 (S.cons (S.head s1) (S.append (S.tail s1) s2)); + S.equal { S.head_cons (S.head s1) (S.append (S.tail s1) s2); + tail_cons (S.head s1) (S.append (S.tail s1) s2) } + S.append (be_of_uint32 (S.head s1)) + (be_of_seq_uint32 (S.append (S.tail s1) s2)); + S.equal { be_of_seq_uint32_append (S.tail s1) s2 } + S.append (be_of_uint32 (S.head s1)) + (S.append (be_of_seq_uint32 (S.tail s1)) (be_of_seq_uint32 s2)); + } + end + +let rec le_of_seq_uint32_append s1 s2 = + Classical.forall_intro_2 (tail_cons #U32.t); // TODO: this is a local pattern, remove once tail_cons lands in FStar.Seq.Properties + if S.length s1 = 0 then begin + assert (S.equal (le_of_seq_uint32 s1) S.empty); + assert (S.equal (S.append s1 s2) s2); + () + end else begin + assert (S.equal (S.append s1 s2) (S.cons (S.head s1) (S.append (S.tail s1) s2))); + assert (S.equal (le_of_seq_uint32 (S.append s1 s2)) + (S.append (le_of_uint32 (S.head s1)) (le_of_seq_uint32 (S.append (S.tail s1) s2)))); + le_of_seq_uint32_append (S.tail s1) s2 + end + +let rec be_of_seq_uint64_append s1 s2 = + Classical.forall_intro_2 (tail_cons #U64.t); // TODO: this is a local pattern, remove once tail_cons lands in FStar.Seq.Properties + if S.length s1 = 0 then begin + assert (S.equal (be_of_seq_uint64 s1) S.empty); + assert (S.equal (S.append s1 s2) s2); + () + end else begin + assert (S.equal (S.append s1 s2) (S.cons (S.head s1) (S.append (S.tail s1) s2))); + assert (S.equal (be_of_seq_uint64 (S.append s1 s2)) + (S.append (be_of_uint64 (S.head s1)) (be_of_seq_uint64 (S.append (S.tail s1) s2)))); + be_of_seq_uint64_append (S.tail s1) s2 + end + +let rec seq_uint32_of_be_be_of_seq_uint32 n s = + if n = 0 + then () + else begin + assert (s `S.equal` S.cons (S.head s) (S.tail s)); + seq_uint32_of_be_be_of_seq_uint32 (n - 1) (S.tail s); + let s' = be_of_seq_uint32 s in + S.lemma_split s' 4; + S.lemma_append_inj (S.slice s' 0 4) (S.slice s' 4 (S.length s')) (be_of_uint32 (S.head s)) (be_of_seq_uint32 (S.tail s)) + end + +let rec be_of_seq_uint32_seq_uint32_of_be n s = + if n = 0 + then () + else begin + S.lemma_split s 4; + be_of_seq_uint32_seq_uint32_of_be (n - 1) (S.slice s 4 (S.length s)); + let s' = seq_uint32_of_be n s in + let hd, tl = S.split s 4 in + assert (S.head s' == uint32_of_be hd); + tail_cons (uint32_of_be hd) (seq_uint32_of_be (n - 1) tl); + assert (S.tail s' == seq_uint32_of_be (n - 1) tl); + let s'' = be_of_seq_uint32 s' in + S.lemma_split s'' 4; + S.lemma_append_inj (S.slice s'' 0 4) (S.slice s'' 4 (S.length s'')) (be_of_uint32 (S.head s')) (be_of_seq_uint32 (S.tail s')); + n_to_be_be_to_n 4 hd + end + +let slice_seq_uint32_of_be n s lo hi = () + +let be_of_seq_uint32_slice s lo hi = + slice_seq_uint32_of_be (S.length s) (be_of_seq_uint32 s) lo hi diff --git a/stage0/ulib/FStar.Endianness.fsti b/stage0/ulib/FStar.Endianness.fsti new file mode 100644 index 00000000000..3262f4afe03 --- /dev/null +++ b/stage0/ulib/FStar.Endianness.fsti @@ -0,0 +1,364 @@ +module FStar.Endianness + +/// A library of lemmas for reasoning about sequences of machine integers and +/// their (little|big)-endian representation as a sequence of bytes. +/// +/// The functions in this module aim to be as generic as possible, in order to +/// facilitate compatibility with: +/// - Vale's model of machine integers (nat64 et al.), which does not rely on +/// FStar's machine integers +/// - HACL*'s Lib.IntTypes module, which exposes a universal indexed integer +/// type but uses F* machine integers under the hood. +/// +/// To achieve maximum compatibility, we try to state most lemmas using nat +/// rather than UIntX. +/// +/// To limit context pollution, the definitions of the recursive functions are +/// abstract; please add lemmas as you see fit. In extreme cases, ``friend``'ing +/// might be de rigueur. +/// +/// .. note:: +/// +/// This module supersedes the poorly-named ``FStar.Krml.Endianness``. + +module U8 = FStar.UInt8 +module U32 = FStar.UInt32 +module U64 = FStar.UInt64 +module Math = FStar.Math.Lemmas +module S = FStar.Seq + +[@@ noextract_to "krml"] +type bytes = S.seq U8.t + +open FStar.Mul + + +/// Definition of little and big-endianness +/// --------------------------------------- +/// +/// This is our spec, to be audited. From bytes to nat. + +/// lt_to_n interprets a byte sequence as a little-endian natural number +val le_to_n : b:bytes -> Tot nat + +/// be_to_n interprets a byte sequence as a big-endian natural number +val be_to_n : b:bytes -> Tot nat + +/// Induction for le_to_n and be_to_n + +val reveal_le_to_n (b:bytes) + : Lemma + (le_to_n b == + (match Seq.length b with + | 0 -> 0 + | _ -> U8.v (S.head b) + pow2 8 * le_to_n (S.tail b))) + +val reveal_be_to_n (b:bytes) + : Lemma + (be_to_n b == + (match Seq.length b with + | 0 -> 0 + | _ -> U8.v (S.last b) + pow2 8 * be_to_n (S.slice b 0 (S.length b - 1)))) + +val lemma_le_to_n_is_bounded: b:bytes -> Lemma + (requires True) + (ensures (le_to_n b < pow2 (8 * Seq.length b))) + (decreases (Seq.length b)) + +val lemma_be_to_n_is_bounded: b:bytes -> Lemma + (requires True) + (ensures (be_to_n b < pow2 (8 * Seq.length b))) + (decreases (Seq.length b)) + + +/// Inverse operations +/// ------------------ +/// +/// From nat to bytes, and their functional correctness. + +/// n_to_le encodes a number as a little-endian byte sequence of a fixed, +/// sufficiently large length. +val n_to_le : len:nat -> n:nat{n < pow2 (8 * len)} -> + Tot (b:bytes{S.length b == len /\ n == le_to_n b}) + (decreases len) + +/// n_to_be encodes a numbers as a big-endian byte sequence of a fixed, +/// sufficiently large length +val n_to_be: + len:nat -> n:nat{n < pow2 (8 * len)} -> + Tot (b:bytes{S.length b == len /\ n == be_to_n b}) + (decreases len) + +/// Injectivity +/// ----------- + +val n_to_le_inj (len: nat) (n1 n2: (n:nat{n < pow2 (8 * len)})): + Lemma (requires (n_to_le len n1 == n_to_le len n2)) + (ensures (n1 == n2)) + +val n_to_be_inj (len: nat) (n1 n2: (n:nat{n < pow2 (8 * len)})) : + Lemma (requires (n_to_be len n1 == n_to_be len n2)) + (ensures (n1 == n2)) + +val be_to_n_inj + (b1 b2: Seq.seq U8.t) +: Lemma + (requires (Seq.length b1 == Seq.length b2 /\ be_to_n b1 == be_to_n b2)) + (ensures (Seq.equal b1 b2)) + (decreases (Seq.length b1)) + +val le_to_n_inj + (b1 b2: Seq.seq U8.t) +: Lemma + (requires (Seq.length b1 == Seq.length b2 /\ le_to_n b1 == le_to_n b2)) + (ensures (Seq.equal b1 b2)) + (decreases (Seq.length b1)) + +/// Roundtripping +/// ------------- + +val n_to_be_be_to_n (len: nat) (s: Seq.seq U8.t) : Lemma + (requires (Seq.length s == len)) + (ensures ( + be_to_n s < pow2 (8 * len) /\ + n_to_be len (be_to_n s) == s + )) + [SMTPat (n_to_be len (be_to_n s))] + +val n_to_le_le_to_n (len: nat) (s: Seq.seq U8.t) : Lemma + (requires (Seq.length s == len)) + (ensures ( + le_to_n s < pow2 (8 * len) /\ + n_to_le len (le_to_n s) == s + )) + [SMTPat (n_to_le len (le_to_n s))] + + +/// Specializations for F* machine integers +/// --------------------------------------- +/// +/// These are useful because they take care of calling the right ``*_is_bounded`` lemmas. + +let uint32_of_le (b: bytes { S.length b = 4 }) = + let n = le_to_n b in + lemma_le_to_n_is_bounded b; + UInt32.uint_to_t n + +let le_of_uint32 (x: UInt32.t): b:bytes{ S.length b = 4 } = + n_to_le 4 (UInt32.v x) + +let uint32_of_be (b: bytes { S.length b = 4 }) = + let n = be_to_n b in + lemma_be_to_n_is_bounded b; + UInt32.uint_to_t n + +let be_of_uint32 (x: UInt32.t): b:bytes{ S.length b = 4 } = + n_to_be 4 (UInt32.v x) + +let uint64_of_le (b: bytes { S.length b = 8 }) = + let n = le_to_n b in + lemma_le_to_n_is_bounded b; + UInt64.uint_to_t n + +let le_of_uint64 (x: UInt64.t): b:bytes{ S.length b = 8 } = + n_to_le 8 (UInt64.v x) + +let uint64_of_be (b: bytes { S.length b = 8 }) = + let n = be_to_n b in + lemma_be_to_n_is_bounded b; + UInt64.uint_to_t n + +let be_of_uint64 (x: UInt64.t): b:bytes{ S.length b = 8 } = + n_to_be 8 (UInt64.v x) + + +/// Lifting {le,be}_to_n / n_to_{le,be} to sequences +/// ------------------------------------------------ +/// +/// TODO: 16-bit (but is it really needed?) +/// TODO: should these be specializations of generic functions that chop on +/// N-byte boundaries, and operate on bounded nats instead of uints? + +val seq_uint32_of_le (l: nat) (b: bytes{ S.length b = 4 * l }): + s:S.seq UInt32.t { S.length s = l } + +val le_of_seq_uint32 (s: S.seq UInt32.t): + Tot (b:bytes { S.length b = 4 * S.length s }) + (decreases (S.length s)) + +val seq_uint32_of_be (l: nat) (b: bytes{ S.length b = 4 * l }): + s:S.seq UInt32.t { S.length s = l } + +val be_of_seq_uint32 (s: S.seq UInt32.t): + Tot (b:bytes { S.length b = 4 * S.length s }) + (decreases (S.length s)) + +val seq_uint64_of_le (l: nat) (b: bytes{ S.length b = 8 * l }): + s:S.seq UInt64.t { S.length s = l } + +val le_of_seq_uint64 (s: S.seq UInt64.t): + Tot (b:bytes { S.length b = 8 * S.length s }) + (decreases (S.length s)) + +val seq_uint64_of_be (l: nat) (b: bytes{ S.length b = 8 * l }): + s:S.seq UInt64.t { S.length s = l } + +val be_of_seq_uint64 (s: S.seq UInt64.t): + Tot (b:bytes { S.length b = 8 * S.length s }) + (decreases (S.length s)) + + +/// Complete specification of the combinators above, relating them to {le,be}_to_ / n_to_{le,be} +/// -------------------------------------------------------------------------------------------- + +val offset_uint32_be (b: bytes) (n: nat) (i: nat): + Lemma + (requires ( + S.length b = 4 * n /\ + i < n)) + (ensures ( + S.index (seq_uint32_of_be n b) i == uint32_of_be (S.slice b (4 * i) (4 * i + 4)))) + (decreases ( + S.length b)) + [ SMTPat (S.index (seq_uint32_of_be n b) i) ] + +val offset_uint32_le (b: bytes) (n: nat) (i: nat): + Lemma + (requires ( + S.length b = 4 * n /\ + i < n)) + (ensures ( + S.index (seq_uint32_of_le n b) i == uint32_of_le (S.slice b (4 * i) (4 * i + 4)))) + (decreases ( + S.length b)) + [ SMTPat (S.index (seq_uint32_of_le n b) i) ] + +val offset_uint64_be (b: bytes) (n: nat) (i: nat): + Lemma + (requires ( + S.length b = 8 * n /\ + i < n)) + (ensures ( + S.index (seq_uint64_of_be n b) i == uint64_of_be (S.slice b (8 * i) (8 * i + 8)))) + (decreases ( + S.length b)) + [ SMTPat (S.index (seq_uint64_of_be n b) i) ] + +val offset_uint64_le (b: bytes) (n: nat) (i: nat): + Lemma + (requires ( + S.length b = 8 * n /\ + i < n)) + (ensures ( + S.index (seq_uint64_of_le n b) i == uint64_of_le (S.slice b (8 * i) (8 * i + 8)))) + (decreases ( + S.length b)) + [ SMTPat (S.index (seq_uint64_of_le n b) i) ] + + +/// Reasoning about appending such sequences +/// ---------------------------------------- +/// +/// TODO: this is fairly incomplete +/// TODO: the *_base cases seem ad-hoc and derivable trivially from offset above; why have them? + +val be_of_seq_uint32_base (s1: S.seq U32.t) (s2: S.seq U8.t): Lemma + (requires ( + S.length s1 = 1 /\ + S.length s2 = 4 /\ + be_to_n s2 = U32.v (S.index s1 0))) + (ensures (S.equal s2 (be_of_seq_uint32 s1))) + [ SMTPat (be_to_n s2); SMTPat (U32.v (S.index s1 0)) ] + +val le_of_seq_uint32_base (s1: S.seq U32.t) (s2: S.seq U8.t): Lemma + (requires ( + S.length s1 = 1 /\ + S.length s2 = 4 /\ + le_to_n s2 = U32.v (S.index s1 0))) + (ensures (S.equal s2 (le_of_seq_uint32 s1))) + [ SMTPat (le_to_n s2); SMTPat (U32.v (S.index s1 0)) ] + +val be_of_seq_uint64_base (s1: S.seq U64.t) (s2: S.seq U8.t): Lemma + (requires ( + S.length s1 = 1 /\ + S.length s2 = 8 /\ + be_to_n s2 = U64.v (S.index s1 0))) + (ensures (S.equal s2 (be_of_seq_uint64 s1))) + [ SMTPat (be_to_n s2); SMTPat (U64.v (S.index s1 0)) ] + +val be_of_seq_uint32_append (s1 s2: S.seq U32.t): Lemma + (ensures ( + S.equal (be_of_seq_uint32 (S.append s1 s2)) + (S.append (be_of_seq_uint32 s1) (be_of_seq_uint32 s2)))) + (decreases ( + S.length s1)) + [ SMTPat (S.append (be_of_seq_uint32 s1) (be_of_seq_uint32 s2)) ] + +val le_of_seq_uint32_append (s1 s2: S.seq U32.t): Lemma + (ensures ( + S.equal (le_of_seq_uint32 (S.append s1 s2)) + (S.append (le_of_seq_uint32 s1) (le_of_seq_uint32 s2)))) + (decreases ( + S.length s1)) + [ SMTPat (S.append (le_of_seq_uint32 s1) (le_of_seq_uint32 s2)) ] + +val be_of_seq_uint64_append (s1 s2: S.seq U64.t): Lemma + (ensures ( + S.equal (be_of_seq_uint64 (S.append s1 s2)) + (S.append (be_of_seq_uint64 s1) (be_of_seq_uint64 s2)))) + (decreases ( + S.length s1)) + [ SMTPat (S.append (be_of_seq_uint64 s1) (be_of_seq_uint64 s2)) ] + +/// Roundtripping +/// ------------- +/// +/// TODO: also incomplete + +val seq_uint32_of_be_be_of_seq_uint32 (n: nat) (s: S.seq U32.t) : Lemma + (requires (n == S.length s)) + (ensures (seq_uint32_of_be n (be_of_seq_uint32 s) `S.equal` s)) + (decreases n) + [SMTPat (seq_uint32_of_be n (be_of_seq_uint32 s))] + +val be_of_seq_uint32_seq_uint32_of_be (n: nat) (s: S.seq U8.t) : Lemma + (requires (4 * n == S.length s)) + (ensures (be_of_seq_uint32 (seq_uint32_of_be n s) `S.equal` s)) + (decreases n) + [SMTPat (be_of_seq_uint32 (seq_uint32_of_be n s))] + +/// Reasoning about slicing such sequences +/// -------------------------------------- +/// +/// (Needs SMTPats above for roundtripping in their proof, hence why they're at the end.) + +val slice_seq_uint32_of_be (n: nat) (s: S.seq U8.t) (lo: nat) (hi: nat) : Lemma + (requires (4 * n == S.length s /\ lo <= hi /\ hi <= n)) + (ensures (S.slice (seq_uint32_of_be n s) lo hi) `S.equal` seq_uint32_of_be (hi - lo) (S.slice s (4 * lo) (4 * hi))) + +val be_of_seq_uint32_slice (s: S.seq U32.t) (lo: nat) (hi: nat) : Lemma + (requires (lo <= hi /\ hi <= S.length s)) + (ensures (be_of_seq_uint32 (S.slice s lo hi) `S.equal` S.slice (be_of_seq_uint32 s) (4 * lo) (4 * hi))) + + +/// Some reasoning about zero bytes + +let rec le_to_n_zeros (s:bytes) + : Lemma + (requires + forall (i:nat). i < Seq.length s ==> Seq.index s i == 0uy) + (ensures le_to_n s == 0) + (decreases (Seq.length s)) + = reveal_le_to_n s; + if Seq.length s = 0 then () + else le_to_n_zeros (Seq.tail s) + +let rec be_to_n_zeros (s:bytes) + : Lemma + (requires + forall (i:nat). i < Seq.length s ==> Seq.index s i == 0uy) + (ensures be_to_n s == 0) + (decreases (Seq.length s)) + = reveal_be_to_n s; + if Seq.length s = 0 then () + else be_to_n_zeros (Seq.slice s 0 (Seq.length s - 1)) diff --git a/stage0/ulib/FStar.Exn.fst b/stage0/ulib/FStar.Exn.fst new file mode 100644 index 00000000000..82ef58e98c4 --- /dev/null +++ b/stage0/ulib/FStar.Exn.fst @@ -0,0 +1,23 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Exn + +(** Providing the signature of [raise], + that is implemented natively in FStar_Exn.ml as primitive raise *) +assume +val raise (e: exn) : Exn 'a (requires True) (ensures (fun r -> r == E e)) + diff --git a/stage0/ulib/FStar.ExtractAs.fst b/stage0/ulib/FStar.ExtractAs.fst new file mode 100644 index 00000000000..11a785dafac --- /dev/null +++ b/stage0/ulib/FStar.ExtractAs.fst @@ -0,0 +1,37 @@ +(* + Copyright 2024 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.ExtractAs +open FStar.Stubs.Reflection.Types + +(** Replaces the annotated definition + by the specified implementation during extraction. + There are no checks whether the implementation + has the same semantics, or even the same type. + + For example, if you have: + + [@@extract_as (`(fun (x: nat) -> "not a number"))] + let add_one (x: nat) : nat = x + 42 + + Then `add_one` will extract to `let add_one x = "not a number"`, + and most likely cause the extracted program to crash. + + Note that the argument needs to be a literal quotation. + The implementation can be recursive, + but then you need to construct the attribute via a tactic. + *) +let extract_as (impl: term) = () diff --git a/stage0/ulib/FStar.Fin.fst b/stage0/ulib/FStar.Fin.fst new file mode 100644 index 00000000000..306c781e04a --- /dev/null +++ b/stage0/ulib/FStar.Fin.fst @@ -0,0 +1,162 @@ +(* + Copyright 2008-2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Fin + +/// This module is supposed to contain various lemmas about +/// finiteness. For now, it mainly provides a basic pigeonhole +/// principle +/// +/// TODO: We might generalize this to also support general utilities +/// for reasoning about cardinality, relation with injections and +/// surjections, etc. +/// +/// UPD. November 8, 2022 -- added support for custom equivalence relation-aware +/// pigeon principle lemma. +/// +/// For better readability of lemmas, Argument types are kept explicit, +/// effectively duplicating the interface file declarations. + +module S = FStar.Seq + +(** Find an index of an element in [s] starting from [i] that validates [p] *) +let rec find (#a: Type) (s: S.seq a) (p: (a -> bool)) (i: under (S.length s)) = + if p (S.index s i) + then Some i + else if i + 1 < S.length s + then find #a s p (i + 1) + else None + +(** Given a sequence [s] all of whose elements are at most [n], if the + length of [s] is greater than [n], then there are two distinct + indexes in [s] that contain the same element *) +let rec pigeonhole (#n: pos) (s: S.seq (under n)) = + if n = 1 then (0, 1) + else let k0 = S.index s 0 in + match find s (fun k -> k = k0) 1 with + | Some i -> 0, i + | None -> let (i1,i2) = + pigeonhole (S.init #(under (n-1)) n + (fun i -> let k = S.index s (i+1) in + if k x `r` z + +let is_reflexive_intro #a r = reveal_opaque (`%is_reflexive) (is_reflexive #a) + +let is_symmetric_intro #a r = reveal_opaque (`%is_reflexive) (is_reflexive #a) + +let is_transitive_intro #a r = reveal_opaque (`%is_reflexive) (is_reflexive #a) + +let refl_lemma #a _ _ = reveal_opaque (`%is_reflexive) (is_reflexive #a) + +let symm_lemma #a _ _ _ = reveal_opaque (`%is_symmetric) (is_symmetric #a) + +let trans_lemma #a _ _ _ _ = reveal_opaque (`%is_transitive) (is_transitive #a); + reveal_opaque (`%is_symmetric) (is_symmetric #a) + + +let contains_eq_means_nonempty #a (eq:equivalence_relation a) (s: S.seq a) (x:a) + : Lemma (requires contains_eq eq s x) + (ensures S.length s > 0) + [SMTPat(contains_eq eq s x)] + = reveal_opaque (`%contains_eq) (contains_eq eq) + +let tail_contains_eq #a (eq: equivalence_relation a) (s:S.seq a) + (x:a { contains_eq eq s x /\ ~(eq x (S.head s)) }) + : Lemma (contains_eq eq (S.tail s) x) + = let t = S.tail s in + reveal_opaque (`%contains_eq) (contains_eq eq); + eliminate exists (i: under (S.length s)). eq x (S.index s i) + returns exists (k: under (S.length t)). eq x (S.index t k) + with _. assert (S.index s i == S.index t (i-1)) + +(** retrieves the index of an element given prior knowledge of its presense + unlike find function above, that returns option, this one guarantees + success, thus returning bare index instead. *) +let rec find_eq #a (eq:equivalence_relation a) (s: S.seq a) + (x:a { contains_eq eq s x }) + : Tot (i: nat { (i < S.length s) /\ + (x `eq` S.index s i) /\ + (forall (j: under i). not (x `eq` S.index s j)) }) + (decreases S.length s) + = reveal_opaque (`%contains_eq) (contains_eq eq); + if S.length s = 1 then 0 + else if x `eq` S.index s 0 then 0 + else begin + tail_contains_eq eq s x; + let ieq = find_eq eq (S.tail s) x in + let aux (i: under (1 + ieq)) + : Lemma (not (x `eq` S.index s i)) + = if i > 0 + then assert (S.index (S.tail s) (i-1) == S.index s i) + in Classical.forall_intro aux; + 1 + ieq + end + +(** pigeonhole principle for setoids: + If we have a nonempty sequence (all), and a sequence (s), + and we know in advance that each item of (s) equals some + item in (all), equals meaning (eq), not (==), + then we automatically know that there are at least + 2 equivalent elements in (s). + + This procedure returns the first such pair. *) + +// for better readability, I kept the signature explicit +let rec pigeonhole_eq (#a:Type) (eq: equivalence_relation a) + (holes: S.seq a{S.length holes > 0}) + (pigeons: S.seq (items_of eq holes)) = + if S.length holes = 1 + then begin + reveal_opaque (`%contains_eq) (contains_eq eq); + trans_lemma eq (S.index pigeons 0) (S.index holes 0) (S.index pigeons 1); + (0,1) + end + else let first_pigeon = S.index pigeons 0 in + match find pigeons (fun k -> eq k first_pigeon) 1 with + | Some i -> (symm_lemma eq (S.index pigeons 0) (S.index pigeons i); (0,i)) + | None -> + let index_of_first_pigeon = find_eq eq holes first_pigeon in //we carefully carve first_pigeon from (holes) + let holes_except_first_pigeon = S.append (S.slice holes 0 (index_of_first_pigeon)) + (S.slice holes (index_of_first_pigeon+1) (S.length holes)) in + let all_but_first_pigeon_remain_in_reduced (x: items_of eq holes { not (eq x first_pigeon) }) + : Lemma (contains_eq eq holes_except_first_pigeon x) + = let index_of_x_in_holes = find_eq eq holes x in + reveal_opaque (`%contains_eq) (contains_eq eq); + if index_of_x_in_holes < index_of_first_pigeon + then assert (S.index holes index_of_x_in_holes == S.index holes_except_first_pigeon index_of_x_in_holes) + else begin + // this serves to prove index_of_x_in_holes > index_of_first_pigeon (no equality!) + Classical.move_requires (trans_lemma eq x (S.index holes index_of_x_in_holes)) first_pigeon; + // append/slice smtpat hint + assert (S.index holes index_of_x_in_holes == S.index holes_except_first_pigeon (index_of_x_in_holes-1)) + end + in Classical.forall_intro all_but_first_pigeon_remain_in_reduced; + let i1, i2 = pigeonhole_eq (eq) (holes_except_first_pigeon) + (S.init #(items_of eq holes_except_first_pigeon) + (S.length pigeons - 1) + (fun i -> S.index pigeons (i+1))) + in (i1+1, i2+1) + diff --git a/stage0/ulib/FStar.Fin.fsti b/stage0/ulib/FStar.Fin.fsti new file mode 100644 index 00000000000..f00030d4f63 --- /dev/null +++ b/stage0/ulib/FStar.Fin.fsti @@ -0,0 +1,158 @@ +(* + Copyright 2008-2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Fin + +/// This module is supposed to contain various lemmas about +/// finiteness. For now, it mainly provides a basic pigeonhole +/// principle +/// +/// TODO: We might generalize this to also support general utilities +/// for reasoning about cardinality, relation with injections and +/// surjections, etc. +/// +/// UPD. November 8, 2022 -- added support for custom equivalence relation-aware +/// pigeon principle lemma. +/// UPD. November 23, 2022 -- added interface file + +module L = FStar.List.Tot +module S = FStar.Seq + +(** The type of natural numbers bounded by [n] *) +inline_for_extraction +let fin (n: nat) = k: int {0 <= k /\ k < n} + +(** Newer synonym. We perhaps should choose one over another globally. + [under] is also defined in IntegerIntervals.fst, along with other + often used finite intervals. *) +inline_for_extraction +let under (p:nat) = x:nat {x bool)) (i: under (S.length s)) + : Pure (option (in_ s)) + (requires True) + (ensures (function + | None -> (forall (k: nat{i <= k /\ k < S.length s}). p (S.index s k) == false) + | Some j -> i <= j /\ p (S.index s j))) + (decreases (S.length s - i)) + +(** Given a sequence [s] all of whose elements are at most [n], if the + length of [s] is greater than [n], then there are two distinct + indexes in [s] that contain the same element *) +val pigeonhole (#n: pos) (s: S.seq (under n)) + : Pure (in_ s & in_ s) + (requires S.length s = n + 1) + (ensures (fun (i1, i2) -> i1 < i2 /\ S.index s i1 = S.index s i2)) + (decreases n) + +(** Here we prepare to prove pigeonhole principle for a finite sequence + with a custom equivalence relation (as opposed to eqtype). + + Think setoids. *) + +(** Following code is extracted from CuteCAS, which will eventually make + its way into F* -- when I wrap things up with most important notions + of abstract algebra. + + As I port more code from my CAS project to F*, such things will be + moved to separate modules. -- Alex Rozanov *) + +inline_for_extraction +type binary_relation (a: Type) = a -> a -> bool + +(** For performance reasons, forall definitions are best kept hidden from SMT. + Use reveal_opaque when you really need it. Or use refl/trans/symm lemmas + below to keep the context clean. *) + +val is_reflexive (#a:Type) (r: binary_relation a) : Type0 +val is_symmetric (#a:Type) (r: binary_relation a) : Type0 +val is_transitive (#a:Type) (r: binary_relation a) : Type0 + +val is_reflexive_intro (#a:Type) (r: binary_relation a) + : Lemma (requires forall (x:a). r x x) (ensures is_reflexive r) + +val is_symmetric_intro (#a:Type) (r: binary_relation a) + : Lemma (requires forall (x:a). r x x) (ensures is_reflexive r) + +val is_transitive_intro (#a:Type) (r: binary_relation a) + : Lemma (requires forall (x:a). r x x) (ensures is_reflexive r) + +(** Textbook stuff on equivalence relations *) +type equivalence_relation (a: Type) + = r:binary_relation a { is_reflexive r /\ is_symmetric r /\ is_transitive r } + +val refl_lemma (#a:Type) (eq: equivalence_relation a) (x:a) + : Lemma (eq x x) + +val symm_lemma (#a:Type) (eq:equivalence_relation a) (x y:a) + : Lemma (eq x y == eq y x) + +val trans_lemma (#a:Type) (eq: equivalence_relation a) (x y z:a) + : Lemma (requires (eq x y \/ eq y x) /\ (eq y z \/ eq z y)) + (ensures (x `eq` z) && (z `eq` x)) + + +(** (contains) predicate, but with custom comparison operation (a->a->bool) *) +[@@"opaque_to_smt"] +let contains_eq #a (eq: equivalence_relation a) (s: S.seq a) (x:a) + = exists (i:under (S.length s)). eq x (S.index s i) + +val contains_eq_means_nonempty (#a:Type) (eq:equivalence_relation a) (s: S.seq a) (x:a) + : Lemma (requires contains_eq eq s x) + (ensures S.length s > 0) + [SMTPat(contains_eq eq s x)] + +(** a type of all elements present in a given sequence *) +let items_of #a (eq: equivalence_relation a) (s: S.seq a) + = x:a { contains_eq eq s x } + +(** retrieves the index of an element given prior knowledge of its presense + unlike find function above, that returns option, this one guarantees + success, thus returning bare index instead. *) +val find_eq (#a:Type) (eq:equivalence_relation a) (s: S.seq a) (x:a { contains_eq eq s x }) + : (i: nat { (i < S.length s) + /\ (x `eq` S.index s i) + /\ (forall (j: under i). not (x `eq` S.index s j)) }) + +(** pigeonhole principle for setoids: + If we have a nonempty sequence (all), and a sequence (s), + and we know in advance that each item of (s) equals some + item in (all), equals meaning (eq), not (==), + then we automatically know that there are at least + 2 equivalent elements in (s). + + This procedure returns the first such pair. *) + +val pigeonhole_eq (#a:Type) (eq: equivalence_relation a) + (holes: S.seq a{S.length holes > 0}) + (pigeons: S.seq (items_of eq holes)) + : Pure (under (S.length pigeons) & under (S.length pigeons)) + (requires S.length pigeons > S.length holes) + (ensures (fun (i1, i2) -> i1 < i2 /\ (S.index pigeons i1 `eq` S.index pigeons i2))) + (decreases S.length holes) diff --git a/stage0/ulib/FStar.FiniteMap.Ambient.fst b/stage0/ulib/FStar.FiniteMap.Ambient.fst new file mode 100644 index 00000000000..4b046cd9018 --- /dev/null +++ b/stage0/ulib/FStar.FiniteMap.Ambient.fst @@ -0,0 +1,44 @@ +(* + Copyright 2008-2021 John Li, Jay Lorch, Rustan Leino, Alex Summers, + Dan Rosen, Nikhil Swamy, Microsoft Research, and contributors to + the Dafny Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Includes material from the Dafny project + (https://github.com/dafny-lang/dafny) which carries this license + information: + + Created 9 February 2008 by Rustan Leino. + Converted to Boogie 2 on 28 June 2008. + Edited sequence axioms 20 October 2009 by Alex Summers. + Modified 2014 by Dan Rosen. + Copyright (c) 2008-2014, Microsoft. + Copyright by the contributors to the Dafny Project + SPDX-License-Identifier: MIT +*) + +(** +This module brings properties about finite maps ambiently into the +context. The properties are modeled after those in the Dafny sequence +axioms, with patterns for quantifiers chosen as in those axioms. + +@summary Puts properties of finite maps into the ambient context +*) +module FStar.FiniteMap.Ambient + +open FStar.FiniteMap.Base + +let all_finite_map_facts_ambient : squash (all_finite_map_facts u#b) = + all_finite_map_facts_lemma u#b () + diff --git a/stage0/ulib/FStar.FiniteMap.Base.fst b/stage0/ulib/FStar.FiniteMap.Base.fst new file mode 100644 index 00000000000..e9b9608b26d --- /dev/null +++ b/stage0/ulib/FStar.FiniteMap.Base.fst @@ -0,0 +1,321 @@ +(* + Copyright 2008-2021 John Li, Jay Lorch, Rustan Leino, Alex Summers, + Dan Rosen, Nikhil Swamy, Microsoft Research, and contributors to + the Dafny Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Includes material from the Dafny project + (https://github.com/dafny-lang/dafny) which carries this license + information: + + Created 9 February 2008 by Rustan Leino. + Converted to Boogie 2 on 28 June 2008. + Edited sequence axioms 20 October 2009 by Alex Summers. + Modified 2014 by Dan Rosen. + Copyright (c) 2008-2014, Microsoft. + Copyright by the contributors to the Dafny Project + SPDX-License-Identifier: MIT +*) + +(** +This module declares a type and functions used for modeling +finite maps as they're modeled in Dafny. + +@summary Type and functions for modeling finite maps +*) +module FStar.FiniteMap.Base + +open FStar.FunctionalExtensionality +module FLT = FStar.List.Tot +module FSet = FStar.FiniteSet.Base +open FStar.FiniteSet.Ambient +module T = FStar.Tactics.V2 + +// Finite maps +type map (a: eqtype) (b: Type u#b) = (keys: FSet.set a & setfun_t a b keys) + +let domain (#a: eqtype) (#b: Type u#b) (m: map a b) : FSet.set a = + let (| keys, _ |) = m in + keys + + +let elements (#a: eqtype) (#b: Type u#b) (m: map a b) : (setfun_t a b (domain m)) = + let (| _, f |) = m in + f + +let rec key_list_to_item_list + (#a: eqtype) + (#b: Type u#b) + (m: map a b) + (keys: list a{FSet.list_nonrepeating keys /\ (forall key. FLT.mem key keys ==> FSet.mem key (domain m))}) +: GTot (items: list (a & b){item_list_doesnt_repeat_keys items /\ (forall key. FLT.mem key keys <==> key_in_item_list key items)}) + (decreases keys) = + match keys with + | [] -> [] + | key :: remaining_keys -> (key, Some?.v ((elements m) key)) :: key_list_to_item_list m remaining_keys + +let map_as_list (#a: eqtype) (#b: Type u#b) (m: map a b) +: GTot (items: list (a & b){item_list_doesnt_repeat_keys items /\ (forall key. key_in_item_list key items <==> mem key m)}) = + key_list_to_item_list m (FSet.set_as_list (domain m)) + +/// We represent the Dafny function `Map#Card` with `cardinality`: +/// +/// function Map#Card(Map U V) : int; + +let cardinality (#a: eqtype) (#b: Type u#b) (m: map a b) : GTot nat = + FSet.cardinality (domain m) + +/// We represent the Dafny function `Map#Values` with `values`: +/// +/// function Map#Values(Map U V) : Set V; + +let values (#a: eqtype) (#b: Type u#b) (m: map a b) : GTot (b -> prop) = + fun value -> exists key. ((elements m) key == Some value) + +/// We represent the Dafny function `Map#Items` with `items`: +/// +/// function Map#Items(Map U V) : Set Box; + +let items (#a: eqtype) (#b: Type u#b) (m: map a b) : GTot ((a & b) -> prop) = + fun item -> ((elements m) (fst item) == Some (snd item)) + +/// We represent the Dafny function `Map#Empty` with `emptymap`: +/// +/// function Map#Empty(): Map U V; + +let emptymap (#a: eqtype) (#b: Type u#b) : (map a b) = + (| FSet.emptyset, on_domain a (fun key -> None) |) + +/// We represent the Dafny function `Map#Glue` with `glue`. +/// +/// function Map#Glue([U]bool, [U]V, Ty): Map U V; + +let glue (#a: eqtype) (#b: Type u#b) (keys: FSet.set a) (f: setfun_t a b keys) : map a b = + (| keys, f |) + +/// We represent the Dafny function `Map#Build` with `build`: +/// +/// function Map#Build(Map U V, U, V): Map U V; + +let insert (#a: eqtype) (#b: Type u#b) (k: a) (v: b) (m: map a b) : map a b = + let keys' = FSet.insert k (domain m) in + let f' = on_domain a (fun key -> if key = k then Some v else (elements m) key) in + (| keys', f' |) + +/// We represent the Dafny function `Map#Merge` with `merge`: +/// +/// function Map#Merge(Map U V, Map U V): Map U V; + +let merge (#a: eqtype) (#b: Type u#b) (m1: map a b) (m2: map a b) : map a b = + let keys' = FSet.union (domain m1) (domain m2) in + let f' = on_domain a (fun key -> if FSet.mem key (domain m2) then (elements m2) key else (elements m1) key) in + (| keys', f' |) + +/// We represent the Dafny function `Map#Subtract` with `subtract`: +/// +/// function Map#Subtract(Map U V, Set U): Map U V; + +let subtract (#a: eqtype) (#b: Type u#b) (m: map a b) (s: FSet.set a) : map a b = + let keys' = FSet.difference (domain m) s in + let f' = on_domain a (fun key -> if FSet.mem key keys' then (elements m) key else None) in + (| keys', f' |) + +/// We represent the Dafny function `Map#Equal` with `equal`: +/// +/// function Map#Equal(Map U V, Map U V): bool; + +let equal (#a: eqtype) (#b: Type u#b) (m1: map a b) (m2: map a b) : prop = + feq (elements m1) (elements m2) /\ True //a bit ugly, a prop coercion + +/// We represent the Dafny function `Map#Disjoint` with `disjoint`: +/// +/// function Map#Disjoint(Map U V, Map U V): bool; + +let disjoint (#a: eqtype) (#b: Type u#b) (m1: map a b) (m2: map a b) : prop = + FSet.disjoint (domain m1) (domain m2) /\ True //prop coercion + +/// We represent the Dafny choice operator by `choose`: +/// +/// var x: T :| x in s; + +let choose (#a: eqtype) (#b: Type u#b) (m: map a b{exists key. mem key m}) : GTot (key: a{mem key m}) = + FSet.choose (domain m) + +/// We now prove each of the facts that comprise `all_finite_map_facts`. +/// For fact `xxx_fact`, we prove it with `xxx_lemma`. + +let cardinality_zero_iff_empty_lemma () +: Lemma (cardinality_zero_iff_empty_fact u#b) = + introduce forall (a: eqtype) (b:Type u#b) (m: map a b). cardinality m = 0 <==> m == emptymap + with ( + introduce cardinality m = 0 ==> m == emptymap + with _. assert (feq (elements m) (elements emptymap)) + ) + + +let empty_or_domain_occupied_lemma () + : Lemma (empty_or_domain_occupied_fact u#b) + = introduce forall (a: eqtype) (b:Type u#b) (m: map a b). m == emptymap \/ (exists k. mem k m) + with ( + if FSet.cardinality (domain m) = 0 then + introduce m == emptymap \/ (exists k. mem k m) + with Left ( + assert (cardinality m = 0); + cardinality_zero_iff_empty_lemma () + ) + else + introduce m == emptymap \/ (exists k. mem k m) + with Right () + ) + + +let empty_or_values_occupied_lemma () +: Lemma (empty_or_values_occupied_fact u#b) = + introduce forall (a: eqtype) (b:Type u#b) (m: map a b). m == emptymap \/ (exists v. (values m) v) + with + if FSet.cardinality (domain m) = 0 then + introduce m == emptymap \/ (exists v. (values m) v) + with Left ( + assert (cardinality m = 0); + cardinality_zero_iff_empty_lemma u#b () + ) + else + introduce m == emptymap \/ (exists v. (values m) v) + with Right ( + let k = choose m in + let v = Some?.v ((elements m) k) in + assert ((values m) v) + ) + +let empty_or_items_occupied_lemma () +: Lemma (empty_or_items_occupied_fact u#b) = + introduce forall (a: eqtype) (b: Type u#b) (m: map a b). m == emptymap \/ (exists item. (items m) item) + with + if FSet.cardinality (domain m) = 0 then + introduce m == emptymap \/ (exists v. (values m) v) + with Left ( + assert (cardinality m = 0); + cardinality_zero_iff_empty_lemma u#b () + ) + else + introduce m == emptymap \/ (exists item. (items m) item) + with Right ( + let k = choose m in + let v = Some?.v ((elements m) k) in + assert ((items m) (k, v)) + ) + +let map_cardinality_matches_domain_lemma () +: Lemma (map_cardinality_matches_domain_fact u#b) = + () + +let values_contains_lemma () +: Lemma (values_contains_fact u#b) = + () + +let items_contains_lemma () +: Lemma (items_contains_fact u#b) = + () + +let empty_domain_empty_lemma () +: Lemma (empty_domain_empty_fact u#b) = + () + +let glue_domain_lemma () +: Lemma (glue_domain_fact u#b) = + () + +let glue_elements_lemma () +: Lemma (glue_elements_fact u#b) = + () + +let insert_elements_lemma () +: Lemma (insert_elements_fact u#b) = + () + +let insert_member_cardinality_lemma () +: Lemma (insert_member_cardinality_fact u#b) = + () + +let insert_nonmember_cardinality_lemma () +: Lemma (insert_nonmember_cardinality_fact u#b) = + () + +let merge_domain_is_union_lemma () +: Lemma (merge_domain_is_union_fact u#b) = + () + +let merge_element_lemma () +: Lemma (merge_element_fact u#b) = + () + +let subtract_domain_lemma () +: Lemma (subtract_domain_fact u#b) = + () + +let subtract_element_lemma () +: Lemma (subtract_element_fact u#b) = + () + + +let map_equal_lemma () +: Lemma (map_equal_fact u#b) //Surprising; needed to split this goal into two += assert (map_equal_fact u#b) + by (T.norm [delta_only [`%map_equal_fact]]; + let _ = T.forall_intro () in + let _ = T.forall_intro () in + let _ = T.forall_intro () in + let _ = T.forall_intro () in + T.split (); + T.smt(); + T.smt()) + + +let map_extensionality_lemma () +: Lemma (map_extensionality_fact u#b) = + introduce forall (a: eqtype) (b:Type u#b) (m1: map a b) (m2: map a b). equal m1 m2 ==> m1 == m2 + with ( + introduce equal m1 m2 ==> m1 == m2 + with _. ( + assert (FSet.equal (domain m1) (domain m2)); + assert (feq (elements m1) (elements m2)) + ) + ) + +let disjoint_lemma () +: Lemma (disjoint_fact u#b) = + () + +let all_finite_map_facts_lemma (_:unit) + : Lemma (all_finite_map_facts u#b) + = cardinality_zero_iff_empty_lemma u#b (); + empty_or_domain_occupied_lemma u#b (); + empty_or_values_occupied_lemma u#b (); + empty_or_items_occupied_lemma u#b (); + map_cardinality_matches_domain_lemma u#b (); + values_contains_lemma u#b (); + items_contains_lemma u#b (); + empty_domain_empty_lemma u#b (); + glue_domain_lemma u#b (); + glue_elements_lemma u#b (); + insert_elements_lemma u#b (); + insert_member_cardinality_lemma u#b (); + insert_nonmember_cardinality_lemma u#b (); + merge_domain_is_union_lemma u#b (); + merge_element_lemma u#b (); + subtract_domain_lemma u#b (); + subtract_element_lemma u#b (); + map_equal_lemma u#b (); + map_extensionality_lemma u#b (); + disjoint_lemma u#b () diff --git a/stage0/ulib/FStar.FiniteMap.Base.fsti b/stage0/ulib/FStar.FiniteMap.Base.fsti new file mode 100644 index 00000000000..0c4d69c4a1b --- /dev/null +++ b/stage0/ulib/FStar.FiniteMap.Base.fsti @@ -0,0 +1,463 @@ +(* + Copyright 2008-2021 John Li, Jay Lorch, Rustan Leino, Alex Summers, + Dan Rosen, Nikhil Swamy, Microsoft Research, and contributors to + the Dafny Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Includes material from the Dafny project + (https://github.com/dafny-lang/dafny) which carries this license + information: + + Created 9 February 2008 by Rustan Leino. + Converted to Boogie 2 on 28 June 2008. + Edited sequence axioms 20 October 2009 by Alex Summers. + Modified 2014 by Dan Rosen. + Copyright (c) 2008-2014, Microsoft. + Copyright by the contributors to the Dafny Project + SPDX-License-Identifier: MIT +*) + +(** +This module declares a type and functions used for modeling +finite maps as they're modeled in Dafny. + +@summary Type and functions for modeling finite maps +*) +module FStar.FiniteMap.Base + +open FStar.FunctionalExtensionality +module FLT = FStar.List.Tot +module FSet = FStar.FiniteSet.Base + +type setfun_t (a: eqtype) + (b: Type u#b) + (s: FSet.set a) = + f: (a ^-> option b){forall (key: a). FSet.mem key s == Some? (f key)} + +val map (a: eqtype) ([@@@ strictly_positive] b: Type u#b) + : Type u#b + +(** + We translate each Dafny sequence function prefixed with `Map#` + into an F* function. +**) + +/// We represent the Dafny function `Map#Domain` with `domain`: +/// +/// function Map#Domain(Map U V) : Set U; + +val domain (#a: eqtype) (#b: Type u#b) (m: map a b) + : FSet.set a + +/// We represent the Dafny function `Map#Elements` with `elements`: +/// +/// function Map#Elements(Map U V) : [U]V; + +val elements (#a: eqtype) (#b: Type u#b) (m: map a b) + : setfun_t a b (domain m) + +/// We represent the Dafny operator `in` on maps with `mem`: + +let mem (#a: eqtype) (#b: Type u#b) (key: a) (m: map a b) = + FSet.mem key (domain m) + +/// We can convert a map to a list of pairs with `map_as_list`: + +let rec key_in_item_list (#a: eqtype) (#b: Type u#b) (key: a) (items: list (a & b)) : bool = + match items with + | [] -> false + | (k, v) :: tl -> key = k || key_in_item_list key tl + +let rec item_list_doesnt_repeat_keys (#a: eqtype) (#b: Type u#b) (items: list (a & b)) : bool = + match items with + | [] -> true + | (k, v) :: tl -> not (key_in_item_list k tl) && item_list_doesnt_repeat_keys tl + +val map_as_list (#a: eqtype) (#b: Type u#b) (m: map a b) + : GTot (items: list (a & b){item_list_doesnt_repeat_keys items /\ (forall key. key_in_item_list key items <==> mem key m)}) + +/// We represent the Dafny operator [] on maps with `lookup`: + +let lookup (#a: eqtype) (#b: Type u#b) (key: a) (m: map a b{mem key m}) + : b = + Some?.v ((elements m) key) + +/// We represent the Dafny function `Map#Card` with `cardinality`: +/// +/// function Map#Card(Map U V) : int; + +val cardinality (#a: eqtype) (#b: Type u#b) (m: map a b) + : GTot nat + +/// We represent the Dafny function `Map#Values` with `values`: +/// +/// function Map#Values(Map U V) : Set V; + +val values (#a: eqtype) (#b: Type u#b) (m: map a b) + : GTot (b -> prop) + +/// We represent the Dafny function `Map#Items` with `items`: +/// +/// function Map#Items(Map U V) : Set Box; + +val items (#a: eqtype) (#b: Type u#b) (m: map a b) + : GTot ((a & b) -> prop) + +/// We represent the Dafny function `Map#Empty` with `emptymap`: +/// +/// function Map#Empty(): Map U V; + +val emptymap (#a: eqtype) (#b: Type u#b) + : map a b + +/// We represent the Dafny function `Map#Glue` with `glue`. +/// +/// function Map#Glue([U]bool, [U]V, Ty): Map U V; + +val glue (#a: eqtype) (#b: Type u#b) (keys: FSet.set a) (f: setfun_t a b keys) + : map a b + +/// We represent the Dafny function `Map#Build` with `insert`: +/// +/// function Map#Build(Map U V, U, V): Map U V; + +val insert (#a: eqtype) (#b: Type u#b) (k: a) (v: b) (m: map a b) + : map a b + +/// We represent the Dafny function `Map#Merge` with `merge`: +/// +/// function Map#Merge(Map U V, Map U V): Map U V; + +val merge (#a: eqtype) (#b: Type u#b) (m1: map a b) (m2: map a b) + : map a b + +/// We represent the Dafny function `Map#Subtract` with `subtract`: +/// +/// function Map#Subtract(Map U V, Set U): Map U V; + +val subtract (#a: eqtype) (#b: Type u#b) (m: map a b) (s: FSet.set a) + : map a b + +/// We represent the Dafny function `Map#Equal` with `equal`: +/// +/// function Map#Equal(Map U V, Map U V): bool; + +val equal (#a: eqtype) (#b: Type u#b) (m1: map a b) (m2: map a b) + : prop + +/// We represent the Dafny function `Map#Disjoint` with `disjoint`: +/// +/// function Map#Disjoint(Map U V, Map U V): bool; + +val disjoint (#a: eqtype) (#b: Type u#b) (m1: map a b) (m2: map a b) + : prop + +/// We represent the Dafny choice operator by `choose`: +/// +/// var x: T :| x in s; + +val choose (#a: eqtype) (#b: Type u#b) (m: map a b{exists key. mem key m}) + : GTot (key: a{mem key m}) + +/// We add the utility functions `remove` and `notin`: + +let remove (#a: eqtype) (#b: Type u#b) (key: a) (m: map a b) + : map a b = + subtract m (FSet.singleton key) + +let notin (#a: eqtype) (#b: Type u#b) (key: a) (m: map a b) + : bool = + not (mem key m) + +(** + We translate each finite map axiom from the Dafny prelude into an F* + predicate ending in `_fact`. +**) + +/// We don't need the following axiom since we return a nat from cardinality: +/// +/// axiom (forall m: Map U V :: { Map#Card(m) } 0 <= Map#Card(m)); + +/// We represent the following Dafny axiom with `cardinality_zero_iff_empty_fact`: +/// +/// axiom (forall m: Map U V :: +/// { Map#Card(m) } +/// Map#Card(m) == 0 <==> m == Map#Empty()); + +let cardinality_zero_iff_empty_fact = + forall (a: eqtype) (b:Type u#b) (m: map a b).{:pattern cardinality m} + cardinality m = 0 <==> m == emptymap + +/// We represent the following Dafny axiom with `empty_or_domain_occupied_fact`: +/// +/// axiom (forall m: Map U V :: +/// { Map#Domain(m) } +/// m == Map#Empty() || (exists k: U :: Map#Domain(m)[k])); + +let empty_or_domain_occupied_fact = + forall (a: eqtype) (b: Type u#b) (m: map a b).{:pattern domain m} + m == emptymap \/ (exists k.{:pattern mem k m} mem k m) + +/// We represent the following Dafny axiom with `empty_or_values_occupied_fact`: +/// +/// axiom (forall m: Map U V :: +/// { Map#Values(m) } +/// m == Map#Empty() || (exists v: V :: Map#Values(m)[v])); + +let empty_or_values_occupied_fact = + forall (a: eqtype) (b: Type u#b) (m: map a b).{:pattern values m} + m == emptymap \/ (exists v. {:pattern values m v } (values m) v) + +/// We represent the following Dafny axiom with `empty_or_items_occupied_fact`: +/// +/// axiom (forall m: Map U V :: +/// { Map#Items(m) } +/// m == Map#Empty() || (exists k, v: Box :: Map#Items(m)[$Box(#_System._tuple#2._#Make2(k, v))])); + +let empty_or_items_occupied_fact = + forall (a: eqtype) (b:Type u#b) (m: map a b).{:pattern items m} + m == emptymap \/ (exists item. {:pattern items m item } (items m) item) + +/// We represent the following Dafny axiom with `map_cardinality_matches_domain_fact`: +/// +/// axiom (forall m: Map U V :: +/// { Set#Card(Map#Domain(m)) } +/// Set#Card(Map#Domain(m)) == Map#Card(m)); + +let map_cardinality_matches_domain_fact = + forall (a: eqtype) (b: Type u#b) (m: map a b).{:pattern FSet.cardinality (domain m)} + FSet.cardinality (domain m) = cardinality m + +/// We don't use the following Dafny axioms, which would require +/// treating the values and items as finite sets, which we can't do +/// because we want to allow non-eqtypes as values. +/// +/// axiom (forall m: Map U V :: +/// { Set#Card(Map#Values(m)) } +/// Set#Card(Map#Values(m)) <= Map#Card(m)); +/// axiom (forall m: Map U V :: +/// { Set#Card(Map#Items(m)) } +/// Set#Card(Map#Items(m)) == Map#Card(m)); + +/// We represent the following Dafny axiom with `values_contains_fact`: +/// +/// axiom (forall m: Map U V, v: V :: { Map#Values(m)[v] } +/// Map#Values(m)[v] == +/// (exists u: U :: { Map#Domain(m)[u] } { Map#Elements(m)[u] } +/// Map#Domain(m)[u] && +/// v == Map#Elements(m)[u])); + +let values_contains_fact = + forall (a: eqtype) (b: Type u#b) (m: map a b) (v: b).{:pattern (values m) v} + (values m) v <==> + (exists (u: a).{:pattern FSet.mem u (domain m) \/ ((elements m) u)} + FSet.mem u (domain m) /\ (elements m) u == Some v) + +/// We represent the following Dafny axiom with `items_contains_fact`: +/// +/// axiom (forall m: Map Box Box, item: Box :: { Map#Items(m)[item] } +/// Map#Items(m)[item] <==> +/// Map#Domain(m)[_System.Tuple2._0($Unbox(item))] && +/// Map#Elements(m)[_System.Tuple2._0($Unbox(item))] == _System.Tuple2._1($Unbox(item))); + +let items_contains_fact = + forall (a: eqtype) (b: Type u#b) (m: map a b) (item: a & b).{:pattern (items m) item} + (items m) item <==> + FSet.mem (fst item) (domain m) + /\ (elements m) (fst item) == Some (snd item) + +/// We represent the following Dafny axiom with `empty_domain_empty_fact`: +/// +/// axiom (forall u: U :: +/// { Map#Domain(Map#Empty(): Map U V)[u] } +/// !Map#Domain(Map#Empty(): Map U V)[u]); + +let empty_domain_empty_fact = + forall (a: eqtype) (b: Type u#b) (u: a).{:pattern FSet.mem u (domain (emptymap #a #b))} + not (FSet.mem u (domain (emptymap #a #b))) + +/// We represent the following Dafny axiom with `glue_domain_fact`: +/// +/// axiom (forall a: [U]bool, b: [U]V, t: Ty :: +/// { Map#Domain(Map#Glue(a, b, t)) } +/// Map#Domain(Map#Glue(a, b, t)) == a); + +let glue_domain_fact = + forall (a: eqtype) (b: Type u#b) (keys: FSet.set a) (f: setfun_t a b keys).{:pattern domain (glue keys f)} + domain (glue keys f) == keys + +/// We represent the following Dafny axiom with `glue_elements_fact`. +/// But we have to change it because our version of `Map#Elements` +/// returns a map to an optional value. +/// +/// axiom (forall a: [U]bool, b: [U]V, t: Ty :: +/// { Map#Elements(Map#Glue(a, b, t)) } +/// Map#Elements(Map#Glue(a, b, t)) == b); + +let glue_elements_fact = + forall (a: eqtype) (b: Type u#b) (keys: FSet.set a) (f: setfun_t a b keys).{:pattern elements (glue keys f)} + domain (glue keys f) == keys + /\ elements (glue keys f) == f + +/// We don't need the following Dafny axiom since the type of `glue` implies it: +/// +/// axiom (forall a: [Box]bool, b: [Box]Box, t0, t1: Ty :: +/// { Map#Glue(a, b, TMap(t0, t1)) } +/// // In the following line, no trigger needed, since the quantifier only gets used in negative contexts +/// (forall bx: Box :: a[bx] ==> $IsBox(bx, t0) && $IsBox(b[bx], t1)) +/// ==> +/// $Is(Map#Glue(a, b, TMap(t0, t1)), TMap(t0, t1))); + +/// We represent the following Dafny axiom with `insert_elements_fact`: +/// +/// axiom (forall m: Map U V, u: U, u': U, v: V :: +/// { Map#Domain(Map#Build(m, u, v))[u'] } { Map#Elements(Map#Build(m, u, v))[u'] } +/// (u' == u ==> Map#Domain(Map#Build(m, u, v))[u'] && +/// Map#Elements(Map#Build(m, u, v))[u'] == v) && +/// (u' != u ==> Map#Domain(Map#Build(m, u, v))[u'] == Map#Domain(m)[u'] && +/// Map#Elements(Map#Build(m, u, v))[u'] == Map#Elements(m)[u'])); + +let insert_elements_fact = + forall (a: eqtype) (b: Type u#b) (m: map a b) (key: a) (key': a) (value: b). + {:pattern FSet.mem key' (domain (insert key value m)) \/ ((elements (insert key value m)) key')} + (key' = key ==> FSet.mem key' (domain (insert key value m)) + /\ (elements (insert key value m)) key' == Some value) + /\ (key' <> key ==> FSet.mem key' (domain (insert key value m)) = FSet.mem key' (domain m) + /\ (elements (insert key value m)) key' == (elements m) key') + +/// We represent the following Dafny axiom with `insert_member_cardinality_fact`: +/// +/// axiom (forall m: Map U V, u: U, v: V :: { Map#Card(Map#Build(m, u, v)) } +/// Map#Domain(m)[u] ==> Map#Card(Map#Build(m, u, v)) == Map#Card(m)); + +let insert_member_cardinality_fact = + forall (a: eqtype) (b: Type u#b) (m: map a b) (key: a) (value: b).{:pattern cardinality (insert key value m)} + FSet.mem key (domain m) ==> cardinality (insert key value m) = cardinality m + +/// We represent the following Dafny axiom with `insert_nonmember_cardinality_fact`: +/// +/// axiom (forall m: Map U V, u: U, v: V :: { Map#Card(Map#Build(m, u, v)) } +/// !Map#Domain(m)[u] ==> Map#Card(Map#Build(m, u, v)) == Map#Card(m) + 1); + +let insert_nonmember_cardinality_fact = + forall (a: eqtype) (b: Type u#b) (m: map a b) (key: a) (value: b).{:pattern cardinality (insert key value m)} + not (FSet.mem key (domain m)) ==> cardinality (insert key value m) = cardinality m + 1 + +/// We represent the following Dafny axiom with `merge_domain_is_union_fact`: +/// +/// axiom (forall m: Map U V, n: Map U V :: +/// { Map#Domain(Map#Merge(m, n)) } +/// Map#Domain(Map#Merge(m, n)) == Set#Union(Map#Domain(m), Map#Domain(n))); + +let merge_domain_is_union_fact = + forall (a: eqtype) (b: Type u#b) (m1: map a b) (m2: map a b).{:pattern domain (merge m1 m2)} + domain (merge m1 m2) == FSet.union (domain m1) (domain m2) + +/// We represent the following Dafny axiom with `merge_element_fact`: +/// +/// axiom (forall m: Map U V, n: Map U V, u: U :: +/// { Map#Elements(Map#Merge(m, n))[u] } +/// Map#Domain(Map#Merge(m, n))[u] ==> +/// (!Map#Domain(n)[u] ==> Map#Elements(Map#Merge(m, n))[u] == Map#Elements(m)[u]) && +/// (Map#Domain(n)[u] ==> Map#Elements(Map#Merge(m, n))[u] == Map#Elements(n)[u])); + +let merge_element_fact = + forall (a: eqtype) (b: Type u#b) (m1: map a b) (m2: map a b) (key: a).{:pattern (elements (merge m1 m2)) key} + FSet.mem key (domain (merge m1 m2)) ==> + (not (FSet.mem key (domain m2)) ==> FSet.mem key (domain m1) /\ (elements (merge m1 m2)) key == (elements m1) key) + /\ (FSet.mem key (domain m2) ==> (elements (merge m1 m2)) key == (elements m2) key) + +/// We represent the following Dafny axiom with `subtract_domain_fact`: +/// +/// axiom (forall m: Map U V, s: Set U :: +/// { Map#Domain(Map#Subtract(m, s)) } +/// Map#Domain(Map#Subtract(m, s)) == Set#Difference(Map#Domain(m), s)); + +let subtract_domain_fact = + forall (a: eqtype) (b: Type u#b) (m: map a b) (s: FSet.set a).{:pattern domain (subtract m s)} + domain (subtract m s) == FSet.difference (domain m) s + +/// We represent the following Dafny axiom with `subtract_element_fact`: +/// +/// axiom (forall m: Map U V, s: Set U, u: U :: +/// { Map#Elements(Map#Subtract(m, s))[u] } +/// Map#Domain(Map#Subtract(m, s))[u] ==> +/// Map#Elements(Map#Subtract(m, s))[u] == Map#Elements(m)[u]); + +let subtract_element_fact = + forall (a: eqtype) (b: Type u#b) (m: map a b) (s: FSet.set a) (key: a).{:pattern (elements (subtract m s)) key} + FSet.mem key (domain (subtract m s)) ==> FSet.mem key (domain m) /\ (elements (subtract m s)) key == (elements m) key + +/// We represent the following Dafny axiom with `map_equal_fact`: +/// +/// axiom (forall m: Map U V, m': Map U V:: +/// { Map#Equal(m, m') } +/// Map#Equal(m, m') <==> (forall u : U :: Map#Domain(m)[u] == Map#Domain(m')[u]) && +/// (forall u : U :: Map#Domain(m)[u] ==> Map#Elements(m)[u] == Map#Elements(m')[u])); + +let map_equal_fact = + forall (a: eqtype) (b: Type u#b) (m1: map a b) (m2: map a b).{:pattern equal m1 m2} + equal m1 m2 <==> (forall key. FSet.mem key (domain m1) = FSet.mem key (domain m2)) + /\ (forall key. FSet.mem key (domain m1) ==> (elements m1) key == (elements m2) key) + +/// We represent the following Dafny axiom with `map_extensionality_fact`: +/// +/// axiom (forall m: Map U V, m': Map U V:: +/// { Map#Equal(m, m') } +/// Map#Equal(m, m') ==> m == m'); + +let map_extensionality_fact = + forall (a: eqtype) (b: Type u#b) (m1: map a b) (m2: map a b).{:pattern equal m1 m2} + equal m1 m2 ==> m1 == m2 + +/// We represent the following Dafny axiom with `disjoint_fact`: +/// +/// axiom (forall m: Map U V, m': Map U V :: +/// { Map#Disjoint(m, m') } +/// Map#Disjoint(m, m') <==> (forall o: U :: {Map#Domain(m)[o]} {Map#Domain(m')[o]} !Map#Domain(m)[o] || !Map#Domain(m')[o])); + +let disjoint_fact = + forall (a: eqtype) (b: Type u#b) (m1: map a b) (m2: map a b).{:pattern disjoint m1 m2} + disjoint m1 m2 <==> (forall key.{:pattern FSet.mem key (domain m1) \/ FSet.mem key (domain m2)} + not (FSet.mem key (domain m1)) || not (FSet.mem key (domain m2))) + +(** + The predicate `all_finite_map_facts` collects all the Dafny finite-map axioms. + One can bring all these facts into scope with `all_finite_map_facts_lemma ()`. +**) + +let all_finite_map_facts = + cardinality_zero_iff_empty_fact u#b + /\ empty_or_domain_occupied_fact u#b + /\ empty_or_values_occupied_fact u#b + /\ empty_or_items_occupied_fact u#b + /\ map_cardinality_matches_domain_fact u#b + /\ values_contains_fact u#b + /\ items_contains_fact u#b + /\ empty_domain_empty_fact u#b + /\ glue_domain_fact u#b + /\ glue_elements_fact u#b + /\ insert_elements_fact u#b + /\ insert_member_cardinality_fact u#b + /\ insert_nonmember_cardinality_fact u#b + /\ merge_domain_is_union_fact u#b + /\ merge_element_fact u#b + /\ subtract_domain_fact u#b + /\ subtract_element_fact u#b + /\ map_equal_fact u#b + /\ map_extensionality_fact u#b + /\ disjoint_fact u#b + +val all_finite_map_facts_lemma (_:unit) + : Lemma (all_finite_map_facts u#b) diff --git a/stage0/ulib/FStar.FiniteSet.Ambient.fst b/stage0/ulib/FStar.FiniteSet.Ambient.fst new file mode 100644 index 00000000000..633e9af7432 --- /dev/null +++ b/stage0/ulib/FStar.FiniteSet.Ambient.fst @@ -0,0 +1,44 @@ +(* + Copyright 2008-2021 John Li, Jay Lorch, Rustan Leino, Alex Summers, + Dan Rosen, Nikhil Swamy, Microsoft Research, and contributors to + the Dafny Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Includes material from the Dafny project + (https://github.com/dafny-lang/dafny) which carries this license + information: + + Created 9 February 2008 by Rustan Leino. + Converted to Boogie 2 on 28 June 2008. + Edited sequence axioms 20 October 2009 by Alex Summers. + Modified 2014 by Dan Rosen. + Copyright (c) 2008-2014, Microsoft. + Copyright by the contributors to the Dafny Project + SPDX-License-Identifier: MIT +*) + +(** +This module brings properties about finite sets ambiently into the +context. The properties are modeled after those in the Dafny sequence +axioms, with patterns for quantifiers chosen as in those axioms. + +@summary Puts properties of finite sets into the ambient context +*) +module FStar.FiniteSet.Ambient + +open FStar.FiniteSet.Base + +let all_finite_set_facts_ambient : (squash all_finite_set_facts) = + all_finite_set_facts_lemma () + diff --git a/stage0/ulib/FStar.FiniteSet.Base.fst b/stage0/ulib/FStar.FiniteSet.Base.fst new file mode 100644 index 00000000000..d0f869204a6 --- /dev/null +++ b/stage0/ulib/FStar.FiniteSet.Base.fst @@ -0,0 +1,481 @@ +(* + Copyright 2008-2021 John Li, Jay Lorch, Rustan Leino, Alex Summers, + Dan Rosen, Nikhil Swamy, Microsoft Research, and contributors to + the Dafny Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Includes material from the Dafny project + (https://github.com/dafny-lang/dafny) which carries this license + information: + + Created 9 February 2008 by Rustan Leino. + Converted to Boogie 2 on 28 June 2008. + Edited sequence axioms 20 October 2009 by Alex Summers. + Modified 2014 by Dan Rosen. + Copyright (c) 2008-2014, Microsoft. + Copyright by the contributors to the Dafny Project + SPDX-License-Identifier: MIT +*) + +(** +This module declares a type and functions used for modeling +finite sets as they're modeled in Dafny. + +@summary Type and functions for modeling finite sets +*) +module FStar.FiniteSet.Base + +module FLT = FStar.List.Tot +open FStar.FunctionalExtensionality + +let has_elements (#a: eqtype) (f: a ^-> bool) (xs: list a): prop = + forall x. f x == x `FLT.mem` xs + +// Finite sets +type set (a: eqtype) = f:(a ^-> bool){exists xs. f `has_elements` xs} + +/// We represent the Dafny function [] on sets with `mem`: + +let mem (#a: eqtype) (x: a) (s: set a) : bool = + s x + +/// We represent the Dafny function `Set#Card` with `cardinality`: +/// +/// function Set#Card(Set T): int; + +let rec remove_repeats (#a: eqtype) (xs: list a) +: (ys: list a{list_nonrepeating ys /\ (forall y. FLT.mem y ys <==> FLT.mem y xs)}) = + match xs with + | [] -> [] + | hd :: tl -> let tl' = remove_repeats tl in if FLT.mem hd tl then tl' else hd :: tl' + +let set_as_list (#a: eqtype) (s: set a): GTot (xs: list a{list_nonrepeating xs /\ (forall x. FLT.mem x xs = mem x s)}) = + remove_repeats (FStar.IndefiniteDescription.indefinite_description_ghost (list a) (fun xs -> forall x. FLT.mem x xs = mem x s)) + +[@"opaque_to_smt"] +let cardinality (#a: eqtype) (s: set a) : GTot nat = + FLT.length (set_as_list s) + +let intro_set (#a: eqtype) (f: a ^-> bool) (xs: Ghost.erased (list a)) +: Pure (set a) + (requires f `has_elements` xs) + (ensures fun _ -> True) += Classical.exists_intro (fun xs -> f `has_elements` xs) xs; + f + +/// We represent the Dafny function `Set#Empty` with `empty`: + +let emptyset (#a: eqtype): set a = intro_set (on_dom a (fun _ -> false)) [] + +/// We represent the Dafny function `Set#UnionOne` with `insert`: +/// +/// function Set#UnionOne(Set T, T): Set T; + +let insert (#a: eqtype) (x: a) (s: set a): set a = + intro_set (on_dom _ (fun x' -> x = x' || s x')) (x :: set_as_list s) + +/// We represent the Dafny function `Set#Singleton` with `singleton`: +/// +/// function Set#Singleton(T): Set T; + +let singleton (#a: eqtype) (x: a) : set a = + insert x emptyset + +/// We represent the Dafny function `Set#Union` with `union`: +/// +/// function Set#Union(Set T, Set T): Set T; + +let rec union_lists (#a: eqtype) (xs: list a) (ys: list a) : (zs: list a{forall z. FLT.mem z zs <==> FLT.mem z xs \/ FLT.mem z ys}) = + match xs with + | [] -> ys + | hd :: tl -> hd :: union_lists tl ys + +let union (#a: eqtype) (s1: set a) (s2: set a) : (set a) = + intro_set (on_dom a (fun x -> s1 x || s2 x)) (union_lists (set_as_list s1) (set_as_list s2)) + +/// We represent the Dafny function `Set#Intersection` with `intersection`: +/// +/// function Set#Intersection(Set T, Set T): Set T; + +let rec intersect_lists (#a: eqtype) (xs: list a) (ys: list a) +: (zs: list a{forall z. FLT.mem z zs <==> FLT.mem z xs /\ FLT.mem z ys}) = + match xs with + | [] -> [] + | hd :: tl -> let zs' = intersect_lists tl ys in if FLT.mem hd ys then hd :: zs' else zs' + +let intersection (#a: eqtype) (s1: set a) (s2: set a) : set a = + intro_set (on_dom a (fun x -> s1 x && s2 x)) (intersect_lists (set_as_list s1) (set_as_list s2)) + +/// We represent the Dafny function `Set#Difference` with `difference`: +/// +/// function Set#Difference(Set T, Set T): Set T; + +let rec difference_lists (#a: eqtype) (xs: list a) (ys: list a) +: (zs: list a{forall z. FLT.mem z zs <==> FLT.mem z xs /\ ~(FLT.mem z ys)}) = + match xs with + | [] -> [] + | hd :: tl -> let zs' = difference_lists tl ys in if FLT.mem hd ys then zs' else hd :: zs' + +let difference (#a: eqtype) (s1: set a) (s2: set a) : set a = + intro_set (on_dom a (fun x -> s1 x && not (s2 x))) (difference_lists (set_as_list s1) (set_as_list s2)) + +/// We represent the Dafny function `Set#Subset` with `subset`: +/// +/// function Set#Subset(Set T, Set T): bool; + +let subset (#a: eqtype) (s1: set a) (s2: set a) : Type0 = + forall x. (s1 x = true) ==> (s2 x = true) + +/// We represent the Dafny function `Set#Equal` with `equal`: +/// +/// function Set#Equal(Set T, Set T): bool; + +let equal (#a: eqtype) (s1: set a) (s2: set a) : Type0 = + feq s1 s2 + +/// We represent the Dafny function `Set#Disjoint` with `disjoint`: +/// +/// function Set#Disjoint(Set T, Set T): bool; + +let disjoint (#a: eqtype) (s1: set a) (s2: set a) : Type0 = + forall x. not (s1 x && s2 x) + +/// We represent the Dafny choice operator by `choose`: +/// +/// var x: T :| x in s; + +let choose (#a: eqtype) (s: set a{exists x. mem x s}) : GTot (x: a{mem x s}) = + Cons?.hd (set_as_list s) + +/// We now prove each of the facts that comprise `all_finite_set_facts`. +/// For fact `xxx_fact`, we prove it with `xxx_lemma`. Sometimes, that +/// requires a helper lemma, which we call `xxx_helper`. + +let empty_set_contains_no_elements_lemma () +: Lemma (empty_set_contains_no_elements_fact) = + () + +let length_zero_lemma () +: Lemma (length_zero_fact) = + introduce forall (a: eqtype) (s: set a). (cardinality s = 0 <==> s == emptyset) /\ (cardinality s <> 0 <==> (exists x. mem x s)) + with ( + reveal_opaque (`%cardinality) (cardinality #a); + introduce cardinality s = 0 ==> s == emptyset + with _. assert (feq s emptyset); + introduce s == emptyset ==> cardinality s = 0 + with _. assert (set_as_list s == []); + introduce cardinality s <> 0 ==> _ + with _. introduce exists x. mem x s + with (Cons?.hd (set_as_list s)) + and ()) + +let singleton_contains_argument_lemma () +: Lemma (singleton_contains_argument_fact) = + () + +let singleton_contains_lemma () +: Lemma (singleton_contains_fact) = + () + +let rec singleton_cardinality_helper (#a: eqtype) (r: a) (xs: list a) +: Lemma (requires FLT.mem r xs /\ (forall x. FLT.mem x xs <==> x = r)) + (ensures remove_repeats xs == [r]) = + match xs with + | [x] -> () + | hd :: tl -> + assert (Cons?.hd tl = r); + singleton_cardinality_helper r tl + +let singleton_cardinality_lemma () +: Lemma (singleton_cardinality_fact) = + introduce forall (a: eqtype) (r: a). cardinality (singleton r) = 1 + with ( + reveal_opaque (`%cardinality) (cardinality #a); + singleton_cardinality_helper r (set_as_list (singleton r)) + ) + +let insert_lemma () +: Lemma (insert_fact) = + () + +let insert_contains_argument_lemma () +: Lemma (insert_contains_argument_fact) = + () + +let insert_contains_lemma () +: Lemma (insert_contains_fact) = + () + +let rec remove_from_nonrepeating_list (#a: eqtype) (x: a) (xs: list a{FLT.mem x xs /\ list_nonrepeating xs}) +: (xs': list a{ list_nonrepeating xs' + /\ FLT.length xs' = FLT.length xs - 1 + /\ (forall y. FLT.mem y xs' <==> FLT.mem y xs /\ y <> x)}) = + match xs with + | hd :: tl -> if x = hd then tl else hd :: (remove_from_nonrepeating_list x tl) + +let rec nonrepeating_lists_with_same_elements_have_same_length (#a: eqtype) (s1: list a) (s2: list a) +: Lemma (requires list_nonrepeating s1 /\ list_nonrepeating s2 /\ (forall x. FLT.mem x s1 <==> FLT.mem x s2)) + (ensures FLT.length s1 = FLT.length s2) = + match s1 with + | [] -> () + | hd :: tl -> nonrepeating_lists_with_same_elements_have_same_length tl (remove_from_nonrepeating_list hd s2) + +let insert_member_cardinality_lemma () +: Lemma (insert_member_cardinality_fact) = + introduce forall (a: eqtype) (s: set a) (x: a). mem x s ==> cardinality (insert x s) = cardinality s + with + introduce mem x s ==> cardinality (insert x s) = cardinality s + with _. ( + reveal_opaque (`%cardinality) (cardinality #a); + nonrepeating_lists_with_same_elements_have_same_length (set_as_list s) (set_as_list (insert x s)) + ) + +let insert_nonmember_cardinality_lemma () +: Lemma (insert_nonmember_cardinality_fact) = + introduce forall (a: eqtype) (s: set a) (x: a). not (mem x s) ==> cardinality (insert x s) = cardinality s + 1 + with + introduce not (mem x s) ==> cardinality (insert x s) = cardinality s + 1 + with _. ( + reveal_opaque (`%cardinality) (cardinality #a); + nonrepeating_lists_with_same_elements_have_same_length (x :: (set_as_list s)) (set_as_list (insert x s)) + ) + +let union_contains_lemma () +: Lemma (union_contains_fact) = + () + +let union_contains_element_from_first_argument_lemma () +: Lemma (union_contains_element_from_first_argument_fact) = + () + +let union_contains_element_from_second_argument_lemma () +: Lemma (union_contains_element_from_second_argument_fact) = + () + +let union_of_disjoint_lemma () +: Lemma (union_of_disjoint_fact) = + introduce forall (a: eqtype) (s1: set a) (s2: set a). disjoint s1 s2 ==> difference (union s1 s2) s1 == s2 /\ difference (union s1 s2) s2 == s1 + with + introduce disjoint s1 s2 ==> difference (union s1 s2) s1 == s2 /\ difference (union s1 s2) s2 == s1 + with _. ( + assert (feq (difference (union s1 s2) s1) s2); + assert (feq (difference (union s1 s2) s2) s1) + ) + +let intersection_contains_lemma () +: Lemma (intersection_contains_fact) = + () + +let union_idempotent_right_lemma () +: Lemma (union_idempotent_right_fact) = + introduce forall (a: eqtype) (s1: set a) (s2: set a). union (union s1 s2) s2 == union s1 s2 + with assert (feq (union (union s1 s2) s2) (union s1 s2)) + +let union_idempotent_left_lemma () +: Lemma (union_idempotent_left_fact) = + introduce forall (a: eqtype) (s1: set a) (s2: set a). union s1 (union s1 s2) == union s1 s2 + with assert (feq (union s1 (union s1 s2)) (union s1 s2)) + +let intersection_idempotent_right_lemma () +: Lemma (intersection_idempotent_right_fact) = + introduce forall (a: eqtype) (s1: set a) (s2: set a). intersection (intersection s1 s2) s2 == intersection s1 s2 + with assert (feq (intersection (intersection s1 s2) s2) (intersection s1 s2)) + +let intersection_idempotent_left_lemma () +: Lemma (intersection_idempotent_left_fact) = + introduce forall (a: eqtype) (s1: set a) (s2: set a). intersection s1 (intersection s1 s2) == intersection s1 s2 + with assert (feq (intersection s1 (intersection s1 s2)) (intersection s1 s2)) + +let rec union_of_disjoint_nonrepeating_lists_length_lemma (#a: eqtype) (xs1: list a) (xs2: list a) (xs3: list a) +: Lemma (requires list_nonrepeating xs1 + /\ list_nonrepeating xs2 + /\ list_nonrepeating xs3 + /\ (forall x. ~(FLT.mem x xs1 /\ FLT.mem x xs2)) + /\ (forall x. FLT.mem x xs3 <==> FLT.mem x xs1 \/ FLT.mem x xs2)) + (ensures FLT.length xs3 = FLT.length xs1 + FLT.length xs2) = + match xs1 with + | [] -> nonrepeating_lists_with_same_elements_have_same_length xs2 xs3 + | hd :: tl -> union_of_disjoint_nonrepeating_lists_length_lemma tl xs2 (remove_from_nonrepeating_list hd xs3) + +let union_of_disjoint_sets_cardinality_lemma (#a: eqtype) (s1: set a) (s2: set a) +: Lemma (requires disjoint s1 s2) + (ensures cardinality (union s1 s2) = cardinality s1 + cardinality s2) = + reveal_opaque (`%cardinality) (cardinality #a); + union_of_disjoint_nonrepeating_lists_length_lemma (set_as_list s1) (set_as_list s2) (set_as_list (union s1 s2)) + +let union_of_three_disjoint_sets_cardinality_lemma (#a: eqtype) (s1: set a) (s2: set a) (s3: set a) +: Lemma (requires disjoint s1 s2 /\ disjoint s2 s3 /\ disjoint s1 s3) + (ensures cardinality (union (union s1 s2) s3) = cardinality s1 + cardinality s2 + cardinality s3) = + union_of_disjoint_sets_cardinality_lemma s1 s2; + union_of_disjoint_sets_cardinality_lemma (union s1 s2) s3 + +#restart-solver +#push-options "--z3rlimit_factor 8 --split_queries no" +let cardinality_matches_difference_plus_intersection_lemma (#a: eqtype) (s1: set a) (s2: set a) +: Lemma (ensures cardinality s1 = cardinality (difference s1 s2) + cardinality (intersection s1 s2)) = + union_of_disjoint_sets_cardinality_lemma (difference s1 s2) (intersection s1 s2); + assert (feq s1 (union (difference s1 s2) (intersection s1 s2))) +#pop-options +#restart-solver +let union_is_differences_and_intersection (#a: eqtype) (s1: set a) (s2: set a) +: Lemma (union s1 s2 == union (union (difference s1 s2) (intersection s1 s2)) (difference s2 s1)) = + assert (feq (union s1 s2) (union (union (difference s1 s2) (intersection s1 s2)) (difference s2 s1))) + +#restart-solver +#push-options "--z3rlimit_factor 8 --split_queries no" +let intersection_cardinality_helper (a: eqtype) (s1: set a) (s2: set a) +: Lemma (cardinality (union s1 s2) + cardinality (intersection s1 s2) = cardinality s1 + cardinality s2) = + cardinality_matches_difference_plus_intersection_lemma s1 s2; + cardinality_matches_difference_plus_intersection_lemma s2 s1; + union_is_differences_and_intersection s1 s2; + union_of_three_disjoint_sets_cardinality_lemma (difference s1 s2) (intersection s1 s2) (difference s2 s1); + assert (feq (intersection s1 s2) (intersection s2 s1)) +#pop-options + +let intersection_cardinality_lemma () +: Lemma (intersection_cardinality_fact) = + introduce forall (a: eqtype) (s1: set a) (s2: set a). + cardinality (union s1 s2) + cardinality (intersection s1 s2) = cardinality s1 + cardinality s2 + with + intersection_cardinality_helper a s1 s2 + +let difference_contains_lemma () +: Lemma (difference_contains_fact) = + () + +let difference_doesnt_include_lemma () +: Lemma (difference_doesnt_include_fact) = + () + +#restart-solver +#push-options "--z3rlimit_factor 8 --split_queries no" +let difference_cardinality_helper (a: eqtype) (s1: set a) (s2: set a) +: Lemma ( cardinality (difference s1 s2) + cardinality (difference s2 s1) + cardinality (intersection s1 s2) = cardinality (union s1 s2) + /\ cardinality (difference s1 s2) = cardinality s1 - cardinality (intersection s1 s2)) = + union_is_differences_and_intersection s1 s2; + union_of_three_disjoint_sets_cardinality_lemma (difference s1 s2) (intersection s1 s2) (difference s2 s1); + cardinality_matches_difference_plus_intersection_lemma s1 s2 +#pop-options + +let difference_cardinality_lemma () +: Lemma (difference_cardinality_fact) = + introduce forall (a: eqtype) (s1: set a) (s2: set a). + cardinality (difference s1 s2) + cardinality (difference s2 s1) + + cardinality (intersection s1 s2) = cardinality (union s1 s2) + /\ cardinality (difference s1 s2) = cardinality s1 - cardinality (intersection s1 s2) + with + difference_cardinality_helper a s1 s2 + +let subset_helper (a: eqtype) (s1: set a) (s2: set a) +: Lemma (subset s1 s2 <==> (forall o.{:pattern mem o s1 \/ mem o s2} mem o s1 ==> mem o s2)) = + introduce (forall o.{:pattern mem o s1 \/ mem o s2} mem o s1 ==> mem o s2) ==> subset s1 s2 + with _. + introduce forall x. s1 x = true ==> s2 x = true + with assert (mem x s1 = s1 x) + +let subset_lemma () +: Lemma (subset_fact) = + introduce forall (a: eqtype) (s1: set a) (s2: set a). subset s1 s2 <==> (forall o.{:pattern mem o s1 \/ mem o s2} mem o s1 ==> mem o s2) + with subset_helper a s1 s2 + +let equal_lemma () +: Lemma (equal_fact) = + introduce forall (a: eqtype) (s1: set a) (s2: set a). + equal s1 s2 <==> (forall o.{:pattern mem o s1 \/ mem o s2} mem o s1 <==> mem o s2) + with ( + introduce (forall o.{:pattern mem o s1 \/ mem o s2} mem o s1 <==> mem o s2) ==> equal s1 s2 + with _. + introduce forall x. s1 x = true <==> s2 x = true + with assert (mem x s1 = s1 x /\ mem x s2 = s2 x) + ) + +let equal_extensionality_lemma () +: Lemma (equal_extensionality_fact) = + () + +let disjoint_lemma () +: Lemma (disjoint_fact) = + introduce forall (a: eqtype) (s1: set a) (s2: set a). + disjoint s1 s2 <==> (forall o.{:pattern mem o s1 \/ mem o s2} not (mem o s1) \/ not (mem o s2)) + with ( + introduce (forall o.{:pattern mem o s1 \/ mem o s2} not (mem o s1) \/ not (mem o s2)) ==> disjoint s1 s2 + with _. ( + introduce forall x. not (s1 x && s2 x) + with assert (not (mem x s1) \/ not (mem x s2)) + ) + ) + +#restart-solver +#push-options "--z3rlimit_factor 8 --split_queries no" +let insert_remove_helper (a: eqtype) (x: a) (s: set a) +: Lemma (requires mem x s) + (ensures insert x (remove x s) == s) = + assert (feq s (insert x (remove x s))) +#pop-options +#restart-solver + +let insert_remove_lemma () +: Lemma (insert_remove_fact) = + introduce forall (a: eqtype) (x: a) (s: set a). mem x s = true ==> insert x (remove x s) == s + with + introduce mem x s = true ==> insert x (remove x s) == s + with _. insert_remove_helper a x s + +let remove_insert_helper (a: eqtype) (x: a) (s: set a) +: Lemma (requires mem x s = false) + (ensures remove x (insert x s) == s) = + assert (feq s (remove x (insert x s))) + +let remove_insert_lemma () +: Lemma (remove_insert_fact) = + introduce forall (a: eqtype) (x: a) (s: set a). mem x s = false ==> remove x (insert x s) == s + with introduce mem x s = false ==> remove x (insert x s) == s + with _. remove_insert_helper a x s + +let set_as_list_cardinality_lemma () +: Lemma (set_as_list_cardinality_fact) = + introduce forall (a: eqtype) (s: set a). FLT.length (set_as_list s) = cardinality s + with reveal_opaque (`%cardinality) (cardinality #a) + +let all_finite_set_facts_lemma () : Lemma (all_finite_set_facts) = + empty_set_contains_no_elements_lemma (); + length_zero_lemma (); + singleton_contains_argument_lemma (); + singleton_contains_lemma (); + singleton_cardinality_lemma (); + insert_lemma (); + insert_contains_argument_lemma (); + insert_contains_lemma (); + insert_member_cardinality_lemma (); + insert_nonmember_cardinality_lemma (); + union_contains_lemma (); + union_contains_element_from_first_argument_lemma (); + union_contains_element_from_second_argument_lemma (); + union_of_disjoint_lemma (); + intersection_contains_lemma (); + union_idempotent_right_lemma (); + union_idempotent_left_lemma (); + intersection_idempotent_right_lemma (); + intersection_idempotent_left_lemma (); + intersection_cardinality_lemma (); + difference_contains_lemma (); + difference_doesnt_include_lemma (); + difference_cardinality_lemma (); + subset_lemma (); + equal_lemma (); + equal_extensionality_lemma (); + disjoint_lemma (); + insert_remove_lemma (); + remove_insert_lemma (); + set_as_list_cardinality_lemma () diff --git a/stage0/ulib/FStar.FiniteSet.Base.fsti b/stage0/ulib/FStar.FiniteSet.Base.fsti new file mode 100644 index 00000000000..aef4169aa05 --- /dev/null +++ b/stage0/ulib/FStar.FiniteSet.Base.fsti @@ -0,0 +1,456 @@ +(* + Copyright 2008-2021 John Li, Jay Lorch, Rustan Leino, Alex Summers, + Dan Rosen, Nikhil Swamy, Microsoft Research, and contributors to + the Dafny Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Includes material from the Dafny project + (https://github.com/dafny-lang/dafny) which carries this license + information: + + Created 9 February 2008 by Rustan Leino. + Converted to Boogie 2 on 28 June 2008. + Edited sequence axioms 20 October 2009 by Alex Summers. + Modified 2014 by Dan Rosen. + Copyright (c) 2008-2014, Microsoft. + Copyright by the contributors to the Dafny Project + SPDX-License-Identifier: MIT +*) + +(** +This module declares a type and functions used for modeling +finite sets as they're modeled in Dafny. + +@summary Type and functions for modeling finite sets +*) +module FStar.FiniteSet.Base + +open FStar.FunctionalExtensionality +module FLT = FStar.List.Tot + +val set (a: eqtype) + : Type0 + +(** + We translate each Dafny sequence function prefixed with `Set#` + into an F* function. +**) + +/// We represent the Dafny operator [] on sets with `mem`: + +val mem (#a: eqtype) (x: a) (s: set a) + : bool + +/// We can convert a set to a list with `set_as_list`: + +let rec list_nonrepeating (#a: eqtype) (xs: list a) : bool = + match xs with + | [] -> true + | hd :: tl -> not (FLT.mem hd tl) && list_nonrepeating tl + +val set_as_list (#a: eqtype) (s: set a) + : GTot (xs: list a{list_nonrepeating xs /\ (forall x. FLT.mem x xs = mem x s)}) + +/// We represent the Dafny function `Set#Card` with `cardinality`: +/// +/// function Set#Card(Set T): int; + +val cardinality (#a: eqtype) (s: set a) + : GTot nat + +/// We represent the Dafny function `Set#Empty` with `empty`: +/// +/// function Set#Empty(): Set T; + +val emptyset (#a: eqtype) + : set a + +/// We represent the Dafny function `Set#UnionOne` with `insert`: +/// +/// function Set#UnionOne(Set T, T): Set T; + +val insert (#a: eqtype) (x: a) (s: set a) + : set a + +/// We represent the Dafny function `Set#Singleton` with `singleton`: +/// +/// function Set#Singleton(T): Set T; + +val singleton (#a: eqtype) (x: a) + : set a + +/// We represent the Dafny function `Set#Union` with `union`: +/// +/// function Set#Union(Set T, Set T): Set T; + +val union (#a: eqtype) (s1: set a) (s2: set a) + : (set a) + +/// We represent the Dafny function `Set#Intersection` with `intersection`: +/// +/// function Set#Intersection(Set T, Set T): Set T; + +val intersection (#a: eqtype) (s1: set a) (s2: set a) + : set a + +/// We represent the Dafny function `Set#Difference` with `difference`: +/// +/// function Set#Difference(Set T, Set T): Set T; + +val difference (#a: eqtype) (s1: set a) (s2: set a) + : set a + +/// We represent the Dafny function `Set#Subset` with `subset`: +/// +/// function Set#Subset(Set T, Set T): bool; + +val subset (#a: eqtype) (s1: set a) (s2: set a) + : Type0 + +/// We represent the Dafny function `Set#Equal` with `equal`: +/// +/// function Set#Equal(Set T, Set T): bool; + +val equal (#a: eqtype) (s1: set a) (s2: set a) + : Type0 + +/// We represent the Dafny function `Set#Disjoint` with `disjoint`: +/// +/// function Set#Disjoint(Set T, Set T): bool; + +val disjoint (#a: eqtype) (s1: set a) (s2: set a) + : Type0 + +/// We represent the Dafny choice operator by `choose`: +/// +/// var x: T :| x in s; + +val choose (#a: eqtype) (s: set a{exists x. mem x s}) + : GTot (x: a{mem x s}) + +/// We add the utility functions `remove` and `notin`: + +let remove (#a: eqtype) (x: a) (s: set a) + : set a = + difference s (singleton x) + +let notin (#a: eqtype) (x: a) (s: set a) + : bool = + not (mem x s) + +(** + We translate each finite set axiom from the Dafny prelude into an F* + predicate ending in `_fact`. +**) + +/// We don't need the following axiom since we return a nat from cardinality: +/// +/// axiom (forall s: Set T :: { Set#Card(s) } 0 <= Set#Card(s)); + +/// We represent the following Dafny axiom with `empty_set_contains_no_elements_fact`: +/// +/// axiom (forall o: T :: { Set#Empty()[o] } !Set#Empty()[o]); + +let empty_set_contains_no_elements_fact = + forall (a: eqtype) (o: a).{:pattern mem o (emptyset)} not (mem o (emptyset #a)) + +/// We represent the following Dafny axiom with `length_zero_fact`: +/// +/// axiom (forall s: Set T :: { Set#Card(s) } +/// (Set#Card(s) == 0 <==> s == Set#Empty()) && +/// (Set#Card(s) != 0 ==> (exists x: T :: s[x]))); + +let length_zero_fact = + forall (a: eqtype) (s: set a).{:pattern cardinality s} + (cardinality s = 0 <==> s == emptyset) + /\ (cardinality s <> 0 <==> (exists x. mem x s)) + +/// We represent the following Dafny axiom with `singleton_contains_argument_fact`: +/// +/// axiom (forall r: T :: { Set#Singleton(r) } Set#Singleton(r)[r]); + +let singleton_contains_argument_fact = + forall (a: eqtype) (r: a).{:pattern singleton r} mem r (singleton r) + +/// We represent the following Dafny axiom with `singleton_contains_fact`: +/// +/// axiom (forall r: T, o: T :: { Set#Singleton(r)[o] } Set#Singleton(r)[o] <==> r == o); + +let singleton_contains_fact = + forall (a: eqtype) (r: a) (o: a).{:pattern mem o (singleton r)} mem o (singleton r) <==> r == o + +/// We represent the following Dafny axiom with `singleton_cardinality_fact`: +/// +/// axiom (forall r: T :: { Set#Card(Set#Singleton(r)) } Set#Card(Set#Singleton(r)) == 1); + +let singleton_cardinality_fact = + forall (a: eqtype) (r: a).{:pattern cardinality (singleton r)} cardinality (singleton r) = 1 + +/// We represent the following Dafny axiom with `insert_fact`: +/// +/// axiom (forall a: Set T, x: T, o: T :: { Set#UnionOne(a,x)[o] } +/// Set#UnionOne(a,x)[o] <==> o == x || a[o]); + +let insert_fact = + forall (a: eqtype) (s: set a) (x: a) (o: a).{:pattern mem o (insert x s)} + mem o (insert x s) <==> o == x \/ mem o s + +/// We represent the following Dafny axiom with `insert_contains_argument_fact`: +/// +/// axiom (forall a: Set T, x: T :: { Set#UnionOne(a, x) } +/// Set#UnionOne(a, x)[x]); + +let insert_contains_argument_fact = + forall (a: eqtype) (s: set a) (x: a).{:pattern insert x s} + mem x (insert x s) + +/// We represent the following Dafny axiom with `insert_contains_fact`: +/// +/// axiom (forall a: Set T, x: T, y: T :: { Set#UnionOne(a, x), a[y] } +/// a[y] ==> Set#UnionOne(a, x)[y]); + +let insert_contains_fact = + forall (a: eqtype) (s: set a) (x: a) (y: a).{:pattern insert x s; mem y s} + mem y s ==> mem y (insert x s) + +/// We represent the following Dafny axiom with `insert_member_cardinality_fact`: +/// +/// axiom (forall a: Set T, x: T :: { Set#Card(Set#UnionOne(a, x)) } +/// a[x] ==> Set#Card(Set#UnionOne(a, x)) == Set#Card(a)); + +let insert_member_cardinality_fact = + forall (a: eqtype) (s: set a) (x: a).{:pattern cardinality (insert x s)} + mem x s ==> cardinality (insert x s) = cardinality s + +/// We represent the following Dafny axiom with `insert_nonmember_cardinality_fact`: +/// +/// axiom (forall a: Set T, x: T :: { Set#Card(Set#UnionOne(a, x)) } +/// !a[x] ==> Set#Card(Set#UnionOne(a, x)) == Set#Card(a) + 1); + +let insert_nonmember_cardinality_fact = + forall (a: eqtype) (s: set a) (x: a).{:pattern cardinality (insert x s)} + not (mem x s) ==> cardinality (insert x s) = cardinality s + 1 + +/// We represent the following Dafny axiom with `union_contains_fact`: +/// +/// axiom (forall a: Set T, b: Set T, o: T :: { Set#Union(a,b)[o] } +/// Set#Union(a,b)[o] <==> a[o] || b[o]); + +let union_contains_fact = + forall (a: eqtype) (s1: set a) (s2: set a) (o: a).{:pattern mem o (union s1 s2)} + mem o (union s1 s2) <==> mem o s1 \/ mem o s2 + +/// We represent the following Dafny axiom with `union_contains_element_from_first_argument_fact`: +/// +/// axiom (forall a, b: Set T, y: T :: { Set#Union(a, b), a[y] } +/// a[y] ==> Set#Union(a, b)[y]); + +let union_contains_element_from_first_argument_fact = + forall (a: eqtype) (s1: set a) (s2: set a) (y: a).{:pattern union s1 s2; mem y s1} + mem y s1 ==> mem y (union s1 s2) + +/// We represent the following Dafny axiom with `union_contains_element_from_second_argument_fact`: +/// +/// axiom (forall a, b: Set T, y: T :: { Set#Union(a, b), a[y] } +/// b[y] ==> Set#Union(a, b)[y]); + +let union_contains_element_from_second_argument_fact = + forall (a: eqtype) (s1: set a) (s2: set a) (y: a).{:pattern union s1 s2; mem y s2} + mem y s2 ==> mem y (union s1 s2) + +/// We represent the following Dafny axiom with `union_of_disjoint_fact`: +/// +/// axiom (forall a, b: Set T :: { Set#Union(a, b) } +/// Set#Disjoint(a, b) ==> +/// Set#Difference(Set#Union(a, b), a) == b && +/// Set#Difference(Set#Union(a, b), b) == a); + +let union_of_disjoint_fact = + forall (a: eqtype) (s1: set a) (s2: set a).{:pattern union s1 s2} + disjoint s1 s2 ==> difference (union s1 s2) s1 == s2 /\ difference (union s1 s2) s2 == s1 + +/// We represent the following Dafny axiom with `intersection_contains_fact`: +/// +/// axiom (forall a: Set T, b: Set T, o: T :: { Set#Intersection(a,b)[o] } +/// Set#Intersection(a,b)[o] <==> a[o] && b[o]); + +let intersection_contains_fact = + forall (a: eqtype) (s1: set a) (s2: set a) (o: a).{:pattern mem o (intersection s1 s2)} + mem o (intersection s1 s2) <==> mem o s1 /\ mem o s2 + +/// We represent the following Dafny axiom with `union_idempotent_right_fact`: +/// +/// axiom (forall a, b: Set T :: { Set#Union(Set#Union(a, b), b) } +/// Set#Union(Set#Union(a, b), b) == Set#Union(a, b)); + +let union_idempotent_right_fact = + forall (a: eqtype) (s1: set a) (s2: set a).{:pattern union (union s1 s2) s2} + union (union s1 s2) s2 == union s1 s2 + +/// We represent the following Dafny axiom with `union_idempotent_left_fact`: +/// +/// axiom (forall a, b: Set T :: { Set#Union(a, Set#Union(a, b)) } +/// Set#Union(a, Set#Union(a, b)) == Set#Union(a, b)); + +let union_idempotent_left_fact = + forall (a: eqtype) (s1: set a) (s2: set a).{:pattern union s1 (union s1 s2)} + union s1 (union s1 s2) == union s1 s2 + +/// We represent the following Dafny axiom with `intersection_idempotent_right_fact`: +/// +/// axiom (forall a, b: Set T :: { Set#Intersection(Set#Intersection(a, b), b) } +/// Set#Intersection(Set#Intersection(a, b), b) == Set#Intersection(a, b)); + +let intersection_idempotent_right_fact = + forall (a: eqtype) (s1: set a) (s2: set a).{:pattern intersection (intersection s1 s2) s2} + intersection (intersection s1 s2) s2 == intersection s1 s2 + +/// We represent the following Dafny axiom with `intersection_idempotent_left_fact`: +/// +/// axiom (forall a, b: Set T :: { Set#Intersection(a, Set#Intersection(a, b)) } +/// Set#Intersection(a, Set#Intersection(a, b)) == Set#Intersection(a, b)); + +let intersection_idempotent_left_fact = + forall (a: eqtype) (s1: set a) (s2: set a).{:pattern intersection s1 (intersection s1 s2)} + intersection s1 (intersection s1 s2) == intersection s1 s2 + +/// We represent the following Dafny axiom with `intersection_cardinality_fact`: +/// +/// axiom (forall a, b: Set T :: { Set#Card(Set#Union(a, b)) }{ Set#Card(Set#Intersection(a, b)) } +/// Set#Card(Set#Union(a, b)) + Set#Card(Set#Intersection(a, b)) == Set#Card(a) + Set#Card(b)); + +let intersection_cardinality_fact = + forall (a: eqtype) (s1: set a) (s2: set a).{:pattern cardinality (intersection s1 s2)} + cardinality (union s1 s2) + cardinality (intersection s1 s2) = cardinality s1 + cardinality s2 + +/// We represent the following Dafny axiom with `difference_contains_fact`: +/// +/// axiom (forall a: Set T, b: Set T, o: T :: { Set#Difference(a,b)[o] } +/// Set#Difference(a,b)[o] <==> a[o] && !b[o]); + +let difference_contains_fact = + forall (a: eqtype) (s1: set a) (s2: set a) (o: a).{:pattern mem o (difference s1 s2)} + mem o (difference s1 s2) <==> mem o s1 /\ not (mem o s2) + +/// We represent the following Dafny axiom with `difference_doesnt_include_fact`: +/// +/// axiom (forall a, b: Set T, y: T :: { Set#Difference(a, b), b[y] } +/// b[y] ==> !Set#Difference(a, b)[y] ); + +let difference_doesnt_include_fact = + forall (a: eqtype) (s1: set a) (s2: set a) (y: a).{:pattern difference s1 s2; mem y s2} + mem y s2 ==> not (mem y (difference s1 s2)) + +/// We represent the following Dafny axiom with `difference_cardinality_fact`: +/// +/// axiom (forall a, b: Set T :: +/// { Set#Card(Set#Difference(a, b)) } +/// Set#Card(Set#Difference(a, b)) + Set#Card(Set#Difference(b, a)) +/// + Set#Card(Set#Intersection(a, b)) +/// == Set#Card(Set#Union(a, b)) && +/// Set#Card(Set#Difference(a, b)) == Set#Card(a) - Set#Card(Set#Intersection(a, b))); + +let difference_cardinality_fact = + forall (a: eqtype) (s1: set a) (s2: set a).{:pattern cardinality (difference s1 s2)} + cardinality (difference s1 s2) + cardinality (difference s2 s1) + cardinality (intersection s1 s2) = cardinality (union s1 s2) + /\ cardinality (difference s1 s2) = cardinality s1 - cardinality (intersection s1 s2) + +/// We represent the following Dafny axiom with `subset_fact`: +/// +/// axiom(forall a: Set T, b: Set T :: { Set#Subset(a,b) } +/// Set#Subset(a,b) <==> (forall o: T :: {a[o]} {b[o]} a[o] ==> b[o])); + +let subset_fact = + forall (a: eqtype) (s1: set a) (s2: set a).{:pattern subset s1 s2} + subset s1 s2 <==> (forall o.{:pattern mem o s1 \/ mem o s2} mem o s1 ==> mem o s2) + +/// We represent the following Dafny axiom with `equal_fact`: +/// +/// axiom(forall a: Set T, b: Set T :: { Set#Equal(a,b) } +/// Set#Equal(a,b) <==> (forall o: T :: {a[o]} {b[o]} a[o] <==> b[o])); + +let equal_fact = + forall (a: eqtype) (s1: set a) (s2: set a).{:pattern equal s1 s2} + equal s1 s2 <==> (forall o.{:pattern mem o s1 \/ mem o s2} mem o s1 <==> mem o s2) + +/// We represent the following Dafny axiom with `equal_extensionality_fact`: +/// +/// axiom(forall a: Set T, b: Set T :: { Set#Equal(a,b) } // extensionality axiom for sets +/// Set#Equal(a,b) ==> a == b); + +let equal_extensionality_fact = + forall (a: eqtype) (s1: set a) (s2: set a).{:pattern equal s1 s2} + equal s1 s2 ==> s1 == s2 + +/// We represent the following Dafny axiom with `disjoint_fact`: +/// +/// axiom (forall a: Set T, b: Set T :: { Set#Disjoint(a,b) } +/// Set#Disjoint(a,b) <==> (forall o: T :: {a[o]} {b[o]} !a[o] || !b[o])); + +let disjoint_fact = + forall (a: eqtype) (s1: set a) (s2: set a).{:pattern disjoint s1 s2} + disjoint s1 s2 <==> (forall o.{:pattern mem o s1 \/ mem o s2} not (mem o s1) \/ not (mem o s2)) + +/// We add a few more facts for the utility function `remove` and for `set_as_list`: + +let insert_remove_fact = + forall (a: eqtype) (x: a) (s: set a).{:pattern insert x (remove x s)} + mem x s = true ==> insert x (remove x s) == s + +let remove_insert_fact = + forall (a: eqtype) (x: a) (s: set a).{:pattern remove x (insert x s)} + mem x s = false ==> remove x (insert x s) == s + +let set_as_list_cardinality_fact = + forall (a: eqtype) (s: set a).{:pattern FLT.length (set_as_list s)} + FLT.length (set_as_list s) = cardinality s + +(** + The predicate `all_finite_set_facts` collects all the Dafny finite-set axioms. + One can bring all these facts into scope with `all_finite_set_facts_lemma ()`. +**) + +let all_finite_set_facts = + empty_set_contains_no_elements_fact + /\ length_zero_fact + /\ singleton_contains_argument_fact + /\ singleton_contains_fact + /\ singleton_cardinality_fact + /\ insert_fact + /\ insert_contains_argument_fact + /\ insert_contains_fact + /\ insert_member_cardinality_fact + /\ insert_nonmember_cardinality_fact + /\ union_contains_fact + /\ union_contains_element_from_first_argument_fact + /\ union_contains_element_from_second_argument_fact + /\ union_of_disjoint_fact + /\ intersection_contains_fact + /\ union_idempotent_right_fact + /\ union_idempotent_left_fact + /\ intersection_idempotent_right_fact + /\ intersection_idempotent_left_fact + /\ intersection_cardinality_fact + /\ difference_contains_fact + /\ difference_doesnt_include_fact + /\ difference_cardinality_fact + /\ subset_fact + /\ equal_fact + /\ equal_extensionality_fact + /\ disjoint_fact + /\ insert_remove_fact + /\ remove_insert_fact + /\ set_as_list_cardinality_fact + +val all_finite_set_facts_lemma : unit -> Lemma (all_finite_set_facts) diff --git a/stage0/ulib/FStar.Float.fsti b/stage0/ulib/FStar.Float.fsti new file mode 100644 index 00000000000..42d9425a8d4 --- /dev/null +++ b/stage0/ulib/FStar.Float.fsti @@ -0,0 +1,25 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Float + +/// Support for floating point numbers in F* is nearly non-existent. +/// This module is a placeholder +assume new +type float : Type0 + +type double = float + diff --git a/stage0/ulib/FStar.FunctionalExtensionality.fst b/stage0/ulib/FStar.FunctionalExtensionality.fst new file mode 100644 index 00000000000..48103f9d5ac --- /dev/null +++ b/stage0/ulib/FStar.FunctionalExtensionality.fst @@ -0,0 +1,103 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.FunctionalExtensionality + +inline_for_extraction +let on_domain (a:Type) (#b:a -> Type) ([@@@strictly_positive] f:arrow a b) + = fun (x:a) -> f x + +let feq_on_domain (#a:Type) (#b:a -> Type) (f:arrow a b) + = () + +let idempotence_on_domain #a #b f + = assert_norm (on_domain a f == (on_domain a (on_domain a f))) + +let quantifier_as_lemma (#a:Type) (#b: a -> Type) + (f:squash (forall (x:a). b x)) + (x:a) + : Lemma (b x) + = () + +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Tactics.Types +open FStar.Tactics.Effect +(* we're early enough in the module stack that we need to reimplement + a few of the tactic helpers *) +noextract +let try_with (f : unit -> Tac 'a) (h : exn -> Tac 'a) : Tac 'a = + match catch f with + | Inl e -> h e + | Inr x -> x + +noextract +let l_to_r (t:term) : Tac unit = + ctrl_rewrite BottomUp + (fun _ -> true, Continue) + (fun _ -> + try t_apply_lemma false true t + with _ -> t_trefl false) + +let extensionality_1 (a:Type) + (b: a -> Type) + (f g: arrow a b) + (sq_feq : squash (feq f g)) + : Lemma (ensures on_domain a f == on_domain a g) + = assert (on_domain a f == on_domain a g) + by (norm [delta_only [`%on_domain]]; + l_to_r (quote (quantifier_as_lemma sq_feq)); + t_trefl false) + +let extensionality a b f g + = let fwd a b (f g:arrow a b) + : Lemma (requires feq #a #b f g) + (ensures on_domain a f == on_domain a g) + [SMTPat (feq #a #b f g)] + = extensionality_1 a b f g () + in + () + + +(****** GTot version ******) + +let on_domain_g (a:Type) (#b:a -> Type) (f:arrow_g a b) + = fun (x:a) -> f x + +let feq_on_domain_g (#a:Type) (#b:a -> Type) (f:arrow_g a b) + = () + +let idempotence_on_domain_g #a #b f + = assert_norm (on_domain_g a f == (on_domain_g a (on_domain_g a f))) + +let extensionality_1_g (a:Type) + (b: a -> Type) + (f g: arrow_g a b) + (sq_feq : squash (feq_g f g)) + : Lemma (ensures on_domain_g a f == on_domain_g a g) + = assert (on_domain_g a f == on_domain_g a g) + by (norm [delta_only [`%on_domain_g]]; + l_to_r (quote (quantifier_as_lemma sq_feq)); + t_trefl false) + +let extensionality_g a b f g + = let fwd a b (f g:arrow_g a b) + : Lemma (requires feq_g #a #b f g) + (ensures on_domain_g a f == on_domain_g a g) + [SMTPat (feq_g #a #b f g)] + = extensionality_1_g a b f g () + in + () diff --git a/stage0/ulib/FStar.FunctionalExtensionality.fsti b/stage0/ulib/FStar.FunctionalExtensionality.fsti new file mode 100644 index 00000000000..495b78b22bb --- /dev/null +++ b/stage0/ulib/FStar.FunctionalExtensionality.fsti @@ -0,0 +1,191 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.FunctionalExtensionality + +/// Functional extensionality asserts the equality of pointwise-equal +/// functions. +/// +/// The formulation of this axiom is particularly subtle in F* because +/// of its interaction with subtyping. In fact, prior formulations of +/// this axiom were discovered to be unsound by Aseem Rastogi. +/// +/// The predicate [feq #a #b f g] asserts that [f, g: x:a -> (b x)] are +/// pointwise equal on the domain [a]. +/// +/// However, due to subtyping [f] and [g] may also be defined on some +/// domain larger than [a]. We need to be careful to ensure that merely +/// proving [f] and [g] equal on their sub-domain [a] does not lead us +/// to conclude that they are equal everywhere. +/// +/// For more context on how functional extensionality works in F*, see +/// 1. tests/micro-benchmarks/Test.FunctionalExtensionality.fst +/// 2. ulib/FStar.Map.fst and ulib/FStar.Map.fsti +/// 3. Issue #1542 on github.com/FStarLang/FStar/issues/1542 + +(** The type of total, dependent functions *) +unfold +let arrow (a: Type) (b: (a -> Type)) = x: a -> Tot (b x) + +(** Using [arrow] instead *) +[@@ (deprecated "Use arrow instead")] +let efun (a: Type) (b: (a -> Type)) = arrow a b + +(** feq #a #b f g: pointwise equality of [f] and [g] on domain [a] *) +let feq (#a: Type) (#b: (a -> Type)) (f g: arrow a b) = forall x. {:pattern (f x)\/(g x)} f x == g x + +(** [on_domain a f]: + + This is a key function provided by the module. It has several + features. + + 1. Intuitively, [on_domain a f] can be seen as a function whose + maximal domain is [a]. + + 2. While, [on_domain a f] is proven to be *pointwise* equal to [f], + crucially it is not provably equal to [f], since [f] may + actually have a domain larger than [a]. + + 3. [on_domain] is idempotent + + 4. [on_domain a f x] has special treatment in F*'s normalizer. It + reduces to [f x], reflecting the pointwise equality of + [on_domain a f] and [f]. + + 5. [on_domain] is marked [inline_for_extraction], to eliminate the + overhead of an indirection in extracted code. (This feature + will be exercised as part of cross-module inlining across + interface boundaries) +*) +inline_for_extraction +val on_domain (a: Type) (#b: (a -> Type)) ([@@@strictly_positive] f: arrow a b) : Tot (arrow a b) + +(** feq_on_domain: + [on_domain a f] is pointwise equal to [f] + *) +val feq_on_domain (#a: Type) (#b: (a -> Type)) (f: arrow a b) + : Lemma (feq (on_domain a f) f) [SMTPat (on_domain a f)] + +(** on_domain is idempotent *) +val idempotence_on_domain (#a: Type) (#b: (a -> Type)) (f: arrow a b) + : Lemma (on_domain a (on_domain a f) == on_domain a f) [SMTPat (on_domain a (on_domain a f))] + +(** [is_restricted a f]: + + Though stated indirectly, [is_restricted a f] is valid when [f] + is a function whose maximal domain is equal to [a]. + + Equivalently, one may see its definition as + [exists g. f == on_domain a g] +*) +let is_restricted (a: Type) (#b: (a -> Type)) (f: arrow a b) = on_domain a f == f + +(** restricted_t a b: + Lifts the [is_restricted] predicate into a refinement type + + This is the type of functions whose maximal domain is [a] + and whose (dependent) co-domain is [b]. +*) +let restricted_t (a: Type) (b: (a -> Type)) = f: arrow a b {is_restricted a f} + +(** [a ^-> b]: + + Notation for non-dependent restricted functions from [a] to [b]. + The first symbol [^] makes it right associative, as expected for + arrows. + *) +unfold +let op_Hat_Subtraction_Greater (a b: Type) = restricted_t a (fun _ -> b) + +(** [on_dom a f]: + A convenience function to introduce a restricted, dependent function + *) +unfold +let on_dom (a: Type) (#b: (a -> Type)) (f: arrow a b) : restricted_t a b = on_domain a f + +(** [on a f]: + A convenience function to introduce a restricted, non-dependent function + *) +unfold +let on (a #b: Type) (f: (a -> Tot b)) : (a ^-> b) = on_dom a f + +(**** MAIN AXIOM *) + +(** [extensionality]: + + The main axiom of this module states that functions [f] and [g] + that are pointwise equal on domain [a] are provably equal when + restricted to [a] *) +val extensionality (a: Type) (b: (a -> Type)) (f g: arrow a b) + : Lemma (ensures (feq #a #b f g <==> on_domain a f == on_domain a g)) [SMTPat (feq #a #b f g)] + +(**** DUPLICATED FOR GHOST FUNCTIONS *) + +(** The type of ghost, total, dependent functions *) +unfold +let arrow_g (a: Type) (b: (a -> Type)) = x: a -> GTot (b x) + +(** Use [arrow_g] instead *) +[@@ (deprecated "Use arrow_g instead")] +let efun_g (a: Type) (b: (a -> Type)) = arrow_g a b + +(** [feq_g #a #b f g]: pointwise equality of [f] and [g] on domain [a] **) +let feq_g (#a: Type) (#b: (a -> Type)) (f g: arrow_g a b) = + forall x. {:pattern (f x)\/(g x)} f x == g x + +(** The counterpart of [on_domain] for ghost functions *) +val on_domain_g (a: Type) (#b: (a -> Type)) (f: arrow_g a b) : Tot (arrow_g a b) + +(** [on_domain_g a f] is pointwise equal to [f] *) +val feq_on_domain_g (#a: Type) (#b: (a -> Type)) (f: arrow_g a b) + : Lemma (feq_g (on_domain_g a f) f) [SMTPat (on_domain_g a f)] + +(** on_domain_g is idempotent *) +val idempotence_on_domain_g (#a: Type) (#b: (a -> Type)) (f: arrow_g a b) + : Lemma (on_domain_g a (on_domain_g a f) == on_domain_g a f) + [SMTPat (on_domain_g a (on_domain_g a f))] + +(** Counterpart of [is_restricted] for ghost functions *) +let is_restricted_g (a: Type) (#b: (a -> Type)) (f: arrow_g a b) = on_domain_g a f == f + +(** Counterpart of [restricted_t] for ghost functions *) +let restricted_g_t (a: Type) (b: (a -> Type)) = f: arrow_g a b {is_restricted_g a f} + +(** [a ^->> b]: + + Notation for ghost, non-dependent restricted functions from [a] + a to [b]. + *) +unfold +let op_Hat_Subtraction_Greater_Greater (a b: Type) = restricted_g_t a (fun _ -> b) + +(** [on_dom_g a f]: + A convenience function to introduce a restricted, ghost, dependent function + *) +unfold +let on_dom_g (a: Type) (#b: (a -> Type)) (f: arrow_g a b) : restricted_g_t a b = on_domain_g a f + +(** [on_g a f]: + A convenience function to introduce a restricted, ghost, non-dependent function + *) +unfold +let on_g (a #b: Type) (f: (a -> GTot b)) : (a ^->> b) = on_dom_g a f + +(** Main axiom for ghost functions **) +val extensionality_g (a: Type) (b: (a -> Type)) (f g: arrow_g a b) + : Lemma (ensures (feq_g #a #b f g <==> on_domain_g a f == on_domain_g a g)) + [SMTPat (feq_g #a #b f g)] + diff --git a/stage0/ulib/FStar.FunctionalQueue.fst b/stage0/ulib/FStar.FunctionalQueue.fst new file mode 100644 index 00000000000..f4af0d78298 --- /dev/null +++ b/stage0/ulib/FStar.FunctionalQueue.fst @@ -0,0 +1,179 @@ +(* + Copyright 2008-2024 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: Megan Frisella +*) +module FStar.FunctionalQueue + +module L = FStar.List.Tot +open FStar.List.Tot +open FStar.Seq + +(* Functional queues in the style of Okasaki. + +Enqueue and dequeue are amortized constant time operations. The queue is +represented by a pair of lists, the first one being the "front" of the +queue, where elements are popped, and the second being the "back", where +elements are pushed. The lists are in opposite order, so that popping +from the front and pushing to the back is O(1). When we need to dequeue +and the front is empty, we reverse the back of the list into the front +(see dequeue). + +The lemmas exposed in the interface guarantee to clients of this module +that we in fact model a queue, by relating the operations to a Sequence. *) + +type queue a = p:(list a & list a){L.isEmpty (fst p) ==> L.isEmpty (snd p)} + +let empty #a = [], [] + +val queue_to_list (#a:Type) (q:queue a) : list a +let queue_to_list #a q + = match (fst q) with + | [] -> [] + | _ -> (fst q) @ (L.rev (snd q)) + +val queue_of_list (#a:Type) (l:list a) : queue a +let queue_of_list #a l + = match l with + | [] -> empty + | _ -> l, [] + +let queue_to_seq #a q + = seq_of_list (queue_to_list q) + +let queue_of_seq #a s + = queue_of_list (seq_to_list s) + +let equal #a q1 q2 = queue_to_seq q1 == queue_to_seq q2 + +let lemma_eq_intro #_ q1 q2 = () + +let lemma_eq_elim #_ q1 q2 = () + +let lemma_list_queue_bij (#a:Type) (l:list a) + : Lemma (queue_to_list (queue_of_list l) == l) + = match l with + | [] -> () + | _ -> L.append_l_nil l + +let lemma_queue_list_bij (#a:Type) (q:queue a) + : Lemma (equal (queue_of_list (queue_to_list q)) q) + = match fst q with + | [] -> () + | l -> ( + L.append_l_nil (L.append l (L.rev (snd q))) + ) + +let lemma_seq_queue_bij (#a:Type) (s:seq a) + : Lemma (queue_to_seq (queue_of_seq s) == s) + = let l = (seq_to_list s) in + lemma_list_queue_bij l; + lemma_seq_list_bij s + +let lemma_queue_seq_bij (#a:Type) (q:queue a) + : Lemma (equal (queue_of_seq (queue_to_seq q)) q) + = let l = (queue_to_list q) in + lemma_queue_list_bij q; + lemma_list_seq_bij l + +let enqueue (#a:Type) (x:a) (q:queue a) + : queue a + = match fst q with + | [] -> [x], [] + | l -> l, x :: (snd q) + +let dequeue (#a:Type) (q:queue a{not_empty q}) + : a & queue a + = lemma_seq_of_list_induction (queue_to_list q); + let hd :: tl = fst q in + match tl with + | [] -> hd, (L.rev (snd q), []) + | _ -> hd, (tl, (snd q)) + +let peek (#a:Type) (q:queue a{not_empty q}) + : a + = lemma_seq_of_list_induction (queue_to_list q); + L.hd (fst q) + +let lemma_empty_ok (#a:Type) + : Lemma (queue_to_seq #a empty == Seq.empty) + = lemma_seq_list_bij #a Seq.empty + +let lemma_enqueue_ok_list (#a:Type) (x:a) (q:queue a) + : Lemma (queue_to_list (enqueue x q) == L.snoc ((queue_to_list q),x)) + = match fst q with + | [] -> () + | l -> ( + L.append_assoc l (L.rev (snd q)) [x]; + L.rev_append [x] (snd q) + ) + +let rec lemma_append_seq_of_list_dist (#a:Type) (l1 l2:list a) + : Lemma (ensures Seq.equal (seq_of_list (L.append l1 l2)) (Seq.append (seq_of_list l1) (seq_of_list l2))) + = match l1 with + | [] -> L.append_nil_l l2 + | hd :: tl -> + ( + lemma_seq_of_list_induction (hd :: (L.append tl l2)); + lemma_append_seq_of_list_dist tl l2; + Seq.append_cons hd (seq_of_list tl) (seq_of_list l2); + lemma_seq_of_list_induction (hd :: tl) + ) + +let lemma_snoc_list_seq (#a:Type) (x:a) (q:queue a) + : Lemma (seq_of_list (L.snoc ((queue_to_list q),x)) == Seq.snoc (queue_to_seq q) x) += + let l = queue_to_list q in + calc (==) { + seq_of_list (L.snoc (l, x)) <: seq a; + == { () } + seq_of_list (l @ [x]); + == { lemma_append_seq_of_list_dist l [x] } + seq_of_list l `Seq.append` seq_of_list [x]; + == { assert (Seq.equal (seq_of_list [x]) (Seq.create 1 x)) } + seq_of_list l `Seq.append` Seq.create 1 x; + == { admit() } + Seq.snoc (seq_of_list l) x; + } + +let lemma_enqueue_ok (#a:Type) (x:a) (q:queue a) + : Lemma (queue_to_seq (enqueue x q) == Seq.snoc (queue_to_seq q) x) + = lemma_enqueue_ok_list x q; + lemma_snoc_list_seq x q + +let lemma_dequeue_ok_list (#a:Type) (q:queue a{not_empty q}) + : Lemma (fst (dequeue q) :: queue_to_list (snd (dequeue q)) == queue_to_list q) + = lemma_seq_of_list_induction (queue_to_list q); + let hd :: tl = fst q in + match tl with + | [] -> L.append_l_nil (L.rev (snd q)) + | _ -> L.append_assoc [hd] tl (L.rev (snd q)) + +let lemma_cons_list_seq (#a:Type) (x:a) (q:queue a) + : Lemma (seq_of_list (x :: (queue_to_list q)) == Seq.cons x (queue_to_seq q)) += let l = (queue_to_list q) in + lemma_append_seq_of_list_dist [x] l; + lemma_seq_list_bij (Seq.create 1 x) + +let lemma_dequeue_ok (#a:Type) (q:queue a{not_empty q}) + : Lemma (let hd, tl = dequeue q in + hd == Seq.head (queue_to_seq q) /\ + equal tl (queue_of_seq (Seq.tail (queue_to_seq q)))) + = lemma_dequeue_ok_list q; + lemma_cons_list_seq (fst (dequeue q)) (snd (dequeue q)) + +let lemma_peek_ok (#a:Type) (q:queue a{not_empty q}) + : Lemma (peek q == Seq.head (queue_to_seq q)) + = lemma_dequeue_ok_list q diff --git a/stage0/ulib/FStar.FunctionalQueue.fsti b/stage0/ulib/FStar.FunctionalQueue.fsti new file mode 100644 index 00000000000..13878a4ea29 --- /dev/null +++ b/stage0/ulib/FStar.FunctionalQueue.fsti @@ -0,0 +1,76 @@ +(* + Copyright 2008-2024 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: Megan Frisella +*) +module FStar.FunctionalQueue + +open FStar.Seq + +val queue (a:Type u#a) : Type u#a + +val empty (#a:Type) : queue a + +val queue_to_seq (#a:Type) (q:queue a) : seq a + +val queue_of_seq (#a:Type) (s:seq a) : queue a + +val equal (#a:Type) (q1 q2:queue a) : prop + +let not_empty (#a:Type) (q:queue a) : prop + = let s = queue_to_seq q in + ~(Seq.equal s Seq.empty) /\ length s > 0 + +val lemma_eq_intro: #a:Type -> q1:queue a -> q2:queue a -> Lemma + (requires Seq.equal (queue_to_seq q1) (queue_to_seq q2)) + (ensures (equal q1 q2)) + [SMTPat (equal q1 q2)] + +val lemma_eq_elim: #a:Type -> q1:queue a -> q2:queue a -> Lemma + (requires (equal q1 q2)) + (ensures queue_to_seq q1 == queue_to_seq q2) + [SMTPat (equal q1 q2)] + +val lemma_seq_queue_bij: #a:Type -> s:seq a -> Lemma + (queue_to_seq (queue_of_seq s) == s) + [SMTPat (queue_of_seq s)] + +val lemma_queue_seq_bij: #a:Type -> q:queue a -> Lemma + (equal (queue_of_seq (queue_to_seq q)) q) + [SMTPat (queue_to_seq q)] + +val enqueue (#a:Type) (x:a) (q:queue a) : queue a + +val dequeue (#a:Type) (q:queue a{not_empty q}) : a & queue a + +val peek (#a:Type) (q:queue a{not_empty q}) : a + +val lemma_empty_ok: #a:Type -> Lemma + (queue_to_seq #a empty == Seq.empty) + [SMTPat (empty #a)] + +val lemma_enqueue_ok: #a:Type -> x:a -> q:queue a -> Lemma + (queue_to_seq (enqueue x q) == Seq.snoc (queue_to_seq q) x) + [SMTPat (enqueue x q)] + +val lemma_dequeue_ok: #a:Type -> q:queue a{not_empty q} -> Lemma + (let hd, tl = dequeue q in + hd == Seq.head (queue_to_seq q) /\ + equal tl (queue_of_seq (Seq.tail (queue_to_seq q)))) + [SMTPat (dequeue q)] + +val lemma_peek_ok: #a:Type -> q:queue a{not_empty q} -> Lemma + (peek q == Seq.head (queue_to_seq q)) + [SMTPat (peek q)] diff --git a/stage0/ulib/FStar.Functions.fst b/stage0/ulib/FStar.Functions.fst new file mode 100644 index 00000000000..4c4ab58e3e5 --- /dev/null +++ b/stage0/ulib/FStar.Functions.fst @@ -0,0 +1,46 @@ +module FStar.Functions + +let inj_comp (#a #b #c : _) (f : a -> b) (g : b -> c) + : Lemma (requires is_inj f /\ is_inj g) + (ensures is_inj (fun x -> g (f x))) + = () + +let surj_comp (#a #b #c : _) (f : a -> b) (g : b -> c) + : Lemma (requires is_surj f /\ is_surj g) + (ensures is_surj (fun x -> g (f x))) + = () + +let bij_comp (#a #b #c : _) (f : a -> b) (g : b -> c) : + Lemma (requires is_bij f /\ is_bij g) + (ensures is_bij (fun x -> g (f x))) += () + +let lem_surj (#a #b : _) (f : a -> b) (y : b) + : Lemma (requires is_surj f) (ensures in_image f y) + = () + +let inverse_of_bij #a #b f = + (* Construct the inverse from indefinite description + choice. *) + let g0 (y:b) : GTot (x:a{f x == y}) = + FStar.IndefiniteDescription.indefinite_description_ghost a + (fun (x:a) -> f x == y) + in + (* Prove it's surjective *) + let aux (x:a) : Lemma (exists (y:b). g0 y == x) = + assert (g0 (f x) == x) + in + Classical.forall_intro aux; + Ghost.Pull.pull g0 + +let inverse_of_inj #a #b f def = + (* f is a bijection into its image, obtain its inverse *) + let f' : a -> image_of f = fun x -> f x in + let g_partial = inverse_of_bij #a #(image_of f) f' in + (* extend the inverse to the full domain b *) + let g : b -> GTot a = + fun (y:b) -> + if FStar.StrongExcludedMiddle.strong_excluded_middle (in_image f y) + then g_partial y + else def + in + Ghost.Pull.pull g diff --git a/stage0/ulib/FStar.Functions.fsti b/stage0/ulib/FStar.Functions.fsti new file mode 100644 index 00000000000..ff92a23fe47 --- /dev/null +++ b/stage0/ulib/FStar.Functions.fsti @@ -0,0 +1,53 @@ +module FStar.Functions + +(* This module contains basic definitions and lemmas +about functions and sets. *) + +let is_inj (#a #b : _) (f : a -> b) : prop = + forall (x1 x2 : a). f x1 == f x2 ==> x1 == x2 + +let is_surj (#a #b : _) (f : a -> b) : prop = + forall (y:b). exists (x:a). f x == y + +let is_bij (#a #b : _) (f : a -> b) : prop = + is_inj f /\ is_surj f + +let in_image (#a #b : _) (f : a -> b) (y : b) : prop = + exists (x:a). f x == y + +let image_of (#a #b : _) (f : a -> b) : Type = + y:b{in_image f y} + +(* g inverses f *) +let is_inverse_of (#a #b : _) (g : b -> a) (f : a -> b) = + forall (x:a). g (f x) == x + +let powerset (a:Type u#aa) : Type u#aa = a -> bool + +val inj_comp (#a #b #c : _) (f : a -> b) (g : b -> c) + : Lemma (requires is_inj f /\ is_inj g) + (ensures is_inj (fun x -> g (f x))) + +val surj_comp (#a #b #c : _) (f : a -> b) (g : b -> c) + : Lemma (requires is_surj f /\ is_surj g) + (ensures is_surj (fun x -> g (f x))) + +val bij_comp (#a #b #c : _) (f : a -> b) (g : b -> c) : + Lemma (requires is_bij f /\ is_bij g) + (ensures is_bij (fun x -> g (f x))) + +val lem_surj (#a #b : _) (f : a -> b) (y : b) + : Lemma (requires is_surj f) (ensures in_image f y) + +(* An bijection has a perfect inverse. *) +val inverse_of_bij (#a #b : _) (f : a -> b) + : Ghost (b -> a) + (requires is_bij f) + (ensures fun g -> is_bij g /\ g `is_inverse_of` f /\ f `is_inverse_of` g) + +(* An injective function has an inverse (as long as the domain is non-empty), +and this inverse is surjective. *) +val inverse_of_inj (#a #b : _) (f : a -> b{is_inj f}) (def : a) + : Ghost (b -> a) + (requires is_inj f) + (ensures fun g -> is_surj g /\ g `is_inverse_of` f) diff --git a/stage0/ulib/FStar.GSet.fst b/stage0/ulib/FStar.GSet.fst new file mode 100644 index 00000000000..018b7b9fa58 --- /dev/null +++ b/stage0/ulib/FStar.GSet.fst @@ -0,0 +1,53 @@ +(* + Copyright 2008-2014 Nikhil Swamy, Aseem Rastogi, + Microsoft Research, University of Maryland + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.GSet +(** Computational sets (on Types): membership is a boolean function *) +#set-options "--initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0" +open FStar.FunctionalExtensionality +module F = FStar.FunctionalExtensionality + +let set (a: Type) : Tot Type = F.restricted_g_t a (fun _ -> bool) + +type equal (#a:Type) (s1:set a) (s2:set a) = F.feq_g s1 s2 + +(* destructors *) +let mem #a x s = s x + +(* constructors *) +let empty #a = F.on_dom_g a (fun x -> false) +let singleton #a x = F.on_dom_g a #(fun _ -> bool) (fun y -> StrongExcludedMiddle.strong_excluded_middle (y == x)) +let union #a s1 s2 = F.on_dom_g a (fun x -> s1 x || s2 x) +let intersect #a s1 s2 = F.on_dom_g a (fun x -> s1 x && s2 x) +let complement #a s = F.on_dom_g a ( fun x -> not (s x)) +let comprehend #a f = F.on_dom_g a f +let of_set #a f = F.on_dom_g a (fun x -> Set.mem x f) + +(* Properties *) +let mem_empty #a x = () +let mem_singleton #a x y = () +let mem_union #a x s1 s2 = () +let mem_intersect #a x s1 s2 = () +let mem_complement #a x s = () +let mem_subset #a s1 s2 = () +let subset_mem #a s1 s2 = () +let comprehend_mem #a f x = () +let mem_of_set #a f x = () + +(* extensionality *) +let lemma_equal_intro #a s1 s2 = () +let lemma_equal_elim #a s1 s2 = () +let lemma_equal_refl #a s1 s2 = () diff --git a/stage0/ulib/FStar.GSet.fsti b/stage0/ulib/FStar.GSet.fsti new file mode 100644 index 00000000000..408496dc2c8 --- /dev/null +++ b/stage0/ulib/FStar.GSet.fsti @@ -0,0 +1,129 @@ +(* + Copyright 2008-2014 Nikhil Swamy, Aseem Rastogi, + Microsoft Research, University of Maryland + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.GSet +(** Computational sets (on Types): membership is a boolean function *) +#set-options "--initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0" + +(* + * AR: mark it must_erase_for_extraction temporarily until CMI comes in + *) +[@@must_erase_for_extraction] +val set (a: Type u#a) : Type u#a + +val equal (#a:Type) (s1:set a) (s2:set a) : Type0 + +(* destructors *) + +val mem : #a:Type -> a -> set a -> GTot bool + +(* constructors *) +val empty : #a:Type -> Tot (set a) +val singleton : #a:Type -> a -> Tot (set a) +val union : #a:Type -> set a -> set a -> Tot (set a) +val intersect : #a:Type -> set a -> set a -> Tot (set a) +val complement : #a:Type -> set a -> Tot (set a) +val comprehend (#a: Type) (f: (a -> GTot bool)) : set a +val of_set (#a: eqtype) (f: Set.set a) : set a + +(* a property about sets *) +let disjoint (#a:Type) (s1: set a) (s2: set a) = + equal (intersect s1 s2) empty + +(* ops *) +type subset (#a:Type) (s1:set a) (s2:set a) :Type0 = forall x. mem x s1 ==> mem x s2 + +(* Properties *) +val mem_empty: #a:Type -> x:a -> Lemma + (requires True) + (ensures (not (mem x empty))) + [SMTPat (mem x empty)] + +val mem_singleton: #a:Type -> x:a -> y:a -> Lemma + (requires True) + (ensures (mem y (singleton x) <==> (x==y))) + [SMTPat (mem y (singleton x))] + +val mem_union: #a:Type -> x:a -> s1:set a -> s2:set a -> Lemma + (requires True) + (ensures (mem x (union s1 s2) = (mem x s1 || mem x s2))) + [SMTPat (mem x (union s1 s2))] + +val mem_intersect: #a:Type -> x:a -> s1:set a -> s2:set a -> Lemma + (requires True) + (ensures (mem x (intersect s1 s2) = (mem x s1 && mem x s2))) + [SMTPat (mem x (intersect s1 s2))] + +val mem_complement: #a:Type -> x:a -> s:set a -> Lemma + (requires True) + (ensures (mem x (complement s) = not (mem x s))) + [SMTPat (mem x (complement s))] + +val mem_subset: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (forall x. mem x s1 ==> mem x s2)) + (ensures (subset s1 s2)) + [SMTPat (subset s1 s2)] + +val subset_mem: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (subset s1 s2)) + (ensures (forall x. mem x s1 ==> mem x s2)) + [SMTPat (subset s1 s2)] + +val comprehend_mem (#a: Type) (f: (a -> GTot bool)) (x: a) + : Lemma (ensures (mem x (comprehend f) == f x)) + [SMTPat (mem x (comprehend f))] + +val mem_of_set (#a: eqtype) (f: Set.set a) (x: a) + : Lemma (ensures (mem x (of_set f) <==> Set.mem x f)) + [SMTPat (mem x (of_set f))] + +(* extensionality *) + +val lemma_equal_intro: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (forall x. mem x s1 = mem x s2)) + (ensures (equal s1 s2)) + [SMTPat (equal s1 s2)] + +val lemma_equal_elim: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (equal s1 s2)) + (ensures (s1 == s2)) + [SMTPat (equal s1 s2)] + +val lemma_equal_refl: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (s1 == s2)) + (ensures (equal s1 s2)) + [SMTPat (equal s1 s2)] + +let disjoint_not_in_both (a:Type) (s1:set a) (s2:set a) : + Lemma + (requires (disjoint s1 s2)) + (ensures (forall (x:a).{:pattern (mem x s1) \/ (mem x s2)} mem x s1 ==> ~(mem x s2))) + [SMTPat (disjoint s1 s2)] += let f (x:a) : Lemma (~(mem x (intersect s1 s2))) = () in + FStar.Classical.forall_intro f + +(* Converting lists to sets *) +#reset-options //restore fuel usage here + +let rec as_set' (#a:Type) (l:list a) : set a = + match l with + | [] -> empty + | hd::tl -> union (singleton hd) (as_set' tl) + +let lemma_disjoint_subset (#a:Type) (s1:set a) (s2:set a) (s3:set a) + : Lemma (requires (disjoint s1 s2 /\ subset s3 s1)) + (ensures (disjoint s3 s2)) + = () diff --git a/stage0/ulib/FStar.Ghost.Pull.fsti b/stage0/ulib/FStar.Ghost.Pull.fsti new file mode 100644 index 00000000000..12c27879e95 --- /dev/null +++ b/stage0/ulib/FStar.Ghost.Pull.fsti @@ -0,0 +1,33 @@ +module FStar.Ghost.Pull + +(** + [pull] is an axiom. + + It type states that for any ghost function ``f``, we can exhibit a + total function ``g`` that is pointwise equal to ``f``. However, it + may not be possible, in general, to compute ``g`` in a way that + enables it to be compiled by F*. So, ``pull f`` itself has ghost + effect, indicating that applications of ``pull`` cannot be used in + compilable code. + + Alternatively, one can think of `pull` as saying that the GTot + effect is idempotent and non-dependent, meaning that if evaluating + `f` on an argument `v:a`, exhibits an effect `GTot` and returns a + result; then the effect does not depend on `v` and it can be + subsumed to exhibiting the effect first and then computing `f v` + purely. + + In other words, it "pulls" the effect out of `f`. + + pull is useful to mimic a kind of Tot/GTot effect polymorphism. + + E.g., if you have `f: a -> GTot b` and a `l:list a` + you can do List.map (pull f) l : GTot (list b) + *) +val pull (#a:Type) (#b:a -> Type) (f: (x:a -> GTot (b x))) + : GTot (x:a -> b x) + +val pull_equiv (#a:Type) (#b:a -> Type) (f: (x:a -> GTot (b x))) (x:a) + : Lemma (ensures pull f x == f x) + [SMTPat (pull f x)] + diff --git a/stage0/ulib/FStar.Ghost.fst b/stage0/ulib/FStar.Ghost.fst new file mode 100644 index 00000000000..b14d8805f65 --- /dev/null +++ b/stage0/ulib/FStar.Ghost.fst @@ -0,0 +1,26 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Ghost + +[@@erasable] +noeq +type erased (a:Type) = + | E of a + +let reveal #a (E x) = x +let hide #a x = E x +let hide_reveal #a x = () +let reveal_hide #a x = () diff --git a/stage0/ulib/FStar.Ghost.fst.sketch b/stage0/ulib/FStar.Ghost.fst.sketch new file mode 100644 index 00000000000..1abb3716ece --- /dev/null +++ b/stage0/ulib/FStar.Ghost.fst.sketch @@ -0,0 +1,28 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +(* See FStar.Ghost.fsti *) +module FStar.Ghost +let erased a = a +let reveal #a x = x +let hide #a x = x +let hide_reveal #a x = () +let reveal_hide #a x = () +let elift1 #a #b f ga = f ga +let elift2 #a #b #c f ga gc = f ga gc +let elift3 #a #b #c #d f ga gc gd = f ga gc gd +let elift1_p #a #b #p f ga = f ga +let elift2_p #a #c #p #b f ga gc = f ga gc diff --git a/stage0/ulib/FStar.Ghost.fsti b/stage0/ulib/FStar.Ghost.fsti new file mode 100644 index 00000000000..243b270b448 --- /dev/null +++ b/stage0/ulib/FStar.Ghost.fsti @@ -0,0 +1,169 @@ +(* + Copyright 2008-2014 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Ghost + +/// This module provides an erased type to abstract computationally +/// irrelevant values. +/// +/// It relies on the GHOST effect defined in Prims. +/// +/// [erased a] is decorated with the erasable attribute. As such, +/// +/// 1. The type is considered non-informative. +/// +/// So, [Ghost (erased a)] can be subsumed to [Pure (erased a)] +/// +/// 2. The compiler extracts [erased a] to [unit] +/// +/// The type is [erased a] is in a bijection with [a], as +/// witnessed by the [hide] and [reveal] function. +/// +/// Importantly, computationally relevant code cannot use [reveal] +/// (it's marked [GTot]) +/// +/// Just like Coq's prop, it is okay to use erased types +/// freely as long as we produce an erased type. +/// +/// [reveal] and [hide] are coercions: the typechecker will +/// automatically insert them when required. That is, if the type of +/// an expression is [erased X], and the expected type is NOT an +/// [erased Y], it will insert [reveal], and vice versa for [hide]. + +(** [erased t] is the computationally irrelevant counterpart of [t] *) +[@@ erasable] +new +val erased ([@@@strictly_positive] a: Type u#a) : Type u#a + +(** [erased t] is in a bijection with [t], as witnessed by [reveal] + and [hide] *) +val reveal: #a: Type u#a -> erased a -> GTot a + +val hide: #a: Type u#a -> a -> Tot (erased a) + +val hide_reveal (#a: Type) (x: erased a) + : Lemma (ensures (hide (reveal x) == x)) [SMTPat (reveal x)] + +val reveal_hide (#a: Type) (x: a) : Lemma (ensures (reveal (hide x) == x)) [SMTPat (hide x)] + + +/// The rest of this module includes several well-defined defined +/// notions. They are not trusted. + +(** [Tot] is a sub-effect of [GTot] F* will usually subsume [Tot] + computations to [GTot] computations, though, occasionally, it may + be useful to apply this coercion explicitly. *) +let tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b = f x + +(** [erased]: Injecting a value into [erased]; just an alias of [hide] *) +let return (#a: Type) (x: a) : erased a = hide x + +(** Sequential composition of erased *) +let bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b) = + let y = reveal x in + f y + +unfold +let (let@) (x:erased 'a) (f:('a -> Tot (erased 'b))) : Tot (erased 'b) = bind x f + +(** Unary map *) +irreducible +let elift1 (#a #b: Type) (f: (a -> GTot b)) (x: erased a) + : Tot (y: erased b {reveal y == f (reveal x)}) = + let@ xx = x in return (f xx) + +(** Binary map *) +irreducible +let elift2 (#a #b #c: Type) (f: (a -> b -> GTot c)) (x: erased a) (y: erased b) + : Tot (z: erased c {reveal z == f (reveal x) (reveal y)}) = + let@ xx = x in + let@ yy = y in + return (f xx yy) + +(** Ternary map *) +irreducible +let elift3 + (#a #b #c #d: Type) + (f: (a -> b -> c -> GTot d)) + (ga: erased a) + (gb: erased b) + (gc: erased c) + : Tot (gd: erased d {reveal gd == f (reveal ga) (reveal gb) (reveal gc)}) = + let@ a = ga in + let@ b = gb in + let@ c = gc in + return (f a b c) + +(** Pushing a refinement type under the [erased] constructor *) +let push_refinement #a (#p: (a -> Type0)) (r: erased a {p (reveal r)}) + : erased (x: a{p x /\ x == reveal r}) = + let x:(x: a{p x}) = reveal r in + return x + +(** Mapping a function with a refined domain over a refined erased value *) +irreducible +let elift1_p + (#a #b: Type) + (#p: (a -> Type)) + ($f: (x: a{p x} -> GTot b)) + (r: erased a {p (reveal r)}) + : Tot (z: erased b {reveal z == f (reveal r)}) = + let x:(x: a{p x}) = reveal r in + return (f x) + +(** Mapping a binary function with a refined domain over a pair of + refined erased values *) +irreducible +let elift2_p + (#a #b #c: Type) + (#p: (a -> b -> Type)) + ($f: (xa: a -> xb: b{p xa xb} -> GTot c)) + (ra: erased a) + (rb: erased b {p (reveal ra) (reveal rb)}) + : Tot (rc: erased c {reveal rc == f (reveal ra) (reveal rb)}) = + let x = reveal ra in + let y:(y: b{p x y}) = reveal rb in + return (f x y) + +(** Mapping a function with a refined domain and co-domain over a + refined erased value producing a refined erased value *) +irreducible +let elift1_pq + (#a #b: Type) + (#p: (a -> Type)) + (#q: (x: a{p x} -> b -> Type)) + ($f: (x: a{p x} -> GTot (y: b{q x y}))) + (r: erased a {p (reveal r)}) + : Tot (z: erased b {reveal z == f (reveal r)}) = + let x:(x: a{p x}) = reveal r in + return (f x) + +(** Mapping a binary function with a refined domain and co-domain over + a pair of refined erased values producing a refined erased value + *) +irreducible +let elift2_pq + (#a #b #c: Type) + (#p: (a -> b -> Type)) + (#q: (x: a -> y: b{p x y} -> c -> Type)) + ($f: (x: a -> y: b{p x y} -> GTot (z: c{q x y z}))) + (ra: erased a) + (rb: erased b {p (reveal ra) (reveal rb)}) + : Tot (z: erased c {reveal z == f (reveal ra) (reveal rb)}) = + let x = reveal ra in + let y:(y: b{p x y}) = reveal rb in + return (f x y) + diff --git a/stage0/ulib/FStar.GhostSet.fst b/stage0/ulib/FStar.GhostSet.fst new file mode 100644 index 00000000000..25659f1d0b0 --- /dev/null +++ b/stage0/ulib/FStar.GhostSet.fst @@ -0,0 +1,53 @@ +(* + Copyright 2008-2014 Nikhil Swamy, Aseem Rastogi, + Microsoft Research, University of Maryland + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.GhostSet +(** Ghost computational sets: membership is a ghost boolean function *) +#set-options "--initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0" +open FStar.FunctionalExtensionality +module F = FStar.FunctionalExtensionality + +let set (a: Type) : Tot Type = F.restricted_g_t a (fun _ -> bool) + +type equal (#a:Type) (s1:set a) (s2:set a) = F.feq_g s1 s2 + +(* destructors *) +let mem #a x s = s x + +(* constructors *) +let empty #a = F.on_dom_g a (fun x -> false) +let singleton #a f x = F.on_dom_g a #(fun _ -> bool) (fun y -> f x y) +let union #a s1 s2 = F.on_dom_g a (fun x -> s1 x || s2 x) +let intersect #a s1 s2 = F.on_dom_g a (fun x -> s1 x && s2 x) +let complement #a s = F.on_dom_g a ( fun x -> not (s x)) +let comprehend #a f = F.on_dom_g a f +let of_set #a f = F.on_dom_g a (fun x -> Set.mem x f) + +(* Properties *) +let mem_empty #a x = () +let mem_singleton #a x y = () +let mem_union #a x s1 s2 = () +let mem_intersect #a x s1 s2 = () +let mem_complement #a x s = () +let mem_subset #a s1 s2 = () +let subset_mem #a s1 s2 = () +let comprehend_mem #a f x = () +let mem_of_set #a f x = () + +(* extensionality *) +let lemma_equal_intro #a s1 s2 = () +let lemma_equal_elim #a s1 s2 = () +let lemma_equal_refl #a s1 s2 = () diff --git a/stage0/ulib/FStar.GhostSet.fsti b/stage0/ulib/FStar.GhostSet.fsti new file mode 100644 index 00000000000..239423a4e6c --- /dev/null +++ b/stage0/ulib/FStar.GhostSet.fsti @@ -0,0 +1,128 @@ +(* + Copyright 2008-2014 Nikhil Swamy, Aseem Rastogi, + Microsoft Research, University of Maryland + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.GhostSet +(** Ghost computational sets: membership is a ghost boolean function *) +#set-options "--initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0" + +[@@must_erase_for_extraction; erasable] +val set (a: Type u#a) : Type u#a + +let decide_eq a = x:a -> y:a -> GTot (b:bool { b <==> (x==y) }) + +val equal (#a:Type) (s1:set a) (s2:set a) : Type0 + +(* destructors *) + +val mem : #a:Type -> a -> set a -> GTot bool + +(* constructors *) +val empty : #a:Type -> Tot (set a) +val singleton : #a:Type -> f:decide_eq a -> a -> Tot (set a) +val union : #a:Type -> set a -> set a -> Tot (set a) +val intersect : #a:Type -> set a -> set a -> Tot (set a) +val complement : #a:Type -> set a -> Tot (set a) +val comprehend (#a: Type) (f: (a -> GTot bool)) : set a +val of_set (#a: eqtype) (f: Set.set a) : set a + +(* a property about sets *) +let disjoint (#a:Type) (s1: set a) (s2: set a) = + equal (intersect s1 s2) empty + +(* ops *) +type subset (#a:Type) (s1:set a) (s2:set a) :Type0 = forall x. mem x s1 ==> mem x s2 + +(* Properties *) +val mem_empty: #a:Type -> x:a -> Lemma + (requires True) + (ensures (not (mem x empty))) + [SMTPat (mem x empty)] + +val mem_singleton: #a:Type -> #f:decide_eq a -> x:a -> y:a -> Lemma + (requires True) + (ensures (mem y (singleton f x) <==> (x==y))) + [SMTPat (mem y (singleton f x))] + +val mem_union: #a:Type -> x:a -> s1:set a -> s2:set a -> Lemma + (requires True) + (ensures (mem x (union s1 s2) = (mem x s1 || mem x s2))) + [SMTPat (mem x (union s1 s2))] + +val mem_intersect: #a:Type -> x:a -> s1:set a -> s2:set a -> Lemma + (requires True) + (ensures (mem x (intersect s1 s2) = (mem x s1 && mem x s2))) + [SMTPat (mem x (intersect s1 s2))] + +val mem_complement: #a:Type -> x:a -> s:set a -> Lemma + (requires True) + (ensures (mem x (complement s) = not (mem x s))) + [SMTPat (mem x (complement s))] + +val mem_subset: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (forall x. mem x s1 ==> mem x s2)) + (ensures (subset s1 s2)) + [SMTPat (subset s1 s2)] + +val subset_mem: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (subset s1 s2)) + (ensures (forall x. mem x s1 ==> mem x s2)) + [SMTPat (subset s1 s2)] + +val comprehend_mem (#a: Type) (f: (a -> GTot bool)) (x: a) + : Lemma (ensures (mem x (comprehend f) == f x)) + [SMTPat (mem x (comprehend f))] + +val mem_of_set (#a: eqtype) (f: Set.set a) (x: a) + : Lemma (ensures (mem x (of_set f) <==> Set.mem x f)) + [SMTPat (mem x (of_set f))] + +(* extensionality *) + +val lemma_equal_intro: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (forall x. mem x s1 = mem x s2)) + (ensures (equal s1 s2)) + [SMTPat (equal s1 s2)] + +val lemma_equal_elim: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (equal s1 s2)) + (ensures (s1 == s2)) + [SMTPat (equal s1 s2)] + +val lemma_equal_refl: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (s1 == s2)) + (ensures (equal s1 s2)) + [SMTPat (equal s1 s2)] + +let disjoint_not_in_both (a:Type) (s1:set a) (s2:set a) : + Lemma + (requires (disjoint s1 s2)) + (ensures (forall (x:a).{:pattern (mem x s1) \/ (mem x s2)} mem x s1 ==> ~(mem x s2))) + [SMTPat (disjoint s1 s2)] += let f (x:a) : Lemma (~(mem x (intersect s1 s2))) = () in + FStar.Classical.forall_intro f + +(* Converting lists to sets *) +#reset-options //restore fuel usage here + +let rec as_set' (#a:Type) (f:decide_eq a) (l:list a) : set a = + match l with + | [] -> empty + | hd::tl -> union (singleton f hd) (as_set' f tl) + +let lemma_disjoint_subset (#a:Type) (s1:set a) (s2:set a) (s3:set a) + : Lemma (requires (disjoint s1 s2 /\ subset s3 s1)) + (ensures (disjoint s3 s2)) + = () diff --git a/stage0/ulib/FStar.Heap.fst b/stage0/ulib/FStar.Heap.fst new file mode 100644 index 00000000000..280e2105ee8 --- /dev/null +++ b/stage0/ulib/FStar.Heap.fst @@ -0,0 +1,24 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Heap + +include FStar.Monotonic.Heap + +let trivial_rel (a:Type0) :Preorder.relation a = fun x y -> True + +let trivial_preorder (a:Type0) :Preorder.preorder a = trivial_rel a + +type ref (a:Type0) = mref a (trivial_preorder a) diff --git a/stage0/ulib/FStar.HyperStack.All.fst b/stage0/ulib/FStar.HyperStack.All.fst new file mode 100644 index 00000000000..c6dbcd68380 --- /dev/null +++ b/stage0/ulib/FStar.HyperStack.All.fst @@ -0,0 +1,39 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.HyperStack.All +include FStar.HyperStack.ST + +let all_pre = all_pre_h HyperStack.mem +let all_post' (a:Type) (pre:Type) = all_post_h' HyperStack.mem a pre +let all_post (a:Type) = all_post_h HyperStack.mem a +let all_wp (a:Type) = all_wp_h HyperStack.mem a +new_effect ALL = ALL_h HyperStack.mem + +unfold let lift_state_all (a:Type) (wp:st_wp a) (p:all_post a) = wp (fun a -> p (V a)) +sub_effect STATE ~> ALL = lift_state_all + +unfold let lift_exn_all (a:Type) (wp:ex_wp a) (p:all_post a) (h:HyperStack.mem) = wp (fun ra -> p ra h) +sub_effect EXN ~> ALL = lift_exn_all + +effect All (a:Type) (pre:all_pre) (post: (h0:HyperStack.mem -> Tot (all_post' a (pre h0)))) = + ALL a + (fun (p:all_post a) (h:HyperStack.mem) -> pre h /\ (forall ra h1. post h ra h1 ==> p ra h1)) (* WP *) +effect ML (a:Type) = + ALL a (fun (p:all_post a) (_:HyperStack.mem) -> forall (a:result a) (h:HyperStack.mem). p a h) + +assume val failwith: string -> All 'a (fun h -> True) (fun h a h' -> Err? a /\ h==h') +assume val exit: int -> ML 'a +assume val try_with: (unit -> ML 'a) -> (exn -> ML 'a) -> ML 'a diff --git a/stage0/ulib/FStar.HyperStack.ST.fst b/stage0/ulib/FStar.HyperStack.ST.fst new file mode 100644 index 00000000000..edfe9dc041b --- /dev/null +++ b/stage0/ulib/FStar.HyperStack.ST.fst @@ -0,0 +1,335 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.HyperStack.ST + +open FStar.HyperStack + +module W = FStar.Monotonic.Witnessed +module HS = FStar.HyperStack + +open FStar.Preorder + +(* Eternal regions remain contained *) +private let eternal_region_pred (m1 m2:mem) :Type0 + = forall (r:HS.rid).{:pattern (HS.is_heap_color (color r)); (m1 `contains_region` r)} + (HS.is_eternal_region_hs r /\ m1 `contains_region` r) ==> m2 `contains_region` r + +(* rid counter increases monotonically *) +private let rid_ctr_pred (m1 m2:mem) :Type0 = get_rid_ctr m1 <= get_rid_ctr m2 + +(* + * A region r, that is: + * (a) not contained in m1, and + * (b) has rid last component less than m1.rid_ctr + * + * remains not contained in m2 + *) +private let rid_last_component_pred (m1 m2:mem) :Type0 + = forall (r:HS.rid).{:pattern (m1 `contains_region` r)} + ((~ (m1 `contains_region` r)) /\ rid_last_component r < get_rid_ctr m1) ==> + (~ (m2 `contains_region` r)) + +(* Predicate for eternal refs *) +private let eternal_refs_pred (m1 m2:mem) :Type0 + = forall (a:Type) (rel:preorder a) (r:HS.mreference a rel). + {:pattern (m1 `HS.contains` r)} + (is_mm r) \/ + (((m1 `HS.contains` r) /\ + (HS.is_eternal_region_hs (frameOf r) \/ + m2 `contains_region` (HS.frameOf r))) ==> (m2 `HS.contains` r /\ rel (HS.sel m1 r) (HS.sel m2 r))) + +(* + * Predicate for next ref address in a region's heap + * For all regions, their next_addr increases monotonically (or the region ceases to exist) + *) +private let next_ref_addr_in_a_region_pred (m1 m2:mem) :Type0 + = forall (r:HS.rid).{:pattern (m1 `contains_region` r)} + (m1 `contains_region` r) ==> + (if m2 `contains_region` r then + let h1 = Map.sel (HS.get_hmap m1) r in + let h2 = Map.sel (HS.get_hmap m2) r in + Heap.next_addr h1 <= Heap.next_addr h2 + else True) + +(* Predicate that an unused ref whose addr is less than the next addr remains unused *) +private let unused_ref_next_addr_pred (m1 m2:mem) :Type0 + = forall (rid:HS.rid).{:pattern (m1 `contains_region` rid)} + (m1 `contains_region` rid) ==> + (let h1 = Map.sel (HS.get_hmap m1) rid in + (forall (a:Type0) (rel:preorder a) (r:HS.mreference a rel).{:pattern (r `HS.unused_in` m1)} + (HS.frameOf r == rid /\ r `HS.unused_in` m1 /\ HS.as_addr r < Heap.next_addr h1) ==> + (r `HS.unused_in` m2))) + +(* Predicate for mm refs *) +private let mm_refs_pred (m1 m2:mem) :Type0 + = forall (a:Type) (rel:preorder a) (r:HS.mreference a rel).{:pattern (m1 `HS.contains` r)} + (not (is_mm r)) \/ + (m1 `HS.contains` r ==> + (m2 `HS.contains` r /\ rel (HS.sel m1 r) (HS.sel m2 r) \/ + r `HS.unused_in` m2)) + +(* The preorder is the conjunction of above predicates *) +let mem_rel :preorder mem + = HS.lemma_rid_ctr_pred (); HS.lemma_next_addr_contained_refs_addr (); + fun (m1 m2:mem) -> + eternal_region_pred m1 m2 /\ rid_ctr_pred m1 m2 /\ rid_last_component_pred m1 m2 /\ eternal_refs_pred m1 m2 /\ + next_ref_addr_in_a_region_pred m1 m2 /\ unused_ref_next_addr_pred m1 m2 /\ mm_refs_pred m1 m2 + +(* Predicates that we will witness with regions and refs *) +let region_contains_pred r = + fun m -> (not (HS.is_eternal_region_hs r)) \/ m `contains_region` r + +let ref_contains_pred #_ #_ r = + fun m -> + let rid = HS.frameOf r in + rid_last_component rid < get_rid_ctr m /\ + (m `contains_region` rid ==> ( + (HS.as_addr r < Heap.next_addr (Map.sel (HS.get_hmap m) rid)) /\ + (HS.is_mm r ==> (m `HS.contains` r \/ r `HS.unused_in` m)) /\ + ((not (HS.is_mm r)) ==> m `HS.contains` r))) + +let stable p = forall (h1:mem) (h2:mem).{:pattern (mem_rel h1 h2)} (p h1 /\ mem_rel h1 h2) ==> p h2 + +let witnessed p = W.witnessed mem_rel p + +(* TODO: we should derive these using DM4F *) +let gst_get _ = admit () +let gst_put _ = admit () + +let gst_witness _ = admit () +let gst_recall _ = admit () + +let lemma_functoriality p q = W.lemma_witnessed_weakening mem_rel p q + +let same_refs_in_all_regions m0 m1 = same_refs_common contained_region m0 m1 +let same_refs_in_stack_regions m0 m1 = same_refs_common contained_stack_region m0 m1 +let same_refs_in_non_tip_regions m0 m1 = same_refs_common contained_non_tip_region m0 m1 +let same_refs_in_non_tip_stack_regions m0 m1 = same_refs_common contained_non_tip_stack_region m0 m1 + +let lemma_same_refs_in_all_regions_intro _ _ = () +let lemma_same_refs_in_all_regions_elim _ _ _ = () +let lemma_same_refs_in_stack_regions_intro _ _ = () +let lemma_same_refs_in_stack_regions_elim _ _ _ = () +let lemma_same_refs_in_non_tip_regions_intro _ _ = () +let lemma_same_refs_in_non_tip_regions_elim _ _ _ = () +let lemma_same_refs_in_non_tip_stack_regions_intro _ _ = () +let lemma_same_refs_in_non_tip_stack_regions_elim _ _ _ = () +let lemma_equal_domains_trans _ _ _ = () + +let push_frame _ = + let m0 = gst_get () in + let m1 = HS.hs_push_frame m0 in + gst_put m1 + +let pop_frame _ = + let m1 = pop (gst_get ()) in + gst_put m1 + +private let salloc_common (#a:Type) (#rel:preorder a) (init:a) (mm:bool) + :StackInline (mreference a rel) + (requires (fun m -> is_stack_region (get_tip m))) + (ensures (fun m0 s m1 -> is_stack_region (HS.frameOf s) /\ salloc_post init m0 s m1 /\ is_mm s == mm)) + = let m0 = gst_get () in + let r, m1 = HS.alloc rel (get_tip m0) init mm m0 in + Heap.lemma_next_addr_alloc rel (Map.sel (get_hmap m0) (get_tip m0)) init mm; //AR: to prove that next_addr in tip's heap increases (it is part of mem_rel) + gst_put m1; + assert (Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1))); + HS.lemma_rid_ctr_pred (); //AR: to prove that rid_last_component of r.id is < rid_ctr + gst_witness (ref_contains_pred r); + gst_witness (region_contains_pred (HS.frameOf r)); + r + +let salloc #_ #_ init = salloc_common init false +let salloc_mm #_ #_ init = salloc_common init true + +let sfree #_ #_ r = + let m0 = gst_get () in + let m1 = HS.free r m0 in + assert (Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1))); + Heap.lemma_distinct_addrs_distinct_preorders (); + Heap.lemma_distinct_addrs_distinct_mm (); + Heap.lemma_next_addr_free_mm (Map.sel (HS.get_hmap m0) (HS.get_tip m0)) (HS.as_ref r); //AR: to prove that next_addr in tip's heap remains same (to satisfy the predicate in mm rel) + gst_put m1 + +let new_region r0 = + if r0 <> HS.root then gst_recall (region_contains_pred r0); //recall containment of r0 + HS.lemma_rid_ctr_pred (); + let m0 = gst_get () in + let new_rid, m1 = HS.new_eternal_region m0 r0 None in + gst_put m1; + gst_witness (region_contains_pred new_rid); + new_rid + +let new_colored_region r0 c = + if r0 <> HS.root then gst_recall (region_contains_pred r0); //recall containment of r0 + HS.lemma_rid_ctr_pred (); + let m0 = gst_get () in + let new_rid, m1 = HS.new_eternal_region m0 r0 (Some c) in + gst_put m1; + gst_witness (region_contains_pred new_rid); + new_rid + +private let ralloc_common (#a:Type) (#rel:preorder a) (i:rid) (init:a) (mm:bool) + :ST (mreference a rel) + (requires (fun m -> is_heap_color (color i) /\ m `contains_region` i)) + (ensures (fun m0 r m1 -> ralloc_post i init m0 r m1 /\ is_mm r == mm)) + = let m0 = gst_get () in + let r, m1 = HS.alloc rel i init mm m0 in + Heap.lemma_next_addr_alloc rel (Map.sel (HS.get_hmap m0) i) init mm; //AR: to prove that next_addr in tip's heap remains same (to satisfy the predicate in mm rel) + gst_put m1; + assert (Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1))); + HS.lemma_rid_ctr_pred (); + gst_witness (ref_contains_pred r); + gst_witness (region_contains_pred i); + r + +let ralloc #_ #_ i init = + if i <> HS.root then gst_recall (region_contains_pred i); + ralloc_common i init false + +let ralloc_mm #_ #_ i init = + if i <> HS.root then gst_recall (region_contains_pred i); + ralloc_common i init true + +let rfree #_ #_ r = + let m0 = gst_get () in + gst_recall (region_contains_pred (HS.frameOf r)); + gst_recall (ref_contains_pred r); + HS.lemma_rid_ctr_pred (); + let m1 = HS.free r m0 in + assert (Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1))); + Heap.lemma_distinct_addrs_distinct_preorders (); + Heap.lemma_distinct_addrs_distinct_mm (); + Heap.lemma_next_addr_free_mm (Map.sel (HS.get_hmap m0) (HS.frameOf r)) (HS.as_ref r); //AR: to prove that next_addr in tip's heap remains same (to satisfy the predicate in mm rel) + gst_put m1 + +let op_Colon_Equals #_ #_ r v = + let m0 = gst_get () in + gst_recall (region_contains_pred (HS.frameOf r)); + gst_recall (ref_contains_pred r); + let m1 = HS.upd_tot m0 r v in + Heap.lemma_distinct_addrs_distinct_preorders (); + Heap.lemma_distinct_addrs_distinct_mm (); + Heap.lemma_upd_equals_upd_tot_for_contained_refs (get_hmap m0 `Map.sel` (HS.frameOf r)) (HS.as_ref r) v; + Heap.lemma_next_addr_upd (Map.sel (HS.get_hmap m0) (HS.frameOf r)) (HS.as_ref r) v; //next_addr in ref's rid heap remains same + gst_put m1 + +let op_Bang #_ #_ r = + let m0 = gst_get () in + gst_recall (region_contains_pred (HS.frameOf r)); + gst_recall (ref_contains_pred r); + Heap.lemma_sel_equals_sel_tot_for_contained_refs (get_hmap m0 `Map.sel` (HS.frameOf r)) (HS.as_ref r); + HS.sel_tot m0 r + +let get _ = gst_get () + +let recall #_ #_ r = + gst_recall (ref_contains_pred r); + gst_recall (region_contains_pred (HS.frameOf r)) + +let recall_region i = if i <> HS.root then gst_recall (region_contains_pred i) +let witness_region i = gst_witness (region_contains_pred i) + +let witness_hsref #_ #_ r = + HS.lemma_rid_ctr_pred (); + HS.lemma_next_addr_contained_refs_addr (); + gst_witness (ref_contains_pred r) + +let mr_witness #r #_ #_ m p = + recall m; + let p_pred (#i:erid) (#a:Type) (#b:preorder a) + (r:m_rref i a b) (p:mem_predicate) + :mem_predicate + = fun m -> m `contains` r /\ p m + in + gst_witness (p_pred m p); + lemma_functoriality (p_pred m p) p + +let weaken_witness p q = + let aux () :Lemma (requires ((forall h. p h ==> q h) /\ witnessed p)) (ensures (witnessed q)) + = lemma_functoriality p q + in + FStar.Classical.move_requires aux () + +let testify (p:mem_predicate) = gst_recall p + +let testify_forall #c #p $s = + W.lemma_witnessed_forall mem_rel p; + gst_recall (fun h -> forall (x:c). p x h) + +let testify_forall_region_contains_pred #c #p $s = + let p' (x:c) :mem_predicate = region_contains_pred (p x) in + let s:squash (forall (x:c). witnessed (p' x)) = () in + testify_forall s + +private let mem_rel_predicate (#a:Type0) (#rel:preorder a) (r:mreference a rel) (p:mem_predicate) + :mem_predicate + = let rid = HS.frameOf r in + fun m -> + (HS.rid_last_component rid < HS.get_rid_ctr m) /\ ( //will help us prove that a deallocated region remains deallocated + (m `HS.contains` r /\ p m) \/ //the ref is contained and satisfies p + (m `contains_region` rid /\ ~ (m `HS.contains_ref_in_its_region` r) /\ HS.as_addr r < Heap.next_addr (HS.get_hmap m `Map.sel` rid) /\ r `HS.unused_in` m) \/ //the ref is deallocated, but its region is contained and next_addr > addr_of ref + (not (m `contains_region` rid))) //the region itself is not there + +let token_p #_ #_ r p = witnessed (mem_rel_predicate r p) + +let witness_p #_ #_ r p = + gst_recall (ref_contains_pred r); + gst_recall (region_contains_pred (HS.frameOf r)); + HS.lemma_next_addr_contained_refs_addr (); + gst_witness (mem_rel_predicate r p) + +let recall_p #_ #_ r p = + gst_recall (ref_contains_pred r); + gst_recall (region_contains_pred (HS.frameOf r)); + gst_recall (mem_rel_predicate r p) + +let token_functoriality #_ #_ r p q = + lemma_functoriality (mem_rel_predicate r p) (mem_rel_predicate r q) + +let lemma_witnessed_constant p = W.lemma_witnessed_constant mem_rel p + +let lemma_witnessed_nested p = + assert_norm (witnessed (fun (m:mem) -> witnessed p) == + W.witnessed mem_rel (fun (m:mem) -> W.witnessed mem_rel p)); + assert_norm (witnessed p == W.witnessed mem_rel p); + W.lemma_witnessed_nested mem_rel p +let lemma_witnessed_and p q = W.lemma_witnessed_and mem_rel p q +let lemma_witnessed_or p q = W.lemma_witnessed_or mem_rel p q +let lemma_witnessed_impl p q = W.lemma_witnessed_impl mem_rel p q +let lemma_witnessed_forall #_ p = W.lemma_witnessed_forall mem_rel p +let lemma_witnessed_exists #_ p = W.lemma_witnessed_exists mem_rel p + + +let drgn = d_hrid +let rid_of_drgn d = d + +let new_drgn r0 = + if r0 <> HS.root then gst_recall (region_contains_pred r0); //recall containment of r0 + HS.lemma_rid_ctr_pred (); + let m0 = gst_get () in + let new_rid, m1 = HS.new_freeable_heap_region m0 r0 in + gst_put m1; + gst_witness (region_contains_pred new_rid); + new_rid + +let free_drgn d = + let m0 = gst_get () in + let m1 = HS.free_heap_region m0 d in + gst_put m1 + +let ralloc_drgn #_ #_ d init = ralloc_common (rid_of_drgn d) init false +let ralloc_drgn_mm #_ #_ d init = ralloc_common (rid_of_drgn d) init true diff --git a/stage0/ulib/FStar.HyperStack.ST.fsti b/stage0/ulib/FStar.HyperStack.ST.fsti new file mode 100644 index 00000000000..695db7263ce --- /dev/null +++ b/stage0/ulib/FStar.HyperStack.ST.fsti @@ -0,0 +1,606 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.HyperStack.ST + +open FStar.HyperStack + +module HS = FStar.HyperStack + +open FStar.Preorder + +(* Setting up the preorder for mem *) + +(* Starting the predicates that constitute the preorder *) + +[@@"opaque_to_smt"] +private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r + +(* The preorder is the conjunction of above predicates *) +val mem_rel :preorder mem + +type mem_predicate = mem -> Type0 + +(* Predicates that we will witness with regions and refs *) +val region_contains_pred (r:HS.rid) :mem_predicate + +val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate + +(***** Global ST (GST) effect with put, get, witness, and recall *****) + +new_effect GST = STATE_h mem + +let gst_pre = st_pre_h mem +let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre +let gst_post (a:Type) = st_post_h mem a +let gst_wp (a:Type) = st_wp_h mem a + +unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h) +sub_effect DIV ~> GST = lift_div_gst + +(* + * AR: A few notes about the interface: + * - The interface closely mimics the interface we formalized in our POPL'18 paper + * - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones) + * - `stable p` is a precondition for `gst_witness` + * - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way + * clients would have obtained `witnessed p`, and so, `p` should already be stable + * - `lemma_functoriality` does not require stability for either `p` or `q` + * Our metatheory ensures that this is sound (without requiring stability of `q`) + * This form is useful in defining the MRRef interface (see mr_witness) + *) + +val stable (p:mem_predicate) :Type0 + +val witnessed (p:mem_predicate) :Type0 + +(* TODO: we should derive these using DM4F *) +private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0) +private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1) + +private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0)) +private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0)) + +val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)}) + : Lemma (witnessed q) + +let st_pre = gst_pre +let st_post' = gst_post' +let st_post = gst_post +let st_wp = gst_wp + +new_effect STATE = GST + +unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp +sub_effect GST ~> STATE = lift_gst_state + +(* effect State (a:Type) (wp:st_wp a) = *) +(* STATE a wp *) + +(** + WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by + code that would later extract to OCaml or by library functions + *) +effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = + STATE a + (fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *) + +(****** defining predicates for equal refs in some regions ******) + +(* +// * AR: (may be this is an overkill) +// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.) +// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier +// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r +// * which meant it could fire for all the contained regions +// * +// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms +// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind +// * of predicates, and hence should fire more contextually +// * should profile the queries to see if it actually helps +// *) + +(* +// * marking these opaque, since expect them to be unfolded away beforehand +// *) +[@@"opaque_to_smt"] +unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0 + = Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r) + +[@@"opaque_to_smt"] +unfold private let contained_region :mem -> mem -> rid -> Type0 + = fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r + +[@@"opaque_to_smt"] +unfold private let contained_stack_region :mem -> mem -> rid -> Type0 + = fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r + +[@@"opaque_to_smt"] +unfold private let contained_non_tip_region :mem -> mem -> rid -> Type0 + = fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r + +[@@"opaque_to_smt"] +unfold private let contained_non_tip_stack_region :mem -> mem -> rid -> Type0 + = fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r + +[@@"opaque_to_smt"] +unfold private let same_refs_common (p:mem -> mem -> rid -> Type0) (m0 m1:mem) = + forall (r:rid). p m0 m1 r ==> equal_heap_dom r m0 m1 + +(* predicates *) +val same_refs_in_all_regions (m0 m1:mem) :Type0 +val same_refs_in_stack_regions (m0 m1:mem) :Type0 +val same_refs_in_non_tip_regions (m0 m1:mem) :Type0 +val same_refs_in_non_tip_stack_regions (m0 m1:mem) :Type0 + +(* intro and elim forms *) +val lemma_same_refs_in_all_regions_intro (m0 m1:mem) + :Lemma (requires (same_refs_common contained_region m0 m1)) (ensures (same_refs_in_all_regions m0 m1)) + [SMTPat (same_refs_in_all_regions m0 m1)] +val lemma_same_refs_in_all_regions_elim (m0 m1:mem) (r:rid) + :Lemma (requires (same_refs_in_all_regions m0 m1 /\ contained_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1)) + [SMTPatOr [[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m0 `contains_region` r)]; + [SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m1 `contains_region` r)]]] + +val lemma_same_refs_in_stack_regions_intro (m0 m1:mem) + :Lemma (requires (same_refs_common contained_stack_region m0 m1)) (ensures (same_refs_in_stack_regions m0 m1)) + [SMTPat (same_refs_in_stack_regions m0 m1)] +val lemma_same_refs_in_stack_regions_elim (m0 m1:mem) (r:rid) + :Lemma (requires (same_refs_in_stack_regions m0 m1 /\ contained_stack_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1)) + [SMTPatOr [[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r)]; + [SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]] + +val lemma_same_refs_in_non_tip_regions_intro (m0 m1:mem) + :Lemma (requires (same_refs_common contained_non_tip_region m0 m1)) (ensures (same_refs_in_non_tip_regions m0 m1)) + [SMTPat (same_refs_in_non_tip_regions m0 m1)] + +val lemma_same_refs_in_non_tip_regions_elim (m0 m1:mem) (r:rid) + :Lemma (requires (same_refs_in_non_tip_regions m0 m1 /\ contained_non_tip_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1)) + [SMTPatOr [[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m0 `contains_region` r)]; + [SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m1 `contains_region` r)]]] + +val lemma_same_refs_in_non_tip_stack_regions_intro (m0 m1:mem) + :Lemma (requires (same_refs_common contained_non_tip_stack_region m0 m1)) (ensures (same_refs_in_non_tip_stack_regions m0 m1)) + [SMTPat (same_refs_in_non_tip_stack_regions m0 m1)] +val lemma_same_refs_in_non_tip_stack_regions_elim (m0 m1:mem) (r:rid) + :Lemma (requires (same_refs_in_non_tip_stack_regions m0 m1 /\ contained_non_tip_stack_region m0 m1 r)) + (ensures (equal_heap_dom r m0 m1)) + [SMTPatOr [[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r);]; + [SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]] + +(******) + +let equal_domains (m0 m1:mem) = + get_tip m0 == get_tip m1 /\ + Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1)) /\ + same_refs_in_all_regions m0 m1 + +val lemma_equal_domains_trans (m0 m1 m2:mem) + :Lemma (requires (equal_domains m0 m1 /\ equal_domains m1 m2)) + (ensures (equal_domains m0 m2)) + [SMTPat (equal_domains m0 m1); SMTPat (equal_domains m1 m2)] + +(** + * Effect of stacked based code: the 'equal_domains' clause enforces that + * - both mem have the same tip + * - both mem reference the same heaps (their map: rid -> heap have the same domain) + * - in each region id, the corresponding heaps contain the same references on both sides + *) +effect Stack (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = + STATE a + (fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_domains h h1) ==> p a h1)) (* WP *) + +(** + * Effect of heap-based code. + * - assumes that the stack is empty (tip = root) + * - corresponds to the HyperHeap ST effect + * - can call to Stack and ST code freely + * - respects the stack invariant: the stack has to be empty when returning + *) +effect Heap (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = + STATE a + (fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ get_tip h = HS.root /\ get_tip h1 = HS.root ) ==> p a h1)) (* WP *) + +let equal_stack_domains (m0 m1:mem) = + get_tip m0 == get_tip m1 /\ + same_refs_in_stack_regions m0 m1 + +(** + * Effect of low-level code: + * - maintains the allocation invariant on the stack: no allocation unless in a new frame that has to be popped before returning + * - not constraints on heap allocation + *) +effect ST (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = + STATE a + (fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_stack_domains h h1) ==> p a h1)) (* WP *) + +effect St (a:Type) = ST a (fun _ -> True) (fun _ _ _ -> True) + +let inline_stack_inv h h' : GTot Type0 = + (* The frame invariant is enforced *) + get_tip h == get_tip h' /\ + (* The heap structure is unchanged *) + Map.domain (get_hmap h) == Map.domain (get_hmap h') /\ + (* Any region that is not the tip has no seen any allocation *) + same_refs_in_non_tip_regions h h' + +(** + * Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame. + * In other terms, the backend has to unfold the body into the caller's body. + * This effect maintains the stack AND the heap invariant: it can be inlined in the Stack effect + * function body as well as in a Heap effect function body + *) +effect StackInline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = + STATE a + (fun (p:st_post a) (h:mem) -> pre h /\ is_stack_region (get_tip h) /\ (forall a h1. (pre h /\ post h a h1 /\ inline_stack_inv h h1) ==> p a h1)) (* WP *) + +let inline_inv h h' : GTot Type0 = + (* The stack invariant is enforced *) + get_tip h == get_tip h' /\ + (* No frame may have received an allocation but the tip *) + same_refs_in_non_tip_stack_regions h h' + +(** + * Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame. + * In other terms, the backend has to unfold the body into the caller's body. + * This effect only maintains the stack invariant: the tip is left unchanged and no allocation + * may occurs in the stack lower than the tip. + * Region allocation is not constrained. + * Heap allocation is not constrained. + *) +effect Inline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = + STATE a + (fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ inline_inv h h1) ==> p a h1)) (* WP *) + +(** + * TODO: + * REMOVE AS SOON AS CONSENSUS IS REACHED ON NEW LOW EFFECT NAMES + *) +effect STL (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = Stack a pre post + +sub_effect + DIV ~> STATE = fun (a:Type) (wp:pure_wp a) (p:st_post a) (h:mem) -> wp (fun a -> p a h) + + +(* + * AR: The clients should open HyperStack.ST after the memory model files (as with Heap and FStar.ST) + *) + +type mreference (a:Type) (rel:preorder a) = + r:HS.mreference a rel{witnessed (ref_contains_pred r) /\ + witnessed (region_contains_pred (HS.frameOf r))} +type mstackref (a:Type) (rel:preorder a) = + r:HS.mstackref a rel{witnessed (ref_contains_pred r) /\ + witnessed (region_contains_pred (HS.frameOf r))} +type mref (a:Type) (rel:preorder a) = + r:HS.mref a rel{witnessed (ref_contains_pred r) /\ + witnessed (region_contains_pred (HS.frameOf r))} +type mmmstackref (a:Type) (rel:preorder a) = + r:HS.mmmstackref a rel{witnessed (ref_contains_pred r) /\ + witnessed (region_contains_pred (HS.frameOf r))} +type mmmref (a:Type) (rel:preorder a) = + r:HS.mmmref a rel{witnessed (ref_contains_pred r) /\ + witnessed (region_contains_pred (HS.frameOf r))} +type s_mref (i:rid) (a:Type) (rel:preorder a) = + r:HS.s_mref i a rel{witnessed (ref_contains_pred r) /\ + witnessed (region_contains_pred (HS.frameOf r))} +type reference (a:Type) = mreference a (Heap.trivial_preorder a) +type stackref (a:Type) = mstackref a (Heap.trivial_preorder a) +type ref (a:Type) = mref a (Heap.trivial_preorder a) +type mmstackref (a:Type) = mmmstackref a (Heap.trivial_preorder a) +type mmref (a:Type) = mmmref a (Heap.trivial_preorder a) +type s_ref (i:rid) (a:Type) = s_mref i a (Heap.trivial_preorder a) + +let is_eternal_region (r:rid) :Type0 + = HS.is_eternal_region_hs r /\ (r == HS.root \/ witnessed (region_contains_pred r)) + +(* + * AR: The change to using ST.rid may not be that bad itself, + * since subtyping should take care of most instances in the client usage. + * However, one case where it could be an issue is modifies clauses that use + * Set.set rid. + *) + +(** Pushes a new empty frame on the stack **) +val push_frame (_:unit) :Unsafe unit (requires (fun m -> True)) (ensures (fun (m0:mem) _ (m1:mem) -> fresh_frame m0 m1)) + +(** Removes old frame from the stack **) +val pop_frame (_:unit) + :Unsafe unit (requires (fun m -> poppable m)) + (ensures (fun (m0:mem) _ (m1:mem) -> poppable m0 /\ m1 == pop m0 /\ popped m0 m1)) + +#push-options "--z3rlimit 40" +let salloc_post (#a:Type) (#rel:preorder a) (init:a) (m0:mem) + (s:mreference a rel{is_stack_region (frameOf s)}) (m1:mem) + = is_stack_region (get_tip m0) /\ + Map.domain (get_hmap m0) == Map.domain (get_hmap m1) /\ + get_tip m0 == get_tip m1 /\ + frameOf s = get_tip m1 /\ + HS.fresh_ref s m0 m1 /\ //it's a fresh reference in the top frame + m1 == HyperStack.upd m0 s init //and it's been initialized +#pop-options + +(** + * Allocates on the top-most stack frame + *) +val salloc (#a:Type) (#rel:preorder a) (init:a) + :StackInline (mstackref a rel) (requires (fun m -> is_stack_region (get_tip m))) + (ensures salloc_post init) + +// JP, AR: these are not supported in C, and `salloc` already benefits from +// automatic memory management. +[@@ (deprecated "Use salloc instead") ] +val salloc_mm (#a:Type) (#rel:preorder a) (init:a) + :StackInline (mmmstackref a rel) (requires (fun m -> is_stack_region (get_tip m))) + (ensures salloc_post init) + +[@@ (deprecated "Use salloc instead") ] +val sfree (#a:Type) (#rel:preorder a) (r:mmmstackref a rel) + :StackInline unit (requires (fun m0 -> frameOf r = get_tip m0 /\ m0 `contains` r)) + (ensures (fun m0 _ m1 -> m0 `contains` r /\ m1 == HS.free r m0)) + +unfold +let new_region_post_common (r0 r1:rid) (m0 m1:mem) = + r1 `HS.extends` r0 /\ + HS.fresh_region r1 m0 m1 /\ + get_hmap m1 == Map.upd (get_hmap m0) r1 Heap.emp /\ + get_tip m1 == get_tip m0 /\ + HS.live_region m0 r0 + +val new_region (r0:rid) + :ST rid + (requires (fun m -> is_eternal_region r0)) + (ensures (fun m0 r1 m1 -> + new_region_post_common r0 r1 m0 m1 /\ + HS.color r1 = HS.color r0 /\ + is_eternal_region r1 /\ + (r1, m1) == HS.new_eternal_region m0 r0 None)) + +val new_colored_region (r0:rid) (c:int) + :ST rid + (requires (fun m -> HS.is_heap_color c /\ is_eternal_region r0)) + (ensures (fun m0 r1 m1 -> + new_region_post_common r0 r1 m0 m1 /\ + HS.color r1 = c /\ + is_eternal_region r1 /\ + (r1, m1) == HS.new_eternal_region m0 r0 (Some c))) + +let ralloc_post (#a:Type) (#rel:preorder a) (i:rid) (init:a) (m0:mem) + (x:mreference a rel) (m1:mem) = + let region_i = get_hmap m0 `Map.sel` i in + as_ref x `Heap.unused_in` region_i /\ + i `is_in` get_hmap m0 /\ + i = frameOf x /\ + m1 == upd m0 x init + +val ralloc (#a:Type) (#rel:preorder a) (i:rid) (init:a) + :ST (mref a rel) (requires (fun m -> is_eternal_region i)) + (ensures (ralloc_post i init)) + +val ralloc_mm (#a:Type) (#rel:preorder a) (i:rid) (init:a) + :ST (mmmref a rel) (requires (fun m -> is_eternal_region i)) + (ensures (ralloc_post i init)) + +(* + * AR: 12/26: For a ref to be readable/writable/free-able, + * the client can either prove contains + * or give us enough so that we can use monotonicity to derive contains + *) +let is_live_for_rw_in (#a:Type) (#rel:preorder a) (r:mreference a rel) (m:mem) :Type0 = + (m `contains` r) \/ + (let i = HS.frameOf r in + (is_eternal_region i \/ i `HS.is_above` get_tip m) /\ + (not (is_mm r) \/ m `HS.contains_ref_in_its_region` r)) + +val rfree (#a:Type) (#rel:preorder a) (r:mreference a rel{HS.is_mm r /\ HS.is_heap_color (HS.color (HS.frameOf r))}) + :ST unit (requires (fun m0 -> r `is_live_for_rw_in` m0)) + (ensures (fun m0 _ m1 -> m0 `contains` r /\ m1 == HS.free r m0)) + +unfold let assign_post (#a:Type) (#rel:preorder a) (r:mreference a rel) (v:a) (m0:mem) (_:unit) (m1:mem) = + m0 `contains` r /\ m1 == HyperStack.upd m0 r v + +(** + * Assigns, provided that the reference exists. + * Guarantees the strongest low-level effect: Stack + *) +val op_Colon_Equals (#a:Type) (#rel:preorder a) (r:mreference a rel) (v:a) + :STL unit (requires (fun m -> r `is_live_for_rw_in` m /\ rel (HS.sel m r) v)) + (ensures (assign_post r v)) + +unfold let deref_post (#a:Type) (#rel:preorder a) (r:mreference a rel) (m0:mem) (x:a) (m1:mem) = + m1 == m0 /\ m0 `contains` r /\ x == HyperStack.sel m0 r + +(** + * Dereferences, provided that the reference exists. + * Guarantees the strongest low-level effect: Stack + *) +val op_Bang (#a:Type) (#rel:preorder a) (r:mreference a rel) + :Stack a (requires (fun m -> r `is_live_for_rw_in` m)) + (ensures (deref_post r)) + +let modifies_none (h0:mem) (h1:mem) = modifies Set.empty h0 h1 + +// NS: This version is just fine; all the operation on mem are ghost +// and we can rig it so that mem just get erased at the end +(** + * Returns the current stack of heaps --- it should be erased + *) +val get (_:unit) + :Stack mem (requires (fun m -> True)) + (ensures (fun m0 x m1 -> m0 == x /\ m1 == m0)) + +(** + * We can only recall refs with mm bit unset, not stack refs + *) +val recall (#a:Type) (#rel:preorder a) (r:mreference a rel{not (HS.is_mm r)}) + :Stack unit (requires (fun m -> is_eternal_region (HS.frameOf r) \/ m `contains_region` (HS.frameOf r))) + (ensures (fun m0 _ m1 -> m0 == m1 /\ m1 `contains` r)) + +(** + * We can only recall eternal regions, not stack regions + *) +val recall_region (i:rid{is_eternal_region i}) + :Stack unit (requires (fun m -> True)) + (ensures (fun m0 _ m1 -> m0 == m1 /\ i `is_in` get_hmap m1)) + +val witness_region (i:rid) + :Stack unit (requires (fun m0 -> HS.is_eternal_region_hs i ==> i `is_in` get_hmap m0)) + (ensures (fun m0 _ m1 -> m0 == m1 /\ witnessed (region_contains_pred i))) + +val witness_hsref (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) + :ST unit (fun h0 -> h0 `HS.contains` r) + (fun h0 _ h1 -> h0 == h1 /\ witnessed (ref_contains_pred r)) + +(** MR witness etc. **) + +type erid = r:rid{is_eternal_region r} + +type m_rref (r:erid) (a:Type) (b:preorder a) = x:mref a b{HS.frameOf x = r} + +(* states that p is preserved by any valid updates on r; note that h0 and h1 may differ arbitrarily elsewhere, hence proving stability usually requires that p depends only on r's content. + *) +unfold type stable_on (#a:Type0) (#rel:preorder a) (p:mem_predicate) (r:mreference a rel) + = forall (h0 h1:mem).{:pattern (p h0); rel (HS.sel h0 r) (HS.sel h1 r)} + (p h0 /\ rel (HS.sel h0 r) (HS.sel h1 r)) ==> p h1 + +(* + * The stable_on_t and mr_witness API is here for legacy reasons, + * the preferred API is stable_on and witness_p + *) + +unfold type stable_on_t (#i:erid) (#a:Type) (#b:preorder a) + (r:m_rref i a b) (p:mem_predicate) + = stable_on p r + +val mr_witness (#r:erid) (#a:Type) (#b:preorder a) + (m:m_rref r a b) (p:mem_predicate) + :ST unit (requires (fun h0 -> p h0 /\ stable_on_t m p)) + (ensures (fun h0 _ h1 -> h0==h1 /\ witnessed p)) + +val weaken_witness (p q:mem_predicate) + :Lemma ((forall h. p h ==> q h) /\ witnessed p ==> witnessed q) + +val testify (p:mem_predicate) + :ST unit (requires (fun _ -> witnessed p)) + (ensures (fun h0 _ h1 -> h0==h1 /\ p h1)) + +val testify_forall (#c:Type) (#p:(c -> mem -> Type0)) + ($s:squash (forall (x:c). witnessed (p x))) + :ST unit (requires (fun h -> True)) + (ensures (fun h0 _ h1 -> h0==h1 /\ (forall (x:c). p x h1))) + +val testify_forall_region_contains_pred (#c:Type) (#p:(c -> GTot rid)) + ($s:squash (forall (x:c). witnessed (region_contains_pred (p x)))) + :ST unit (requires (fun _ -> True)) + (ensures (fun h0 _ h1 -> h0 == h1 /\ + (forall (x:c). HS.is_eternal_region_hs (p x) ==> h1 `contains_region` (p x)))) + + +(****** Begin: preferred API for witnessing and recalling predicates ******) + + +val token_p (#a:Type0) (#rel:preorder a) (r:mreference a rel) (p:mem_predicate) :Type0 + +val witness_p (#a:Type0) (#rel:preorder a) (r:mreference a rel) (p:mem_predicate) + :ST unit (fun h0 -> p h0 /\ p `stable_on` r) + (fun h0 _ h1 -> h0 == h1 /\ token_p r p) + +val recall_p (#a:Type0) (#rel:preorder a) (r:mreference a rel) (p:mem_predicate) + :ST unit (fun h0 -> ((is_eternal_region (HS.frameOf r) /\ not (HS.is_mm r)) \/ h0 `HS.contains` r) /\ token_p r p) + (fun h0 _ h1 -> h0 == h1 /\ h0 `HS.contains` r /\ p h0) + +val token_functoriality + (#a:Type0) (#rel:preorder a) (r:mreference a rel) + (p:mem_predicate{token_p r p}) (q:mem_predicate{forall (h:mem). p h ==> q h}) + : Lemma (token_p r q) + + +(****** End: preferred API for witnessing and recalling predicates ******) + + +type ex_rid = erid + + +(****** logical properties of witnessed ******) + +val lemma_witnessed_constant (p:Type0) + :Lemma (witnessed (fun (m:mem) -> p) <==> p) + +val lemma_witnessed_nested (p:mem_predicate) + : Lemma (witnessed (fun (m:mem) -> witnessed p) <==> witnessed p) + +val lemma_witnessed_and (p q:mem_predicate) + :Lemma (witnessed (fun s -> p s /\ q s) <==> (witnessed p /\ witnessed q)) + +val lemma_witnessed_or (p q:mem_predicate) + :Lemma ((witnessed p \/ witnessed q) ==> witnessed (fun s -> p s \/ q s)) + +val lemma_witnessed_impl (p q:mem_predicate) + :Lemma ((witnessed (fun s -> p s ==> q s) /\ witnessed p) ==> witnessed q) + +val lemma_witnessed_forall (#t:Type) (p:(t -> mem_predicate)) + :Lemma ((witnessed (fun s -> forall x. p x s)) <==> (forall x. witnessed (p x))) + +val lemma_witnessed_exists (#t:Type) (p:(t -> mem_predicate)) + :Lemma ((exists x. witnessed (p x)) ==> witnessed (fun s -> exists x. p x s)) + + +(*** Support for dynamic regions ***) + + +let is_freeable_heap_region (r:rid) : Type0 = + HS.is_heap_color (color r) /\ HS.rid_freeable r /\ witnessed (region_contains_pred r) + +type d_hrid = r:rid{is_freeable_heap_region r} + +val drgn : Type0 + +val rid_of_drgn (d:drgn) : d_hrid + +val new_drgn (r0:rid) +: ST drgn + (requires fun m -> is_eternal_region r0) + (ensures fun m0 d m1 -> + let r1 = rid_of_drgn d in + new_region_post_common r0 r1 m0 m1 /\ + HS.color r1 == HS.color r0 /\ + (r1, m1) == HS.new_freeable_heap_region m0 r0) + +val free_drgn (d:drgn) +: ST unit + (requires fun m -> contains_region m (rid_of_drgn d)) + (ensures fun m0 _ m1 -> m1 == HS.free_heap_region m0 (rid_of_drgn d)) + +val ralloc_drgn (#a:Type) (#rel:preorder a) (d:drgn) (init:a) +: ST (mreference a rel) + (requires fun m -> m `contains_region` (rid_of_drgn d)) + (ensures fun m0 r m1 -> + not (HS.is_mm r) /\ + ralloc_post (rid_of_drgn d) init m0 r m1) + +val ralloc_drgn_mm (#a:Type) (#rel:preorder a) (d:drgn) (init:a) +: ST (mreference a rel) + (requires fun m -> m `contains_region` (rid_of_drgn d)) + (ensures fun m0 r m1 -> + HS.is_mm r /\ + ralloc_post (rid_of_drgn d) init m0 r m1) + + +(* This causes the verification conditition for the continuation +of the call to this function to be done in a separate Z3 query. *) +inline_for_extraction +let break_vc () + : STATE unit (fun p h -> spinoff (squash (p () h))) + = () diff --git a/stage0/ulib/FStar.HyperStack.fst b/stage0/ulib/FStar.HyperStack.fst new file mode 100644 index 00000000000..c8f49f6f255 --- /dev/null +++ b/stage0/ulib/FStar.HyperStack.fst @@ -0,0 +1,43 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.HyperStack + +include FStar.Monotonic.HyperStack + +type reference (a:Type) = mreference a (Heap.trivial_preorder a) + +let stackref (a:Type) = mstackref a (Heap.trivial_preorder a) +let ref (a:Type) = mref a (Heap.trivial_preorder a) + +let mmstackref (a:Type) = mmmstackref a (Heap.trivial_preorder a) +let mmref (a:Type) = mmmref a (Heap.trivial_preorder a) +type s_ref (i:rid) (a:Type) = s_mref i a (Heap.trivial_preorder a) + +(* Two references with different reads are disjoint. *) + +let reference_distinct_sel_disjoint + (#a:Type0) (h: mem) (r1: reference a) (r2: reference a) +: Lemma + (requires ( + h `contains` r1 /\ + h `contains` r2 /\ + frameOf r1 == frameOf r2 /\ + as_addr r1 == as_addr r2 + )) + (ensures ( + sel h r1 == sel h r2 + )) += mreference_distinct_sel_disjoint h r1 r2 diff --git a/stage0/ulib/FStar.IFC.fst b/stage0/ulib/FStar.IFC.fst new file mode 100644 index 00000000000..fdb8645cb87 --- /dev/null +++ b/stage0/ulib/FStar.IFC.fst @@ -0,0 +1,26 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.IFC + +let protected #sl l b = b + +let reveal #sl #l #b x = x +let hide #sl #l #b x = x +let reveal_hide #l #t #b x = () +let hide_reveal #sl #l #b x = () + +let map #a #b #sl #l x f = f x +let join #sl #l1 #l2 #a x = x diff --git a/stage0/ulib/FStar.IFC.fsti b/stage0/ulib/FStar.IFC.fsti new file mode 100644 index 00000000000..b242180817e --- /dev/null +++ b/stage0/ulib/FStar.IFC.fsti @@ -0,0 +1,134 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.IFC + +/// FStar.IFC provides a simple, generic abstraction for monadic +/// information-flow control based on a user-defined (semi-)lattice of +/// information flow labels. +/// +/// The main idea is to provide an abstract type [protected a l], +/// encapsulating values of type [a] carrying information at +/// confidentiality level [l]. Operations that compute on the +/// underlying [a] are instrumented to reflect the sensitivity of +/// their arguments on their results. +/// +/// Several papers develop this idea, ranging from +/// +/// Fable: A language for enforcing user-defined security policies +/// http://www.cs.umd.edu/~nswamy/papers/fable-tr.pdf +/// +/// To more modern variants like +/// https://hackage.haskell.org/package/lio + +(**** Basic definitions for a join semilattice *) + +(** The [lub] is associative *) +let associative #a (f: (a -> a -> a)) = forall x y z. f (f x y) z == f x (f y z) + +(** The [lub] is commutative *) +let commutative #a (f: (a -> a -> a)) = forall x y. f x y == f y x + +(** The [lub] is idempotent *) +let idempotent #a (f: (a -> a -> a)) = forall x. f x x == x + +(** A semilattice has a top element and a + associative-commutative-idempotent least upper bound operator. + This is effectively the typeclass of a semilattice, however, we + program explicitly with semilattice, rather than use typeclass + instantiation. *) +noeq +type semilattice : Type u#(c + 1) = + | SemiLattice : + #carrier: Type u#c -> + top: carrier -> + lub: (f: (carrier -> carrier -> carrier){associative f /\ commutative f /\ idempotent f}) + -> semilattice + +(** For most of the rest of this development, we'll use an erased + counterpart of a semilattice *) +let sl:Type u#(c + 1) = FStar.Ghost.erased semilattice + +(** A lattice element is just an element of the carrier type *) +let lattice_element (sl: sl) = Ghost.erased (SemiLattice?.carrier (Ghost.reveal sl)) + +(** A convenience for joining elements in the lattice *) +unfold +let lub #sl (x: lattice_element sl) (y: lattice_element sl) : Tot (lattice_element sl) = + Ghost.hide (SemiLattice?.lub (Ghost.reveal sl) (Ghost.reveal x) (Ghost.reveal y)) + +(** The main type provided by this module is [protected l b] i.e,, a + [b]-typed value protected at IFC level [l]. + + [protected b l] is in a bijection with [b], as shown by [reveal] + and [hide] below *) +val protected (#sl: sl u#c) (l: lattice_element sl) (b: Type u#b) : Type u#b + +(** [reveal] projects a [b] from a [protected b l], but incurs a ghost effect *) +val reveal (#sl: _) (#l: lattice_element sl) (#b: _) (x: protected l b) : GTot b + +(** [hide] injects a [b] into a [protected b l]. + + Note, any [b] can be promoted to a [protected l b] i.e., + [protected l b] is only meant to enforce confidentiality *) +val hide (#sl: _) (#l: lattice_element sl) (#b: _) (x: b) : Tot (protected l b) + +(** The next pair of lemmas show that reveal/hide are inverses *) +val reveal_hide (#l #t #b: _) (x: b) : Lemma (reveal (hide #l #t x) == x) [SMTPat (hide #l #t x)] + +val hide_reveal (#sl: _) (#l: lattice_element sl) (#b: _) (x: protected l b) + : Lemma (hide (reveal x) == x) [SMTPat (reveal x)] + +/// [protected l b] is a form of parameterized monad +/// It provides: +/// -- [return] (via [hide]) +/// -- [map] (i.e., it's a functor) +/// -- [join] (so it's also a monad) +/// Which we package up as a [bind] + +unfold +let return #sl #a (l: lattice_element sl) (x: a) : protected l a = hide x + +(** This is just a map of [f] over [x] But, notice the order of + arguments is flipped We write [map x f] instead of [map f x] so + that [f]'s type can depend on [x] *) +val map (#a #b #sl: _) (#l: lattice_element sl) (x: protected l a) (f: (y: a{y == reveal x} -> b)) + : Tot (y: protected l b {reveal y == f (reveal x)}) + +(** This is almost a regular monadic [join] + Except notice that the label of the result is the [lub] + of the both the labels in the argument *) +val join (#sl: _) (#l1 #l2: lattice_element sl) (#a: _) (x: protected l1 (protected l2 a)) + : Tot (y: protected (l1 `lub` l2) a {reveal y == reveal (reveal x)}) + +(** This is almost like a regular bind, except like [map] the type of + the continuation's argument depends on the argument [x]; and, like + [join], the indexes on the result are at least as high as the + indexes of the argument + + As such, any computation that observes the protected value held in + [x] has a secrecy level at least as secret as [x] itself *) +unfold +let (let>>) + #sl + (#l1: lattice_element sl) + #a + (x: protected l1 a) + (#l2: lattice_element sl) + #b + (f: (y: a{y == reveal x} -> protected l2 b)) + : Tot (protected (l1 `lub` l2) b) = join (map x f) + diff --git a/stage0/ulib/FStar.IO.fsti b/stage0/ulib/FStar.IO.fsti new file mode 100644 index 00000000000..4a65a0b20dd --- /dev/null +++ b/stage0/ulib/FStar.IO.fsti @@ -0,0 +1,82 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.IO + +open FStar.All + +exception EOF + +new +val fd_read : Type0 +new +val fd_write : Type0 + +val stdin : fd_read +val stdout : fd_write +val stderr : fd_write + +val print_newline : unit -> ML unit +val print_string : string -> ML unit + +(* assume val print_nat_hex : nat -> ML unit *) +(* assume val print_nat_dec : nat -> ML unit *) + +(* Print as hexadecimal with a leading 0x *) +val print_uint8 : FStar.UInt8.t -> ML unit +val print_uint16 : FStar.UInt16.t -> ML unit +val print_uint32 : FStar.UInt32.t -> ML unit +val print_uint64 : FStar.UInt64.t -> ML unit + +(* Print as decimal *) +val print_uint8_dec : FStar.UInt8.t -> ML unit +val print_uint16_dec : FStar.UInt16.t -> ML unit +val print_uint32_dec : FStar.UInt32.t -> ML unit +val print_uint64_dec : FStar.UInt64.t -> ML unit + +(* Print as hex in fixed width, no leading 0x *) +val print_uint8_hex_pad : FStar.UInt8.t -> ML unit +val print_uint16_hex_pad : FStar.UInt16.t -> ML unit +val print_uint32_hex_pad : FStar.UInt32.t -> ML unit +val print_uint64_hex_pad : FStar.UInt64.t -> ML unit + +(* Print as decimal, zero padded to maximum possible length *) +val print_uint8_dec_pad : FStar.UInt8.t -> ML unit +val print_uint16_dec_pad : FStar.UInt16.t -> ML unit +val print_uint32_dec_pad : FStar.UInt32.t -> ML unit +val print_uint64_dec_pad : FStar.UInt64.t -> ML unit + +val print_any : 'a -> ML unit +val input_line : unit -> ML string +val input_int : unit -> ML int +val input_float : unit -> ML FStar.Float.float +val open_read_file : string -> ML fd_read +val open_write_file : string -> ML fd_write +val close_read_file : fd_read -> ML unit +val close_write_file : fd_write -> ML unit +val read_line : fd_read -> ML string +val write_string : fd_write -> string -> ML unit + +(* + An UNSOUND escape hatch for printf-debugging; + Although it always returns false, we mark it + as returning a bool, so that extraction doesn't + erase this call. + + Note: no guarantees are provided regarding the order + of assume valuation of this function; since it is marked as pure, + the compiler may re-order or replicate it. +*) +val debug_print_string : string -> Tot bool diff --git a/stage0/ulib/FStar.ImmutableArray.Base.fsti b/stage0/ulib/FStar.ImmutableArray.Base.fsti new file mode 100644 index 00000000000..f1aa23f5557 --- /dev/null +++ b/stage0/ulib/FStar.ImmutableArray.Base.fsti @@ -0,0 +1,57 @@ +(* + Copyright 2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Authors: N. Swamy +*) + +(* This module provides a primitive type of immutable arrays, + implemented in OCaml by an array. + + The main intended usage of this module, as suggested by Jay Lorch, + is to provide a sequence-like type with constant-time random access + of elements, as opposed to FStar.Seq and related types, which + provide only linear time access. + + Both the F* normalizer and NBE engine are aware of this type and + reduce its three functions, `of_list`, `length`, and `index`, by + invoking the corresponding operations on the underlying OCaml array + that represents a `t`. + + See tests/micro-benchmarks/TestImmutableArray.fst for some samples + + And also FStar.ImmutableArray, which includes this interface and + augments it with various properties of the functions below. + +*) +module FStar.ImmutableArray.Base + +(* The main type of immutable arrays *) +new +val t ([@@@strictly_positive] a:Type u#a) : Type u#a + +(* An array supports equality when its elements also do. *) +val array_has_eq (a : Type) : Lemma + (requires hasEq a) + (ensures hasEq (t a)) + [SMTPat (hasEq (t a))] + +(* Creating an immutable array from a list *) +val of_list (#a:Type u#a) (l:list a) : Tot (t a) + +(* The length of an array (is the length of the list from which it was created) *) +val length (#a:Type) (s:t a) : Tot nat + +(* Indexing the array `s` at offset `i`, which must be within bounds *) +val index (#a:Type) (s:t a) (i:nat { i < length s }) : Tot a diff --git a/stage0/ulib/FStar.ImmutableArray.fsti b/stage0/ulib/FStar.ImmutableArray.fsti new file mode 100644 index 00000000000..402d9820290 --- /dev/null +++ b/stage0/ulib/FStar.ImmutableArray.fsti @@ -0,0 +1,81 @@ +(* + Copyright 2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Authors: N. Swamy +*) + +(* This module provides a primitive type of immutable arrays, + implemented in OCaml by an array. + + The main intended usage of this module, as suggested by Jay Lorch, + is to provide a sequence-like type with constant-time random access + of elements, as opposed to FStar.Seq and related types, which + provide only linear time access. + + Both the F* normalizer and NBE engine are aware of this type and + reduce its three functions, `of_list`, `length`, and `index`, by + invoking the corresponding operations on the underlying OCaml array + that represents a `t`. + + See tests/micro-benchmarks/TestImmutableArray.fst for some samples + + And also FStar.ImmutableArray, which includes this interface and + augments it with various properties of the functions below. + +*) +module FStar.ImmutableArray +include FStar.ImmutableArray.Base + +(* Converting an immutable array back to a list *) +val to_list (#a:Type u#a) (s:t a) + : Tot (list a) + +(* to_list is the inverse of of_list *) +val to_list_of_list (#a:Type u#a) (l:list a) + : Lemma (to_list (of_list l) == l) + [SMTPat (of_list l)] + +(* of_list is the inverse of to_list *) +val of_list_to_list (#a:Type u#a) (s:t a) + : Lemma (of_list (to_list s) == s) + +(* The length of an immutable array is the length of its corresponding list *) +val length_spec (#a:Type u#a) (s:t a) + : Lemma (length s == FStar.List.Tot.length (to_list s)) + [SMTPat (length s)] + +(* The indexes of an immutable array are in correspondence with its underling list *) +val index_spec (#a:Type u#a) (s:t a) (i:nat{ i < length s }) + : Lemma (index s i == FStar.List.Tot.index (to_list s) i) + [SMTPat (index s i)] + +(* The list of elements precedes the array.*) +val to_list_precedes (#a:Type u#a) (s:t a) + : Lemma (to_list s << s) + +(* Idem. *) +let of_list_precedes (#a:Type u#a) (l:list a) + : Lemma (l << of_list l) + = to_list_precedes (of_list l) + +(* An explicit proof that elements of the array precede the array. *) +let elem_precedes (#a:Type u#a) (s:t a) (i : nat{i < length s}) + : Lemma (index s i << s) + = FStar.List.Tot.( + to_list_precedes s; + let l = to_list s in + assert (memP (index l i) l); + memP_precedes (index l i) l + ) diff --git a/stage0/ulib/FStar.IndefiniteDescription.fst b/stage0/ulib/FStar.IndefiniteDescription.fst new file mode 100644 index 00000000000..0bad5da531f --- /dev/null +++ b/stage0/ulib/FStar.IndefiniteDescription.fst @@ -0,0 +1,67 @@ +(* + Copyright 2008-2024 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.IndefiniteDescription + +/// Indefinite description is an axiom that allows picking a witness +/// for existentially quantified predicate. See the interface for more +/// context. + +(** A proof for squash p can be eliminated to get p in the Ghost effect *) +irreducible let elim_squash (#p:Type u#a) (s:squash p) : GTot p = admit () + +(** Given a classical proof of [exists x. p x], we can exhibit an erased + (computationally irrelevant) a witness [x:erased a] validating + [p x]. *) +irreducible +let indefinite_description_ghost (a: Type) (p: (a -> prop) { exists x. p x }) + : GTot (x: a { p x }) + = let h : squash (exists x. p x) = () in + let h : (exists x. p x) = elim_squash h in + let (| x, h |) : x:a & p x = elim_squash h in + x + +(** A version in ghost is easily derivable *) +let indefinite_description_tot (a:Type) (p:(a -> prop) { exists x. p x }) + : Tot (w:Ghost.erased a{ p w }) + = Ghost.hide (indefinite_description_ghost a p) + +(** Indefinite description entails the a strong form of the excluded + middle, i.e., one can case-analyze the truth of a proposition + (only in [Ghost]) *) +let strong_excluded_middle (p: Type0) : GTot (b: bool{b = true <==> p}) = + let h : squash (p \/ ~p) = () in + let h : (p \/ ~p) = elim_squash h in + let h : sum p (~p) = elim_squash h in + match h with + | Left h -> true + | Right h -> false + +(** We also can combine this with a the classical tautology converting + with a [forall] and an [exists] to extract a witness of validity of [p] from + a classical proof that [p] is not universally invalid. + + Note, F*+SMT can easily prove, since it is just classical logic: + [(~(forall n. ~(p n))) ==> (exists n. p n) ] *) +let stronger_markovs_principle (p: (nat -> GTot bool)) + : Ghost nat (requires (~(forall (n: nat). ~(p n)))) (ensures (fun n -> p n)) = + indefinite_description_ghost _ (fun n -> p n==true) + +(** A variant of the previous lemma, but for a [prop] rather than a + boolean predicate *) +let stronger_markovs_principle_prop (p: (nat -> GTot prop)) + : Ghost nat (requires (~(forall (n: nat). ~(p n)))) (ensures (fun n -> p n)) = + indefinite_description_ghost _ p \ No newline at end of file diff --git a/stage0/ulib/FStar.IndefiniteDescription.fsti b/stage0/ulib/FStar.IndefiniteDescription.fsti new file mode 100644 index 00000000000..109a7593577 --- /dev/null +++ b/stage0/ulib/FStar.IndefiniteDescription.fsti @@ -0,0 +1,59 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.IndefiniteDescription + +/// Indefinite description is an axiom that allows picking a witness +/// for existentially quantified predicate. +/// +/// Many other axioms can be derived from this one: Use it with care! +/// +/// For some background on the axiom, see: +/// +/// https://github.com/coq/coq/wiki/CoqAndAxioms#indefinite-description--hilberts-epsilon-operator +/// https://en.wikipedia.org/wiki/Theory_of_descriptions#Indefinite_descriptions + +(** The main axiom: a proof for squash p can be eliminated to get p in the Ghost effect *) +val elim_squash (#p:Type u#a) (s:squash p) : GTot p + +(** Given a classical proof of [exists x. p x], we can exhibit + a witness [x:erased a] validating [p x] in GTot. *) +val indefinite_description_ghost (a: Type) (p: (a -> prop) { exists x. p x }) + : GTot (x: a { p x }) + +(** Given a classical proof of [exists x. p x], we can exhibit an erased + (computationally irrelevant) a witness [x:erased a] validating [p x]. *) +val indefinite_description_tot (a:Type) (p:(a -> prop) { exists x. p x }) + : Tot (w:Ghost.erased a{ p w }) + +(** Indefinite description entails the a strong form of the excluded + middle, i.e., one can case-analyze the truth of a proposition + (only in [Ghost]) *) +val strong_excluded_middle (p: Type0) : GTot (b: bool{b = true <==> p}) + +(** We also can combine this with a the classical tautology converting + with a [forall] and an [exists] to extract a witness of validity of [p] from + a classical proof that [p] is not universally invalid. + + Note, F*+SMT can easily prove, since it is just classical logic: + [(~(forall n. ~(p n))) ==> (exists n. p n) ] *) +val stronger_markovs_principle (p: (nat -> GTot bool)) + : Ghost nat (requires (~(forall (n: nat). ~(p n)))) (ensures (fun n -> p n)) + +(** A variant of the previous lemma, but for a [prop] rather than a + boolean predicate *) +val stronger_markovs_principle_prop (p: (nat -> GTot prop)) + : Ghost nat (requires (~(forall (n: nat). ~(p n)))) (ensures (fun n -> p n)) \ No newline at end of file diff --git a/stage0/ulib/FStar.Int.Cast.Full.fst b/stage0/ulib/FStar.Int.Cast.Full.fst new file mode 100644 index 00000000000..f0ce291e467 --- /dev/null +++ b/stage0/ulib/FStar.Int.Cast.Full.fst @@ -0,0 +1,31 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Int.Cast.Full + +include FStar.Int.Cast + +module U64 = FStar.UInt64 +module U128 = FStar.UInt128 + +inline_for_extraction noextract +val uint64_to_uint128: a:U64.t -> b:U128.t{U128.v b == U64.v a} +inline_for_extraction noextract +let uint64_to_uint128 a = U128.uint64_to_uint128 a + +inline_for_extraction noextract +val uint128_to_uint64: a:U128.t -> b:U64.t{U64.v b == U128.v a % pow2 64} +inline_for_extraction noextract +let uint128_to_uint64 a = U128.uint128_to_uint64 a diff --git a/stage0/ulib/FStar.Int.Cast.fst b/stage0/ulib/FStar.Int.Cast.fst new file mode 100644 index 00000000000..ab6def87d7b --- /dev/null +++ b/stage0/ulib/FStar.Int.Cast.fst @@ -0,0 +1,235 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Int.Cast + +module U8 = FStar.UInt8 +module U16 = FStar.UInt16 +module U32 = FStar.UInt32 +module U64 = FStar.UInt64 +module I8 = FStar.Int8 +module I16 = FStar.Int16 +module I32 = FStar.Int32 +module I64 = FStar.Int64 + +let op_At_Percent = FStar.Int.op_At_Percent + +/// Unsigned to unsigned + +val uint8_to_uint64: a:U8.t -> Tot (b:U64.t{U64.v b = U8.v a}) +let uint8_to_uint64 a = U64.uint_to_t (U8.v a) + +val uint8_to_uint32: a:U8.t -> Tot (b:U32.t{U32.v b = U8.v a}) +let uint8_to_uint32 x = U32.uint_to_t (U8.v x) + +val uint8_to_uint16: a:U8.t -> Tot (b:U16.t{U16.v b = U8.v a}) +let uint8_to_uint16 x = U16.uint_to_t (U8.v x) + +val uint16_to_uint64: a:U16.t -> Tot (b:U64.t{U64.v b = U16.v a}) +let uint16_to_uint64 x = U64.uint_to_t (U16.v x) + +val uint16_to_uint32: a:U16.t -> Tot (b:U32.t{U32.v b = U16.v a}) +let uint16_to_uint32 x = U32.uint_to_t (U16.v x) + +val uint16_to_uint8 : a:U16.t -> Tot (b:U8.t{U8.v b = U16.v a % pow2 8}) +let uint16_to_uint8 x = U8.uint_to_t (U16.v x % pow2 8) + +val uint32_to_uint64: a:U32.t -> Tot (b:U64.t{U64.v b = U32.v a}) +let uint32_to_uint64 x = U64.uint_to_t (U32.v x) + +val uint32_to_uint16: a:U32.t -> Tot (b:U16.t{U16.v b = U32.v a % pow2 16}) +let uint32_to_uint16 x = U16.uint_to_t (U32.v x % pow2 16) + +val uint32_to_uint8 : a:U32.t -> Tot (b:U8.t{U8.v b = U32.v a % pow2 8}) +let uint32_to_uint8 x = U8.uint_to_t (U32.v x % pow2 8) + +val uint64_to_uint32: a:U64.t -> Tot (b:U32.t{U32.v b = U64.v a % pow2 32}) +let uint64_to_uint32 x = U32.uint_to_t (U64.v x % pow2 32) + +val uint64_to_uint16: a:U64.t -> Tot (b:U16.t{U16.v b = U64.v a % pow2 16}) +let uint64_to_uint16 x = U16.uint_to_t (U64.v x % pow2 16) + +val uint64_to_uint8 : a:U64.t -> Tot (b:U8.t{U8.v b = U64.v a % pow2 8}) +let uint64_to_uint8 x = U8.uint_to_t (U64.v x % pow2 8) + +/// Signed to signed + +val int8_to_int64: a:I8.t -> Tot (b:I64.t{I64.v b = I8.v a}) +let int8_to_int64 x = I64.int_to_t (I8.v x) + +val int8_to_int32: a:I8.t -> Tot (b:I32.t{I32.v b = I8.v a}) +let int8_to_int32 x = I32.int_to_t (I8.v x) + +val int8_to_int16: a:I8.t -> Tot (b:I16.t{I16.v b = I8.v a}) +let int8_to_int16 x = I16.int_to_t (I8.v x) + +val int16_to_int64: a:I16.t -> Tot (b:I64.t{I64.v b = I16.v a}) +let int16_to_int64 x = I64.int_to_t (I16.v x @% pow2 64) + +val int16_to_int32: a:I16.t -> Tot (b:I32.t{I32.v b = I16.v a}) +let int16_to_int32 x = I32.int_to_t (I16.v x @% pow2 32) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val int16_to_int8 : a:I16.t -> Tot (b:I8.t {I8.v b = (I16.v a @% pow2 8)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let int16_to_int8 x = I8.int_to_t (I16.v x @% pow2 8) + +val int32_to_int64: a:I32.t -> Tot (b:I64.t{I64.v b = I32.v a}) +let int32_to_int64 x = I64.int_to_t (I32.v x @% pow2 64) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val int32_to_int16: a:I32.t -> Tot (b:I16.t{I16.v b = (I32.v a @% pow2 16)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let int32_to_int16 x = I16.int_to_t (I32.v x @% pow2 16) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val int32_to_int8 : a:I32.t -> Tot (b:I8.t {I8.v b = (I32.v a @% pow2 8)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let int32_to_int8 x = I8.int_to_t (I32.v x @% pow2 8) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val int64_to_int32: a:I64.t -> Tot (b:I32.t{I32.v b = (I64.v a @% pow2 32)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let int64_to_int32 x = I32.int_to_t (I64.v x @% pow2 32) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val int64_to_int16: a:I64.t -> Tot (b:I16.t{I16.v b = (I64.v a @% pow2 16)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let int64_to_int16 x = I16.int_to_t (I64.v x @% pow2 16) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val int64_to_int8 : a:I64.t -> Tot (b:I8.t {I8.v b = (I64.v a @% pow2 8)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let int64_to_int8 x = I8.int_to_t (I64.v x @% pow2 8) + +/// Unsigned to signed + +val uint8_to_int64: a:U8.t -> Tot (b:I64.t{I64.v b = U8.v a}) +let uint8_to_int64 x = I64.int_to_t (U8.v x) + +val uint8_to_int32: a:U8.t -> Tot (b:I32.t{I32.v b = U8.v a}) +let uint8_to_int32 x = I32.int_to_t (U8.v x) + +val uint8_to_int16: a:U8.t -> Tot (b:I16.t{I16.v b = U8.v a}) +let uint8_to_int16 x = I16.int_to_t (U8.v x) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val uint8_to_int8 : a:U8.t -> Tot (b:I8.t {I8.v b = (U8.v a @% pow2 8)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let uint8_to_int8 x = I8.int_to_t (U8.v x @% pow2 8) + +val uint16_to_int64: a:U16.t -> Tot (b:I64.t{I64.v b = U16.v a}) +let uint16_to_int64 x = I64.int_to_t (U16.v x) + +val uint16_to_int32: a:U16.t -> Tot (b:I32.t{I32.v b = U16.v a}) +let uint16_to_int32 x = I32.int_to_t (U16.v x) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val uint16_to_int16: a:U16.t -> Tot (b:I16.t{I16.v b = (U16.v a @% pow2 16)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let uint16_to_int16 x = I16.int_to_t (U16.v x @% pow2 16) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val uint16_to_int8 : a:U16.t -> Tot (b:I8.t {I8.v b = (U16.v a @% pow2 8)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let uint16_to_int8 x = I8.int_to_t (U16.v x @% pow2 8) + +val uint32_to_int64: a:U32.t -> Tot (b:I64.t{I64.v b = U32.v a}) +let uint32_to_int64 x = I64.int_to_t (U32.v x) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val uint32_to_int32: a:U32.t -> Tot (b:I32.t{I32.v b = (U32.v a @% pow2 32)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let uint32_to_int32 x = I32.int_to_t (U32.v x @% pow2 32) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val uint32_to_int16: a:U32.t -> Tot (b:I16.t{I16.v b = (U32.v a @% pow2 16)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let uint32_to_int16 x = I16.int_to_t (U32.v x @% pow2 16) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val uint32_to_int8 : a:U32.t -> Tot (b:I8.t {I8.v b = (U32.v a @% pow2 8)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let uint32_to_int8 x = I8.int_to_t (U32.v x @% pow2 8) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val uint64_to_int64: a:U64.t -> Tot (b:I64.t{I64.v b = (U64.v a @% pow2 64)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let uint64_to_int64 x = I64.int_to_t (U64.v x @% pow2 64) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val uint64_to_int32: a:U64.t -> Tot (b:I32.t{I32.v b = (U64.v a @% pow2 32)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let uint64_to_int32 x = I32.int_to_t (U64.v x @% pow2 32) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val uint64_to_int16: a:U64.t -> Tot (b:I16.t{I16.v b = (U64.v a @% pow2 16)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let uint64_to_int16 x = I16.int_to_t (U64.v x @% pow2 16) + +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +val uint64_to_int8 : a:U64.t -> Tot (b:I8.t {I8.v b = (U64.v a @% pow2 8)}) +[@@(deprecated "with care; in C the result is implementation-defined when not representable")] +let uint64_to_int8 x = I8.int_to_t (U64.v x @% pow2 8) + +/// Signed to unsigned + +val int8_to_uint64: a:I8.t -> Tot (b:U64.t{U64.v b = I8.v a % pow2 64}) +let int8_to_uint64 x = U64.uint_to_t (I8.v x % pow2 64) + +val int8_to_uint32: a:I8.t -> Tot (b:U32.t{U32.v b = I8.v a % pow2 32}) +let int8_to_uint32 x = U32.uint_to_t (I8.v x % pow2 32) + +val int8_to_uint16: a:I8.t -> Tot (b:U16.t{U16.v b = I8.v a % pow2 16}) +let int8_to_uint16 x = U16.uint_to_t (I8.v x % pow2 16) + +val int8_to_uint8 : a:I8.t -> Tot (b:U8.t {U8.v b = I8.v a % pow2 8}) +let int8_to_uint8 x = U8.uint_to_t (I8.v x % pow2 8) + +val int16_to_uint64: a:I16.t -> Tot (b:U64.t{U64.v b = I16.v a % pow2 64}) +let int16_to_uint64 x = U64.uint_to_t (I16.v x % pow2 64) + +val int16_to_uint32: a:I16.t -> Tot (b:U32.t{U32.v b = I16.v a % pow2 32}) +let int16_to_uint32 x = U32.uint_to_t (I16.v x % pow2 32) + +val int16_to_uint16: a:I16.t -> Tot (b:U16.t{U16.v b = I16.v a % pow2 16}) +let int16_to_uint16 x = U16.uint_to_t (I16.v x % pow2 16) + +val int16_to_uint8 : a:I16.t -> Tot (b:U8.t {U8.v b = I16.v a % pow2 8}) +let int16_to_uint8 x = U8.uint_to_t (I16.v x % pow2 8) + +val int32_to_uint64: a:I32.t -> Tot (b:U64.t{U64.v b = I32.v a % pow2 64}) +let int32_to_uint64 x = U64.uint_to_t (I32.v x % pow2 64) + +val int32_to_uint32: a:I32.t -> Tot (b:U32.t{U32.v b = I32.v a % pow2 32}) +let int32_to_uint32 x = U32.uint_to_t (I32.v x % pow2 32) + +val int32_to_uint16: a:I32.t -> Tot (b:U16.t{U16.v b = I32.v a % pow2 16}) +let int32_to_uint16 x = U16.uint_to_t (I32.v x % pow2 16) + +val int32_to_uint8 : a:I32.t -> Tot (b:U8.t {U8.v b = I32.v a % pow2 8}) +let int32_to_uint8 x = U8.uint_to_t (I32.v x % pow2 8) + +val int64_to_uint64: a:I64.t -> Tot (b:U64.t{U64.v b = I64.v a % pow2 64}) +let int64_to_uint64 x = U64.uint_to_t (I64.v x % pow2 64) + +val int64_to_uint32: a:I64.t -> Tot (b:U32.t{U32.v b = I64.v a % pow2 32}) +let int64_to_uint32 x = U32.uint_to_t (I64.v x % pow2 32) + +val int64_to_uint16: a:I64.t -> Tot (b:U16.t{U16.v b = I64.v a % pow2 16}) +let int64_to_uint16 x = U16.uint_to_t (I64.v x % pow2 16) + +val int64_to_uint8 : a:I64.t -> Tot (b:U8.t {U8.v b = I64.v a % pow2 8}) +let int64_to_uint8 x = U8.uint_to_t (I64.v x % pow2 8) diff --git a/stage0/ulib/FStar.Int.fst b/stage0/ulib/FStar.Int.fst new file mode 100644 index 00000000000..a266e9ae177 --- /dev/null +++ b/stage0/ulib/FStar.Int.fst @@ -0,0 +1,188 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Int + +(* NOTE: anything that you fix/update here should be reflected in [FStar.UInt.fst], which is mostly + * a copy-paste of this module. *) + +open FStar.Mul +open FStar.BitVector +open FStar.Math.Lemmas + +let pow2_values x = + match x with + | 0 -> assert_norm (pow2 0 == 1) + | 1 -> assert_norm (pow2 1 == 2) + | 8 -> assert_norm (pow2 8 == 256) + | 16 -> assert_norm (pow2 16 == 65536) + | 31 -> assert_norm (pow2 31 == 2147483648) + | 32 -> assert_norm (pow2 32 == 4294967296) + | 63 -> assert_norm (pow2 63 == 9223372036854775808) + | 64 -> assert_norm (pow2 64 == 18446744073709551616) + | _ -> () + +let incr_underspec #n a = + if a < max_int n then a + 1 else 0 + +let decr_underspec #n a = + if a > min_int n then a - 1 else 0 + +let add_underspec #n a b = + if fits (a+b) n then a + b else 0 + +let sub_underspec #n a b = + if fits (a-b) n then a - b else 0 + +let mul_underspec #n a b = + if fits (a*b) n then a * b else 0 + +let div_underspec #n a b = + if fits (a / b) n then a / b else 0 + +let div_size #n a b = + FStar.Math.Lib.slash_decr_axiom (abs a) (abs b) + +let to_uint_injective #n x = () + +open FStar.Seq + +let to_vec_lemma_1 #n a b = () + +let to_vec_lemma_2 #n a b = + UInt.to_vec_lemma_2 #n (to_uint a) (to_uint b) + +#push-options "--initial_fuel 1 --max_fuel 1" +let rec inverse_aux #n vec i = + if i = n - 1 then + assert((from_vec vec) % 2 = (if index vec (n - 1) then 1 else 0)) + else inverse_aux #(n - 1) (slice vec 0 (n - 1)) i +#pop-options + +let inverse_vec_lemma #n vec = () + +let inverse_num_lemma #n num = () + +let from_vec_lemma_1 #n a b = () + +let from_vec_lemma_2 #n a b = inverse_vec_lemma a; inverse_vec_lemma b + +let rec zero_to_vec_lemma #n i = + if i = n - 1 then () else zero_to_vec_lemma #(n - 1) i + +let zero_from_vec_lemma #n = to_vec_lemma_2 (from_vec (zero_vec #n)) (zero n) + +let one_to_vec_lemma #n i = + if i = n - 1 then () else zero_to_vec_lemma #n i + +#push-options "--smtencoding.elim_box true --smtencoding.l_arith_repr native" +let rec pow2_to_vec_lemma #n p i = + if i = n - 1 then () + else if p = 0 then one_to_vec_lemma #n i + else pow2_to_vec_lemma #(n - 1) (p - 1) i +#pop-options + +let pow2_from_vec_lemma #n p = + to_vec_lemma_2 (from_vec (elem_vec #n p)) (pow2_n #n (n - p - 1)) + +let ones_to_vec_lemma #n i = () + +let ones_from_vec_lemma #n = + to_vec_lemma_2 (from_vec (ones_vec #n)) (ones n) + +let nth_lemma #n a b = + assert(forall (i:nat{i < n}). index (to_vec #n a) i = index (to_vec #n b) i); + to_vec_lemma_2 a b + +let zero_nth_lemma #n i = () + +let one_nth_lemma #n i = () + +let ones_nth_lemma #n i = () + +let logand_definition #n a b i = () + +let logxor_definition #n a b i = () + +let logor_definition #n a b i = () + +let lognot_definition #n a i = () + +let logand_commutative #n a b = nth_lemma #n (logand #n a b) (logand #n b a) + +let logand_associative #n a b c = + nth_lemma #n (logand #n (logand #n a b) c) (logand #n a (logand #n b c)) + +let logand_self #n a = nth_lemma #n (logand #n a a) a + +let logand_lemma_1 #n a = + nth_lemma #n (logand #n a (zero n)) (zero n) + +let logand_lemma_2 #n a = + nth_lemma #n (logand #n a (ones n)) a + +let sign_bit_negative #n a = + UInt.from_vec_propriety #n (to_vec a) 1 + +let sign_bit_positive #n a = + UInt.from_vec_propriety #n (to_vec a) 1 + +let logand_pos_le #n a b = + UInt.logand_le (to_uint a) (to_uint b) + +let logand_pow2_minus_one #n a m = + UInt.logand_le (to_uint a) (to_uint (pow2_minus_one #n m)) + +#push-options "--z3rlimit_factor 2" +let logand_max #n a = + sign_bit_positive a; + sign_bit_positive #n (max_int n); + nth_lemma a (logand a (max_int n)) +#pop-options + +let logxor_commutative #n a b = nth_lemma #n (logxor #n a b) (logxor #n b a) + +let logxor_associative #n a b c = nth_lemma #n (logxor #n (logxor #n a b) c) (logxor #n a (logxor #n b c)) + +let logxor_self #n a = nth_lemma #n (logxor #n a a) (zero n) + +let logxor_lemma_1 #n a = nth_lemma #n (logxor #n a (zero n)) a + +let logxor_lemma_2 #n a = nth_lemma #n (logxor #n a (ones n)) (lognot #n a) + +let logxor_inv #n a b = + UInt.logxor_inv (to_uint a) (to_uint b) + +let logxor_neq_nonzero #n a b = + UInt.logxor_neq_nonzero (to_uint a) (to_uint b) + +let lognot_negative #n a = + assert_norm (pow2 n = 2 * pow2 (n - 1)); + UInt.lemma_lognot_value_mod #n (a + pow2 n) + +let shift_left_lemma_1 #n a s i = () + +let shift_left_lemma_2 #n a s i = () + +let shift_left_value_lemma #n a s = + UInt.shift_left_value_lemma #n a s + +let shift_right_lemma_1 #n a s i = () + +let shift_right_lemma_2 #n a s i = () + +let shift_arithmetic_right_lemma_1 #n a s i = () + +let shift_arithmetic_right_lemma_2 #n a s i = () diff --git a/stage0/ulib/FStar.Int.fsti b/stage0/ulib/FStar.Int.fsti new file mode 100644 index 00000000000..eb4ba64e0d1 --- /dev/null +++ b/stage0/ulib/FStar.Int.fsti @@ -0,0 +1,435 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Int + +(* NOTE: anything that you fix/update here should be reflected in [FStar.UInt.fsti], which is mostly + * a copy-paste of this module. *) + +open FStar.Mul +open FStar.BitVector +open FStar.Math.Lemmas + +val pow2_values: x:nat -> Lemma + (let p = pow2 x in + match x with + | 0 -> p=1 + | 1 -> p=2 + | 8 -> p=256 + | 16 -> p=65536 + | 31 -> p=2147483648 + | 32 -> p=4294967296 + | 63 -> p=9223372036854775808 + | 64 -> p=18446744073709551616 + | _ -> True) + [SMTPat (pow2 x)] + +/// Specs + +let max_int (n:pos) : Tot int = pow2 (n-1) - 1 +let min_int (n:pos) : Tot int = - (pow2 (n-1)) + +let fits (x:int) (n:pos) : Tot bool = min_int n <= x && x <= max_int n +let size (x:int) (n:pos) : Tot Type0 = b2t(fits x n) + +(* Machine integer type *) +type int_t (n:pos) = x:int{size x n} + +/// Multiplicative operator semantics, see C11 6.5.5 + +(* Truncation towards zero division *) +let op_Slash (a:int) (b:int{b <> 0}) : Tot int = + if (a >= 0 && b < 0) || (a < 0 && b >= 0) then - (abs a / abs b) + else abs a / abs b + +(* Wrap-around modulo: wraps into [-p/2; p/2[ *) +let op_At_Percent (v:int) (p:int{p>0/\ p%2=0}) : Tot int = + let m = v % p in if m >= p/2 then m - p else m + +/// Constants + +let zero (n:pos) : Tot (int_t n) = 0 + +#push-options "--initial_fuel 1 --max_fuel 1" + +let pow2_n (#n:pos) (p:nat{p < n-1}) : Tot (int_t n) = + pow2_le_compat (n - 2) p; pow2 p + +let pow2_minus_one (#n:pos{1 < n}) (m:nat{m < n}) : Tot (int_t n) = + pow2_le_compat (n - 1) m; + pow2 m - 1 + +let one (n:pos{1 < n}) : Tot (int_t n) = 1 + +#pop-options + +let ones (n:pos) : Tot (int_t n) = -1 + +(* Increment and decrement *) +let incr (#n:pos) (a:int_t n) + : Pure (int_t n) + (requires (b2t (a < max_int n))) (ensures (fun _ -> True)) + = a + 1 + +let decr (#n:pos) (a:int_t n) + : Pure (int_t n) + (requires (b2t (a > min_int n))) (ensures (fun _ -> True)) + = a - 1 + +val incr_underspec: #n:pos -> a:int_t n -> Pure (int_t n) + (requires (b2t (a < max_int n))) + (ensures (fun b -> a + 1 = b)) + +val decr_underspec: #n:pos -> a:int_t n -> Pure (int_t n) + (requires (b2t (a > min_int n))) + (ensures (fun b -> a - 1 = b)) + +let incr_mod (#n:pos) (a:int_t n) : Tot (int_t n) = + (a + 1) % (pow2 (n-1)) + +let decr_mod (#n:pos) (a:int_t n) : Tot (int_t n) = + (a - 1) % (pow2 (n-1)) + +(* Addition primitives *) +let add (#n:pos) (a:int_t n) (b:int_t n) + : Pure (int_t n) + (requires (size (a + b) n)) + (ensures (fun _ -> True)) + = a + b + +val add_underspec: #n:pos -> a:int_t n -> b:int_t n -> Pure (int_t n) + (requires True) + (ensures (fun c -> + size (a + b) n ==> a + b = c)) + +#push-options "--initial_fuel 1 --max_fuel 1" + +let add_mod (#n:pos) (a:int_t n) (b:int_t n) : Tot (int_t n) = + (a + b) @% (pow2 n) + +(* Subtraction primitives *) +let sub (#n:pos) (a:int_t n) (b:int_t n) + : Pure (int_t n) + (requires (size (a - b) n)) + (ensures (fun _ -> True)) + = a - b + +val sub_underspec: #n:pos -> a:int_t n -> b:int_t n -> Pure (int_t n) + (requires True) + (ensures (fun c -> + size (a - b) n ==> a - b = c)) + +let sub_mod (#n:pos) (a:int_t n) (b:int_t n) : Tot (int_t n) = + (a - b) @% (pow2 n) + +(* Multiplication primitives *) +let mul (#n:pos) (a:int_t n) (b:int_t n) + : Pure (int_t n) + (requires (size (a * b) n)) + (ensures (fun _ -> True)) + = a * b + +val mul_underspec: #n:pos -> a:int_t n -> b:int_t n -> Pure (int_t n) + (requires True) + (ensures (fun c -> + size (a * b) n ==> a * b = c)) + +let mul_mod (#n:pos) (a:int_t n) (b:int_t n) : Tot (int_t n) = + (a * b) @% (pow2 n) + +#pop-options + +(* Division primitives *) +let div (#n:pos) (a:int_t n) (b:int_t n{b <> 0}) + : Pure (int_t n) + (requires (size (a / b) n)) + (ensures (fun c -> b <> 0 ==> a / b = c)) += a / b + +val div_underspec: #n:pos -> a:int_t n -> b:int_t n{b <> 0} -> Pure (int_t n) + (requires True) + (ensures (fun c -> + (b <> 0 /\ size (a / b) n) ==> a / b = c)) + +val div_size: #n:pos -> a:int_t n{min_int n < a} -> b:int_t n{b <> 0} -> + Lemma (requires (size a n)) (ensures (size (a / b) n)) + +let udiv (#n:pos) (a:int_t n{min_int n < a}) (b:int_t n{b <> 0}) + : Tot (c:int_t n{b <> 0 ==> a / b = c}) + = div_size #n a b; + a / b + + +(* Modulo primitives *) +let mod (#n:pos) (a:int_t n) (b:int_t n{b <> 0}) : Tot (int_t n) = + a - ((a/b) * b) + +(* Comparison operators *) +let eq #n (a:int_t n) (b:int_t n) : Tot bool = a = b +let gt #n (a:int_t n) (b:int_t n) : Tot bool = a > b +let gte #n (a:int_t n) (b:int_t n) : Tot bool = a >= b +let lt #n (a:int_t n) (b:int_t n) : Tot bool = a < b +let lte #n (a:int_t n) (b:int_t n) : Tot bool = a <= b + +/// Casts + +let to_uint (#n:pos) (x:int_t n) : Tot (UInt.uint_t n) = + if 0 <= x then x else x + pow2 n + +let from_uint (#n:pos) (x:UInt.uint_t n) : Tot (int_t n) = + if x <= max_int n then x else x - pow2 n + +val to_uint_injective: #n:pos -> x:int_t n + -> Lemma (ensures from_uint (to_uint x) == x) [SMTPat (to_uint x)] + +let to_int_t (m:pos) (a:int) : Tot (int_t m) = a @% pow2 m + +open FStar.Seq + +(* WARNING: Mind the big endian vs little endian definition *) + +let to_vec (#n:pos) (num:int_t n) : Tot (bv_t n) = + UInt.to_vec (to_uint num) + +let from_vec (#n:pos) (vec:bv_t n) : Tot (int_t n) = + let x = UInt.from_vec vec in + if max_int n < x then x - pow2 n else x + +val to_vec_lemma_1: #n:pos -> a:int_t n -> b:int_t n -> + Lemma (requires a = b) (ensures equal (to_vec a) (to_vec b)) + +val to_vec_lemma_2: #n:pos -> a:int_t n -> b:int_t n -> + Lemma (requires equal (to_vec a) (to_vec b)) (ensures a = b) + +val inverse_aux: #n:nat -> vec:bv_t n -> i:nat{i < n} -> + Lemma (requires True) (ensures index vec i = index (to_vec (from_vec vec)) i) + [SMTPat (index (to_vec (from_vec vec)) i)] + +val inverse_vec_lemma: #n:pos -> vec:bv_t n -> + Lemma (requires True) (ensures equal vec (to_vec (from_vec vec))) + [SMTPat (to_vec (from_vec vec))] + +val inverse_num_lemma: #n:pos -> num:int_t n -> + Lemma (requires True) (ensures num = from_vec (to_vec num)) + [SMTPat (from_vec (to_vec num))] + +val from_vec_lemma_1: #n:pos -> a:bv_t n -> b:bv_t n -> + Lemma (requires equal a b) (ensures from_vec a = from_vec b) + +val from_vec_lemma_2: #n:pos -> a:bv_t n -> b:bv_t n -> + Lemma (requires from_vec a = from_vec b) (ensures equal a b) + +(* Relations between constants in BitVector and in UInt. *) +val zero_to_vec_lemma: #n:pos -> i:nat{i < n} -> + Lemma (requires True) (ensures index (to_vec (zero n)) i = index (zero_vec #n) i) + [SMTPat (index (to_vec (zero n)) i)] + +val zero_from_vec_lemma: #n:pos -> + Lemma (requires True) (ensures from_vec (zero_vec #n) = zero n) + [SMTPat (from_vec (zero_vec #n))] + +val one_to_vec_lemma: #n:pos{1 < n} -> i:nat{i < n} -> + Lemma (requires True) + (ensures index (to_vec (one n)) i = index (elem_vec #n (n - 1)) i) + [SMTPat (index (to_vec (one n)) i)] + +val pow2_to_vec_lemma: #n:pos -> p:nat{p < n-1} -> i:nat{i < n} -> + Lemma (requires True) + (ensures index (to_vec (pow2_n #n p)) i = index (elem_vec #n (n - p - 1)) i) + [SMTPat (index (to_vec (pow2_n #n p)) i)] + +val pow2_from_vec_lemma: #n:pos -> p:pos{p < n-1} -> + Lemma (requires True) (ensures from_vec (elem_vec #n p) = pow2_n #n (n - p - 1)) + [SMTPat (from_vec (elem_vec #n p))] + +val ones_to_vec_lemma: #n:pos -> i:nat{i < n} -> + Lemma (requires True) + (ensures index (to_vec (ones n)) i = index (ones_vec #n) i) + [SMTPat (index (to_vec (ones n)) i)] + +val ones_from_vec_lemma: #n:pos -> + Lemma (requires True) (ensures from_vec (ones_vec #n) = ones n) + [SMTPat (from_vec (ones_vec #n))] + + +(* (nth a i) returns a boolean indicating the i-th bit of a. *) +let nth (#n:pos) (a:int_t n) (i:nat{i < n}) : Tot bool = index (to_vec #n a) i + +val nth_lemma: #n:pos -> a:int_t n -> b:int_t n -> + Lemma (requires forall (i:nat{i < n}). nth a i = nth b i) + (ensures a = b) + +(* Lemmas for constants *) +val zero_nth_lemma: #n:pos -> i:nat{i < n} -> + Lemma (requires True) (ensures nth (zero n) i = false) + [SMTPat (nth (zero n) i)] + +val one_nth_lemma: #n:pos{1 < n} -> i:nat{i < n} -> + Lemma (requires True) + (ensures (i = n - 1 ==> nth (one n) i = true) /\ + (i < n - 1 ==> nth (one n) i = false)) + [SMTPat (nth (one n) i)] + +val ones_nth_lemma: #n:pos -> i:nat{i < n} -> + Lemma (requires True) (ensures (nth (ones n) i) = true) + [SMTPat (nth (ones n) i)] + +(* Bitwise operators *) +let logand (#n:pos) (a:int_t n) (b:int_t n) : Tot (int_t n) = + from_vec #n (logand_vec #n (to_vec #n a) (to_vec #n b)) + +let logxor (#n:pos) (a:int_t n) (b:int_t n) : Tot (int_t n) = + from_vec #n (logxor_vec #n (to_vec #n a) (to_vec #n b)) + +let logor (#n:pos) (a:int_t n) (b:int_t n) : Tot (int_t n) = + from_vec #n (logor_vec #n (to_vec #n a) (to_vec #n b)) + +let lognot (#n:pos) (a:int_t n) : Tot (int_t n)= + from_vec #n (lognot_vec #n (to_vec #n a)) + +(* Bitwise operators definitions *) +val logand_definition: #n:pos -> a:int_t n -> b:int_t n -> i:nat{i < n} -> + Lemma (requires True) + (ensures (nth (logand a b) i = (nth a i && nth b i))) + [SMTPat (nth (logand a b) i)] + +val logxor_definition: #n:pos -> a:int_t n -> b:int_t n -> i:nat{i < n} -> + Lemma (requires True) + (ensures (nth (logxor a b) i = (nth a i <> nth b i))) + [SMTPat (nth (logxor a b) i)] + +val logor_definition: #n:pos -> a:int_t n -> b:int_t n -> i:nat{i < n} -> + Lemma (requires True) + (ensures (nth (logor a b) i = (nth a i || nth b i))) + [SMTPat (nth (logor a b) i)] + +val lognot_definition: #n:pos -> a:int_t n -> i:nat{i < n} -> + Lemma (requires True) + (ensures (nth (lognot a) i = not(nth a i))) + [SMTPat (nth (lognot a) i)] + +(* Two's complement unary minus *) +inline_for_extraction +let minus (#n:pos{1 < n}) (a:int_t n) : Tot (int_t n) = + add_mod (lognot a) 1 + +(* Bitwise operators lemmas *) +(* TODO: lemmas about the relations between different operators *) +(* Bitwise AND operator *) +val logand_commutative: #n:pos -> a:int_t n -> b:int_t n -> + Lemma (requires True) (ensures (logand #n a b = logand #n b a)) + +val logand_associative: #n:pos -> a:int_t n -> b:int_t n -> c:int_t n -> + Lemma (logand #n (logand #n a b) c = logand #n a (logand #n b c)) + +val logand_self: #n:pos -> a:int_t n -> + Lemma (logand #n a a = a) + +val logand_lemma_1: #n:pos -> a:int_t n -> + Lemma (requires True) (ensures (logand #n a (zero n) = zero n)) + +val logand_lemma_2: #n:pos -> a:int_t n -> + Lemma (logand #n a (ones n) = a) + +val sign_bit_negative: #n:pos{1 < n} -> a:int_t n -> + Lemma (nth a 0 = true <==> a < 0) + +val sign_bit_positive: #n:pos{1 < n} -> a:int_t n -> + Lemma (nth a 0 = false <==> 0 <= a) + +val logand_pos_le: #n:pos{1 < n} -> a:int_t n{0 <= a} -> b:int_t n{0 <= b} -> + Lemma (0 <= logand a b /\ logand a b <= a /\ logand a b <= b) + +val logand_pow2_minus_one: #n:pos{1 < n} -> a:int_t n -> m:pos{m < n} -> + Lemma (0 <= logand a (pow2_minus_one m) /\ + logand a (pow2_minus_one m) <= pow2_minus_one #n m) + +val logand_max: #n:pos{1 < n} -> a:int_t n{0 <= a} -> + Lemma (0 <= logand a (max_int n) /\ a = logand a (max_int n)) + +(* Bitwise XOR operator *) +val logxor_commutative: #n:pos -> a:int_t n -> b:int_t n -> + Lemma (requires True) (ensures (logxor #n a b = logxor #n b a)) + +val logxor_associative: #n:pos -> a:int_t n -> b:int_t n -> c:int_t n -> + Lemma (requires True) (ensures (logxor #n (logxor #n a b) c = logxor #n a (logxor #n b c))) + +val logxor_self: #n:pos -> a:int_t n -> + Lemma (requires True) (ensures (logxor #n a a = zero n)) + +val logxor_lemma_1: #n:pos -> a:int_t n -> + Lemma (requires True) (ensures (logxor #n a (zero n) = a)) + +val logxor_lemma_2: #n:pos -> a:int_t n -> + Lemma (requires True) (ensures (logxor #n a (ones n) = lognot #n a)) + +val logxor_inv: #n:pos -> a:int_t n -> b:int_t n -> Lemma + (a = logxor #n (logxor #n a b) b) + +val logxor_neq_nonzero: #n:pos -> a:int_t n -> b:int_t n -> Lemma + (a <> b ==> logxor a b <> 0) + +val lognot_negative: #n:pos -> a:int_t n -> Lemma + (requires a < 0) + (ensures lognot a == UInt.lognot #n (a + pow2 n)) + +(* Shift operators *) + +(** If a is negative the result is undefined behaviour *) +let shift_left (#n:pos) (a:int_t n{0 <= a}) (s:nat) : Tot (int_t n) = + from_vec (shift_left_vec #n (to_vec #n a) s) + +(** If a is negative the result is implementation defined *) +let shift_right (#n:pos) (a:int_t n{0 <= a}) (s:nat) : Tot (int_t n) = + from_vec (shift_right_vec #n (to_vec #n a) s) + +let shift_arithmetic_right (#n:pos) (a:int_t n) (s:nat) : Tot (int_t n) = + from_vec (shift_arithmetic_right_vec #n (to_vec #n a) s) + +(* Shift operators lemmas *) +val shift_left_lemma_1: #n:pos -> a:int_t n{0 <= a} -> s:nat -> i:nat{i < n && i >= n - s} -> + Lemma (requires True) + (ensures (nth (shift_left #n a s) i = false)) + [SMTPat (nth (shift_left #n a s) i)] + +val shift_left_lemma_2: #n:pos -> a:int_t n{0 <= a} -> s:nat -> i:nat{i < n && i < n - s} -> + Lemma (requires True) + (ensures (nth (shift_left #n a s) i = nth #n a (i + s))) + [SMTPat (nth (shift_left #n a s) i)] + +val shift_left_value_lemma: #n:pos -> a:int_t n{0 <= a} -> s:nat -> + Lemma (requires True) + (ensures shift_left #n a s = (a * pow2 s) @% pow2 n) + [SMTPat (shift_left #n a s)] + +val shift_right_lemma_1: #n:pos -> a:int_t n{0 <= a} -> s:nat -> i:nat{i < n && i < s} -> + Lemma (requires True) + (ensures (nth (shift_right #n a s) i = false)) + [SMTPat (nth (shift_right #n a s) i)] + +val shift_right_lemma_2: #n:pos -> a:int_t n{0 <= a} -> s:nat -> i:nat{i < n && i >= s} -> + Lemma (requires True) + (ensures (nth (shift_right #n a s) i = nth #n a (i - s))) + [SMTPat (nth (shift_right #n a s) i)] + +val shift_arithmetic_right_lemma_1: #n:pos -> a:int_t n -> s:nat -> i:nat{i < n && i < s} -> + Lemma (requires True) + (ensures (nth (shift_arithmetic_right #n a s) i = nth a 0)) + [SMTPat (nth (shift_arithmetic_right #n a s) i)] + +val shift_arithmetic_right_lemma_2: #n:pos -> a:int_t n -> s:nat -> i:nat{i < n && i >= s} -> + Lemma (requires True) + (ensures (nth (shift_arithmetic_right #n a s) i = nth #n a (i - s))) + [SMTPat (nth (shift_arithmetic_right #n a s) i)] diff --git a/stage0/ulib/FStar.Int128.fst b/stage0/ulib/FStar.Int128.fst new file mode 100644 index 00000000000..be9dc4b28c0 --- /dev/null +++ b/stage0/ulib/FStar.Int128.fst @@ -0,0 +1,85 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Int128 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +open FStar.Int +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly + * a copy-paste of this module. *) + +type t : eqtype = + | Mk: v:int_t n -> t + + +let v x = x.v + +irreducible +let int_to_t x = Mk x + +let uv_inv _ = () + +let vu_inv _ = () + +let v_inj _ _ = () + +let zero = int_to_t 0 + +let one = + FStar.Math.Lemmas.pow2_lt_compat (n - 1) 1; + int_to_t 1 + +let add a b = Mk (add (v a) (v b)) + +let sub a b = Mk (sub (v a) (v b)) + +let mul a b = Mk (mul (v a) (v b)) + +let div a b = Mk (div (v a) (v b)) + +let rem a b = Mk (mod (v a) (v b)) + +let logand x y = Mk (logand (v x) (v y)) + +let logxor x y = Mk (logxor (v x) (v y)) + +let logor x y = Mk (logor (v x) (v y)) + +let lognot x = Mk (lognot (v x)) + +let shift_right a s = Mk (shift_right (v a) (UInt32.v s)) + +let shift_left a s = Mk (shift_left (v a) (UInt32.v s)) + +let shift_arithmetic_right a s = Mk (shift_arithmetic_right (v a) (UInt32.v s)) + +let to_string _ = admit () + +//AR: this is to workaround the interleaving semantics of pragmas in FStar.Int128.fst +// where the interface requires the last but one definition to be lax-checked +#push-options "--admit_smt_queries true" + +let of_string _ = admit () + +#pop-options + +let mul_wide a b = + assume (size (Int64.v a * Int64.v b) n); + Mk ((Int64.v a) * (Int64.v b)) diff --git a/stage0/ulib/FStar.Int128.fsti b/stage0/ulib/FStar.Int128.fsti new file mode 100644 index 00000000000..6e313df7db3 --- /dev/null +++ b/stage0/ulib/FStar.Int128.fsti @@ -0,0 +1,179 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Int128 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +unfold let n = 128 + +open FStar.Int +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly + * a copy-paste of this module. *) + +new val t : eqtype + +val v (x:t) : Tot (int_t n) + +val int_to_t: x:int_t n -> Pure t + (requires True) + (ensures (fun y -> v y = x)) + +val uv_inv (x : t) : Lemma + (ensures (int_to_t (v x) == x)) + [SMTPat (v x)] + +val vu_inv (x : int_t n) : Lemma + (ensures (v (int_to_t x) == x)) + [SMTPat (int_to_t x)] + +val v_inj (x1 x2: t): Lemma + (requires (v x1 == v x2)) + (ensures (x1 == x2)) + +val zero : x:t{v x = 0} + +val one : x:t{v x = 1} + +val add (a:t) (b:t) : Pure t + (requires (size (v a + v b) n)) + (ensures (fun c -> v a + v b = v c)) + +(* Subtraction primitives *) +val sub (a:t) (b:t) : Pure t + (requires (size (v a - v b) n)) + (ensures (fun c -> v a - v b = v c)) + +(* Multiplication primitives *) +val mul (a:t) (b:t) : Pure t + (requires (size (v a * v b) n)) + (ensures (fun c -> v a * v b = v c)) + +(* Division primitives *) +val div (a:t) (b:t{v b <> 0}) : Pure t + // division overflows on INT_MIN / -1 + (requires (size (v a / v b) n)) + (ensures (fun c -> v a / v b = v c)) + +(* Modulo primitives *) +(* If a/b is not representable the result of a%b is undefind *) +val rem (a:t) (b:t{v b <> 0}) : Pure t + (requires (size (v a / v b) n)) + (ensures (fun c -> FStar.Int.mod (v a) (v b) = v c)) + +(* Bitwise operators *) +val logand (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logand` v y = v z)) + +val logxor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logxor` v y == v z)) + +val logor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logor` v y == v z)) + +val lognot (x:t) : Pure t + (requires True) + (ensures (fun z -> lognot (v x) == v z)) + +(* Shift operators *) + +(** If a is negative the result is implementation-defined *) +val shift_right (a:t) (s:UInt32.t) : Pure t + (requires (0 <= v a /\ UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c)) + +(** If a is negative or a * pow2 s is not representable the result is undefined *) +val shift_left (a:t) (s:UInt32.t) : Pure t + (requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c)) + +val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t + (requires (UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c)) + +(* Comparison operators *) +let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b) +let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b) +let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b) +let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b) +let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b) + +(* Infix notations *) +unfold let op_Plus_Hat = add +unfold let op_Subtraction_Hat = sub +unfold let op_Star_Hat = mul +unfold let op_Slash_Hat = div +unfold let op_Percent_Hat = rem +unfold let op_Hat_Hat = logxor +unfold let op_Amp_Hat = logand +unfold let op_Bar_Hat = logor +unfold let op_Less_Less_Hat = shift_left +unfold let op_Greater_Greater_Hat = shift_right +unfold let op_Greater_Greater_Greater_Hat = shift_arithmetic_right +unfold let op_Equals_Hat = eq +unfold let op_Greater_Hat = gt +unfold let op_Greater_Equals_Hat = gte +unfold let op_Less_Hat = lt +unfold let op_Less_Equals_Hat = lte + +inline_for_extraction +let ct_abs (a:t{min_int n < v a}) : Tot (b:t{v b = abs (v a)}) = + let mask = a >>>^ UInt32.uint_to_t (n - 1) in + if 0 <= v a then + begin + sign_bit_positive (v a); + nth_lemma (v mask) (FStar.Int.zero _); + logxor_lemma_1 (v a) + end + else + begin + sign_bit_negative (v a); + nth_lemma (v mask) (ones _); + logxor_lemma_2 (v a); + lognot_negative (v a); + UInt.lemma_lognot_value #n (to_uint (v a)) + end; + (a ^^ mask) -^ mask + +(* To input / output constants *) +(* .. in decimal representation *) +val to_string: t -> Tot string + +val of_string: string -> Tot t + +#set-options "--admit_smt_queries true" +//This private primitive is used internally by the +//compiler to translate bounded integer constants +//with a desugaring-time check of the size of the number, +//rather than an expensive verification check. +//Since it is marked private, client programs cannot call it directly +//Since it is marked unfold, it eagerly reduces, +//eliminating the verification overhead of the wrapper +private +unfold +let __int_to_t (x:int) : Tot t + = int_to_t x +#reset-options + +val mul_wide: a:Int64.t -> b:Int64.t -> Pure t + (requires True) + (ensures (fun c -> v c = Int64.v a * Int64.v b)) diff --git a/stage0/ulib/FStar.Int16.fst b/stage0/ulib/FStar.Int16.fst new file mode 100644 index 00000000000..352f7d63192 --- /dev/null +++ b/stage0/ulib/FStar.Int16.fst @@ -0,0 +1,81 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Int16 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +open FStar.Int +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly + * a copy-paste of this module. *) + +type t : eqtype = + | Mk: v:int_t n -> t + + +let v x = x.v + +irreducible +let int_to_t x = Mk x + +let uv_inv _ = () + +let vu_inv _ = () + +let v_inj _ _ = () + +let zero = int_to_t 0 + +let one = + FStar.Math.Lemmas.pow2_lt_compat (n - 1) 1; + int_to_t 1 + +let add a b = Mk (add (v a) (v b)) + +let sub a b = Mk (sub (v a) (v b)) + +let mul a b = Mk (mul (v a) (v b)) + +let div a b = Mk (div (v a) (v b)) + +let rem a b = Mk (mod (v a) (v b)) + +let logand x y = Mk (logand (v x) (v y)) + +let logxor x y = Mk (logxor (v x) (v y)) + +let logor x y = Mk (logor (v x) (v y)) + +let lognot x = Mk (lognot (v x)) + +let shift_right a s = Mk (shift_right (v a) (UInt32.v s)) + +let shift_left a s = Mk (shift_left (v a) (UInt32.v s)) + +let shift_arithmetic_right a s = Mk (shift_arithmetic_right (v a) (UInt32.v s)) + +let to_string _ = admit () + +//AR: this is to workaround the interleaving semantics of pragmas in FStar.Int128.fst +// where the interface requires the last but one definition to be lax-checked +#push-options "--admit_smt_queries true" + +let of_string _ = admit () + +#pop-options diff --git a/stage0/ulib/FStar.Int16.fsti b/stage0/ulib/FStar.Int16.fsti new file mode 100644 index 00000000000..d45d0e3f4b0 --- /dev/null +++ b/stage0/ulib/FStar.Int16.fsti @@ -0,0 +1,175 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Int16 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +unfold let n = 16 + +open FStar.Int +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly + * a copy-paste of this module. *) + +new val t : eqtype + +val v (x:t) : Tot (int_t n) + +val int_to_t: x:int_t n -> Pure t + (requires True) + (ensures (fun y -> v y = x)) + +val uv_inv (x : t) : Lemma + (ensures (int_to_t (v x) == x)) + [SMTPat (v x)] + +val vu_inv (x : int_t n) : Lemma + (ensures (v (int_to_t x) == x)) + [SMTPat (int_to_t x)] + +val v_inj (x1 x2: t): Lemma + (requires (v x1 == v x2)) + (ensures (x1 == x2)) + +val zero : x:t{v x = 0} + +val one : x:t{v x = 1} + +val add (a:t) (b:t) : Pure t + (requires (size (v a + v b) n)) + (ensures (fun c -> v a + v b = v c)) + +(* Subtraction primitives *) +val sub (a:t) (b:t) : Pure t + (requires (size (v a - v b) n)) + (ensures (fun c -> v a - v b = v c)) + +(* Multiplication primitives *) +val mul (a:t) (b:t) : Pure t + (requires (size (v a * v b) n)) + (ensures (fun c -> v a * v b = v c)) + +(* Division primitives *) +val div (a:t) (b:t{v b <> 0}) : Pure t + // division overflows on INT_MIN / -1 + (requires (size (v a / v b) n)) + (ensures (fun c -> v a / v b = v c)) + +(* Modulo primitives *) +(* If a/b is not representable the result of a%b is undefind *) +val rem (a:t) (b:t{v b <> 0}) : Pure t + (requires (size (v a / v b) n)) + (ensures (fun c -> FStar.Int.mod (v a) (v b) = v c)) + +(* Bitwise operators *) +val logand (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logand` v y = v z)) + +val logxor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logxor` v y == v z)) + +val logor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logor` v y == v z)) + +val lognot (x:t) : Pure t + (requires True) + (ensures (fun z -> lognot (v x) == v z)) + +(* Shift operators *) + +(** If a is negative the result is implementation-defined *) +val shift_right (a:t) (s:UInt32.t) : Pure t + (requires (0 <= v a /\ UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c)) + +(** If a is negative or a * pow2 s is not representable the result is undefined *) +val shift_left (a:t) (s:UInt32.t) : Pure t + (requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c)) + +val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t + (requires (UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c)) + +(* Comparison operators *) +let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b) +let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b) +let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b) +let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b) +let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b) + +(* Infix notations *) +unfold let op_Plus_Hat = add +unfold let op_Subtraction_Hat = sub +unfold let op_Star_Hat = mul +unfold let op_Slash_Hat = div +unfold let op_Percent_Hat = rem +unfold let op_Hat_Hat = logxor +unfold let op_Amp_Hat = logand +unfold let op_Bar_Hat = logor +unfold let op_Less_Less_Hat = shift_left +unfold let op_Greater_Greater_Hat = shift_right +unfold let op_Greater_Greater_Greater_Hat = shift_arithmetic_right +unfold let op_Equals_Hat = eq +unfold let op_Greater_Hat = gt +unfold let op_Greater_Equals_Hat = gte +unfold let op_Less_Hat = lt +unfold let op_Less_Equals_Hat = lte + +inline_for_extraction +let ct_abs (a:t{min_int n < v a}) : Tot (b:t{v b = abs (v a)}) = + let mask = a >>>^ UInt32.uint_to_t (n - 1) in + if 0 <= v a then + begin + sign_bit_positive (v a); + nth_lemma (v mask) (FStar.Int.zero _); + logxor_lemma_1 (v a) + end + else + begin + sign_bit_negative (v a); + nth_lemma (v mask) (ones _); + logxor_lemma_2 (v a); + lognot_negative (v a); + UInt.lemma_lognot_value #n (to_uint (v a)) + end; + (a ^^ mask) -^ mask + +(* To input / output constants *) +(* .. in decimal representation *) +val to_string: t -> Tot string + +val of_string: string -> Tot t + +#set-options "--admit_smt_queries true" +//This private primitive is used internally by the +//compiler to translate bounded integer constants +//with a desugaring-time check of the size of the number, +//rather than an expensive verification check. +//Since it is marked private, client programs cannot call it directly +//Since it is marked unfold, it eagerly reduces, +//eliminating the verification overhead of the wrapper +private +unfold +let __int_to_t (x:int) : Tot t + = int_to_t x +#reset-options diff --git a/stage0/ulib/FStar.Int32.fst b/stage0/ulib/FStar.Int32.fst new file mode 100644 index 00000000000..9d916278215 --- /dev/null +++ b/stage0/ulib/FStar.Int32.fst @@ -0,0 +1,81 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Int32 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +open FStar.Int +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly + * a copy-paste of this module. *) + +type t : eqtype = + | Mk: v:int_t n -> t + + +let v x = x.v + +irreducible +let int_to_t x = Mk x + +let uv_inv _ = () + +let vu_inv _ = () + +let v_inj _ _ = () + +let zero = int_to_t 0 + +let one = + FStar.Math.Lemmas.pow2_lt_compat (n - 1) 1; + int_to_t 1 + +let add a b = Mk (add (v a) (v b)) + +let sub a b = Mk (sub (v a) (v b)) + +let mul a b = Mk (mul (v a) (v b)) + +let div a b = Mk (div (v a) (v b)) + +let rem a b = Mk (mod (v a) (v b)) + +let logand x y = Mk (logand (v x) (v y)) + +let logxor x y = Mk (logxor (v x) (v y)) + +let logor x y = Mk (logor (v x) (v y)) + +let lognot x = Mk (lognot (v x)) + +let shift_right a s = Mk (shift_right (v a) (UInt32.v s)) + +let shift_left a s = Mk (shift_left (v a) (UInt32.v s)) + +let shift_arithmetic_right a s = Mk (shift_arithmetic_right (v a) (UInt32.v s)) + +let to_string _ = admit () + +//AR: this is to workaround the interleaving semantics of pragmas in FStar.Int128.fst +// where the interface requires the last but one definition to be lax-checked +#push-options "--admit_smt_queries true" + +let of_string _ = admit () + +#pop-options diff --git a/stage0/ulib/FStar.Int32.fsti b/stage0/ulib/FStar.Int32.fsti new file mode 100644 index 00000000000..a937c754f16 --- /dev/null +++ b/stage0/ulib/FStar.Int32.fsti @@ -0,0 +1,175 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Int32 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +unfold let n = 32 + +open FStar.Int +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly + * a copy-paste of this module. *) + +new val t : eqtype + +val v (x:t) : Tot (int_t n) + +val int_to_t: x:int_t n -> Pure t + (requires True) + (ensures (fun y -> v y = x)) + +val uv_inv (x : t) : Lemma + (ensures (int_to_t (v x) == x)) + [SMTPat (v x)] + +val vu_inv (x : int_t n) : Lemma + (ensures (v (int_to_t x) == x)) + [SMTPat (int_to_t x)] + +val v_inj (x1 x2: t): Lemma + (requires (v x1 == v x2)) + (ensures (x1 == x2)) + +val zero : x:t{v x = 0} + +val one : x:t{v x = 1} + +val add (a:t) (b:t) : Pure t + (requires (size (v a + v b) n)) + (ensures (fun c -> v a + v b = v c)) + +(* Subtraction primitives *) +val sub (a:t) (b:t) : Pure t + (requires (size (v a - v b) n)) + (ensures (fun c -> v a - v b = v c)) + +(* Multiplication primitives *) +val mul (a:t) (b:t) : Pure t + (requires (size (v a * v b) n)) + (ensures (fun c -> v a * v b = v c)) + +(* Division primitives *) +val div (a:t) (b:t{v b <> 0}) : Pure t + // division overflows on INT_MIN / -1 + (requires (size (v a / v b) n)) + (ensures (fun c -> v a / v b = v c)) + +(* Modulo primitives *) +(* If a/b is not representable the result of a%b is undefind *) +val rem (a:t) (b:t{v b <> 0}) : Pure t + (requires (size (v a / v b) n)) + (ensures (fun c -> FStar.Int.mod (v a) (v b) = v c)) + +(* Bitwise operators *) +val logand (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logand` v y = v z)) + +val logxor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logxor` v y == v z)) + +val logor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logor` v y == v z)) + +val lognot (x:t) : Pure t + (requires True) + (ensures (fun z -> lognot (v x) == v z)) + +(* Shift operators *) + +(** If a is negative the result is implementation-defined *) +val shift_right (a:t) (s:UInt32.t) : Pure t + (requires (0 <= v a /\ UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c)) + +(** If a is negative or a * pow2 s is not representable the result is undefined *) +val shift_left (a:t) (s:UInt32.t) : Pure t + (requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c)) + +val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t + (requires (UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c)) + +(* Comparison operators *) +let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b) +let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b) +let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b) +let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b) +let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b) + +(* Infix notations *) +unfold let op_Plus_Hat = add +unfold let op_Subtraction_Hat = sub +unfold let op_Star_Hat = mul +unfold let op_Slash_Hat = div +unfold let op_Percent_Hat = rem +unfold let op_Hat_Hat = logxor +unfold let op_Amp_Hat = logand +unfold let op_Bar_Hat = logor +unfold let op_Less_Less_Hat = shift_left +unfold let op_Greater_Greater_Hat = shift_right +unfold let op_Greater_Greater_Greater_Hat = shift_arithmetic_right +unfold let op_Equals_Hat = eq +unfold let op_Greater_Hat = gt +unfold let op_Greater_Equals_Hat = gte +unfold let op_Less_Hat = lt +unfold let op_Less_Equals_Hat = lte + +inline_for_extraction +let ct_abs (a:t{min_int n < v a}) : Tot (b:t{v b = abs (v a)}) = + let mask = a >>>^ UInt32.uint_to_t (n - 1) in + if 0 <= v a then + begin + sign_bit_positive (v a); + nth_lemma (v mask) (FStar.Int.zero _); + logxor_lemma_1 (v a) + end + else + begin + sign_bit_negative (v a); + nth_lemma (v mask) (ones _); + logxor_lemma_2 (v a); + lognot_negative (v a); + UInt.lemma_lognot_value #n (to_uint (v a)) + end; + (a ^^ mask) -^ mask + +(* To input / output constants *) +(* .. in decimal representation *) +val to_string: t -> Tot string + +val of_string: string -> Tot t + +#set-options "--admit_smt_queries true" +//This private primitive is used internally by the +//compiler to translate bounded integer constants +//with a desugaring-time check of the size of the number, +//rather than an expensive verification check. +//Since it is marked private, client programs cannot call it directly +//Since it is marked unfold, it eagerly reduces, +//eliminating the verification overhead of the wrapper +private +unfold +let __int_to_t (x:int) : Tot t + = int_to_t x +#reset-options diff --git a/stage0/ulib/FStar.Int64.fst b/stage0/ulib/FStar.Int64.fst new file mode 100644 index 00000000000..2a7fe2ee20b --- /dev/null +++ b/stage0/ulib/FStar.Int64.fst @@ -0,0 +1,81 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Int64 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +open FStar.Int +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly + * a copy-paste of this module. *) + +type t : eqtype = + | Mk: v:int_t n -> t + + +let v x = x.v + +irreducible +let int_to_t x = Mk x + +let uv_inv _ = () + +let vu_inv _ = () + +let v_inj _ _ = () + +let zero = int_to_t 0 + +let one = + FStar.Math.Lemmas.pow2_lt_compat (n - 1) 1; + int_to_t 1 + +let add a b = Mk (add (v a) (v b)) + +let sub a b = Mk (sub (v a) (v b)) + +let mul a b = Mk (mul (v a) (v b)) + +let div a b = Mk (div (v a) (v b)) + +let rem a b = Mk (mod (v a) (v b)) + +let logand x y = Mk (logand (v x) (v y)) + +let logxor x y = Mk (logxor (v x) (v y)) + +let logor x y = Mk (logor (v x) (v y)) + +let lognot x = Mk (lognot (v x)) + +let shift_right a s = Mk (shift_right (v a) (UInt32.v s)) + +let shift_left a s = Mk (shift_left (v a) (UInt32.v s)) + +let shift_arithmetic_right a s = Mk (shift_arithmetic_right (v a) (UInt32.v s)) + +let to_string _ = admit () + +//AR: this is to workaround the interleaving semantics of pragmas in FStar.Int128.fst +// where the interface requires the last but one definition to be lax-checked +#push-options "--admit_smt_queries true" + +let of_string _ = admit () + +#pop-options diff --git a/stage0/ulib/FStar.Int64.fsti b/stage0/ulib/FStar.Int64.fsti new file mode 100644 index 00000000000..9f121d6d644 --- /dev/null +++ b/stage0/ulib/FStar.Int64.fsti @@ -0,0 +1,175 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Int64 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +unfold let n = 64 + +open FStar.Int +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly + * a copy-paste of this module. *) + +new val t : eqtype + +val v (x:t) : Tot (int_t n) + +val int_to_t: x:int_t n -> Pure t + (requires True) + (ensures (fun y -> v y = x)) + +val uv_inv (x : t) : Lemma + (ensures (int_to_t (v x) == x)) + [SMTPat (v x)] + +val vu_inv (x : int_t n) : Lemma + (ensures (v (int_to_t x) == x)) + [SMTPat (int_to_t x)] + +val v_inj (x1 x2: t): Lemma + (requires (v x1 == v x2)) + (ensures (x1 == x2)) + +val zero : x:t{v x = 0} + +val one : x:t{v x = 1} + +val add (a:t) (b:t) : Pure t + (requires (size (v a + v b) n)) + (ensures (fun c -> v a + v b = v c)) + +(* Subtraction primitives *) +val sub (a:t) (b:t) : Pure t + (requires (size (v a - v b) n)) + (ensures (fun c -> v a - v b = v c)) + +(* Multiplication primitives *) +val mul (a:t) (b:t) : Pure t + (requires (size (v a * v b) n)) + (ensures (fun c -> v a * v b = v c)) + +(* Division primitives *) +val div (a:t) (b:t{v b <> 0}) : Pure t + // division overflows on INT_MIN / -1 + (requires (size (v a / v b) n)) + (ensures (fun c -> v a / v b = v c)) + +(* Modulo primitives *) +(* If a/b is not representable the result of a%b is undefind *) +val rem (a:t) (b:t{v b <> 0}) : Pure t + (requires (size (v a / v b) n)) + (ensures (fun c -> FStar.Int.mod (v a) (v b) = v c)) + +(* Bitwise operators *) +val logand (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logand` v y = v z)) + +val logxor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logxor` v y == v z)) + +val logor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logor` v y == v z)) + +val lognot (x:t) : Pure t + (requires True) + (ensures (fun z -> lognot (v x) == v z)) + +(* Shift operators *) + +(** If a is negative the result is implementation-defined *) +val shift_right (a:t) (s:UInt32.t) : Pure t + (requires (0 <= v a /\ UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c)) + +(** If a is negative or a * pow2 s is not representable the result is undefined *) +val shift_left (a:t) (s:UInt32.t) : Pure t + (requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c)) + +val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t + (requires (UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c)) + +(* Comparison operators *) +let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b) +let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b) +let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b) +let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b) +let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b) + +(* Infix notations *) +unfold let op_Plus_Hat = add +unfold let op_Subtraction_Hat = sub +unfold let op_Star_Hat = mul +unfold let op_Slash_Hat = div +unfold let op_Percent_Hat = rem +unfold let op_Hat_Hat = logxor +unfold let op_Amp_Hat = logand +unfold let op_Bar_Hat = logor +unfold let op_Less_Less_Hat = shift_left +unfold let op_Greater_Greater_Hat = shift_right +unfold let op_Greater_Greater_Greater_Hat = shift_arithmetic_right +unfold let op_Equals_Hat = eq +unfold let op_Greater_Hat = gt +unfold let op_Greater_Equals_Hat = gte +unfold let op_Less_Hat = lt +unfold let op_Less_Equals_Hat = lte + +inline_for_extraction +let ct_abs (a:t{min_int n < v a}) : Tot (b:t{v b = abs (v a)}) = + let mask = a >>>^ UInt32.uint_to_t (n - 1) in + if 0 <= v a then + begin + sign_bit_positive (v a); + nth_lemma (v mask) (FStar.Int.zero _); + logxor_lemma_1 (v a) + end + else + begin + sign_bit_negative (v a); + nth_lemma (v mask) (ones _); + logxor_lemma_2 (v a); + lognot_negative (v a); + UInt.lemma_lognot_value #n (to_uint (v a)) + end; + (a ^^ mask) -^ mask + +(* To input / output constants *) +(* .. in decimal representation *) +val to_string: t -> Tot string + +val of_string: string -> Tot t + +#set-options "--admit_smt_queries true" +//This private primitive is used internally by the +//compiler to translate bounded integer constants +//with a desugaring-time check of the size of the number, +//rather than an expensive verification check. +//Since it is marked private, client programs cannot call it directly +//Since it is marked unfold, it eagerly reduces, +//eliminating the verification overhead of the wrapper +private +unfold +let __int_to_t (x:int) : Tot t + = int_to_t x +#reset-options diff --git a/stage0/ulib/FStar.Int8.fst b/stage0/ulib/FStar.Int8.fst new file mode 100644 index 00000000000..a53f9570a71 --- /dev/null +++ b/stage0/ulib/FStar.Int8.fst @@ -0,0 +1,81 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Int8 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +open FStar.Int +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly + * a copy-paste of this module. *) + +type t : eqtype = + | Mk: v:int_t n -> t + + +let v x = x.v + +irreducible +let int_to_t x = Mk x + +let uv_inv _ = () + +let vu_inv _ = () + +let v_inj _ _ = () + +let zero = int_to_t 0 + +let one = + FStar.Math.Lemmas.pow2_lt_compat (n - 1) 1; + int_to_t 1 + +let add a b = Mk (add (v a) (v b)) + +let sub a b = Mk (sub (v a) (v b)) + +let mul a b = Mk (mul (v a) (v b)) + +let div a b = Mk (div (v a) (v b)) + +let rem a b = Mk (mod (v a) (v b)) + +let logand x y = Mk (logand (v x) (v y)) + +let logxor x y = Mk (logxor (v x) (v y)) + +let logor x y = Mk (logor (v x) (v y)) + +let lognot x = Mk (lognot (v x)) + +let shift_right a s = Mk (shift_right (v a) (UInt32.v s)) + +let shift_left a s = Mk (shift_left (v a) (UInt32.v s)) + +let shift_arithmetic_right a s = Mk (shift_arithmetic_right (v a) (UInt32.v s)) + +let to_string _ = admit () + +//AR: this is to workaround the interleaving semantics of pragmas in FStar.Int128.fst +// where the interface requires the last but one definition to be lax-checked +#push-options "--admit_smt_queries true" + +let of_string _ = admit () + +#pop-options diff --git a/stage0/ulib/FStar.Int8.fsti b/stage0/ulib/FStar.Int8.fsti new file mode 100644 index 00000000000..afc1f1a005b --- /dev/null +++ b/stage0/ulib/FStar.Int8.fsti @@ -0,0 +1,175 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Int8 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +unfold let n = 8 + +open FStar.Int +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly + * a copy-paste of this module. *) + +new val t : eqtype + +val v (x:t) : Tot (int_t n) + +val int_to_t: x:int_t n -> Pure t + (requires True) + (ensures (fun y -> v y = x)) + +val uv_inv (x : t) : Lemma + (ensures (int_to_t (v x) == x)) + [SMTPat (v x)] + +val vu_inv (x : int_t n) : Lemma + (ensures (v (int_to_t x) == x)) + [SMTPat (int_to_t x)] + +val v_inj (x1 x2: t): Lemma + (requires (v x1 == v x2)) + (ensures (x1 == x2)) + +val zero : x:t{v x = 0} + +val one : x:t{v x = 1} + +val add (a:t) (b:t) : Pure t + (requires (size (v a + v b) n)) + (ensures (fun c -> v a + v b = v c)) + +(* Subtraction primitives *) +val sub (a:t) (b:t) : Pure t + (requires (size (v a - v b) n)) + (ensures (fun c -> v a - v b = v c)) + +(* Multiplication primitives *) +val mul (a:t) (b:t) : Pure t + (requires (size (v a * v b) n)) + (ensures (fun c -> v a * v b = v c)) + +(* Division primitives *) +val div (a:t) (b:t{v b <> 0}) : Pure t + // division overflows on INT_MIN / -1 + (requires (size (v a / v b) n)) + (ensures (fun c -> v a / v b = v c)) + +(* Modulo primitives *) +(* If a/b is not representable the result of a%b is undefind *) +val rem (a:t) (b:t{v b <> 0}) : Pure t + (requires (size (v a / v b) n)) + (ensures (fun c -> FStar.Int.mod (v a) (v b) = v c)) + +(* Bitwise operators *) +val logand (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logand` v y = v z)) + +val logxor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logxor` v y == v z)) + +val logor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logor` v y == v z)) + +val lognot (x:t) : Pure t + (requires True) + (ensures (fun z -> lognot (v x) == v z)) + +(* Shift operators *) + +(** If a is negative the result is implementation-defined *) +val shift_right (a:t) (s:UInt32.t) : Pure t + (requires (0 <= v a /\ UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c)) + +(** If a is negative or a * pow2 s is not representable the result is undefined *) +val shift_left (a:t) (s:UInt32.t) : Pure t + (requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c)) + +val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t + (requires (UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c)) + +(* Comparison operators *) +let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b) +let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b) +let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b) +let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b) +let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b) + +(* Infix notations *) +unfold let op_Plus_Hat = add +unfold let op_Subtraction_Hat = sub +unfold let op_Star_Hat = mul +unfold let op_Slash_Hat = div +unfold let op_Percent_Hat = rem +unfold let op_Hat_Hat = logxor +unfold let op_Amp_Hat = logand +unfold let op_Bar_Hat = logor +unfold let op_Less_Less_Hat = shift_left +unfold let op_Greater_Greater_Hat = shift_right +unfold let op_Greater_Greater_Greater_Hat = shift_arithmetic_right +unfold let op_Equals_Hat = eq +unfold let op_Greater_Hat = gt +unfold let op_Greater_Equals_Hat = gte +unfold let op_Less_Hat = lt +unfold let op_Less_Equals_Hat = lte + +inline_for_extraction +let ct_abs (a:t{min_int n < v a}) : Tot (b:t{v b = abs (v a)}) = + let mask = a >>>^ UInt32.uint_to_t (n - 1) in + if 0 <= v a then + begin + sign_bit_positive (v a); + nth_lemma (v mask) (FStar.Int.zero _); + logxor_lemma_1 (v a) + end + else + begin + sign_bit_negative (v a); + nth_lemma (v mask) (ones _); + logxor_lemma_2 (v a); + lognot_negative (v a); + UInt.lemma_lognot_value #n (to_uint (v a)) + end; + (a ^^ mask) -^ mask + +(* To input / output constants *) +(* .. in decimal representation *) +val to_string: t -> Tot string + +val of_string: string -> Tot t + +#set-options "--admit_smt_queries true" +//This private primitive is used internally by the +//compiler to translate bounded integer constants +//with a desugaring-time check of the size of the number, +//rather than an expensive verification check. +//Since it is marked private, client programs cannot call it directly +//Since it is marked unfold, it eagerly reduces, +//eliminating the verification overhead of the wrapper +private +unfold +let __int_to_t (x:int) : Tot t + = int_to_t x +#reset-options diff --git a/stage0/ulib/FStar.IntN.fstip b/stage0/ulib/FStar.IntN.fstip new file mode 100644 index 00000000000..87d5e5d3329 --- /dev/null +++ b/stage0/ulib/FStar.IntN.fstip @@ -0,0 +1,154 @@ +open FStar.Int +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly + * a copy-paste of this module. *) + +new val t : eqtype + +val v (x:t) : Tot (int_t n) + +val int_to_t: x:int_t n -> Pure t + (requires True) + (ensures (fun y -> v y = x)) + +val uv_inv (x : t) : Lemma + (ensures (int_to_t (v x) == x)) + [SMTPat (v x)] + +val vu_inv (x : int_t n) : Lemma + (ensures (v (int_to_t x) == x)) + [SMTPat (int_to_t x)] + +val v_inj (x1 x2: t): Lemma + (requires (v x1 == v x2)) + (ensures (x1 == x2)) + +val zero : x:t{v x = 0} + +val one : x:t{v x = 1} + +val add (a:t) (b:t) : Pure t + (requires (size (v a + v b) n)) + (ensures (fun c -> v a + v b = v c)) + +(* Subtraction primitives *) +val sub (a:t) (b:t) : Pure t + (requires (size (v a - v b) n)) + (ensures (fun c -> v a - v b = v c)) + +(* Multiplication primitives *) +val mul (a:t) (b:t) : Pure t + (requires (size (v a * v b) n)) + (ensures (fun c -> v a * v b = v c)) + +(* Division primitives *) +val div (a:t) (b:t{v b <> 0}) : Pure t + // division overflows on INT_MIN / -1 + (requires (size (v a / v b) n)) + (ensures (fun c -> v a / v b = v c)) + +(* Modulo primitives *) +(* If a/b is not representable the result of a%b is undefind *) +val rem (a:t) (b:t{v b <> 0}) : Pure t + (requires (size (v a / v b) n)) + (ensures (fun c -> FStar.Int.mod (v a) (v b) = v c)) + +(* Bitwise operators *) +val logand (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logand` v y = v z)) + +val logxor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logxor` v y == v z)) + +val logor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logor` v y == v z)) + +val lognot (x:t) : Pure t + (requires True) + (ensures (fun z -> lognot (v x) == v z)) + +(* Shift operators *) + +(** If a is negative the result is implementation-defined *) +val shift_right (a:t) (s:UInt32.t) : Pure t + (requires (0 <= v a /\ UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c)) + +(** If a is negative or a * pow2 s is not representable the result is undefined *) +val shift_left (a:t) (s:UInt32.t) : Pure t + (requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c)) + +val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t + (requires (UInt32.v s < n)) + (ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c)) + +(* Comparison operators *) +let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b) +let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b) +let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b) +let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b) +let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b) + +(* Infix notations *) +unfold let op_Plus_Hat = add +unfold let op_Subtraction_Hat = sub +unfold let op_Star_Hat = mul +unfold let op_Slash_Hat = div +unfold let op_Percent_Hat = rem +unfold let op_Hat_Hat = logxor +unfold let op_Amp_Hat = logand +unfold let op_Bar_Hat = logor +unfold let op_Less_Less_Hat = shift_left +unfold let op_Greater_Greater_Hat = shift_right +unfold let op_Greater_Greater_Greater_Hat = shift_arithmetic_right +unfold let op_Equals_Hat = eq +unfold let op_Greater_Hat = gt +unfold let op_Greater_Equals_Hat = gte +unfold let op_Less_Hat = lt +unfold let op_Less_Equals_Hat = lte + +inline_for_extraction +let ct_abs (a:t{min_int n < v a}) : Tot (b:t{v b = abs (v a)}) = + let mask = a >>>^ UInt32.uint_to_t (n - 1) in + if 0 <= v a then + begin + sign_bit_positive (v a); + nth_lemma (v mask) (FStar.Int.zero _); + logxor_lemma_1 (v a) + end + else + begin + sign_bit_negative (v a); + nth_lemma (v mask) (ones _); + logxor_lemma_2 (v a); + lognot_negative (v a); + UInt.lemma_lognot_value #n (to_uint (v a)) + end; + (a ^^ mask) -^ mask + +(* To input / output constants *) +(* .. in decimal representation *) +val to_string: t -> Tot string + +val of_string: string -> Tot t + +#set-options "--admit_smt_queries true" +//This private primitive is used internally by the +//compiler to translate bounded integer constants +//with a desugaring-time check of the size of the number, +//rather than an expensive verification check. +//Since it is marked private, client programs cannot call it directly +//Since it is marked unfold, it eagerly reduces, +//eliminating the verification overhead of the wrapper +private +unfold +let __int_to_t (x:int) : Tot t + = int_to_t x +#reset-options diff --git a/stage0/ulib/FStar.IntN.fstp b/stage0/ulib/FStar.IntN.fstp new file mode 100644 index 00000000000..ef34d37cf04 --- /dev/null +++ b/stage0/ulib/FStar.IntN.fstp @@ -0,0 +1,62 @@ +open FStar.Int +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly + * a copy-paste of this module. *) + +type t : eqtype = + | Mk: v:int_t n -> t + + +let v x = x.v + +irreducible +let int_to_t x = Mk x + +let uv_inv _ = () + +let vu_inv _ = () + +let v_inj _ _ = () + +let zero = int_to_t 0 + +let one = + FStar.Math.Lemmas.pow2_lt_compat (n - 1) 1; + int_to_t 1 + +let add a b = Mk (add (v a) (v b)) + +let sub a b = Mk (sub (v a) (v b)) + +let mul a b = Mk (mul (v a) (v b)) + +let div a b = Mk (div (v a) (v b)) + +let rem a b = Mk (mod (v a) (v b)) + +let logand x y = Mk (logand (v x) (v y)) + +let logxor x y = Mk (logxor (v x) (v y)) + +let logor x y = Mk (logor (v x) (v y)) + +let lognot x = Mk (lognot (v x)) + +let shift_right a s = Mk (shift_right (v a) (UInt32.v s)) + +let shift_left a s = Mk (shift_left (v a) (UInt32.v s)) + +let shift_arithmetic_right a s = Mk (shift_arithmetic_right (v a) (UInt32.v s)) + +let to_string _ = admit () + +//AR: this is to workaround the interleaving semantics of pragmas in FStar.Int128.fst +// where the interface requires the last but one definition to be lax-checked +#push-options "--admit_smt_queries true" + +let of_string _ = admit () + +#pop-options diff --git a/stage0/ulib/FStar.IntegerIntervals.fst b/stage0/ulib/FStar.IntegerIntervals.fst new file mode 100644 index 00000000000..5706c332ea4 --- /dev/null +++ b/stage0/ulib/FStar.IntegerIntervals.fst @@ -0,0 +1,84 @@ +(* + Copyright 2008-2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: A. Rozanov +*) +module FStar.IntegerIntervals + +(* Aliases to all kinds of integer intervals *) + +(* general infinite integer intervals *) +type less_than (k: int) = x:int{xk} +type not_less_than (x: int) = greater_than (x-1) +type not_greater_than (x: int) = less_than (x+1) + +(* Type coercion. While supposed to be absolutely trivial, + might still be invoked directly under extremely low rlimits *) +let coerce_to_less_than #n (x: not_greater_than n) : less_than (n+1) = x +let coerce_to_not_less_than #n (x: greater_than n) : not_less_than (n+1) = x + +let interval_condition (x y t: int) = (x <= t) && (t < y) + +type interval_type (x y:int) = z : Type0{ z == t:int{interval_condition x y t} } + +(* Default interval is half-open, which is the most frequently used case *) +type interval (x y: int) : interval_type x y = t:int{interval_condition x y t} + +(* general finite integer intervals *) +type efrom_eto (x y: int) = interval (x+1) y +type efrom_ito (x y: int) = interval (x+1) (y+1) +type ifrom_eto (x y: int) = interval x y +type ifrom_ito (x y: int) = interval x (y+1) + +(* Special case for naturals under k, to use in sequences, lists, arrays, etc *) +type under (k: nat) = interval 0 k + +(* If we define our intervals this way, then the following lemma comes for free: *) +private let closed_interval_lemma (x y:int) : Lemma (interval x (y+1) == ifrom_ito x y) = () + + +(* how many numbers fall into an interval? *) +let interval_size (#x #y: int) (interval: interval_type x y) : nat + = if y >= x then y-x else 0 + +(* when we want a zero-based index that runs over an interval, we use this *) +type counter_for (#x #y:int) (interval: interval_type x y) = under (interval_size interval) + +(* special case for closed intervals, used in FStar.Algebra.CommMonoid.Fold *) +let closed_interval_size (x y: int) : nat = interval_size (ifrom_ito x y) + +(* A usage example and a test at the same time: *) +private let _ = assert (interval_size (interval 5 10) = 5) +private let _ = assert (interval_size (ifrom_ito 5 10) = 6) +private let _ = assert (interval_size (ifrom_ito 15 10) = 0) + +(* This lemma, especially when used with forall_intro, helps the + prover verify the index ranges of sequences that correspond + to arbitrary folds. + + It is supposed to be invoked to decrease the toll we put on rlimit, + i.e. will be redundant in most use cases. *) +let counter_bounds_lemma (x y:int) (i: (counter_for (ifrom_ito x y))) + : Lemma (x+i >= x /\ x+i <= y) = () + +(* An integer sequence [0..n), n values in total, + with index value available to the prover. *) +let indices_seq (n: nat) + : (f:FStar.Seq.Base.seq (under n) { + FStar.Seq.Base.length f = n /\ + (forall (k: under n). FStar.Seq.Base.index f k = k) + }) + = FStar.Seq.Base.init n (fun (x:under n) -> x) diff --git a/stage0/ulib/FStar.Integers.fst b/stage0/ulib/FStar.Integers.fst new file mode 100644 index 00000000000..ce57d46b745 --- /dev/null +++ b/stage0/ulib/FStar.Integers.fst @@ -0,0 +1,589 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Integers + +#set-options "--initial_ifuel 2 --max_ifuel 2 --initial_fuel 0 --max_fuel 0" + +irreducible +let mark_for_norm = () + +unfold +let norm (#a:Type) (x:a) = norm [iota; delta_attr [`%mark_for_norm]] x + +type width = + | W8 + | W16 + | W32 + | W64 + | W128 + | Winfinite + +[@@mark_for_norm] +let nat_of_width = function + | W8 -> Some 8 + | W16 -> Some 16 + | W32 -> Some 32 + | W64 -> Some 64 + | W128 -> Some 128 + | Winfinite -> None + +let fixed_width = w:width{w <> Winfinite} + +[@@mark_for_norm] +let nat_of_fixed_width (w:fixed_width) = + match nat_of_width w with + | Some v -> v + +type signed_width = + | Signed of width + | Unsigned of fixed_width //We don't support (Unsigned WInfinite); use nat instead + +[@@mark_for_norm] +let width_of_sw = function + | Signed w -> w + | Unsigned w -> w + +[@@mark_for_norm] +noextract +inline_for_extraction +let int_t sw : Tot Type0 = + match sw with + | Unsigned W8 -> FStar.UInt8.t + | Unsigned W16 -> FStar.UInt16.t + | Unsigned W32 -> FStar.UInt32.t + | Unsigned W64 -> FStar.UInt64.t + | Unsigned W128 -> FStar.UInt128.t + | Signed Winfinite -> int + | Signed W8 -> FStar.Int8.t + | Signed W16 -> FStar.Int16.t + | Signed W32 -> FStar.Int32.t + | Signed W64 -> FStar.Int64.t + | Signed W128 -> FStar.Int128.t + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let within_bounds' sw (x:int) = + match sw, nat_of_width (width_of_sw sw) with + | Signed _, None -> True + | Signed _, Some n -> FStar.Int.size x n + | Unsigned _, Some n -> FStar.UInt.size x n + +unfold +let within_bounds sw x = norm (within_bounds' sw x) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let v #sw (x:int_t sw) + : Tot (y:int_t (Signed Winfinite){within_bounds sw y}) + = match sw with + | Unsigned w -> + (match w with + | W8 -> FStar.UInt8.v x + | W16 -> FStar.UInt16.v x + | W32 -> FStar.UInt32.v x + | W64 -> FStar.UInt64.v x + | W128 -> FStar.UInt128.v x) + | Signed w -> + (match w with + | Winfinite -> x + | W8 -> FStar.Int8.v x + | W16 -> FStar.Int16.v x + | W32 -> FStar.Int32.v x + | W64 -> FStar.Int64.v x + | W128 -> FStar.Int128.v x) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let u #sw + (x:int_t (Signed Winfinite){within_bounds sw x}) + : Tot (y:int_t sw{norm (v x == v y)}) + = match sw with + | Unsigned w -> + (match w with + | W8 -> FStar.UInt8.uint_to_t x + | W16 -> FStar.UInt16.uint_to_t x + | W32 -> FStar.UInt32.uint_to_t x + | W64 -> FStar.UInt64.uint_to_t x + | W128 -> FStar.UInt128.uint_to_t x) + | Signed w -> + (match w with + | Winfinite -> x + | W8 -> FStar.Int8.int_to_t x + | W16 -> FStar.Int16.int_to_t x + | W32 -> FStar.Int32.int_to_t x + | W64 -> FStar.Int64.int_to_t x + | W128 -> FStar.Int128.int_to_t x) + +irreducible +noextract +let cast #sw #sw' + (from:int_t sw{within_bounds sw' (v from)}) + : Tot (to:int_t sw'{norm (v from == v to)}) + = u (v from) + +[@@mark_for_norm] +unfold +noextract +let cast_ok #from to (x:int_t from) = within_bounds to (v x) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( + ) #sw + (x:int_t sw) + (y:int_t sw{within_bounds sw (v x + v y)}) + : Tot (int_t sw) + = match sw with + | Signed Winfinite -> x + y + | Unsigned W8 -> FStar.UInt8.(x +^ y) + | Unsigned W16 -> FStar.UInt16.(x +^ y) + | Unsigned W32 -> FStar.UInt32.(x +^ y) + | Unsigned W64 -> FStar.UInt64.(x +^ y) + | Unsigned W128 -> FStar.UInt128.(x +^ y) + | Signed W8 -> FStar.Int8.(x +^ y) + | Signed W16 -> FStar.Int16.(x +^ y) + | Signed W32 -> FStar.Int32.(x +^ y) + | Signed W64 -> FStar.Int64.(x +^ y) + | Signed W128 -> FStar.Int128.(x +^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( +? ) (#w:fixed_width) + (x:int_t (Unsigned w)) + (y:int_t (Unsigned w)) + : Tot (int_t (Unsigned w)) + = match w with + | W8 -> FStar.UInt8.(x +?^ y) + | W16 -> FStar.UInt16.(x +?^ y) + | W32 -> FStar.UInt32.(x +?^ y) + | W64 -> FStar.UInt64.(x +?^ y) + | W128 -> FStar.UInt128.(x +?^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +noextract +let modulo sw (x:int) (y:pos{Signed? sw ==> y%2=0}) = + match sw with + | Unsigned _ -> x % y + | _ -> FStar.Int.(x @% y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( +% ) (#sw:_{Unsigned? sw}) + (x:int_t sw) + (y:int_t sw) + : Tot (int_t sw) + = let Unsigned w = sw in + match w with + | W8 -> FStar.UInt8.(x +%^ y) + | W16 -> FStar.UInt16.(x +%^ y) + | W32 -> FStar.UInt32.(x +%^ y) + | W64 -> FStar.UInt64.(x +%^ y) + | W128 -> FStar.UInt128.(x +%^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let op_Subtraction #sw + (x:int_t sw) + (y:int_t sw{within_bounds sw (v x - v y)}) + : Tot (int_t sw) + = match sw with + | Signed Winfinite -> x - y + | Unsigned W8 -> FStar.UInt8.(x -^ y) + | Unsigned W16 -> FStar.UInt16.(x -^ y) + | Unsigned W32 -> FStar.UInt32.(x -^ y) + | Unsigned W64 -> FStar.UInt64.(x -^ y) + | Unsigned W128 -> FStar.UInt128.(x -^ y) + | Signed W8 -> FStar.Int8.(x -^ y) + | Signed W16 -> FStar.Int16.(x -^ y) + | Signed W32 -> FStar.Int32.(x -^ y) + | Signed W64 -> FStar.Int64.(x -^ y) + | Signed W128 -> FStar.Int128.(x -^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let op_Subtraction_Question + (#sw:_{Unsigned? sw}) + (x:int_t sw) + (y:int_t sw) + : Tot (int_t sw) + = let Unsigned w = sw in + match w with + | W8 -> FStar.UInt8.(x -?^ y) + | W16 -> FStar.UInt16.(x -?^ y) + | W32 -> FStar.UInt32.(x -?^ y) + | W64 -> FStar.UInt64.(x -?^ y) + | W128 -> FStar.UInt128.(x -?^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let op_Subtraction_Percent + (#sw:_{Unsigned? sw}) + (x:int_t sw) + (y:int_t sw) + : Tot (int_t sw) + = let Unsigned w = sw in + match w with + | W8 -> FStar.UInt8.(x -%^ y) + | W16 -> FStar.UInt16.(x -%^ y) + | W32 -> FStar.UInt32.(x -%^ y) + | W64 -> FStar.UInt64.(x -%^ y) + | W128 -> FStar.UInt128.(x -%^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let op_Minus + (#sw:_{Signed? sw}) + (x:int_t sw{within_bounds sw (0 - v x)}) + : Tot (int_t sw) + = let Signed w = sw in + match w with + | Winfinite -> 0 - x + | W8 -> FStar.Int8.(0y -^ x) + | W16 -> FStar.Int16.(0s -^ x) + | W32 -> FStar.Int32.(0l -^ x) + | W64 -> FStar.Int64.(0L -^ x) + | W128 -> FStar.Int128.(int_to_t 0 -^ x) + +open FStar.Mul +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( * ) (#sw:signed_width{width_of_sw sw <> W128}) + (x:int_t sw) + (y:int_t sw{within_bounds sw (v x * v y)}) + : Tot (int_t sw) + = match sw with + | Signed Winfinite -> x * y + | Unsigned W8 -> FStar.UInt8.(x *^ y) + | Unsigned W16 -> FStar.UInt16.(x *^ y) + | Unsigned W32 -> FStar.UInt32.(x *^ y) + | Unsigned W64 -> FStar.UInt64.(x *^ y) + | Signed W8 -> FStar.Int8.(x *^ y) + | Signed W16 -> FStar.Int16.(x *^ y) + | Signed W32 -> FStar.Int32.(x *^ y) + | Signed W64 -> FStar.Int64.(x *^ y) + | Signed W128 -> FStar.Int128.(x *^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( *? ) (#sw:_{Unsigned? sw /\ width_of_sw sw <> W128}) + (x:int_t sw) + (y:int_t sw) + : Tot (int_t sw) + = let Unsigned w = sw in + match w with + | W8 -> FStar.UInt8.(x *?^ y) + | W16 -> FStar.UInt16.(x *?^ y) + | W32 -> FStar.UInt32.(x *?^ y) + | W64 -> FStar.UInt64.(x *?^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( *% ) (#sw:_{Unsigned? sw /\ width_of_sw sw <> W128}) + (x:int_t sw) + (y:int_t sw) + : Tot (int_t sw) + = let Unsigned w = sw in + match w with + | W8 -> FStar.UInt8.(x *%^ y) + | W16 -> FStar.UInt16.(x *%^ y) + | W32 -> FStar.UInt32.(x *%^ y) + | W64 -> FStar.UInt64.(x *%^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( > ) #sw (x:int_t sw) (y:int_t sw) : bool = + match sw with + | Signed Winfinite -> x > y + | Unsigned W8 -> FStar.UInt8.(x >^ y) + | Unsigned W16 -> FStar.UInt16.(x >^ y) + | Unsigned W32 -> FStar.UInt32.(x >^ y) + | Unsigned W64 -> FStar.UInt64.(x >^ y) + | Unsigned W128 -> FStar.UInt128.(x >^ y) + | Signed W8 -> FStar.Int8.(x >^ y) + | Signed W16 -> FStar.Int16.(x >^ y) + | Signed W32 -> FStar.Int32.(x >^ y) + | Signed W64 -> FStar.Int64.(x >^ y) + | Signed W128 -> FStar.Int128.(x >^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( >= ) #sw (x:int_t sw) (y:int_t sw) : bool = + match sw with + | Signed Winfinite -> x >= y + | Unsigned W8 -> FStar.UInt8.(x >=^ y) + | Unsigned W16 -> FStar.UInt16.(x >=^ y) + | Unsigned W32 -> FStar.UInt32.(x >=^ y) + | Unsigned W64 -> FStar.UInt64.(x >=^ y) + | Unsigned W128 -> FStar.UInt128.(x >=^ y) + | Signed W8 -> FStar.Int8.(x >=^ y) + | Signed W16 -> FStar.Int16.(x >=^ y) + | Signed W32 -> FStar.Int32.(x >=^ y) + | Signed W64 -> FStar.Int64.(x >=^ y) + | Signed W128 -> FStar.Int128.(x >=^ y) + + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( < ) #sw (x:int_t sw) (y:int_t sw) : bool = + match sw with + | Signed Winfinite -> x < y + | Unsigned W8 -> FStar.UInt8.(x <^ y) + | Unsigned W16 -> FStar.UInt16.(x <^ y) + | Unsigned W32 -> FStar.UInt32.(x <^ y) + | Unsigned W64 -> FStar.UInt64.(x <^ y) + | Unsigned W128 -> FStar.UInt128.(x <^ y) + | Signed W8 -> FStar.Int8.(x <^ y) + | Signed W16 -> FStar.Int16.(x <^ y) + | Signed W32 -> FStar.Int32.(x <^ y) + | Signed W64 -> FStar.Int64.(x <^ y) + | Signed W128 -> FStar.Int128.(x <^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( <= ) #sw (x:int_t sw) (y:int_t sw) : bool = + match sw with + | Signed Winfinite -> x <= y + | Unsigned W8 -> FStar.UInt8.(x <=^ y) + | Unsigned W16 -> FStar.UInt16.(x <=^ y) + | Unsigned W32 -> FStar.UInt32.(x <=^ y) + | Unsigned W64 -> FStar.UInt64.(x <=^ y) + | Unsigned W128 -> FStar.UInt128.(x <=^ y) + | Signed W8 -> FStar.Int8.(x <=^ y) + | Signed W16 -> FStar.Int16.(x <=^ y) + | Signed W32 -> FStar.Int32.(x <=^ y) + | Signed W64 -> FStar.Int64.(x <=^ y) + | Signed W128 -> FStar.Int128.(x <=^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( / ) (#sw:signed_width{sw <> Unsigned W128}) + (x:int_t sw) + (y:int_t sw{0 <> (v y <: Prims.int) /\ + (match sw with + | Unsigned _ -> within_bounds sw (v x / v y) + | Signed _ -> within_bounds sw (v x `FStar.Int.op_Slash` v y))}) + : Tot (int_t sw) + = match sw with + | Signed Winfinite -> x / y + | Unsigned W8 -> FStar.UInt8.(x /^ y) + | Unsigned W16 -> FStar.UInt16.(x /^ y) + | Unsigned W32 -> FStar.UInt32.(x /^ y) + | Unsigned W64 -> FStar.UInt64.(x /^ y) + | Signed W8 -> FStar.Int8.(x /^ y) + | Signed W16 -> FStar.Int16.(x /^ y) + | Signed W32 -> FStar.Int32.(x /^ y) + | Signed W64 -> FStar.Int64.(x /^ y) + | Signed W128 -> FStar.Int128.(x /^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( % ) (#sw:signed_width{sw <> Unsigned W128}) + (x:int_t sw) + (y:int_t sw{0 <> (v y <: Prims.int) /\ + (match sw with + | Unsigned _ -> within_bounds sw (FStar.UInt.mod #(nat_of_fixed_width (width_of_sw sw)) (v x) (v y)) + | Signed Winfinite -> True + | Signed _ -> within_bounds sw (FStar.Int.mod #(nat_of_fixed_width (width_of_sw sw)) (v x) (v y))) /\ + within_bounds sw (FStar.Int.op_Slash (v x) (v y))}) + : Tot (int_t sw) + = match sw with + | Signed Winfinite -> x % y + | Unsigned W8 -> FStar.UInt8.(x %^ y) + | Unsigned W16 -> FStar.UInt16.(x %^ y) + | Unsigned W32 -> FStar.UInt32.(x %^ y) + | Unsigned W64 -> FStar.UInt64.(x %^ y) + | Signed W8 -> FStar.Int8.(x %^ y) + | Signed W16 -> FStar.Int16.(x %^ y) + | Signed W32 -> FStar.Int32.(x %^ y) + | Signed W64 -> FStar.Int64.(x %^ y) + | Signed W128 -> FStar.Int128.(x %^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( ^^ ) #sw (x:int_t sw) (y:int_t sw{width_of_sw sw <> Winfinite}) + : Tot (int_t sw) + = match sw with + | Unsigned W8 -> FStar.UInt8.(x ^^ y) + | Unsigned W16 -> FStar.UInt16.(x ^^ y) + | Unsigned W32 -> FStar.UInt32.(x ^^ y) + | Unsigned W64 -> FStar.UInt64.(x ^^ y) + | Unsigned W128 -> FStar.UInt128.(x ^^ y) + | Signed W8 -> FStar.Int8.(x ^^ y) + | Signed W16 -> FStar.Int16.(x ^^ y) + | Signed W32 -> FStar.Int32.(x ^^ y) + | Signed W64 -> FStar.Int64.(x ^^ y) + | Signed W128 -> FStar.Int128.(x ^^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( &^ ) #sw (x:int_t sw) (y:int_t sw{width_of_sw sw <> Winfinite}) + : Tot (int_t sw) + = match sw with + | Unsigned W8 -> FStar.UInt8.(x &^ y) + | Unsigned W16 -> FStar.UInt16.(x &^ y) + | Unsigned W32 -> FStar.UInt32.(x &^ y) + | Unsigned W64 -> FStar.UInt64.(x &^ y) + | Unsigned W128 -> FStar.UInt128.(x &^ y) + | Signed W8 -> FStar.Int8.(x &^ y) + | Signed W16 -> FStar.Int16.(x &^ y) + | Signed W32 -> FStar.Int32.(x &^ y) + | Signed W64 -> FStar.Int64.(x &^ y) + | Signed W128 -> FStar.Int128.(x &^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( |^ ) #sw (x:int_t sw) (y:int_t sw{width_of_sw sw <> Winfinite}) + : Tot (int_t sw) + = match sw with + | Unsigned W8 -> FStar.UInt8.(x |^ y) + | Unsigned W16 -> FStar.UInt16.(x |^ y) + | Unsigned W32 -> FStar.UInt32.(x |^ y) + | Unsigned W64 -> FStar.UInt64.(x |^ y) + | Unsigned W128 -> FStar.UInt128.(x |^ y) + | Signed W8 -> FStar.Int8.(x |^ y) + | Signed W16 -> FStar.Int16.(x |^ y) + | Signed W32 -> FStar.Int32.(x |^ y) + | Signed W64 -> FStar.Int64.(x |^ y) + | Signed W128 -> FStar.Int128.(x |^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( <<^ ) #sw (x:int_t sw{0 <= v x}) + (y:int_t (Unsigned W32){width_of_sw sw <> Winfinite /\ v y < nat_of_fixed_width (width_of_sw sw) /\ (Signed? sw ==> within_bounds sw (v x * pow2 (v y)))}) + : Tot (int_t sw) + = match sw with + | Unsigned W8 -> FStar.UInt8.(x <<^ y) + | Unsigned W16 -> FStar.UInt16.(x <<^ y) + | Unsigned W32 -> FStar.UInt32.(x <<^ y) + | Unsigned W64 -> FStar.UInt64.(x <<^ y) + | Unsigned W128 -> FStar.UInt128.(x <<^ y) + | Signed W8 -> FStar.Int8.(x <<^ y) + | Signed W16 -> FStar.Int16.(x <<^ y) + | Signed W32 -> FStar.Int32.(x <<^ y) + | Signed W64 -> FStar.Int64.(x <<^ y) + | Signed W128 -> FStar.Int128.(x <<^ y) + +[@@mark_for_norm; strict_on_arguments [0]] +unfold +noextract +let ( >>^ ) #sw (x:int_t sw{0 <= v x}) + (y:int_t (Unsigned W32){width_of_sw sw <> Winfinite /\ v y < nat_of_fixed_width (width_of_sw sw)}) + : Tot (int_t sw) + = match sw with + | Unsigned W8 -> FStar.UInt8.(x >>^ y) + | Unsigned W16 -> FStar.UInt16.(x >>^ y) + | Unsigned W32 -> FStar.UInt32.(x >>^ y) + | Unsigned W64 -> FStar.UInt64.(x >>^ y) + | Unsigned W128 -> FStar.UInt128.(x >>^ y) + | Signed W8 -> FStar.Int8.(x >>^ y) + | Signed W16 -> FStar.Int16.(x >>^ y) + | Signed W32 -> FStar.Int32.(x >>^ y) + | Signed W64 -> FStar.Int64.(x >>^ y) + | Signed W128 -> FStar.Int128.(x >>^ y) + +[@@mark_for_norm] +unfold +let uint_8 = int_t (Unsigned W8) + +[@@mark_for_norm] +unfold +let uint_16 = int_t (Unsigned W16) + +[@@mark_for_norm] +unfold +let uint_32 = int_t (Unsigned W32) + +[@@mark_for_norm] +unfold +let uint_64 = int_t (Unsigned W64) + +[@@mark_for_norm] +unfold +let int = int_t (Signed Winfinite) + +[@@mark_for_norm] +unfold +let int_8 = int_t (Signed W8) + +[@@mark_for_norm] +unfold +let int_16 = int_t (Signed W16) + +[@@mark_for_norm] +unfold +let int_32 = int_t (Signed W32) + +[@@mark_for_norm] +unfold +let int_64 = int_t (Signed W64) + +[@@mark_for_norm] +unfold +let int_128 = int_t (Signed W128) + +[@@mark_for_norm] +unfold +let ok #sw + (op:(int_t (Signed Winfinite) + -> int_t (Signed Winfinite) + -> int_t (Signed Winfinite))) + (x:int_t sw) + (y:int_t sw) + = within_bounds sw (op (v x) (v y)) + +[@@mark_for_norm] +unfold +let nat = i:int{ i >= 0 } + +[@@mark_for_norm] +unfold +let pos = i:nat{ 0 < i } + +//////////////////////////////////////////////////////////////////////////////// +//Test +//////////////////////////////////////////////////////////////////////////////// +let f_int (x:int) (y:int) = x + y +let f_nat (x:nat) (y:nat) = x + y +let f_nat_int_pos (x:nat) (y:int) (z:pos) = x + y + z +let f_uint_8 (x:uint_8) (y:uint_8{ok (+) x y}) = x + y +let f_int_16 (x:int_16) (y:int_16{ok (+) x y}) = x + y +let g (x:uint_32) (y:uint_32{ok ( * ) y y /\ ok (+) x (y * y)}) = x + y * y +let h (x:Prims.nat) (y:Prims.nat): nat = u x + u y +let i (x:Prims.nat) (y:Prims.nat) = x + y +let j (x:Prims.int) (y:Prims.nat) = x - y +let k (x:Prims.int) (y:Prims.int) = x * y diff --git a/stage0/ulib/FStar.Issue.fsti b/stage0/ulib/FStar.Issue.fsti new file mode 100644 index 00000000000..3697227fc1e --- /dev/null +++ b/stage0/ulib/FStar.Issue.fsti @@ -0,0 +1,49 @@ +module FStar.Issue +open FStar.Range + +module Pprint = FStar.Pprint + +new +val issue : Type0 + +let issue_level_string = s:string { + s == "Info" \/ + s == "Warning" \/ + s == "Error" \/ + s == "Feature not yet implemented: " +} + +val message_of_issue (i:issue) : Tot (list Pprint.document) + +val level_of_issue (i:issue) : Tot issue_level_string + +val number_of_issue (i:issue) : Tot (option int) + +val range_of_issue (i:issue) : Tot (option range) + +val context_of_issue (i:issue) : Tot (list string) + +val render_issue (i:issue) : Tot string + +(* NOTE: the only way to build a document that actually reduces +in interpreted mode (like in tactics when not using plugins) +is using arbitrary_string, as below. *) +val mk_issue_doc (i:issue_level_string) + (msg:list Pprint.document) + (range:option range) + (number:option int) + (ctx:list string) + : Tot issue + +(* These qualifiers here to make sure that karamel (while building +krmllib) does not attempt to extract this definition, as that would fail +since it does not have an implementation of arbitrary_string. We could +also not extract this module altogether. *) +noextract +inline_for_extraction +let mk_issue (i:issue_level_string) + (msg:string) + (range:option range) + (number:option int) + (ctx:list string) + = mk_issue_doc i [Pprint.arbitrary_string msg] range number ctx diff --git a/stage0/ulib/FStar.LexicographicOrdering.fst b/stage0/ulib/FStar.LexicographicOrdering.fst new file mode 100644 index 00000000000..ef1349c1151 --- /dev/null +++ b/stage0/ulib/FStar.LexicographicOrdering.fst @@ -0,0 +1,132 @@ +(* + Copyright 2021 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Authors: Aseem Rastogi and Nikhil Swamy +*) + +module FStar.LexicographicOrdering +#set-options "--warn_error -242" //no inner let recs in SMT +open FStar.ReflexiveTransitiveClosure +open FStar.WellFounded + +/// A helper lemma about reflexive transitive closure + +let closure_transitive (#a:Type u#a) (#r_a:binrel u#a u#ra a) (x y z:a) + : Lemma + (requires closure r_a x y /\ + squash (r_a y z)) + (ensures closure r_a x z) + [SMTPat (closure r_a x y); + SMTPat (r_a y z)] + = assert (closure r_a y z) + +/// The main workhorse for the proof of lex_t well-foundedness +/// +/// Given x:a and (y:b x), along with proof of their accessibility, +/// this function provides a proof of accessibility for all t s.t. lex_t t (| x, y |) +/// +/// The proof is by induction on the accessibility proofs of x and y +/// In the Left_lex case, we do the induction on the accessibility of x, +/// and in the Right_lex case, on the accessibility of y +/// +/// Note also that the proof _does not_ rely on the in-built lexicographic ordering in F* +/// +/// An interesting aspect of the proof is the wf_b argument, +/// that provides a proof for the well-foundedness of r_b, +/// but note that we only require it on elements of a that are related to x in the +/// transitive closure of r_a + +let rec lex_t_wf_aux (#a:Type u#a) + (#b:a -> Type u#b) + (#r_a:binrel u#a u#ra a) + (#r_b:(x:a -> binrel u#b u#rb (b x))) + (x:a) + (acc_x:acc r_a x) //x and accessibility of x + (wf_b:(x0:a{closure r_a x0 x} -> well_founded (r_b x0))) //well-foundedness of r_b + (y:b x) + (acc_y:acc (r_b x) y) //y and accessibility of y + (t:(x:a & b x)) //another element t, + (p_t:lex_t r_a r_b t (| x, y |)) //that is related to (| x, y |) + : Tot (acc (lex_t r_a r_b) t) //returns the accessibility proof for t + (decreases acc_x) + = match p_t with + | Left_lex x_t _ y_t _ p_a -> + AccIntro (lex_t_wf_aux + x_t + (match acc_x with + | AccIntro f -> f x_t p_a) + wf_b + y_t + (wf_b x_t y_t)) + | Right_lex _ _ _ _ -> + //inner induction that keeps x same, but recurses on acc_y + let rec lex_t_wf_aux_y (y:b x) (acc_y:acc (r_b x) y) (t:(x:a & b x)) (p_t:lex_t r_a r_b t (| x, y |)) + : Tot (acc (lex_t r_a r_b) t) + (decreases acc_y) + = match p_t with + | Left_lex x_t _ y_t _ p_a -> + AccIntro (lex_t_wf_aux + x_t + (match acc_x with + | AccIntro f -> f x_t p_a) + wf_b + y_t + (wf_b x_t y_t)) + | Right_lex _ y_t _ p_b -> + AccIntro (lex_t_wf_aux_y + y_t + (match acc_y with + | AccIntro f -> f y_t p_b)) in + lex_t_wf_aux_y y acc_y t p_t + + +let lex_t_wf #_ #_ #_ #_ wf_a wf_b = + fun (| x, y |) -> AccIntro (lex_t_wf_aux x (wf_a x) wf_b y (wf_b x y)) + +open FStar.Squash + +(* + * Given lex_sq, we can output a squashed instance of lex + *) +let lex_to_lex_t #a #b r_a r_b t1 t2 p = + let left (p:squash (r_a (dfst t1) (dfst t2))) + : squash (lex_t r_a r_b t1 t2) + = bind_squash p (fun p -> + return_squash (Left_lex #a #b #r_a #r_b (dfst t1) (dfst t2) (dsnd t1) (dsnd t2) p)) in + + let right (p:(dfst t1 == dfst t2 /\ (squash (r_b (dfst t1) (dsnd t1) (dsnd t2))))) + : squash (lex_t r_a r_b t1 t2) + = bind_squash p (fun p -> + match p with + | Prims.Pair (_:dfst t1 == dfst t2) p2 -> + bind_squash p2 (fun p2 -> + return_squash (Right_lex #a #b #r_a #r_b (dfst t1) (dsnd t1) (dsnd t2) p2))) in + + bind_squash p (fun p -> + match p with + | Prims.Left p1 -> left p1 + | Prims.Right p2 -> right p2) + + +let lex_t_non_dep_wf #a #b #r_a #r_b wf_a wf_b = + let rec get_acc (t:a & b) (p:acc (lex_t r_a (fun _ -> r_b)) (tuple_to_dep_tuple t)) + : Tot (acc (lex_t_non_dep r_a r_b) t) + (decreases p) + = let get_acc_aux (t1:a & b) (p_dep:lex_t_non_dep r_a r_b t1 t) + : (p1:acc (lex_t r_a (fun _ -> r_b)) (tuple_to_dep_tuple t1){p1 << p}) + = match p with + | AccIntro f -> f (tuple_to_dep_tuple t1) p_dep in + AccIntro (fun t1 p1 -> get_acc t1 (get_acc_aux t1 p1)) in + fun t -> get_acc t (lex_t_wf wf_a (fun _ -> wf_b) (tuple_to_dep_tuple t)) diff --git a/stage0/ulib/FStar.LexicographicOrdering.fsti b/stage0/ulib/FStar.LexicographicOrdering.fsti new file mode 100644 index 00000000000..3c1ff1106bc --- /dev/null +++ b/stage0/ulib/FStar.LexicographicOrdering.fsti @@ -0,0 +1,194 @@ +(* + Copyright 2021 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Authors: Aseem Rastogi and Nikhil Swamy +*) + +module FStar.LexicographicOrdering + +/// This module proves that lexicographic ordering is well-founded +/// (i.e. every element is accessible) +/// +/// It defines the lex relation as an inductive, and prove its well-foundedness +/// +/// Since SMT proofs in F* are more amenable to squashed definitions, +/// the module also defines a squashed version of the lex relation, +/// and prove its well-foundedness, reusing the proof for the constructive version +/// +/// See tests/micro-benchmarks/Test.WellFoundedRecursion.fst for +/// how we use squashed `lex` to prove termination for the ackermann function +/// +/// Finally, the module defines a non-dependent version of lex +/// (in-terms of dependent lex), and uses it to prove well-foundedness of symmetric products too +/// +/// Some references: +/// - https://github.com/coq/coq/blob/master/theories/Wellfounded/Lexicographic_Product.v +/// - Constructing Recursion Operators in Type Theory, L. Paulson JSC (1986) 2, 325-355 + +open FStar.WellFounded + + +/// Definition of lexicographic ordering as a relation over dependent tuples +/// +/// Two elements are related if: +/// - Either their first components are related +/// - Or, the first components are equal, and the second components are related + +noeq +type lex_t (#a:Type u#a) (#b:a -> Type u#b) + (r_a:binrel u#a u#ra a) + (r_b:(x:a -> binrel u#b u#rb (b x))) + : (x:a & b x) -> (x:a & b x) -> Type u#(max a b ra rb) = + | Left_lex: + x1:a -> x2:a -> + y1:b x1 -> y2:b x2 -> + r_a x1 x2 -> + lex_t r_a r_b (| x1, y1 |) (| x2, y2 |) + | Right_lex: + x:a -> + y1:b x -> y2:b x -> + r_b x y1 y2 -> + lex_t r_a r_b (| x, y1 |) (| x, y2 |) + +/// Given two well-founded relations `r_a` and `r_b`, +/// their lexicographic ordering is also well-founded + +val lex_t_wf (#a:Type u#a) (#b:a -> Type u#b) + (#r_a:binrel u#a u#ra a) + (#r_b:(x:a -> binrel u#b u#rb (b x))) + (wf_a:well_founded r_a) + (wf_b:(x:a -> well_founded (r_b x))) + : well_founded (lex_t r_a r_b) + + +/// We can also define a squashed version of lex relation + +unfold +let lex_aux (#a:Type u#a) (#b:a -> Type u#b) + (r_a:binrel u#a u#ra a) + (r_b:(x:a -> binrel u#b u#rb (b x))) + : binrel u#(max a b) u#0 (x:a & b x) + = fun (| x1, y1 |) (| x2, y2 |) -> + (squash (r_a x1 x2)) \/ + (x1 == x2 /\ squash ((r_b x1) y1 y2)) + + +/// Provide a mapping from a point in lex_aux to a squashed point in lex + +val lex_to_lex_t (#a:Type u#a) (#b:a -> Type u#b) + (r_a:binrel u#a u#ra a) + (r_b:(x:a -> binrel u#b u#rb (b x))) + (t1 t2:(x:a & b x)) + (p:lex_aux r_a r_b t1 t2) + : squash (lex_t r_a r_b t1 t2) + +/// And prove that is it is well-founded + +let lex_wf (#a:Type u#a) (#b:a -> Type u#b) + (#r_a:binrel u#a u#ra a) + (#r_b:(x:a -> binrel u#b u#rb (b x))) + (wf_a:well_founded r_a) + (wf_b:(x:a -> well_founded (r_b x))) + : Lemma (is_well_founded (lex_aux r_a r_b)) + = subrelation_squash_wf (lex_to_lex_t r_a r_b) (lex_t_wf wf_a wf_b) + + +/// A user-friendly lex_wf that returns a well-founded relation + +unfold +let lex (#a:Type u#a) (#b:a -> Type u#b) + (#r_a:binrel u#a u#ra a) + (#r_b:(x:a -> binrel u#b u#rb (b x))) + (wf_a:well_founded r_a) + (wf_b:(x:a -> well_founded (r_b x))) + : well_founded_relation (x:a & b x) + = lex_wf wf_a wf_b; + lex_aux r_a r_b + + +/// We can also define a non-dependent version of the lex ordering, +/// in terms of the dependent lex tuple, +/// and prove its well-foundedness + +let tuple_to_dep_tuple (#a #b:Type) (x:a & b) : dtuple2 a (fun _ -> b) = + (| fst x, snd x |) + + +/// The non-dependent lexicographic ordering +/// and its well-foundedness + +let lex_t_non_dep (#a:Type u#a) + (#b:Type u#b) + (r_a:binrel u#a u#ra a) + (r_b:binrel u#b u#rb b) + : binrel u#(max a b) u#(max a b ra rb) (a & b) + = fun x y -> + lex_t r_a (fun _ -> r_b) (tuple_to_dep_tuple x) (tuple_to_dep_tuple y) + +val lex_t_non_dep_wf (#a:Type u#a) + (#b:Type u#b) + (#r_a:binrel u#a u#ra a) + (#r_b:binrel u#b u#rb b) + (wf_a:well_founded r_a) + (wf_b:well_founded r_b) + : well_founded (lex_t_non_dep r_a r_b) + + +/// Symmetric product relation +/// we can prove its well-foundedness by showing that it is a subrelation of non-dep lex + +noeq +type sym (#a:Type u#a) + (#b:Type u#b) + (r_a:binrel u#a u#ra a) + (r_b:binrel u#b u#rb b) + : (a & b) -> (a & b) -> Type u#(max a b ra rb) = + | Left_sym: + x1:a -> x2:a -> + y:b -> + r_a x1 x2 -> + sym r_a r_b (x1, y) (x2, y) + | Right_sym: + x:a -> + y1:b -> y2:b -> + r_b y1 y2 -> + sym r_a r_b (x, y1) (x, y2) + +/// sym is a subrelation of non-dependent lex + +let sym_sub_lex (#a:Type u#a) + (#b:Type u#b) + (#r_a:binrel u#a u#ra a) + (#r_b:binrel u#b u#rb b) + (t1 t2:a & b) + (p:sym r_a r_b t1 t2) + : lex_t_non_dep r_a r_b t1 t2 + = match p with + | Left_sym x1 x2 y p -> + Left_lex #a #(fun _ -> b) #r_a #(fun _ -> r_b) x1 x2 y y p + | Right_sym x y1 y2 p -> + Right_lex #a #(fun _ -> b) #r_a #(fun _ -> r_b) x y1 y2 p + + +/// Theorem for symmetric product +/// +let sym_wf (#a:Type u#a) + (#b:Type u#b) + (#r_a:binrel u#a u#ra a) + (#r_b:binrel u#b u#rb b) + (wf_a:well_founded r_a) + (wf_b:well_founded r_b) + : well_founded (sym r_a r_b) + = subrelation_wf sym_sub_lex (lex_t_non_dep_wf wf_a wf_b) diff --git a/stage0/ulib/FStar.List.Pure.Base.fst b/stage0/ulib/FStar.List.Pure.Base.fst new file mode 100644 index 00000000000..1c0f58ddcca --- /dev/null +++ b/stage0/ulib/FStar.List.Pure.Base.fst @@ -0,0 +1,70 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.List.Pure.Base + +open FStar.List.Tot.Base + +(** Functions on list with a pure specification *) + +(** [map2] takes a pair of list of the same length [x1; ...; xn] [y1; ... ; yn] + and return the list [f x1 y1; ... ; f xn yn] *) +val map2 (#a1 #a2 #b: Type) + (f: a1 -> a2 -> b) + (l1:list a1) + (l2:list a2) + : Pure (list b) + (requires (length l1 == length l2)) + (ensures (fun _ -> True)) + (decreases l1) +let rec map2 #a1 #a2 #b f l1 l2 = + match l1, l2 with + | [], [] -> [] + | x1::xs1, x2::xs2 -> f x1 x2 :: map2 f xs1 xs2 + +(** [map3] takes three lists of the same length [x1; ...; xn] + [y1; ... ; yn] [z1; ... ; zn] and return the list + [f x1 y1 z1; ... ; f xn yn zn] *) +val map3 (#a1 #a2 #a3 #b: Type) + (f: a1 -> a2 -> a3 -> b) + (l1:list a1) + (l2:list a2) + (l3:list a3) + : Pure (list b) + (requires (let n = length l1 in + (n == length l2 /\ + n == length l3))) + (ensures (fun _ -> True)) + (decreases l1) +let rec map3 #a1 #a2 #a3 #b f l1 l2 l3 = + match l1, l2, l3 with + | [], [], [] -> [] + | x1::xs1, x2::xs2, x3::xs3 -> f x1 x2 x3 :: map3 f xs1 xs2 xs3 + +(** [zip] takes a pair of list of the same length and returns + the list of index-wise pairs *) +val zip (#a1 #a2:Type) (l1:list a1) (l2:list a2) + : Pure (list (a1 & a2)) + (requires (let n = length l1 in n == length l2)) + (ensures (fun _ -> True)) +let zip #a1 #a2 l1 l2 = map2 (fun x y -> x, y) l1 l2 + +(** [zip3] takes a 3-tuple of list of the same length and returns + the list of index-wise 3-tuples *) +val zip3 (#a1 #a2 #a3:Type) (l1:list a1) (l2:list a2) (l3:list a3) + : Pure (list (a1 & a2 & a3)) + (requires (let n = length l1 in n == length l2 /\ n == length l3)) + (ensures (fun _ -> True)) +let zip3 #a1 #a2 #a3 l1 l2 l3 = map3 (fun x y z -> x,y,z) l1 l2 l3 diff --git a/stage0/ulib/FStar.List.Pure.Properties.fst b/stage0/ulib/FStar.List.Pure.Properties.fst new file mode 100644 index 00000000000..6224666d77d --- /dev/null +++ b/stage0/ulib/FStar.List.Pure.Properties.fst @@ -0,0 +1,275 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.List.Pure.Properties + +open FStar.List.Tot.Base +open FStar.List.Pure.Base +open FStar.List.Tot.Properties + +(** Properties of splitAt *) + +let rec splitAt_length + (#a:Type) + (n:nat) + (l:list a) + : Lemma (requires True) + (ensures begin + let l_1, l_2 = splitAt n l in + if length l < n then + length l_1 == length l /\ length l_2 == 0 + else + length l_1 == n /\ length l_2 = length l - n + end) + (decreases n) += + if n = 0 then () + else + match l with + | [] -> () + | _::xs -> splitAt_length (n-1) xs + +let rec splitAt_assoc + (#a:Type) + (n1 n2:nat) + (l:list a) + : Lemma (requires True) + (ensures begin + let l1, l2 = splitAt n1 l in + let l2, l3 = splitAt n2 l2 in + let l1', l2' = splitAt (n1+n2) l in + l1' == l1 @ l2 /\ l2' == l3 + end) + (decreases n1) += + if n1 = 0 then () + else + match l with + | [] -> () + | x :: xs -> splitAt_assoc (n1-1) n2 xs + + +let rec splitAt_length_total (#a:Type) (l:list a) + : Lemma (requires True) (ensures (splitAt (length l) l == (l, []))) (decreases l) += + match l with + | [] -> () + | x :: xs -> splitAt_length_total xs + + +(** If we [append] the two lists produced using a [splitAt], then we + get back the original list *) +let rec lemma_splitAt_append (#a:Type) (n:nat) (l:list a) : + Lemma + (requires n <= length l) + (ensures (let l1, l2 = splitAt n l in + append l1 l2 == l /\ length l1 = n)) = + match n with + | 0 -> () + | _ -> + match l with + | [] -> () + | x :: xs -> lemma_splitAt_append (n-1) xs + + +(** If we [splitAt] the point at which two lists have been [append]ed, then we + get back the original lists. *) +let rec lemma_append_splitAt (#t:Type) (l1 l2:list t) : + Lemma + (ensures (splitAt (length l1) (append l1 l2) == (l1, l2))) = + match l1 with + | [] -> () + | _ -> lemma_append_splitAt (tl l1) l2 + + +(** Fully characterize behavior of [splitAt] in terms of more standard list concepts *) +let lemma_splitAt (#t: Type) (l l1 l2:list t) (n:nat{n <= length l}) : + Lemma (splitAt n l == (l1, l2) <==> l == l1 @ l2 /\ length l1 = n) = + lemma_splitAt_append n l; + lemma_append_splitAt l1 l2 + + +(** The [hd] of the second list returned via [splitAt] is the [n]th element of + the original list *) +let rec lemma_splitAt_index_hd (#t:Type) (n:nat) (l:list t) : + Lemma + (requires (n < length l)) + (ensures (let l1, l2 = splitAt n l in + splitAt_length n l; + length l2 > 0 /\ hd l2 == index l n)) = + let x :: xs = l in + match n with + | 0 -> () + | _ -> lemma_splitAt_index_hd (n - 1) (tl l) + + +(** If two lists have the same left prefix, then shorter left prefixes are + also the same. *) +let rec lemma_splitAt_shorten_left + (#t:Type) (l1 l2:list t) (i:nat{i <= length l1 /\ i <= length l2}) (j:nat{j <= i}) : + Lemma + (requires (fst (splitAt i l1) == fst (splitAt i l2))) + (ensures (fst (splitAt j l1) == fst (splitAt j l2))) = + match j with + | 0 -> () + | _ -> + lemma_splitAt_shorten_left (tl l1) (tl l2) (i-1) (j-1) + +(** Doing an [index] on the left-part of a [splitAt] is same as doing it on + the original list *) +let rec lemma_splitAt_reindex_left (#t:Type) (i:nat) (l:list t) (j:nat) : + Lemma + (requires i <= length l /\ j < i) + (ensures ( + let left, right = splitAt i l in + splitAt_length i l; + j < length left /\ index left j == index l j)) = + match i, j with + | 1, _ | _, 0 -> () + | _ -> lemma_splitAt_reindex_left (i - 1) (tl l) (j - 1) + + +(** Doing an [index] on the right-part of a [splitAt] is same as doing it on + the original list, but shifted *) +let rec lemma_splitAt_reindex_right (#t:Type) (i:nat) (l:list t) (j:nat) : + Lemma + (requires i <= length l /\ j + i < length l) + (ensures ( + let left, right = splitAt i l in + splitAt_length i l; + j < length right /\ index right j == index l (j + i))) = + match i with + | 0 -> () + | _ -> lemma_splitAt_reindex_right (i - 1) (tl l) j + + +(** Properties of split3 *) + + +(** The 3 pieces returned via [split3] can be joined together via an + [append] and a [cons] *) +let lemma_split3_append (#t:Type) (l:list t) (n:nat{n < length l}) : + Lemma + (requires True) + (ensures ( + let a, b, c = split3 l n in + l == append a (b :: c))) = + lemma_splitAt_append n l + + +(** The middle element returned via [split3] is the [n]th [index]ed element *) +let lemma_split3_index (#t:Type) (l:list t) (n:nat{n < length l}) : + Lemma + (requires True) + (ensures ( + let a, b, c = split3 l n in + b == index l n)) = + lemma_splitAt_index_hd n l + + +(** The lengths of the left and right parts of a [split3] are as expected. *) +let lemma_split3_length (#t:Type) (l:list t) (n:nat{n < length l}) : + Lemma + (requires True) + (ensures ( + let a, b, c = split3 l n in + length a = n /\ length c = length l - n - 1)) = + splitAt_length n l + + +(** If we [split3] on lists with the same left prefix, we get the same + element and left prefix. *) +let lemma_split3_on_same_leftprefix + (#t:Type) (l1 l2:list t) (n:nat{n < length l1 /\ n < length l2}) : + Lemma + (requires (fst (splitAt (n+1) l1) == fst (splitAt (n+1) l2))) + (ensures (let a1, b1, c1 = split3 l1 n in + let a2, b2, c2 = split3 l2 n in + a1 == a2 /\ b1 == b2)) = + let a1, b1, c1 = split3 l1 n in + let a2, b2, c2 = split3 l2 n in + lemma_split3_append l1 n; + lemma_split3_append l2 n; + lemma_split3_length l1 n; + lemma_split3_length l2 n; + append_l_cons b1 c1 a1; + append_l_cons b2 c2 a2; + // assert ((a1 @ [b1]) @ c1 == l1); + // assert ((a2 @ [b2]) @ c2 == l2); + let x1, y1 = splitAt (n+1) l1 in + let x2, y2 = splitAt (n+1) l2 in + lemma_splitAt_append (n+1) l1; + lemma_splitAt_append (n+1) l2; + splitAt_length (n+1) l1; + splitAt_length (n+1) l2; + // assert (x1 @ y1 == (a1 @ [b1]) @ c1); + // assert (x2 @ y2 == (a2 @ [b2]) @ c2); + append_length_inv_head x1 y1 (append a1 [b1]) c1; + append_length_inv_head x2 y2 (append a2 [b2]) c2; + // assert (a1 @ [b1] == a2 @ [b2]); + append_length_inv_tail a1 [b1] a2 [b2]; + // assert (a1 == a2 /\ b1 == b2); + () + + +(** If we perform an [unsnoc] on a list, then the left part is the same + as an [append]+[cons] on the list after [split3]. *) +let rec lemma_split3_unsnoc (#t:Type) (l:list t) (n:nat{n < length l}) : + Lemma + (requires (n <> length l - 1)) + (ensures ( + let a, b, c = split3 l n in + lemma_split3_length l n; + length c > 0 /\ ( + let xs, x = unsnoc l in + let ys, y = unsnoc c in + append a (b :: ys) == xs))) = + match n with + | 0 -> () + | _ -> lemma_split3_unsnoc (tl l) (n-1) + + +(** Doing [unsnoc] and [split3] in either order leads to the same left + part, and element. *) +let lemma_unsnoc_split3 (#t:Type) (l:list t) (i:nat{i < length l}) : + Lemma + (requires (i <> length l - 1)) + (ensures ( + let xs, x = unsnoc l in + i < length xs /\ ( + let a0, b0, c0 = split3 l i in + let a1, b1, c1 = split3 xs i in + a0 == a1 /\ b0 == b1))) = + let xs, x = unsnoc l in + lemma_unsnoc_length l; + let a0, b0, c0 = split3 l i in + let a1, b1, c1 = split3 xs i in + splitAt_length_total xs; + // assert (fst (splitAt (length xs) xs) == xs); + // assert (fst (splitAt (length xs) xs) == fst (splitAt (length xs) l)); + // assert (i+1 <= length xs); + lemma_splitAt_shorten_left xs l (length xs) (i+1); + // assert (fst (splitAt (i+1) xs) == fst (splitAt (i+1) l)); + lemma_split3_on_same_leftprefix l xs i + +(** The head of the right side of a [split3] can be [index]ed from original list. *) +let rec lemma_split3_r_hd (#t:Type) (l:list t) (i:nat{i < length l}) : + Lemma + (ensures (let a, b, c = split3 l i in + lemma_split3_length l i; + length c > 0 ==> i + 1 < length l /\ hd c == index l (i + 1))) = + match i with + | 0 -> () + | _ -> lemma_split3_r_hd (tl l) (i - 1) diff --git a/stage0/ulib/FStar.List.Pure.fst b/stage0/ulib/FStar.List.Pure.fst new file mode 100644 index 00000000000..6fc75e241bd --- /dev/null +++ b/stage0/ulib/FStar.List.Pure.fst @@ -0,0 +1,20 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.List.Pure + +include FStar.List.Tot +include FStar.List.Pure.Base +include FStar.List.Pure.Properties diff --git a/stage0/ulib/FStar.List.Tot.Base.fst b/stage0/ulib/FStar.List.Tot.Base.fst new file mode 100644 index 00000000000..c08178e20ac --- /dev/null +++ b/stage0/ulib/FStar.List.Tot.Base.fst @@ -0,0 +1,572 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +(** +This module defines all pure and total operations on lists that can be +used in specifications. It is implemented by FStar_List_Tot_Base.ml, any +functional change and/or the addition of new functions MUST be reflected +there. + +@summary Pure total operations on lists +*) +module FStar.List.Tot.Base + +(** +Base operations +*) + +(** [isEmpty l] returns [true] if and only if [l] is empty *) +val isEmpty: list 'a -> Tot bool +let isEmpty l = match l with + | [] -> true + | _ -> false + +(** [hd l] returns the first element of [l]. Requires [l] to be +nonempty, at type-checking time. Named as in: OCaml, F#, Coq *) +val hd: l:list 'a{Cons? l} -> Tot 'a +let hd = function + | hd::_ -> hd + +(** [tail l] returns [l] without its first element. Requires, at +type-checking time, that [l] be nonempty. Similar to: tl in OCaml, F#, Coq +*) +val tail: l:list 'a {Cons? l} -> Tot (list 'a) +let tail = function + | _::tl -> tl + +(** [tl l] returns [l] without its first element. Requires, at +type-checking time, that [l] be nonempty. Named as in: OCaml, F#, Coq +*) +val tl: l:list 'a {Cons? l} -> Tot (list 'a) +let tl = tail + +(** [last l] returns the last element of [l]. Requires, at +type-checking time, that [l] be nonempty. Named as in: Haskell +*) +val last: l:list 'a {Cons? l} -> Tot 'a +let rec last = function + | [hd] -> hd + | _::tl -> last tl + +(** [init l] returns [l] without its last element. Requires, at +type-checking time, that [l] be nonempty. Named as in: Haskell +*) +val init: l:list 'a {Cons? l} -> Tot (list 'a) +let rec init = function + | [_] -> [] + | hd::tl -> hd::(init tl) + +(** [length l] returns the total number of elements in [l]. Named as +in: OCaml, F#, Coq *) +val length: list 'a -> Tot nat +let rec length = function + | [] -> 0 + | _::tl -> 1 + length tl + +(** [nth l n] returns the [n]-th element in list [l] (with the first +element being the 0-th) if [l] is long enough, or [None] +otherwise. Named as in: OCaml, F#, Coq *) +val nth: list 'a -> nat -> Tot (option 'a) +let rec nth l n = match l with + | [] -> None + | hd::tl -> if n = 0 then Some hd else nth tl (n - 1) + +(** [index l n] returns the [n]-th element in list [l] (with the first +element being the 0-th). Requires, at type-checking time, that [l] be +of length at least [n+1]. *) +val index: #a:Type -> l:list a -> i:nat{i < length l} -> Tot a +let rec index #a (l: list a) (i:nat{i < length l}): Tot a = + if i = 0 then + hd l + else + index (tl l) (i - 1) + +(** [count x l] returns the number of occurrences of [x] in +[l]. Requires, at type-checking time, the type of [a] to have equality +defined. Similar to: [List.count_occ] in Coq. *) +val count: #a:eqtype -> a -> list a -> Tot nat +let rec count #a x = function + | [] -> 0 + | hd::tl -> if x=hd then 1 + count x tl else count x tl + +(** [rev_acc l1 l2] appends the elements of [l1] to the beginning of +[l2], in reverse order. It is equivalent to [append (rev l1) l2], but +is tail-recursive. Similar to: [List.rev_append] in OCaml, Coq. *) +val rev_acc: list 'a -> list 'a -> Tot (list 'a) +let rec rev_acc l acc = match l with + | [] -> acc + | hd::tl -> rev_acc tl (hd::acc) + +(** [rev l] returns the list [l] in reverse order. Named as in: OCaml, +F#, Coq. *) +val rev: list 'a -> Tot (list 'a) +let rev l = rev_acc l [] + +(** [append l1 l2] appends the elements of [l2] to the end of [l1]. Named as: OCaml, F#. Similar to: [List.app] in Coq. *) +val append: list 'a -> list 'a -> Tot (list 'a) +let rec append x y = match x with + | [] -> y + | a::tl -> a::append tl y + +(** Defines notation [@@] for [append], as in OCaml, F# . *) +let op_At x y = append x y + +(** [snoc (l, x)] adds [x] to the end of the list [l]. + + Note: We use an uncurried [snoc (l, x)] instead of the curried + [snoc l x]. This is intentional. If [snoc] takes a pair instead + of 2 arguments, it allows for a better pattern on + [lemma_unsnoc_snoc], which connects [snoc] and [unsnoc]. In + particular, if we had two arguments, then either the pattern would + either be too restrictive or would lead to over-triggering. More + context for this can be seen in the (collapsed and uncollapsed) + comments at https://github.com/FStarLang/FStar/pull/1560 *) +val snoc: (list 'a & 'a) -> Tot (list 'a) +let snoc (l, x) = append l [x] + +(** [flatten l], where [l] is a list of lists, returns the list of the +elements of the lists in [l], preserving their order. Named as in: +OCaml, Coq. *) +val flatten: list (list 'a) -> Tot (list 'a) +let rec flatten l = match l with + | [] -> [] + | hd::tl -> append hd (flatten tl) + +(** [map f l] applies [f] to each element of [l] and returns the list +of results, in the order of the original elements in [l]. Requires, at +type-checking time, [f] to be a pure total function. Named as in: OCaml, Coq, F# *) +val map: ('a -> Tot 'b) -> list 'a -> Tot (list 'b) +let rec map f x = match x with + | [] -> [] + | a::tl -> f a::map f tl + +(** [mapi_init f l n] applies, for each [k], [f (n+k)] to the [k]-th +element of [l] and returns the list of results, in the order of the +original elements in [l]. Requires, at type-checking time, [f] to be a +pure total function. *) +val mapi_init: (int -> 'a -> Tot 'b) -> list 'a -> int -> Tot (list 'b) +let rec mapi_init f l i = match l with + | [] -> [] + | hd::tl -> (f i hd)::(mapi_init f tl (i+1)) + +(** [mapi f l] applies, for each [k], [f k] to the [k]-th element of +[l] and returns the list of results, in the order of the original +elements in [l]. Requires, at type-checking time, [f] to be a pure +total function. Named as in: OCaml *) +val mapi: (int -> 'a -> Tot 'b) -> list 'a -> Tot (list 'b) +let mapi f l = mapi_init f l 0 + +(** [concatMap f l] applies [f] to each element of [l] and returns the +concatenation of the results, in the order of the original elements of +[l]. This is equivalent to [flatten (map f l)]. Requires, at +type-checking time, [f] to be a pure total function. *) +val concatMap: ('a -> Tot (list 'b)) -> list 'a -> Tot (list 'b) +let rec concatMap f = function + | [] -> [] + | a::tl -> + let fa = f a in + let ftl = concatMap f tl in + append fa ftl + +(** [fold_left f x [y1; y2; ...; yn]] computes (f (... (f x y1) y2) +... yn). Requires, at type-checking time, [f] to be a pure total +function. Named as in: OCaml, Coq. *) +val fold_left: ('a -> 'b -> Tot 'a) -> 'a -> l:list 'b -> Tot 'a (decreases l) +let rec fold_left f x l = match l with + | [] -> x + | hd::tl -> fold_left f (f x hd) tl + +(** [fold_right f [x1; x2; ...; xn] y] computes (f x1 (f x2 (... (f xn +y)) ... )). Requires, at type-checking time, [f] to be a pure total +function. Named as in: OCaml, Coq *) +val fold_right: ('a -> 'b -> Tot 'b) -> list 'a -> 'b -> Tot 'b +let rec fold_right f l x = match l with + | [] -> x + | hd::tl -> f hd (fold_right f tl x) + +(** [fold_right_gtot] is just like [fold_right], except `f` is + a ghost function **) +let rec fold_right_gtot (#a:Type) (#b:Type) (l:list a) (f:a -> b -> GTot b) (x:b) + : GTot b + = match l with + | [] -> x + | hd::tl -> f hd (fold_right_gtot tl f x) + +(* We define map in terms of fold, to share simple lemmas *) +let map_gtot #a #b (f:a -> GTot b) (x:list a) + : GTot (list b) + = fold_right_gtot x (fun x tl -> f x :: tl) [] + +(** [fold_left2 f x [y1; y2; ...; yn] [z1; z2; ...; zn]] computes (f +(... (f x y1 z1) y2 z2) ... yn zn). Requires, at type-checking time, +[f] to be a pure total function, and the lists [y1; y2; ...; yn] and +[z1; z2; ...; zn] to have the same lengths. Named as in: OCaml *) +val fold_left2 : f:('a -> 'b -> 'c -> Tot 'a) -> accu:'a -> l1:(list 'b) -> l2:(list 'c) -> + Pure 'a (requires (length l1 == length l2)) (ensures (fun _ -> True)) (decreases l1) +let rec fold_left2 f accu l1 l2 = + match (l1, l2) with + | ([], []) -> accu + | (a1::l1, a2::l2) -> fold_left2 f (f accu a1 a2) l1 l2 + +(** Propositional membership (as in Coq). Does not require decidable +equality. *) + +(** [memP x l] holds if, and only if, [x] appears as an +element of [l]. Similar to: List.In in Coq. *) +let rec memP (#a: Type) (x: a) (l: list a) : Tot Type0 = + match l with + | [] -> False + | y :: q -> x == y \/ memP x q + +(** List searching **) + +(** [mem x l] returns [true] if, and only if, [x] appears as an +element of [l]. Requires, at type-checking time, the type of elements +of [l] to have decidable equality. Named as in: OCaml. See also: +List.In in Coq, which is propositional. *) +val mem: #a:eqtype -> a -> list a -> Tot bool +let rec mem #a x = function + | [] -> false + | hd::tl -> if hd = x then true else mem x tl + +(** [contains x l] returns [true] if, and only if, [x] appears as an +element of [l]. Requires, at type-checking time, the type of elements +of [l] to have decidable equality. It is equivalent to: [mem x +l]. TODO: should we rather swap the order of arguments? *) +let contains : #a:eqtype -> a -> list a -> Tot bool = mem + +(** [existsb f l] returns [true] if, and only if, there exists some +element [x] in [l] such that [f x] holds. *) +val existsb: #a:Type + -> f:(a -> Tot bool) + -> list a + -> Tot bool +let rec existsb #a f l = match l with + | [] -> false + | hd::tl -> if f hd then true else existsb f tl + +(** [find f l] returns [Some x] for some element [x] appearing in [l] +such that [f x] holds, or [None] only if no such [x] exists. *) +val find: #a:Type + -> f:(a -> Tot bool) + -> list a + -> Tot (option (x:a{f x})) +let rec find #a f l = match l with + | [] -> None #(x:a{f x}) //These type annotations are only present because it makes bootstrapping go much faster + | hd::tl -> if f hd then Some #(x:a{f x}) hd else find f tl + +(** Filtering elements of a list [l] through a Boolean pure total +predicate [f] *) + +(** [filter f l] returns [l] with all elements [x] such that [f x] +does not hold removed. Requires, at type-checking time, [f] to be a +pure total function. Named as in: OCaml, Coq *) +val filter : #a: Type -> f:(a -> Tot bool) -> l: list a -> Tot (list a) +let rec filter #a f = function + | [] -> [] + | hd::tl -> if f hd then hd::filter f tl else filter f tl + +(** Postcondition on [filter f l]: for any element [x] of [filter f l], +[x] is a member of [l] and [f x] holds. Requires, at type-checking time, +[f] to be a pure total function.*) +let rec mem_filter (#a: Type) (f: (a -> Tot bool)) (l: list a) (x: a) + : Lemma (memP x (filter f l) <==> memP x l /\ f x) = + match l with + | [] -> () + | hd :: tl -> mem_filter f tl x + +(** Postcondition on [filter f l]: stated with [forall]: for any element +[x] of [filter f l], [x] is a member of [l] and [f x] holds. Requires, +at type-checking time, [f] to be a pure total function.*) +let mem_filter_forall (#a: Type) (f: (a -> Tot bool)) (l: list a) + : Lemma (forall x. memP x (filter f l) <==> memP x l /\ f x) + [SMTPat (filter f l)] = + introduce forall x . memP x (filter f l) <==> memP x l /\ f x + with mem_filter f l x + +(** [for_all f l] returns [true] if, and only if, for all elements [x] +appearing in [l], [f x] holds. Requires, at type-checking time, [f] to +be a pure total function. Named as in: OCaml. Similar to: List.forallb +in Coq *) +val for_all: ('a -> Tot bool) -> list 'a -> Tot bool +let rec for_all f l = match l with + | [] -> true + | hd::tl -> if f hd then for_all f tl else false + +(** Specification for [for_all f l] vs. mem *) +let rec for_all_mem + (#a: Type) + (f: (a -> Tot bool)) + (l: list a) +: Lemma + (for_all f l <==> (forall x . memP x l ==> f x)) += match l with + | [] -> () + | _ :: q -> for_all_mem f q + +(** [collect f l] applies [f] to each element of [l] and returns the +concatenation of the results, in the order of the original elements of +[l]. It is equivalent to [flatten (map f l)]. Requires, at +type-checking time, [f] to be a pure total function. TODO: what is +the difference with [concatMap]? *) +val collect: ('a -> Tot (list 'b)) -> list 'a -> Tot (list 'b) +let rec collect f l = match l with + | [] -> [] + | hd::tl -> append (f hd) (collect f tl) + +(** [tryFind f l] returns [Some x] for some element [x] appearing in +[l] such that [f x] holds, or [None] only if no such [x] +exists. Requires, at type-checking time, [f] to be a pure total +function. Contrary to [find], [tryFind] provides no postcondition on +its result. *) +val tryFind: ('a -> Tot bool) -> list 'a -> Tot (option 'a) +let rec tryFind p l = match l with + | [] -> None + | hd::tl -> if p hd then Some hd else tryFind p tl + +(** [tryPick f l] returns [y] for some element [x] appearing in [l] +such that [f x = Some y] for some y, or [None] only if [f x = None] +for all elements [x] of [l]. Requires, at type-checking time, [f] to +be a pure total function. *) +val tryPick: ('a -> Tot (option 'b)) -> list 'a -> Tot (option 'b) +let rec tryPick f l = match l with + | [] -> None + | hd::tl -> + match f hd with + | Some x -> Some x + | None -> tryPick f tl + +(** [choose f l] returns the list of [y] for all elements [x] +appearing in [l] such that [f x = Some y] for some [y]. Requires, at +type-checking time, [f] to be a pure total function. *) +val choose: ('a -> Tot (option 'b)) -> list 'a -> Tot (list 'b) +let rec choose f l = match l with + | [] -> [] + | hd::tl -> + match f hd with + | Some x -> x::(choose f tl) + | None -> choose f tl + +(** [partition f l] returns the pair of lists [(l1, l2)] where all +elements [x] of [l] are in [l1] if [f x] holds, and in [l2] +otherwise. Both [l1] and [l2] retain the original order of +[l]. Requires, at type-checking time, [f] to be a pure total +function. *) +val partition: f:('a -> Tot bool) -> list 'a -> Tot (list 'a & list 'a) +let rec partition f = function + | [] -> [], [] + | hd::tl -> + let l1, l2 = partition f tl in + if f hd + then hd::l1, l2 + else l1, hd::l2 + +(** [subset la lb] is true if and only if all the elements from [la] + are also in [lb]. Requires, at type-checking time, the type of + elements of [la] and [lb] to have decidable equality. *) +val subset: #a:eqtype -> list a -> list a -> Tot bool +let rec subset #a la lb = + match la with + | [] -> true + | h :: tl -> mem h lb && subset tl lb + +(** [noRepeats l] returns [true] if, and only if, no element of [l] +appears in [l] more than once. Requires, at type-checking time, the +type of elements of [la] and [lb] to have decidable equality. *) +val noRepeats : #a:eqtype -> list a -> Tot bool +let rec noRepeats #a la = + match la with + | [] -> true + | h :: tl -> not(mem h tl) && noRepeats tl + + +(** [no_repeats_p l] valid if, and only if, no element of [l] +appears in [l] more than once. *) +val no_repeats_p : #a:Type -> list a -> Tot prop +let rec no_repeats_p #a la = + match la with + | [] -> True + | h :: tl -> ~(memP h tl) /\ no_repeats_p tl + +(** List of tuples **) + +(** [assoc x l] returns [Some y] where [(x, y)] is the first element +of [l] whose first element is [x], or [None] only if no such element +exists. Requires, at type-checking time, the type of [x] to have +decidable equality. Named as in: OCaml. *) +val assoc: #a:eqtype -> #b:Type -> a -> list (a & b) -> Tot (option b) +let rec assoc #a #b x = function + | [] -> None + | (x', y)::tl -> if x=x' then Some y else assoc x tl + +(** [split] takes a list of pairs [(x1, y1), ..., (xn, yn)] and +returns the pair of lists ([x1, ..., xn], [y1, ..., yn]). Named as in: +OCaml *) +val split: list ('a & 'b) -> Tot (list 'a & list 'b) +let rec split l = match l with + | [] -> ([],[]) + | (hd1,hd2)::tl -> + let (tl1,tl2) = split tl in + (hd1::tl1,hd2::tl2) + +(** [unzip] takes a list of pairs [(x1, y1), ..., (xn, yn)] and +returns the pair of lists ([x1, ..., xn], [y1, ..., yn]). Named as in: +Haskell *) +let unzip l = split l + +(** [unzip3] takes a list of triples [(x1, y1, z1), ..., (xn, yn, zn)] +and returns the triple of lists ([x1, ..., xn], [y1, ..., yn], [z1, +..., zn]). Named as in: Haskell *) +val unzip3: list ('a & 'b & 'c) -> Tot (list 'a & list 'b & list 'c) +let rec unzip3 l = match l with + | [] -> ([],[],[]) + | (hd1,hd2,hd3)::tl -> + let (tl1,tl2,tl3) = unzip3 tl in + (hd1::tl1,hd2::tl2,hd3::tl3) + +(** Splitting a list at some index **) + +(** [splitAt] takes a natural number n and a list and returns a pair + of the maximal prefix of l of size smaller than n and the rest of + the list *) +let rec splitAt (#a:Type) (n:nat) (l:list a) : Tot (list a & list a) = + if n = 0 then [], l + else + match l with + | [] -> [], l + | x :: xs -> let l1, l2 = splitAt (n-1) xs in x :: l1, l2 + +let rec lemma_splitAt_snd_length (#a:Type) (n:nat) (l:list a) : + Lemma + (requires (n <= length l)) + (ensures (length (snd (splitAt n l)) = length l - n)) = + match n, l with + | 0, _ -> () + | _, [] -> () + | _, _ :: l' -> lemma_splitAt_snd_length (n - 1) l' + +(** [unsnoc] is an inverse of [snoc]. It splits a list into + all-elements-except-last and last element. *) +val unsnoc: #a:Type -> l:list a{length l > 0} -> Tot (list a & a) +let unsnoc #a l = + let l1, l2 = splitAt (length l - 1) l in + lemma_splitAt_snd_length (length l - 1) l; + l1, hd l2 + +(** [split3] splits a list into 3 parts. This allows easy access to + the part of the list before and after the element, as well as the + element itself. *) +val split3: #a:Type -> l:list a -> i:nat{i < length l} -> Tot (list a & a & list a) +let split3 #a l i = + let a, rest = splitAt i l in + lemma_splitAt_snd_length i l; + let b :: c = rest in + a, b, c + +(** Sorting (implemented as quicksort) **) + +(** [partition] splits a list [l] into two lists, the sum of whose +lengths is the length of [l]. *) +val partition_length: f:('a -> Tot bool) + -> l:list 'a + -> Lemma (requires True) + (ensures (length (fst (partition f l)) + + length (snd (partition f l)) = length l)) +let rec partition_length f l = match l with + | [] -> () + | hd::tl -> partition_length f tl + +(** [bool_of_compare] turns a comparison function into a strict + order. More precisely, [bool_of_compare compare x y] returns true + if, and only if, [compare x y] is negative, meaning [x] precedes + [y] in the ordering defined by compare. + + This is used in sorting, and is defined to be consistent with + OCaml and F#, where sorting is performed in ascending order. +*) +val bool_of_compare : #a:Type -> (a -> a -> Tot int) -> a -> a -> Tot bool +let bool_of_compare #a f x y = f x y < 0 + +(** [compare_of_bool] turns a strict order into a comparison +function. More precisely, [compare_of_bool rel x y] returns a positive +number if, and only if, x `rel` y holds. Inspired from OCaml, where +polymorphic comparison using both the [compare] function and the (>) +infix operator are such that [compare x y] is positive if, and only +if, x > y. Requires, at type-checking time, [rel] to be a pure total +function. *) +val compare_of_bool : #a:eqtype -> (a -> a -> Tot bool) -> a -> a -> Tot int +let compare_of_bool #a rel x y = + if x `rel` y then -1 + else if x = y then 0 + else 1 + +let compare_of_bool_of_compare (#a:eqtype) (f:a -> a -> Tot bool) + : Lemma (forall x y. bool_of_compare (compare_of_bool f) x y == f x y) + = () + +(** [sortWith compare l] returns the list [l'] containing the elements + of [l] sorted along the comparison function [compare], in such a + way that if [compare x y > 0], then [x] appears before [y] in + [l']. Sorts in ascending order *) +val sortWith: ('a -> 'a -> Tot int) -> l:list 'a -> Tot (list 'a) (decreases (length l)) +let rec sortWith f = function + | [] -> [] + | pivot::tl -> + let hi, lo = partition (bool_of_compare f pivot) tl in + partition_length (bool_of_compare f pivot) tl; + append (sortWith f lo) (pivot::sortWith f hi) + +(** A l1 is a strict suffix of l2. *) +let rec strict_suffix_of (#a: Type) (l1 l2: list a) +: Pure Type0 + (requires True) + (ensures (fun _ -> True)) + (decreases l2) += match l2 with + | [] -> False + | _ :: q -> l1 == q \/ l1 `strict_suffix_of` q + +[@@deprecated "This function was misnamed: Please use 'strict_suffix_of'"] +let strict_prefix_of = strict_suffix_of + +val list_unref : #a:Type -> #p:(a -> Type0) -> list (x:a{p x}) -> Tot (list a) +let rec list_unref #a #p l = + match l with + | [] -> [] + | x::xs -> x :: list_unref xs + +val list_refb: #a:eqtype -> #p:(a -> Tot bool) -> + l:list a { for_all p l } -> + Tot (l':list (x:a{ p x }) { + length l = length l' /\ + (forall i. {:pattern (index l i) } index l i = index l' i) }) +let rec list_refb #a #p l = + match l with + | hd :: tl -> hd :: list_refb #a #p tl + | [] -> [] + +val list_ref: #a:eqtype -> #p:(a -> Tot prop) -> l:list a { + forall x. {:pattern mem x l} mem x l ==> p x +} -> Tot (l':list (x:a{ p x }) { + length l = length l' /\ + (forall i. {:pattern (index l i) } index l i = index l' i) }) +let rec list_ref #a #p l = + match l with + | hd :: tl -> + assert (mem hd l); + assert (p hd); + assert (forall x. {:pattern mem x tl} mem x tl ==> mem x l); + hd :: list_ref #a #p tl + | [] -> [] diff --git a/stage0/ulib/FStar.List.Tot.Properties.fst b/stage0/ulib/FStar.List.Tot.Properties.fst new file mode 100644 index 00000000000..08fa9dffeac --- /dev/null +++ b/stage0/ulib/FStar.List.Tot.Properties.fst @@ -0,0 +1,1021 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +(** +This module states and proves some properties about pure and total +operations on lists. + +@summary Properties of pure total operations on lists +*) +module FStar.List.Tot.Properties +open FStar.List.Tot.Base + +(** Properties about mem **) + +(** Correctness of [mem] for types with decidable equality. TODO: +replace [mem] with [memP] in relevant lemmas and define the right +SMTPat to automatically recover lemmas about [mem] for types with +decidable equality *) +let rec mem_memP + (#a: eqtype) + (x: a) + (l: list a) +: Lemma (ensures (mem x l <==> memP x l)) + [SMTPat (mem x l); SMTPat (memP x l)] += match l with + | [] -> () + | a :: q -> mem_memP x q + +(** If an element can be [index]ed, then it is a [memP] of the list. *) +let rec lemma_index_memP (#t:Type) (l:list t) (i:nat{i < length l}) : + Lemma + (ensures (index l i `memP` l)) + [SMTPat (index l i `memP` l)] = + match i with + | 0 -> () + | _ -> lemma_index_memP (tl l) (i - 1) + +(** The empty list has no elements. *) +let memP_empty #a x = () + +(** Full specification for [existsb]: [existsb f xs] holds if, and +only if, there exists an element [x] of [xs] such that [f x] holds. *) +let rec memP_existsb #a f xs = + match xs with + | [] -> () + | hd::tl -> memP_existsb f tl + +let rec memP_map_intro + (#a #b: Type) + (f: a -> Tot b) + (x: a) + (l: list a) +: Lemma + (requires True) + (ensures (memP x l ==> memP (f x) (map f l))) + (decreases l) += match l with + | [] -> () + | _ :: q -> memP_map_intro f x q (* NOTE: would fail if [requires memP x l] instead of [ ==> ] *) + +let rec memP_map_elim + (#a #b: Type) + (f: a -> Tot b) + (y: b) + (l: list a) +: Lemma + (requires True) + (ensures (memP y (map f l) ==> (exists (x : a) . memP x l /\ f x == y))) + (decreases l) += match l with + | [] -> () + | _ :: q -> memP_map_elim f y q + +(** The empty list has no elements *) +let mem_empty #a x = () + +(** Full specification for [existsb]: [existsb f xs] holds if, and +only if, there exists an element [x] of [xs] such that [f x] holds. *) +let rec mem_existsb #a f xs = + match xs with + | [] -> () + | hd::tl -> mem_existsb f tl + +let rec mem_count + (#a: eqtype) + (l: list a) + (x: a) +: Lemma + (mem x l <==> count x l > 0) += match l with + | [] -> () + | x' :: l' -> mem_count l' x + +(** Properties about rev **) + +let rec rev_acc_length l acc = match l with + | [] -> () + | hd::tl -> rev_acc_length tl (hd::acc) + +let rev_length l = rev_acc_length l [] + +let rec rev_acc_memP #a l acc x = match l with + | [] -> () + | hd::tl -> rev_acc_memP tl (hd::acc) x + +(** A list and its reversed have the same elements *) +let rev_memP #a l x = rev_acc_memP l [] x + +let rev_mem l x = rev_memP l x + +(** Properties about append **) + +let append_nil_l l = () + +let rec append_l_nil = function + | [] -> () + | hd::tl -> append_l_nil tl + +let append_cons_l hd tl l = () + +let rec append_l_cons hd tl l = match l with + | [] -> () + | hd'::tl' -> append_l_cons hd tl tl' + +let rec append_assoc l1 l2 l3 = match l1 with + | [] -> () + | hd::tl -> append_assoc tl l2 l3 + +let rec append_length l1 l2 = match l1 with + | [] -> () + | hd::tl -> append_length tl l2 + +let rec append_mem #t l1 l2 a = match l1 with + | [] -> () + | hd::tl -> append_mem tl l2 a + +let rec append_memP #t l1 l2 a = match l1 with + | [] -> () + | hd::tl -> append_memP tl l2 a + + +let rec append_mem_forall #a l1 l2 = match l1 with + | [] -> () + | hd::tl -> append_mem_forall tl l2 + +let rec append_memP_forall #a l1 l2 = match l1 with + | [] -> () + | hd::tl -> append_memP_forall tl l2 + + +let rec append_count #t l1 l2 a = match l1 with + | [] -> () + | hd::tl -> append_count tl l2 a + +let rec append_count_forall #a l1 l2 = match l1 with + | [] -> () + | hd::tl -> append_count_forall tl l2 + +let append_eq_nil l1 l2 = () + +let append_eq_singl l1 l2 x = () + +let rec append_inv_head l l1 l2 = match l with + | [] -> () + | hd::tl -> append_inv_head tl l1 l2 + +let rec append_inv_tail l l1 l2 = match l1, l2 with + | [], [] -> () + | hd1::tl1, hd2::tl2 -> append_inv_tail l tl1 tl2 + | [], hd2::tl2 -> + (match l with + | [] -> () + | hd::tl -> append_l_cons hd tl tl2; append_inv_tail tl [] (tl2@[hd]) + (* We can here apply the induction hypothesis thanks to termination on a lexicographical ordering of the arguments! *) + ) + | hd1::tl1, [] -> + (match l with + | [] -> () + | hd::tl -> append_l_cons hd tl tl1; append_inv_tail tl (tl1@[hd]) [] + (* Idem *) + ) + +let rec append_length_inv_head + (#a: Type) + (left1 right1 left2 right2: list a) +: Lemma + (requires (append left1 right1 == append left2 right2 /\ length left1 == length left2)) + (ensures (left1 == left2 /\ right1 == right2)) + (decreases left1) += match left1 with + | [] -> () + | _ :: left1' -> + append_length_inv_head left1' right1 (tl left2) right2 + +let append_length_inv_tail + (#a: Type) + (left1 right1 left2 right2: list a) +: Lemma + (requires (append left1 right1 == append left2 right2 /\ length right1 == length right2)) + (ensures (left1 == left2 /\ right1 == right2)) += append_length left1 right1; + append_length left2 right2; + append_length_inv_head left1 right1 left2 right2 + +let append_injective #a (l0 l0':list a) + (l1 l1':list a) + : Lemma + (ensures + (length l0 == length l0' \/ length l1 == length l1') /\ + append l0 l1 == append l0' l1' ==> + l0 == l0' /\ l1 == l1') + = introduce + ((length l0 == length l0' \/ length l1 == length l1') /\ + append l0 l1 == append l0' l1') + ==> + (l0 == l0' /\ l1 == l1') + with _. eliminate (length l0 == length l0') \/ + (length l1 == length l1') + returns _ + with _. append_length_inv_head l0 l1 l0' l1' + and _. append_length_inv_tail l0 l1 l0' l1' + +(** The [last] element of a list remains the same, even after that list is + [append]ed to another list. *) +let rec lemma_append_last (#a:Type) (l1 l2:list a) : + Lemma + (requires (length l2 > 0)) + (ensures (last (l1 @ l2) == last l2)) = + match l1 with + | [] -> () + | _ :: l1' -> lemma_append_last l1' l2 + +(** Properties mixing rev and append **) + +let rec rev_acc_rev' l acc = match l with + | [] -> () + | hd::tl -> rev_acc_rev' tl (hd::acc); append_l_cons hd acc (rev' tl) + +let rev_rev' l = rev_acc_rev' l []; append_l_nil (rev' l) + +let rec rev'_append l1 l2 = match l1 with + | [] -> append_l_nil (rev' l2) + | hd::tl -> rev'_append tl l2; append_assoc (rev' l2) (rev' tl) [hd] + +let rev_append l1 l2 = rev_rev' l1; rev_rev' l2; rev_rev' (l1@l2); rev'_append l1 l2 + +let rec rev'_involutive = function + | [] -> () + | hd::tl -> rev'_append (rev' tl) [hd]; rev'_involutive tl + +let rev_involutive l = rev_rev' l; rev_rev' (rev' l); rev'_involutive l + +(** Properties about snoc *) + +let lemma_snoc_length (l, x) = append_length l [x] + +(** Reverse induction principle **) + +let rec rev'_list_ind p = function + | [] -> () + | hd::tl -> rev'_list_ind p tl + +let rev_ind p l = rev'_involutive l; rev'_list_ind p (rev' l) + +(** Properties about iterators **) + +let rec map_lemma f l = + match l with + | [] -> () + | h::t -> map_lemma f t + +(** Properties about unsnoc *) + +(** [unsnoc] is the inverse of [snoc] *) +let lemma_unsnoc_snoc #a l = + let l', x = unsnoc l in + let l1, l2 = l', [x] in + lemma_splitAt_snd_length (length l - 1) l; + // assert ((l1, l2) == splitAt (length l - 1) l); + let rec aux (l:list a{length l > 0}) : + Lemma (let l1, l2 = splitAt (length l - 1) l in + append l1 l2 == l) = + if length l = 1 then () else aux (tl l) in + aux l + +(** [snoc] is the inverse of [unsnoc] *) +let rec lemma_snoc_unsnoc #a lx : Lemma (ensures unsnoc (snoc lx) == lx) (decreases (length (fst lx)))= + let l, x = lx in + match l with + | [] -> () + | _ -> lemma_snoc_unsnoc (tl l, x) + +(** Doing an [unsnoc] gives us a list that is shorter in length by 1 *) +let lemma_unsnoc_length #a l = + lemma_snoc_length (unsnoc l) + +(** [unsnoc] followed by [append] can be connected to the same vice-versa. *) +let rec lemma_unsnoc_append (#a:Type) (l1 l2:list a) : + Lemma + (requires (length l2 > 0)) // the [length l2 = 0] is trivial + (ensures ( + let al, a = unsnoc (l1 @ l2) in + let bl, b = unsnoc l2 in + al == l1 @ bl /\ a == b)) = + match l1 with + | [] -> () + | _ :: l1' -> lemma_unsnoc_append l1' l2 + +(** [unsnoc] gives you [last] element, which is [index]ed at [length l - 1] *) +let rec lemma_unsnoc_is_last (#t:Type) (l:list t) : + Lemma + (requires (length l > 0)) + (ensures (snd (unsnoc l) == last l /\ snd (unsnoc l) == index l (length l - 1))) = + match l with + | [_] -> () + | _ -> lemma_unsnoc_is_last (tl l) + +(** [index]ing on the left part of an [unsnoc]d list is the same as indexing + the original list. *) +let rec lemma_unsnoc_index (#t:Type) (l:list t) (i:nat) : + Lemma + (requires (length l > 0 /\ i < length l - 1)) + (ensures ( + i < length (fst (unsnoc l)) /\ + index (fst (unsnoc l)) i == index l i)) = + match i with + | 0 -> () + | _ -> lemma_unsnoc_index (tl l) (i - 1) + +(** Definition and properties about [split_using] *) + +let rec lemma_split_using (#t:Type) (l:list t) (x:t{x `memP` l}) : + Lemma + (ensures ( + let l1, l2 = split_using l x in + length l2 > 0 /\ + ~(x `memP` l1) /\ + hd l2 == x /\ + append l1 l2 == l)) = + match l with + | [_] -> () + | a :: rest -> + let goal = + let l1, l2 = split_using l x in + length l2 > 0 /\ + ~(x `memP` l1) /\ + hd l2 == x /\ + append l1 l2 == l + in + FStar.Classical.or_elim + #_ #_ + #(fun () -> goal) + (fun (_:squash (a == x)) -> ()) + (fun (_:squash (x `memP` rest)) -> lemma_split_using rest x) + +(** Properties about partition **) + +(** If [partition f l = (l1, l2)], then for any [x], [x] is in [l] if +and only if [x] is in either one of [l1] or [l2] *) +let rec partition_mem #a f l x = match l with + | [] -> () + | hd::tl -> partition_mem f tl x + +(** Same as [partition_mem], but using [forall] *) +let rec partition_mem_forall #a f l = match l with + | [] -> () + | hd::tl -> partition_mem_forall f tl + +(** If [partition f l = (l1, l2)], then for any [x], if [x] is in [l1] +(resp. [l2]), then [f x] holds (resp. does not hold) *) +let rec partition_mem_p_forall #a p l = match l with + | [] -> () + | hd::tl -> partition_mem_p_forall p tl + +(** If [partition f l = (l1, l2)], then the number of occurrences of +any [x] in [l] is the same as the sum of the number of occurrences in +[l1] and [l2]. *) +let rec partition_count #a f l x = match l with + | [] -> () + | hd::tl -> partition_count f tl x + +(** Same as [partition_count], but using [forall] *) +let rec partition_count_forall #a f l= match l with + | [] -> () + | hd::tl -> partition_count_forall f tl + +(** Properties about subset **) + +let rec mem_subset (#a: eqtype) (la lb: list a) + : Lemma (subset la lb <==> (forall x. mem x la ==> mem x lb)) + [SMTPat (subset la lb)] = + match la with + | [] -> () + | hd :: tl -> mem_subset tl lb + +(* NOTE: This is implied by mem_subset above, kept for compatibility *) +let subset_reflexive (#a: eqtype) (l: list a) + : Lemma (subset l l) = () + +(** Correctness of quicksort **) + +(** Correctness of [sortWith], part 1/2: the number of occurrences of +any [x] in [sortWith f l] is the same as the number of occurrences in +[l]. *) +let rec sortWith_permutation #a f l : + Lemma (ensures forall x. count x l = count x (sortWith f l)) + (decreases length l) += match l with + | [] -> () + | pivot::tl -> + let hi, lo = partition (bool_of_compare f pivot) tl in + partition_length (bool_of_compare f pivot) tl; + partition_count_forall (bool_of_compare f pivot) tl; + sortWith_permutation f lo; + sortWith_permutation f hi; + append_count_forall (sortWith f lo) (pivot::sortWith f hi) + +(** Correctness of the merging of two sorted lists around a pivot. *) +let rec append_sorted #a f l1 l2 pivot = match l1 with + | [] -> () + | hd::tl -> append_sorted f tl l2 pivot + +(** Correctness of [sortWith], part 2/2: the elements of [sortWith f +l] are sorted according to comparison function [f], and the elements +of [sortWith f l] are the elements of [l]. *) +let rec sortWith_sorted (#a:eqtype) (f:(a -> a -> Tot int)) (l:list a) : + Lemma (requires (total_order #a (bool_of_compare f))) + (ensures ((sorted (bool_of_compare f) (sortWith f l)) /\ (forall x. mem x l = mem x (sortWith f l)))) + (decreases length l) += + match l with + | [] -> () + | pivot::tl -> + let hi, lo = partition (bool_of_compare f pivot) tl in + partition_length (bool_of_compare f pivot) tl; + partition_mem_forall (bool_of_compare f pivot) tl; + partition_mem_p_forall (bool_of_compare f pivot) tl; + sortWith_sorted f lo; + sortWith_sorted f hi; + append_mem_forall (sortWith f lo) (pivot::sortWith f hi); + append_sorted (bool_of_compare f) (sortWith f lo) (sortWith f hi) pivot + +(** Properties of [noRepeats] *) +let noRepeats_nil + (#a: eqtype) +: Lemma + (ensures (noRepeats #a [])) += () + +let noRepeats_cons + (#a: eqtype) + (h: a) + (tl: list a) +: Lemma + (requires ((~ (mem h tl)) /\ noRepeats tl)) + (ensures (noRepeats #a (h::tl))) += () + +let rec noRepeats_append_elim + (#a: eqtype) + (l1 l2: list a) +: Lemma + (requires (noRepeats (l1 @ l2))) + (ensures (noRepeats l1 /\ noRepeats l2 /\ (forall x . mem x l1 ==> ~ (mem x l2)))) + (decreases l1) += match l1 with + | [] -> () + | x :: q1 -> + append_mem q1 l2 x; + noRepeats_append_elim q1 l2 + +let rec noRepeats_append_intro + (#a: eqtype) + (l1 l2: list a) +: Lemma + (requires (noRepeats l1 /\ noRepeats l2 /\ (forall x . mem x l1 ==> ~ (mem x l2)))) + (ensures (noRepeats (l1 @ l2))) + (decreases l1) += match l1 with + | [] -> () + | x :: q1 -> + append_mem q1 l2 x; + noRepeats_append_intro q1 l2 + +(** Properties of [no_repeats_p] *) +let no_repeats_p_nil + (#a: Type) +: Lemma + (ensures (no_repeats_p #a [])) += () + +let no_repeats_p_cons + (#a: Type) + (h: a) + (tl: list a) +: Lemma + (requires ((~ (memP h tl)) /\ no_repeats_p tl)) + (ensures (no_repeats_p #a (h::tl))) += () + +let rec no_repeats_p_append_elim + (#a: Type) + (l1 l2: list a) +: Lemma + (requires (no_repeats_p (l1 `append` l2))) + (ensures (no_repeats_p l1 /\ no_repeats_p l2 /\ (forall x . memP x l1 ==> ~ (memP x l2)))) + (decreases l1) += match l1 with + | [] -> () + | x :: q1 -> + append_memP q1 l2 x; + no_repeats_p_append_elim q1 l2 + +let rec no_repeats_p_append_intro + (#a: Type) + (l1 l2: list a) +: Lemma + (requires (no_repeats_p l1 /\ no_repeats_p l2 /\ (forall x . memP x l1 ==> ~ (memP x l2)))) + (ensures (no_repeats_p (l1 `append` l2))) + (decreases l1) += match l1 with + | [] -> () + | x :: q1 -> + append_memP q1 l2 x; + no_repeats_p_append_intro q1 l2 + +let no_repeats_p_append + (#a: Type) + (l1 l2: list a) +: Lemma + (no_repeats_p (l1 `append` l2) <==> ( + (no_repeats_p l1 /\ no_repeats_p l2 /\ (forall x . memP x l1 ==> ~ (memP x l2))) + )) += FStar.Classical.move_requires (no_repeats_p_append_intro l1) l2; + FStar.Classical.move_requires (no_repeats_p_append_elim l1) l2 + +let no_repeats_p_append_swap + (#a: Type) + (l1 l2: list a) +: Lemma + (no_repeats_p (l1 `append` l2) <==> no_repeats_p (l2 `append` l1)) += no_repeats_p_append l1 l2; + no_repeats_p_append l2 l1 + +let no_repeats_p_append_permut + (#a: Type) + (l1 l2 l3 l4 l5: list a) +: Lemma + ((no_repeats_p (l1 `append` (l2 `append` (l3 `append` (l4 `append` l5))))) <==> no_repeats_p (l1 `append` (l4 `append` (l3 `append` (l2 `append` l5))))) += no_repeats_p_append l1 (l2 `append` (l3 `append` (l4 `append` l5))); + append_memP_forall l2 (l3 `append` (l4 `append` l5)); + append_memP_forall l3 (l4 `append` l5); + append_memP_forall l4 l5; + no_repeats_p_append l2 (l3 `append` (l4 `append` l5)); + no_repeats_p_append l3 (l4 `append` l5); + no_repeats_p_append l4 l5; + no_repeats_p_append l2 l5; + no_repeats_p_append l3 (l2 `append` l5); + append_memP_forall l2 l5; + no_repeats_p_append l4 (l3 `append` (l2 `append` l5)); + append_memP_forall l3 (l2 `append` l5); + no_repeats_p_append l1 (l4 `append` (l3 `append` (l2 `append` l5))); + append_memP_forall l4 (l3 `append` (l2 `append` l5)) + +let no_repeats_p_false_intro + (#a: Type) + (l1 l l2 l3: list a) +: Lemma + (requires (Cons? l)) + (ensures (~ (no_repeats_p (l1 `append` (l `append` (l2 `append` (l `append` l3))))))) += let x = hd l in + assert (memP x l); + no_repeats_p_append l1 (l `append` (l2 `append` (l `append` l3))); + no_repeats_p_append l (l2 `append` (l `append` l3)); + append_memP l2 (l `append` l3) x; + append_memP l l3 x + +(** Properties of [assoc] *) + +let assoc_nil + (#a: eqtype) + (#b: Type) + (x: a) +: Lemma + (ensures (assoc #a #b x [] == None)) += () + +let assoc_cons_eq + (#a: eqtype) + (#b: Type) + (x: a) + (y: b) + (q: list (a & b)) +: Lemma + (ensures (assoc x ((x, y) :: q) == Some y)) += () + +let assoc_cons_not_eq + (#a: eqtype) + (#b: Type) + (x x': a) + (y: b) + (q: list (a & b)) +: Lemma + (requires (x <> x')) + (ensures (assoc x' ((x, y) :: q) == assoc x' q)) += () + +let rec assoc_append_elim_r + (#a: eqtype) + (#b: Type) + (x: a) + (l1 l2: list (a & b)) +: Lemma + (requires (assoc x l2 == None \/ ~ (assoc x l1 == None))) + (ensures (assoc x (l1 @ l2) == assoc x l1)) + (decreases l1) += match l1 with + | [] -> () + | (x', _) :: q -> if x = x' then () else assoc_append_elim_r x q l2 + +let rec assoc_append_elim_l + (#a: eqtype) + (#b: Type) + (x: a) + (l1 l2: list (a & b)) +: Lemma + (requires (assoc x l1 == None)) + (ensures (assoc x (l1 @ l2) == assoc x l2)) + (decreases l1) += match l1 with + | [] -> () + | (x', _) :: q -> if x = x' then assert False else assoc_append_elim_l x q l2 + +let rec assoc_memP_some + (#a: eqtype) + (#b: Type) + (x: a) + (y: b) + (l: list (a & b)) +: Lemma + (requires (assoc x l == Some y)) + (ensures (memP (x, y) l)) + (decreases l) += match l with + | [] -> () + | (x', _) :: q -> if x = x' then () else assoc_memP_some x y q + +let rec assoc_memP_none + (#a: eqtype) + (#b: Type) + (x: a) + (l: list (a & b)) +: Lemma + (requires (assoc x l == None)) + (ensures (forall y . ~ (memP (x, y) l))) + (decreases l) += match l with + | [] -> () + | (x', _) :: q -> if x = x' then assert False else assoc_memP_none x q + +let assoc_mem + (#a: eqtype) + (#b: Type) + (x: a) + (l: list (a & b)) +: Lemma + (ensures (mem x (map fst l) <==> (exists y . assoc x l == Some y))) += match assoc x l with + | None -> + assoc_memP_none x l; + mem_memP x (map fst l); + memP_map_elim fst x l + | Some y -> + assoc_memP_some x y l; + memP_map_intro fst (x, y) l; + mem_memP x (map fst l) + +(** Properties of [fold_left] *) + +let rec fold_left_invar + (#a #b: Type) + (f: (a -> b -> Tot a)) + (l: list b) + (p: (a -> Tot Type0)) + : Lemma + (requires forall (x: a) (y: b) . p x ==> memP y l ==> p (f x y) ) + (ensures forall (x: a) . p x ==> p (fold_left f x l)) += + match l with + | [] -> () + | y :: q -> fold_left_invar f q p + +let rec fold_left_map + (#a #b #c: Type) + (f_aba: a -> b -> Tot a) + (f_bc: b -> Tot c) + (f_aca: a -> c -> Tot a) + (l: list b) + : Lemma + (requires forall (x: a) (y: b) . f_aba x y == f_aca x (f_bc y) ) + (ensures forall (x : a) . fold_left f_aba x l == fold_left f_aca x (map f_bc l) ) + = + match l with + | [] -> () + | y :: q -> fold_left_map f_aba f_bc f_aca q + +let rec map_append + (#a #b: Type) + (f: a -> Tot b) + (l1 l2: list a) +: + Lemma + (ensures map f (l1 @ l2) == map f l1 @ map f l2) += + match l1 with + | [] -> () + | x :: q -> map_append f q l2 + +let rec fold_left_append + (#a #b: Type) + (f: a -> b -> Tot a) + (l1 l2: list b) + : Lemma + (ensures forall x . fold_left f x (l1 @ l2) == fold_left f (fold_left f x l1) l2) += match l1 with + | [] -> () + | x :: q -> fold_left_append f q l2 + +let rec fold_left_monoid + (#a: Type) + (opA: (a -> a -> Tot a)) + (zeroA: a) + (l: list a) +: Lemma + (requires + (forall u v w . (u `opA` (v `opA` w)) == ((u `opA` v) `opA` w)) /\ + (forall x . (x `opA` zeroA) == x) /\ + (forall x . (zeroA `opA` x) == x)) + (ensures + forall x . + (fold_left opA x l) == (x `opA` (fold_left opA zeroA l))) += match l with + | [] -> () + | x :: q -> fold_left_monoid opA zeroA q + +let fold_left_append_monoid + (#a: Type) + (f: (a -> a -> Tot a)) + (z: a) + (l1 l2: list a) +: Lemma + (requires + (forall u v w . f u (f v w) == f (f u v) w) /\ + (forall x . f x z == x) /\ + (forall x . f z x == x)) + (ensures + fold_left f z (l1 @ l2) == f (fold_left f z l1) (fold_left f z l2)) += fold_left_append f l1 l2; + fold_left_monoid f z l2 + +(* Properties of [index] *) + +private let rec index_extensionality_aux + (#a: Type) + (l1 l2: list a) + (l_len: (l_len: unit { length l1 == length l2 } )) + (l_index: (i: (i: nat {i < length l1})) -> Tot (l_index: unit {index l1 i == index l2 i})) +: Lemma + (ensures (l1 == l2)) += match (l1, l2) with + | (a1::q1, a2::q2) -> + let a_eq : (a_eq : unit {a1 == a2}) = l_index 0 in + let q_len : (q_len: unit {length q1 == length q2}) = () in + let q_index (i: (i: nat {i < length q1})) : Tot (q_index: unit {index q1 i == index q2 i}) = + l_index (i + 1) in + let q_eq : (q_eq : unit {l1 == l2}) = index_extensionality_aux q1 q2 q_len q_index in + () + | _ -> () + +let index_extensionality + (#a: Type) + (l1 l2: list a) +: Lemma + (requires + (length l1 == length l2 /\ + (forall (i: nat) . i < length l1 ==> index l1 i == index l2 i))) + (ensures (l1 == l2)) += index_extensionality_aux l1 l2 () (fun i -> ()) + +(** Properties of [strict_suffix_of] *) + +let rec strict_suffix_of_nil (#a: Type) (x: a) (l: list a) +: Lemma + (requires True) + (ensures (strict_suffix_of [] (x::l))) + (decreases l) += match l with + | [] -> () + | a' :: q -> strict_suffix_of_nil a' q + +let strict_suffix_of_or_eq_nil (#a: Type) (l: list a) +: Lemma + (ensures (strict_suffix_of [] l \/ l == [])) += match l with + | [] -> () + | a :: q -> strict_suffix_of_nil a q + +let strict_suffix_of_cons (#a: Type) (x: a) (l: list a) : + Lemma + (ensures (strict_suffix_of l (x::l))) += () + +let rec strict_suffix_of_trans (#a: Type) (l1 l2 l3: list a) +: Lemma + (requires True) + (ensures ((strict_suffix_of l1 l2 /\ strict_suffix_of l2 l3) ==> strict_suffix_of l1 l3)) + (decreases l3) + [SMTPat (strict_suffix_of l1 l2); SMTPat (strict_suffix_of l2 l3)] += match l3 with + | [] -> () + | _ :: q -> strict_suffix_of_trans l1 l2 q + +let rec strict_suffix_of_correct (#a) (l1 l2: list a) +: Lemma + (requires True) + (ensures (strict_suffix_of l1 l2 ==> l1 << l2)) + (decreases l2) += match l2 with + | [] -> () + | _ :: q -> + strict_suffix_of_correct l1 q + +let rec map_strict_suffix_of (#a #b: Type) (f: a -> Tot b) (l1: list a) (l2: list a) : + Lemma + (requires True) + (ensures (strict_suffix_of l1 l2 ==> strict_suffix_of (map f l1) (map f l2))) + (decreases l2) += match l2 with + | [] -> () + | a::q -> + map_strict_suffix_of f l1 q + +let rec mem_strict_suffix_of (#a: eqtype) (l1: list a) (m: a) (l2: list a) +: Lemma + (requires True) + (ensures ((mem m l1 /\ strict_suffix_of l1 l2) ==> mem m l2)) += match l2 with + | [] -> () + | a :: q -> + mem_strict_suffix_of l1 m q + +let rec strict_suffix_of_exists_append + (#a: Type) + (l1 l2: list a) +: Lemma + (ensures (strict_suffix_of l1 l2 ==> (exists l3 . l2 == append l3 l1))) += match l2 with + | [] -> () + | a :: q -> + FStar.Classical.or_elim + #(l1 == q) + #(strict_suffix_of l1 q) + #(fun _ -> exists l3 . l2 == append l3 l1) + (fun _ -> + FStar.Classical.exists_intro (fun l3 -> l2 == append l3 l1) (a :: [])) + (fun _ -> + FStar.Classical.exists_elim + (exists l3 . l2 == append l3 l1) + #_ + #(fun l3 -> q == append l3 l1) + (strict_suffix_of_exists_append l1 q) + (fun l3 -> + FStar.Classical.exists_intro (fun l3 -> l2 == append l3 l1) (a :: l3) + )) + +let strict_suffix_of_or_eq_exists_append + (#a: Type) + (l1 l2: list a) +: Lemma + (ensures ((strict_suffix_of l1 l2 \/ l1 == l2) ==> (exists l3 . l2 == append l3 l1))) += FStar.Classical.or_elim + #(strict_suffix_of l1 l2) + #(l1 == l2) + #(fun _ -> exists l3 . l2 == append l3 l1) + (fun _ -> + strict_suffix_of_exists_append l1 l2) + (fun _ -> + FStar.Classical.exists_intro + (fun l3 -> l2 == append l3 l1) + [] ) + +(** Properties of << with lists *) + +let precedes_tl + (#a: Type) + (l: list a {Cons? l}) +: Lemma (ensures (tl l << l)) += () + +let rec precedes_append_cons_r + (#a: Type) + (l1: list a) + (x: a) + (l2: list a) +: Lemma + (requires True) + (ensures (x << append l1 (x :: l2))) + [SMTPat (x << append l1 (x :: l2))] += match l1 with + | [] -> () + | _ :: q -> precedes_append_cons_r q x l2 + +let precedes_append_cons_prod_r + (#a #b: Type) + (l1: list (a & b)) + (x: a) + (y: b) + (l2: list (a & b)) +: Lemma + (ensures + x << (append l1 ((x, y) :: l2)) /\ + y << (append l1 ((x, y) :: l2))) += precedes_append_cons_r l1 (x, y) l2 + +let rec memP_precedes + (#a: Type) + (x: a) + (l: list a) +: Lemma + (requires True) + (ensures (memP x l ==> x << l)) + (decreases l) += match l with + | [] -> () + | y :: q -> + FStar.Classical.or_elim + #(x == y) + #(memP x q) + #(fun _ -> x << l) + (fun _ -> ()) + (fun _ -> memP_precedes x q) + +let assoc_precedes + (#a: eqtype) + (#b: Type) + (x: a) + (l: list (a & b)) + (y: b) +: Lemma + (requires (assoc x l == Some y)) + (ensures (x << l /\ y << l)) += assoc_memP_some x y l; + memP_precedes (x, y) l + +(** Properties about find *) + +let rec find_none + (#a: Type) + (f: (a -> Tot bool)) + (l: list a) + (x: a) +: Lemma + (requires (find f l == None /\ memP x l)) + (ensures (f x == false)) += let (x' :: l') = l in + Classical.or_elim + #(x == x') + #(~ (x == x')) + #(fun _ -> f x == false) + (fun h -> ()) + (fun h -> find_none f l' x) + +(** Properties of init and last *) + +let rec append_init_last (#a: Type) (l: list a { Cons? l }) : Lemma + (l == append (init l) [last l]) += match l with + | a :: q -> + if Cons? q + then + append_init_last q + else + () + +let rec init_last_def (#a: Type) (l: list a) (x: a) : Lemma + (let l' = append l [x] in + init l' == l /\ last l' == x) += match l with + | [] -> () + | y :: q -> init_last_def q x + +let init_last_inj (#a: Type) (l1: list a { Cons? l1 } ) (l2: list a { Cons? l2 } ) : Lemma + (requires (init l1 == init l2 /\ last l1 == last l2)) + (ensures (l1 == l2)) += append_init_last l1; + append_init_last l2 + +(* Properties of for_all *) + +#push-options "--fuel 1" +let rec for_all_append #a (f: a -> Tot bool) (s1 s2: list a): Lemma + (ensures for_all f (s1 @ s2) <==> for_all f s1 && for_all f s2) += + let _ = allow_inversion (list a) in + match s1 with + | [] -> () + | hd1 :: tl1 -> for_all_append f tl1 s2 +#pop-options diff --git a/stage0/ulib/FStar.List.Tot.Properties.fsti b/stage0/ulib/FStar.List.Tot.Properties.fsti new file mode 100644 index 00000000000..1a4ff371970 --- /dev/null +++ b/stage0/ulib/FStar.List.Tot.Properties.fsti @@ -0,0 +1,813 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +(** +This module states and proves some properties about pure and total +operations on lists. + +@summary Properties of pure total operations on lists +*) +module FStar.List.Tot.Properties +open FStar.List.Tot.Base + +(** A list indexed by its length **) +let llist a (n:nat) = l:list a {length l = n} + +(** Properties about mem **) + +(** Correctness of [mem] for types with decidable equality. TODO: +replace [mem] with [memP] in relevant lemmas and define the right +SMTPat to automatically recover lemmas about [mem] for types with +decidable equality *) +val mem_memP + (#a: eqtype) + (x: a) + (l: list a) +: Lemma (ensures (mem x l <==> memP x l)) + [SMTPat (mem x l); SMTPat (memP x l)] + +(** If an element can be [index]ed, then it is a [memP] of the list. *) +val lemma_index_memP (#t:Type) (l:list t) (i:nat{i < length l}) : + Lemma + (ensures (index l i `memP` l)) + [SMTPat (index l i `memP` l)] + +(** The empty list has no elements. *) +val memP_empty : #a: Type -> x:a -> + Lemma (requires (memP x [])) + (ensures False) + +(** Full specification for [existsb]: [existsb f xs] holds if, and +only if, there exists an element [x] of [xs] such that [f x] holds. *) +val memP_existsb: #a: Type -> f:(a -> Tot bool) -> xs:list a -> + Lemma(ensures (existsb f xs <==> (exists (x:a). (f x = true /\ memP x xs)))) + +val memP_map_intro + (#a #b: Type) + (f: a -> Tot b) + (x: a) + (l: list a) +: Lemma + (requires True) + (ensures (memP x l ==> memP (f x) (map f l))) + +val memP_map_elim + (#a #b: Type) + (f: a -> Tot b) + (y: b) + (l: list a) +: Lemma + (requires True) + (ensures (memP y (map f l) ==> (exists (x : a) . memP x l /\ f x == y))) + +(** The empty list has no elements *) +val mem_empty : #a:eqtype -> x:a -> + Lemma (requires (mem x [])) + (ensures False) + +(** Full specification for [existsb]: [existsb f xs] holds if, and +only if, there exists an element [x] of [xs] such that [f x] holds. *) +val mem_existsb: #a:eqtype -> f:(a -> Tot bool) -> xs:list a -> + Lemma(ensures (existsb f xs <==> (exists (x:a). (f x = true /\ mem x xs)))) + +val mem_count + (#a: eqtype) + (l: list a) + (x: a) +: Lemma + (mem x l <==> count x l > 0) + +(** Properties about rev **) + +val rev_acc_length : l:list 'a -> acc:list 'a -> + Lemma (requires True) + (ensures (length (rev_acc l acc) = length l + length acc)) + +val rev_length : l:list 'a -> + Lemma (requires True) + (ensures (length (rev l) = length l)) + +val rev_acc_memP : #a:Type -> l:list a -> acc:list a -> x:a -> + Lemma (requires True) + (ensures (memP x (rev_acc l acc) <==> (memP x l \/ memP x acc))) + +(** A list and its reversed have the same elements *) +val rev_memP : #a:Type -> l:list a -> x:a -> + Lemma (requires True) + (ensures (memP x (rev l) <==> memP x l)) + +val rev_mem : #a:eqtype -> l:list a -> x:a -> + Lemma (requires True) + (ensures (mem x (rev l) <==> mem x l)) + +(** Properties about append **) + +val append_nil_l: l:list 'a -> + Lemma (requires True) + (ensures ([]@l == l)) + +val append_l_nil: l:list 'a -> + Lemma (requires True) + (ensures (l@[] == l)) [SMTPat (l@[])] + +val append_cons_l: hd:'a -> tl:list 'a -> l:list 'a -> + Lemma (requires True) + (ensures (((hd::tl)@l) == (hd::(tl@l)))) + +val append_l_cons: hd:'a -> tl:list 'a -> l:list 'a -> + Lemma (requires True) + (ensures ((l@(hd::tl)) == ((l@[hd])@tl))) + +val append_assoc: l1:list 'a -> l2:list 'a -> l3:list 'a -> + Lemma (requires True) + (ensures ((l1@(l2@l3)) == ((l1@l2)@l3))) + +val append_length: l1:list 'a -> l2:list 'a -> + Lemma (requires True) + (ensures (length (l1@l2) = length l1 + length l2)) [SMTPat (length (l1 @ l2))] + +val append_mem: #t:eqtype -> l1:list t + -> l2:list t + -> a:t + -> Lemma (requires True) + (ensures (mem a (l1@l2) = (mem a l1 || mem a l2))) + (* [SMTPat (mem a (l1@l2))] *) + +val append_memP: #t:Type -> l1:list t + -> l2:list t + -> a:t + -> Lemma (requires True) + (ensures (memP a (l1 `append` l2) <==> (memP a l1 \/ memP a l2))) + (* [SMTPat (mem a (l1@l2))] *) + +val append_mem_forall: #a:eqtype -> l1:list a + -> l2:list a + -> Lemma (requires True) + (ensures (forall a. mem a (l1@l2) = (mem a l1 || mem a l2))) + +val append_memP_forall: #a:Type -> l1:list a + -> l2:list a + -> Lemma (requires True) + (ensures (forall a. memP a (l1 `append` l2) <==> (memP a l1 \/ memP a l2))) + + +val append_count: #t:eqtype -> l1:list t + -> l2:list t + -> a:t + -> Lemma (requires True) + (ensures (count a (l1@l2) = (count a l1 + count a l2))) + +val append_count_forall: #a:eqtype -> l1:list a + -> l2:list a + -> Lemma (requires True) + (ensures (forall a. count a (l1@l2) = (count a l1 + count a l2))) + (* [SMTPat (l1@l2)] *) + +val append_eq_nil: l1:list 'a -> l2:list 'a -> + Lemma (requires (l1@l2 == [])) + (ensures (l1 == [] /\ l2 == [])) + +val append_eq_singl: l1:list 'a -> l2:list 'a -> x:'a -> + Lemma (requires (l1@l2 == [x])) + (ensures ((l1 == [x] /\ l2 == []) \/ (l1 == [] /\ l2 == [x]))) + +val append_inv_head: l:list 'a -> l1:list 'a -> l2:list 'a -> + Lemma (requires ((l@l1) == (l@l2))) + (ensures (l1 == l2)) + +val append_inv_tail: l:list 'a -> l1:list 'a -> l2:list 'a -> + Lemma (requires ((l1@l) == (l2@l))) + (ensures (l1 == l2)) + +val append_length_inv_head + (#a: Type) + (left1 right1 left2 right2: list a) +: Lemma + (requires (append left1 right1 == append left2 right2 /\ length left1 == length left2)) + (ensures (left1 == left2 /\ right1 == right2)) + +val append_length_inv_tail + (#a: Type) + (left1 right1 left2 right2: list a) +: Lemma + (requires (append left1 right1 == append left2 right2 /\ length right1 == length right2)) + (ensures (left1 == left2 /\ right1 == right2)) + +val append_injective #a (l0 l0':list a) + (l1 l1':list a) + : Lemma + (ensures + (length l0 == length l0' \/ length l1 == length l1') /\ + append l0 l1 == append l0' l1' ==> + l0 == l0' /\ l1 == l1') + +(** The [last] element of a list remains the same, even after that list is + [append]ed to another list. *) +val lemma_append_last (#a:Type) (l1 l2:list a) : + Lemma + (requires (length l2 > 0)) + (ensures (last (l1 @ l2) == last l2)) + +(** Properties mixing rev and append **) + +let rec rev' (#a:Type) (xs : list a) : list a = + match xs with + | [] -> [] + | hd::tl -> (rev' tl)@[hd] +let rev'T = rev' + +val rev_acc_rev': l:list 'a -> acc:list 'a -> + Lemma (requires (True)) + (ensures ((rev_acc l acc) == ((rev' l)@acc))) + +val rev_rev': l:list 'a -> + Lemma (requires True) + (ensures ((rev l) == (rev' l))) + +val rev'_append: l1:list 'a -> l2:list 'a -> + Lemma (requires True) + (ensures ((rev' (l1@l2)) == ((rev' l2)@(rev' l1)))) + +val rev_append: l1:list 'a -> l2:list 'a -> + Lemma (requires True) + (ensures ((rev (l1@l2)) == ((rev l2)@(rev l1)))) + +val rev'_involutive : l:list 'a -> + Lemma (requires True) + (ensures (rev' (rev' l) == l)) + +val rev_involutive : l:list 'a -> + Lemma (requires True) + (ensures (rev (rev l) == l)) + +(** Properties about snoc *) + +val lemma_snoc_length : (lx:(list 'a & 'a)) -> + Lemma (requires True) + (ensures (length (snoc lx) = length (fst lx) + 1)) + +(** Reverse induction principle **) + +val rev'_list_ind: p:(list 'a -> Tot bool) -> l:list 'a -> + Lemma (requires ((p []) /\ (forall hd tl. p (rev' tl) ==> p (rev' (hd::tl))))) + (ensures (p (rev' l))) + +val rev_ind: p:(list 'a -> Tot bool) -> l:list 'a -> + Lemma (requires ((p []) /\ (forall hd tl. p hd ==> p (hd@[tl])))) + (ensures (p l)) + +(** Properties about iterators **) + +val map_lemma: f:('a -> Tot 'b) + -> l:(list 'a) + -> Lemma (requires True) + (ensures (length (map f l)) = length l) + [SMTPat (map f l)] + +(** Properties about unsnoc *) + +(** [unsnoc] is the inverse of [snoc] *) +val lemma_unsnoc_snoc: #a:Type -> l:list a{length l > 0} -> + Lemma (requires True) + (ensures (snoc (unsnoc l) == l)) + [SMTPat (snoc (unsnoc l))] + +(** [snoc] is the inverse of [unsnoc] *) +val lemma_snoc_unsnoc: #a:Type -> lx:(list a & a) -> + Lemma (requires True) + (ensures (unsnoc (snoc lx) == lx)) + [SMTPat (unsnoc (snoc lx))] + +(** Doing an [unsnoc] gives us a list that is shorter in length by 1 *) +val lemma_unsnoc_length: #a:Type -> l:list a{length l > 0} -> + Lemma (requires True) + (ensures (length (fst (unsnoc l)) == length l - 1)) + +(** [unsnoc] followed by [append] can be connected to the same vice-versa. *) +val lemma_unsnoc_append (#a:Type) (l1 l2:list a) : + Lemma + (requires (length l2 > 0)) // the [length l2 = 0] is trivial + (ensures ( + let al, a = unsnoc (l1 @ l2) in + let bl, b = unsnoc l2 in + al == l1 @ bl /\ a == b)) + +(** [unsnoc] gives you [last] element, which is [index]ed at [length l - 1] *) +val lemma_unsnoc_is_last (#t:Type) (l:list t) : + Lemma + (requires (length l > 0)) + (ensures (snd (unsnoc l) == last l /\ snd (unsnoc l) == index l (length l - 1))) + +(** [index]ing on the left part of an [unsnoc]d list is the same as indexing + the original list. *) +val lemma_unsnoc_index (#t:Type) (l:list t) (i:nat) : + Lemma + (requires (length l > 0 /\ i < length l - 1)) + (ensures ( + i < length (fst (unsnoc l)) /\ + index (fst (unsnoc l)) i == index l i)) + +(** Definition and properties about [split_using] *) + +(** [split_using] splits a list at the first instance of finding an + element in it. + + NOTE: Uses [strong_excluded_middle] axiom. *) +let rec split_using (#t:Type) (l:list t) (x:t{x `memP` l}) : + GTot (list t & list t) = + match l with + | [_] -> [], l + | a :: rest -> + if FStar.StrongExcludedMiddle.strong_excluded_middle (a == x) then ( + [], l + ) else ( + let l1', l2' = split_using rest x in + a :: l1', l2' + ) + +val lemma_split_using (#t:Type) (l:list t) (x:t{x `memP` l}) : + Lemma + (ensures ( + let l1, l2 = split_using l x in + length l2 > 0 /\ + ~(x `memP` l1) /\ + hd l2 == x /\ + append l1 l2 == l)) + +(** Definition of [index_of] *) + +(** [index_of l x] gives the index of the leftmost [x] in [l]. + + NOTE: Uses [strong_excluded_middle] axiom. *) +let rec index_of (#t:Type) (l:list t) (x:t{x `memP` l}) : + GTot (i:nat{i < length l /\ index l i == x}) = + match l with + | [_] -> 0 + | a :: rest -> + if FStar.StrongExcludedMiddle.strong_excluded_middle (a == x) then ( + 0 + ) else ( + 1 + index_of rest x + ) + + +(** Properties about partition **) + +(** If [partition f l = (l1, l2)], then for any [x], [x] is in [l] if +and only if [x] is in either one of [l1] or [l2] *) +val partition_mem: #a:eqtype -> f:(a -> Tot bool) + -> l:list a + -> x:a + -> Lemma (requires True) + (ensures (let l1, l2 = partition f l in + mem x l = (mem x l1 || mem x l2))) + +(** Same as [partition_mem], but using [forall] *) +val partition_mem_forall: #a:eqtype -> f:(a -> Tot bool) + -> l:list a + -> Lemma (requires True) + (ensures (let l1, l2 = partition f l in + (forall x. mem x l = (mem x l1 || mem x l2)))) + +(** If [partition f l = (l1, l2)], then for any [x], if [x] is in [l1] +(resp. [l2]), then [f x] holds (resp. does not hold) *) +val partition_mem_p_forall: #a:eqtype -> p:(a -> Tot bool) + -> l:list a + -> Lemma (requires True) + (ensures (let l1, l2 = partition p l in + (forall x. mem x l1 ==> p x) /\ (forall x. mem x l2 ==> not (p x)))) + +(** If [partition f l = (l1, l2)], then the number of occurrences of +any [x] in [l] is the same as the sum of the number of occurrences in +[l1] and [l2]. *) +val partition_count: #a:eqtype -> f:(a -> Tot bool) + -> l:list a + -> x:a + -> Lemma (requires True) + (ensures (count x l = (count x (fst (partition f l)) + count x (snd (partition f l))))) + +(** Same as [partition_count], but using [forall] *) +val partition_count_forall: #a:eqtype -> f:(a -> Tot bool) + -> l:list a + -> Lemma (requires True) + (ensures (forall x. count x l = (count x (fst (partition f l)) + count x (snd (partition f l))))) + (* [SMTPat (partitionT f l)] *) + +(** Properties about subset **) + +val mem_subset (#a: eqtype) (la lb: list a) + : Lemma (subset la lb <==> (forall x. mem x la ==> mem x lb)) + [SMTPat (subset la lb)] + +(* NOTE: This is implied by mem_subset above, kept for compatibility *) +val subset_reflexive (#a: eqtype) (l: list a) + : Lemma (subset l l) + +(** Correctness of quicksort **) + +(** Correctness of [sortWith], part 1/2: the number of occurrences of +any [x] in [sortWith f l] is the same as the number of occurrences in +[l]. *) +val sortWith_permutation: #a:eqtype -> f:(a -> a -> Tot int) -> l:list a -> + Lemma (requires True) + (ensures (forall x. count x l = count x (sortWith f l))) + +(** [sorted f l] holds if, and only if, any two consecutive elements + [x], [y] of [l] are such that [f x y] holds + *) +let rec sorted (#a:Type) (f : a -> a -> Tot bool) : list a -> bool = function + | [] + | [_] -> true + | x::y::tl -> f x y && sorted f (y::tl) + +(** [f] is a total order if, and only if, it is reflexive, +anti-symmetric, transitive and total. *) +type total_order (#a:Type) (f: (a -> a -> Tot bool)) = + (forall a. f a a) (* reflexivity *) + /\ (forall a1 a2. f a1 a2 /\ f a2 a1 ==> a1 == a2) (* anti-symmetry *) + /\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *) + /\ (forall a1 a2. f a1 a2 \/ f a2 a1) (* totality *) + +(** Correctness of the merging of two sorted lists around a pivot. *) +val append_sorted: #a:eqtype + -> f:(a -> a -> Tot bool) + -> l1:list a{sorted f l1} + -> l2:list a{sorted f l2} + -> pivot:a + -> Lemma (requires (total_order #a f + /\ (forall y. mem y l1 ==> not(f pivot y)) + /\ (forall y. mem y l2 ==> f pivot y))) + (ensures (sorted f (l1@(pivot::l2)))) + [SMTPat (sorted f (l1@(pivot::l2)))] + +(** Correctness of [sortWith], part 2/2: the elements of [sortWith f +l] are sorted according to comparison function [f], and the elements +of [sortWith f l] are the elements of [l]. *) +val sortWith_sorted: #a:eqtype -> f:(a -> a -> Tot int) -> l:list a -> + Lemma (requires (total_order #a (bool_of_compare f))) + (ensures ((sorted (bool_of_compare f) (sortWith f l)) /\ (forall x. mem x l = mem x (sortWith f l)))) + +(** Properties of [noRepeats] *) +val noRepeats_nil + (#a: eqtype) +: Lemma + (ensures (noRepeats #a [])) + +val noRepeats_cons + (#a: eqtype) + (h: a) + (tl: list a) +: Lemma + (requires ((~ (mem h tl)) /\ noRepeats tl)) + (ensures (noRepeats #a (h::tl))) + +val noRepeats_append_elim + (#a: eqtype) + (l1 l2: list a) +: Lemma + (requires (noRepeats (l1 @ l2))) + (ensures (noRepeats l1 /\ noRepeats l2 /\ (forall x . mem x l1 ==> ~ (mem x l2)))) + +val noRepeats_append_intro + (#a: eqtype) + (l1 l2: list a) +: Lemma + (requires (noRepeats l1 /\ noRepeats l2 /\ (forall x . mem x l1 ==> ~ (mem x l2)))) + (ensures (noRepeats (l1 @ l2))) + +(** Properties of [no_repeats_p] *) +val no_repeats_p_nil + (#a: Type) +: Lemma + (ensures (no_repeats_p #a [])) + +val no_repeats_p_cons + (#a: Type) + (h: a) + (tl: list a) +: Lemma + (requires ((~ (memP h tl)) /\ no_repeats_p tl)) + (ensures (no_repeats_p #a (h::tl))) + +val no_repeats_p_append_elim + (#a: Type) + (l1 l2: list a) +: Lemma + (requires (no_repeats_p (l1 `append` l2))) + (ensures (no_repeats_p l1 /\ no_repeats_p l2 /\ (forall x . memP x l1 ==> ~ (memP x l2)))) + +val no_repeats_p_append_intro + (#a: Type) + (l1 l2: list a) +: Lemma + (requires (no_repeats_p l1 /\ no_repeats_p l2 /\ (forall x . memP x l1 ==> ~ (memP x l2)))) + (ensures (no_repeats_p (l1 `append` l2))) + +val no_repeats_p_append + (#a: Type) + (l1 l2: list a) +: Lemma + (no_repeats_p (l1 `append` l2) <==> ( + (no_repeats_p l1 /\ no_repeats_p l2 /\ (forall x . memP x l1 ==> ~ (memP x l2))) + )) + +val no_repeats_p_append_swap + (#a: Type) + (l1 l2: list a) +: Lemma + (no_repeats_p (l1 `append` l2) <==> no_repeats_p (l2 `append` l1)) + +val no_repeats_p_append_permut + (#a: Type) + (l1 l2 l3 l4 l5: list a) +: Lemma + ((no_repeats_p (l1 `append` (l2 `append` (l3 `append` (l4 `append` l5))))) <==> no_repeats_p (l1 `append` (l4 `append` (l3 `append` (l2 `append` l5))))) + +val no_repeats_p_false_intro + (#a: Type) + (l1 l l2 l3: list a) +: Lemma + (requires (Cons? l)) + (ensures (~ (no_repeats_p (l1 `append` (l `append` (l2 `append` (l `append` l3))))))) + +(** Properties of [assoc] *) + +val assoc_nil + (#a: eqtype) + (#b: Type) + (x: a) +: Lemma + (ensures (assoc #a #b x [] == None)) + +val assoc_cons_eq + (#a: eqtype) + (#b: Type) + (x: a) + (y: b) + (q: list (a & b)) +: Lemma + (ensures (assoc x ((x, y) :: q) == Some y)) + +val assoc_cons_not_eq + (#a: eqtype) + (#b: Type) + (x x': a) + (y: b) + (q: list (a & b)) +: Lemma + (requires (x <> x')) + (ensures (assoc x' ((x, y) :: q) == assoc x' q)) + +val assoc_append_elim_r + (#a: eqtype) + (#b: Type) + (x: a) + (l1 l2: list (a & b)) +: Lemma + (requires (assoc x l2 == None \/ ~ (assoc x l1 == None))) + (ensures (assoc x (l1 @ l2) == assoc x l1)) + +val assoc_append_elim_l + (#a: eqtype) + (#b: Type) + (x: a) + (l1 l2: list (a & b)) +: Lemma + (requires (assoc x l1 == None)) + (ensures (assoc x (l1 @ l2) == assoc x l2)) + +val assoc_memP_some + (#a: eqtype) + (#b: Type) + (x: a) + (y: b) + (l: list (a & b)) +: Lemma + (requires (assoc x l == Some y)) + (ensures (memP (x, y) l)) + +val assoc_memP_none + (#a: eqtype) + (#b: Type) + (x: a) + (l: list (a & b)) +: Lemma + (requires (assoc x l == None)) + (ensures (forall y . ~ (memP (x, y) l))) + +val assoc_mem + (#a: eqtype) + (#b: Type) + (x: a) + (l: list (a & b)) +: Lemma + (ensures (mem x (map fst l) <==> (exists y . assoc x l == Some y))) + +(** Properties of [fold_left] *) + +val fold_left_invar + (#a #b: Type) + (f: (a -> b -> Tot a)) + (l: list b) + (p: (a -> Tot Type0)) + : Lemma + (requires forall (x: a) (y: b) . p x ==> memP y l ==> p (f x y) ) + (ensures forall (x: a) . p x ==> p (fold_left f x l)) + +val fold_left_map + (#a #b #c: Type) + (f_aba: a -> b -> Tot a) + (f_bc: b -> Tot c) + (f_aca: a -> c -> Tot a) + (l: list b) + : Lemma + (requires forall (x: a) (y: b) . f_aba x y == f_aca x (f_bc y) ) + (ensures forall (x : a) . fold_left f_aba x l == fold_left f_aca x (map f_bc l) ) + +val map_append + (#a #b: Type) + (f: a -> Tot b) + (l1 l2: list a) +: + Lemma + (ensures map f (l1 @ l2) == map f l1 @ map f l2) + +val fold_left_append + (#a #b: Type) + (f: a -> b -> Tot a) + (l1 l2: list b) + : Lemma + (ensures forall x . fold_left f x (l1 @ l2) == fold_left f (fold_left f x l1) l2) + +val fold_left_monoid + (#a: Type) + (opA: (a -> a -> Tot a)) + (zeroA: a) + (l: list a) +: Lemma + (requires + (forall u v w . (u `opA` (v `opA` w)) == ((u `opA` v) `opA` w)) /\ + (forall x . (x `opA` zeroA) == x) /\ + (forall x . (zeroA `opA` x) == x)) + (ensures + forall x . + (fold_left opA x l) == (x `opA` (fold_left opA zeroA l))) + +val fold_left_append_monoid + (#a: Type) + (f: (a -> a -> Tot a)) + (z: a) + (l1 l2: list a) +: Lemma + (requires + (forall u v w . f u (f v w) == f (f u v) w) /\ + (forall x . f x z == x) /\ + (forall x . f z x == x)) + (ensures + fold_left f z (l1 @ l2) == f (fold_left f z l1) (fold_left f z l2)) + +(* Properties of [index] *) + +val index_extensionality + (#a: Type) + (l1 l2: list a) +: Lemma + (requires + (length l1 == length l2 /\ + (forall (i: nat) . i < length l1 ==> index l1 i == index l2 i))) + (ensures (l1 == l2)) + +(** Properties of [strict_suffix_of] *) + +val strict_suffix_of_nil (#a: Type) (x: a) (l: list a) +: Lemma + (requires True) + (ensures (strict_suffix_of [] (x::l))) + +val strict_suffix_of_or_eq_nil (#a: Type) (l: list a) +: Lemma + (ensures (strict_suffix_of [] l \/ l == [])) + +val strict_suffix_of_cons (#a: Type) (x: a) (l: list a) : + Lemma + (ensures (strict_suffix_of l (x::l))) + +val strict_suffix_of_trans (#a: Type) (l1 l2 l3: list a) +: Lemma + (requires True) + (ensures ((strict_suffix_of l1 l2 /\ strict_suffix_of l2 l3) ==> strict_suffix_of l1 l3)) + [SMTPat (strict_suffix_of l1 l2); SMTPat (strict_suffix_of l2 l3)] + +val strict_suffix_of_correct (#a:Type) (l1 l2: list a) +: Lemma + (requires True) + (ensures (strict_suffix_of l1 l2 ==> l1 << l2)) + +val map_strict_suffix_of (#a #b: Type) (f: a -> Tot b) (l1: list a) (l2: list a) : + Lemma + (requires True) + (ensures (strict_suffix_of l1 l2 ==> strict_suffix_of (map f l1) (map f l2))) + +val mem_strict_suffix_of (#a: eqtype) (l1: list a) (m: a) (l2: list a) +: Lemma + (requires True) + (ensures ((mem m l1 /\ strict_suffix_of l1 l2) ==> mem m l2)) + +val strict_suffix_of_exists_append + (#a: Type) + (l1 l2: list a) +: Lemma + (ensures (strict_suffix_of l1 l2 ==> (exists l3 . l2 == append l3 l1))) + +val strict_suffix_of_or_eq_exists_append + (#a: Type) + (l1 l2: list a) +: Lemma + (ensures ((strict_suffix_of l1 l2 \/ l1 == l2) ==> (exists l3 . l2 == append l3 l1))) + +(** Properties of << with lists *) + +val precedes_tl + (#a: Type) + (l: list a {Cons? l}) +: Lemma (ensures (tl l << l)) + +val precedes_append_cons_r + (#a: Type) + (l1: list a) + (x: a) + (l2: list a) +: Lemma + (requires True) + (ensures (x << append l1 (x :: l2))) + [SMTPat (x << append l1 (x :: l2))] + +val precedes_append_cons_prod_r + (#a #b: Type) + (l1: list (a & b)) + (x: a) + (y: b) + (l2: list (a & b)) +: Lemma + (ensures + x << (append l1 ((x, y) :: l2)) /\ + y << (append l1 ((x, y) :: l2))) + +val memP_precedes + (#a: Type) + (x: a) + (l: list a) +: Lemma + (requires True) + (ensures (memP x l ==> x << l)) + +val assoc_precedes + (#a: eqtype) + (#b: Type) + (x: a) + (l: list (a & b)) + (y: b) +: Lemma + (requires (assoc x l == Some y)) + (ensures (x << l /\ y << l)) + +(** Properties about find *) + +val find_none + (#a: Type) + (f: (a -> Tot bool)) + (l: list a) + (x: a) +: Lemma + (requires (find f l == None /\ memP x l)) + (ensures (f x == false)) + +(** Properties of init and last *) + +val append_init_last (#a: Type) (l: list a { Cons? l }) : Lemma + (l == append (init l) [last l]) + +val init_last_def (#a: Type) (l: list a) (x: a) : Lemma + (let l' = append l [x] in + init l' == l /\ last l' == x) + +val init_last_inj (#a: Type) (l1: list a { Cons? l1 } ) (l2: list a { Cons? l2 } ) : Lemma + (requires (init l1 == init l2 /\ last l1 == last l2)) + (ensures (l1 == l2)) + +(* Properties of for_all *) + +val for_all_append #a (f: a -> Tot bool) (s1 s2: list a): Lemma + (ensures for_all f (s1 @ s2) <==> for_all f s1 && for_all f s2) \ No newline at end of file diff --git a/stage0/ulib/FStar.List.Tot.fst b/stage0/ulib/FStar.List.Tot.fst new file mode 100644 index 00000000000..1e10ef59993 --- /dev/null +++ b/stage0/ulib/FStar.List.Tot.fst @@ -0,0 +1,18 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.List.Tot +include FStar.List.Tot.Base +include FStar.List.Tot.Properties diff --git a/stage0/ulib/FStar.List.fst b/stage0/ulib/FStar.List.fst new file mode 100644 index 00000000000..f4b2640d1be --- /dev/null +++ b/stage0/ulib/FStar.List.fst @@ -0,0 +1,367 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +(** +F* standard library List module. + +@summary F* stdlib List module. +*) +module FStar.List +open FStar.All +include FStar.List.Tot + +(** Base operations **) + +(** [hd l] returns the first element of [l]. Raises an exception if +[l] is empty (thus, [hd] hides [List.Tot.hd] which requires [l] to be +nonempty at type-checking time.) Named as in: OCaml, F#, Coq *) +val hd: list 'a -> ML 'a +let hd = function + | hd::tl -> hd + | _ -> failwith "head of empty list" + +(** [tail l] returns [l] without its first element. Raises an +exception if [l] is empty (thus, [tail] hides [List.Tot.tail] which +requires [l] to be nonempty at type-checking time). Similar to: tl in +OCaml, F#, Coq *) +val tail: list 'a -> ML (list 'a) +let tail = function + | hd::tl -> tl + | _ -> failwith "tail of empty list" + +(** [tl l] returns [l] without its first element. Raises an exception +if [l] is empty (thus, [tl] hides [List.Tot.tl] which requires [l] to +be nonempty at type-checking time). Named as in: tl in OCaml, F#, Coq +*) +val tl : list 'a -> ML (list 'a) +let tl l = tail l + +(** [last l] returns the last element of [l]. Requires, at +type-checking time, that [l] be nonempty. Named as in: Haskell +*) +val last: list 'a -> ML 'a +let rec last = function + | [hd] -> hd + | _::tl -> last tl + | _ -> failwith "last of empty list" + +(** [init l] returns [l] without its last element. Requires, at +type-checking time, that [l] be nonempty. Named as in: Haskell +*) +val init: list 'a -> ML (list 'a) +let rec init = function + | [_] -> [] + | hd::tl -> hd::(init tl) + | _ -> failwith "init of empty list" + +(** [nth l n] returns the [n]-th element in list [l] (with the first +element being the 0-th) if [l] is long enough, or raises an exception +otherwise (thus, [nth] hides [List.Tot.nth] which has [option] type.) +Named as in: OCaml, F#, Coq *) + +val nth: list 'a -> int -> ML 'a +let rec nth l n = + if n < 0 then + failwith "nth takes a non-negative integer as input" + else + if n = 0 then + match l with + | [] -> failwith "not enough elements" + | hd::_ -> hd + else + match l with + | [] -> failwith "not enough elements" + | _::tl -> nth tl (n - 1) + +(** Iterators **) + +(** [iter f l] performs [f x] for each element [x] of [l], in the +order in which they appear in [l]. Named as in: OCaml, F# . *) +val iter: ('a -> ML unit) -> list 'a -> ML unit +let rec iter f x = match x with + | [] -> () + | a::tl -> let _ = f a in iter f tl + +(** [iteri_aux n f l] performs, for each i, [f (i+n) x] for the i-th +element [x] of [l], in the order in which they appear in [l]. *) +val iteri_aux: int -> (int -> 'a -> ML unit) -> list 'a -> ML unit +let rec iteri_aux i f x = match x with + | [] -> () + | a::tl -> f i a; iteri_aux (i+1) f tl + +(** [iteri_aux f l] performs, for each [i], [f i x] for the i-th +element [x] of [l], in the order in which they appear in [l]. Named as +in: OCaml *) +val iteri: (int -> 'a -> ML unit) -> list 'a -> ML unit +let iteri f x = iteri_aux 0 f x + +(** [map f l] applies [f] to each element of [l] and returns the list +of results, in the order of the original elements in [l]. (Hides +[List.Tot.map] which requires, at type-checking time, [f] to be a pure +total function.) Named as in: OCaml, Coq, F# *) +val map: ('a -> ML 'b) -> list 'a -> ML (list 'b) +let rec map f x = match x with + | [] -> [] + | a::tl -> f a::map f tl + +(** [mapT f l] applies [f] to each element of [l] and returns the list +of results, in the order of the original elements in [l]. Requires, at +type-checking time, [f] to be a pure total function. *) +val mapT: ('a -> Tot 'b) -> list 'a -> Tot (list 'b) +let mapT = FStar.List.Tot.map + +(** [mapi_init f n l] applies, for each [k], [f (n+k)] to the [k]-th +element of [l] and returns the list of results, in the order of the +original elements in [l]. (Hides [List.Tot.mapi_init] which requires, +at type-checking time, [f] to be a pure total function.) *) +val mapi_init: (int -> 'a -> ML 'b) -> list 'a -> int -> ML (list 'b) +let rec mapi_init f l i = match l with + | [] -> [] + | hd::tl -> (f i hd)::(mapi_init f tl (i+1)) + +(** [mapi f l] applies, for each [k], [f k] to the [k]-th element of +[l] and returns the list of results, in the order of the original +elements in [l]. (Hides [List.Tot.mapi] which requires, at +type-checking time, [f] to be a pure total function.) Named as in: +OCaml *) +val mapi: (int -> 'a -> ML 'b) -> list 'a -> ML (list 'b) +let mapi f l = mapi_init f l 0 + +(** [concatMap f l] applies [f] to each element of [l] and returns the +concatenation of the results, in the order of the original elements of +[l]. This is equivalent to [flatten (map f l)]. (Hides +[List.Tot.concatMap], which requires, at type-checking time, [f] to be +a pure total function.) *) + +val concatMap: ('a -> ML (list 'b)) -> list 'a -> ML (list 'b) +let rec concatMap f = function + | [] -> [] + | a::tl -> + let fa = f a in + let ftl = concatMap f tl in + fa @ ftl + +(** [map2 f l1 l2] computes [f x1 x2] for each element x1 of [l1] and +the element [x2] of [l2] at the same position, and returns the list of +such results, in the order of the original elements in [l1]. Raises an +exception if [l1] and [l2] have different lengths. Named as in: OCaml +*) +val map2: ('a -> 'b -> ML 'c) -> list 'a -> list 'b -> ML (list 'c) +let rec map2 f l1 l2 = match l1, l2 with + | [], [] -> [] + | hd1::tl1, hd2::tl2 -> (f hd1 hd2)::(map2 f tl1 tl2) + | _, _ -> failwith "The lists do not have the same length" + +(** [map3 f l1 l2 l3] computes [f x1 x2 x3] for each element x1 of +[l1] and the element [x2] of [l2] and the element [x3] of [l3] at the +same position, and returns the list of such results, in the order of +the original elements in [l1]. Raises an exception if [l1], [l2] and +[l3] have different lengths. Named as in: OCaml *) +val map3: ('a -> 'b -> 'c -> ML 'd) -> list 'a -> list 'b -> list 'c -> ML (list 'd) +let rec map3 f l1 l2 l3 = match l1, l2, l3 with + | [], [], [] -> [] + | hd1::tl1, hd2::tl2, hd3::tl3 -> (f hd1 hd2 hd3)::(map3 f tl1 tl2 tl3) + | _, _, _ -> failwith "The lists do not have the same length" + +(** [fold_left f x [y1; y2; ...; yn]] computes (f (... (f x y1) y2) +... yn). (Hides [List.Tot.fold_left], which requires, at type-checking +time, [f] to be a pure total function.) Named as in: OCaml, Coq *) +val fold_left: ('a -> 'b -> ML 'a) -> 'a -> list 'b -> ML 'a +let rec fold_left f x y = match y with + | [] -> x + | hd::tl -> fold_left f (f x hd) tl + +(** [fold_left2 f x [y1; y2; ...; yn] [z1; z2; ...; zn]] computes (f +(... (f x y1 z1) y2 z2 ... yn zn). Raises an exception if [y1; y2; +...] and [z1; z2; ...] have different lengths. (Thus, hides +[List.Tot.fold_left2] which requires such a condition at type-checking +time.) Named as in: OCaml *) +val fold_left2: ('s -> 'a -> 'b -> ML 's) -> 's -> list 'a -> list 'b -> ML 's +let rec fold_left2 f a l1 l2 = match l1, l2 with + | [], [] -> a + | hd1::tl1, hd2::tl2 -> fold_left2 f (f a hd1 hd2) tl1 tl2 + | _, _ -> failwith "The lists do not have the same length" + +(** [fold_right f [x1; x2; ...; xn] y] computes (f x1 (f x2 (... (f xn +y)) ... )). (Hides [List.Tot.fold_right], which requires, at +type-checking time, [f] to be a pure total function.) Named as in: +OCaml, Coq *) +val fold_right: ('a -> 'b -> ML 'b) -> list 'a -> 'b -> ML 'b +let rec fold_right f l x = match l with + | [] -> x + | hd::tl -> f hd (fold_right f tl x) + +(** List searching **) + +(** [filter f l] returns [l] with all elements [x] such that [f x] +does not hold removed. (Hides [List.Tot.filter] which requires, at +type-checking time, [f] to be a pure total function.) Named as in: +OCaml, Coq *) +val filter: ('a -> ML bool) -> list 'a -> ML (list 'a) +let rec filter f = function + | [] -> [] + | hd::tl -> if f hd then hd::(filter f tl) else filter f tl + +(** [for_all f l] returns [true] if, and only if, for all elements [x] +appearing in [l], [f x] holds. (Hides [List.Tot.for_all], which +requires, at type-checking time, [f] to be a pure total function.) +Named as in: OCaml. Similar to: List.forallb in Coq *) +val for_all: ('a -> ML bool) -> list 'a -> ML bool +let rec for_all f l = match l with + | [] -> true + | hd::tl -> if f hd then for_all f tl else false + +(** [for_all f l1 l2] returns [true] if, and only if, for all elements +[x1] appearing in [l1] and the element [x2] appearing in [l2] at the +same position, [f x1 x2] holds. Raises an exception if [l1] and [l2] +have different lengths. Similar to: List.for_all2 in OCaml. Similar +to: List.Forall2 in Coq (which is propositional) *) +val forall2: ('a -> 'b -> ML bool) -> list 'a -> list 'b -> ML bool +let rec forall2 f l1 l2 = match l1,l2 with + | [], [] -> true + | hd1::tl1, hd2::tl2 -> if f hd1 hd2 then forall2 f tl1 tl2 else false + | _, _ -> failwith "The lists do not have the same length" + +(** [collect f l] applies [f] to each element of [l] and returns the +concatenation of the results, in the order of the original elements of +[l]. It is equivalent to [flatten (map f l)]. (Hides +[List.Tot.collect] which requires, at type-checking time, [f] to be a +pure total function.) TODO: what is the difference with [concatMap]? +*) +val collect: ('a -> ML (list 'b)) -> list 'a -> ML (list 'b) +let rec collect f l = match l with + | [] -> [] + | hd::tl -> append (f hd) (collect f tl) + +(** [tryFind f l] returns [Some x] for some element [x] appearing in +[l] such that [f x] holds, or [None] only if no such [x] +exists. (Hides [List.Tot.tryFind], which requires, at type-checking +time, [f] to be a pure total function.) *) +val tryFind: ('a -> ML bool) -> list 'a -> ML (option 'a) +let rec tryFind p l = match l with + | [] -> None + | hd::tl -> if p hd then Some hd else tryFind p tl + +(** [tryPick f l] returns [y] for some element [x] appearing in [l] +such that [f x = Some y] for some y, or [None] only if [f x = None] +for all elements [x] of [l]. (Hides [List.Tot.tryPick], which +requires, at type-checking time, [f] to be a pure total function.) *) +val tryPick: ('a -> ML (option 'b)) -> list 'a -> ML (option 'b) +let rec tryPick f l = match l with + | [] -> None + | hd::tl -> + match f hd with + | Some x -> Some x + | None -> tryPick f tl + +(** [choose f l] returns the list of [y] for all elements [x] +appearing in [l] such that [f x = Some y] for some [y]. (Hides +[List.Tot.choose] which requires, at type-checking time, [f] to be a +pure total function.) *) +val choose: ('a -> ML (option 'b)) -> list 'a -> ML (list 'b) +let rec choose f l = match l with + | [] -> [] + | hd::tl -> + match f hd with + | Some x -> x::(choose f tl) + | None -> choose f tl + +(** [partition f l] returns the pair of lists [(l1, l2)] where all +elements [x] of [l] are in [l1] if [f x] holds, and in [l2] +otherwise. Both [l1] and [l2] retain the original order of [l]. (Hides +[List.Tot.partition], which requires, at type-checking time, [f] to be +a pure total function.) *) +val partition: ('a -> ML bool) -> list 'a -> ML (list 'a & list 'a) +let rec partition f = function + | [] -> [], [] + | hd::tl -> + let l1, l2 = partition f tl in + if f hd + then hd::l1, l2 + else l1, hd::l2 + +(** List of tuples **) + +(** [zip] takes two lists [x1, ..., xn] and [y1, ..., yn] and returns +the list of pairs [(x1, y1), ..., (xn, yn)]. Raises an exception if +the two lists have different lengths. Named as in: Haskell *) +val zip: list 'a -> list 'b -> ML (list ('a & 'b)) +let rec zip l1 l2 = match l1,l2 with + | [], [] -> [] + | hd1::tl1, hd2::tl2 -> (hd1,hd2)::(zip tl1 tl2) + | _, _ -> failwith "The lists do not have the same length" + +(** Sorting (implemented as quicksort) **) + +(** [sortWith compare l] returns the list [l'] containing the elements +of [l] sorted along the comparison function [compare], in such a way +that if [compare x y > 0], then [x] appears before [y] in [l']. (Hides +[List.Tot.sortWith], which requires, at type-checking time, [compare] +to be a pure total function.) *) +val sortWith: ('a -> 'a -> ML int) -> list 'a -> ML (list 'a) +let rec sortWith f = function + | [] -> [] + | pivot::tl -> + let hi, lo = partition (fun x -> f pivot x > 0) tl in + sortWith f lo@(pivot::sortWith f hi) + +(** [splitAt n l] returns the pair of lists [(l1, l2)] such that [l1] +contains the first [n] elements of [l] and [l2] contains the +rest. Raises an exception if [l] has fewer than [n] elements. *) +val splitAt: nat -> list 'a -> ML (list 'a & list 'a) +let rec splitAt n l = + if n = 0 then [], l + else + match l with + | [] -> failwith "splitAt index is more that list length" + | hd::tl -> + let l1, l2 = splitAt (n - 1) tl in + hd::l1, l2 + +(** [filter_map f l] returns the list of [y] for all elements [x] +appearing in [l] such that [f x = Some y] for some [y]. (Implemented +here as a tail-recursive version of [choose] *) +let filter_map (f:'a -> ML (option 'b)) (l:list 'a) : ML (list 'b) = + let rec filter_map_acc (acc:list 'b) (l:list 'a) : ML (list 'b) = + match l with + | [] -> + rev acc + | hd :: tl -> + match f hd with + | Some hd -> + filter_map_acc (hd :: acc) tl + | None -> + filter_map_acc acc tl + in + filter_map_acc [] l + +(** [index f l] returns the position index in list [l] of the first +element [x] in [l] such that [f x] holds. Raises an exception if no +such [x] exists. TODO: rename this function (it hides List.Tot.index +which has a completely different semantics.) *) +val index: ('a -> ML bool) -> list 'a -> ML int +let index f l = + let rec index l i : ML int = + match l with + | [] -> + failwith "List.index: not found" + | hd :: tl -> + if f hd then + i + else + index tl (i + 1) + in + index l 0 diff --git a/stage0/ulib/FStar.MRef.fst b/stage0/ulib/FStar.MRef.fst new file mode 100644 index 00000000000..4c5d8c3ed6b --- /dev/null +++ b/stage0/ulib/FStar.MRef.fst @@ -0,0 +1,37 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.MRef +open FStar.Heap +open FStar.ST + +open FStar.Preorder + +private let p_pred (#a:Type) (#b:preorder a) (r:mref a b) (p:(a -> Type)) + = fun h -> h `contains` r /\ p (sel h r) + +let token #_ #_ r p = witnessed (p_pred r p) + +let witness_token #_ #_ m p = + gst_recall (contains_pred m); + gst_witness (p_pred m p) + +let recall_token #_ #_ m p = gst_recall (p_pred m p) + +let lemma_functoriality #_ #_ r p q = lemma_functoriality (p_pred r p) (p_pred r q) + +let recall p = gst_recall p + +let witness p = gst_witness p diff --git a/stage0/ulib/FStar.MRef.fsti b/stage0/ulib/FStar.MRef.fsti new file mode 100644 index 00000000000..d31f2746bf3 --- /dev/null +++ b/stage0/ulib/FStar.MRef.fsti @@ -0,0 +1,50 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.MRef +open FStar.Heap +open FStar.ST + +open FStar.Preorder + +let stable = FStar.Preorder.stable + +val token (#a:Type) (#b:preorder a) (r:mref a b) (p:(a -> Type){stable p b}) : Type0 + +val witness_token: #a:Type -> #b:preorder a -> m:mref a b -> p:(a -> Type){stable p b} + -> ST unit (requires (fun h0 -> p (sel h0 m))) + (ensures (fun h0 _ h1 -> h0==h1 /\ token m p)) + +val recall_token: #a:Type -> #b:preorder a -> m:mref a b -> p:(a -> Type){stable p b} + -> ST unit (requires (fun _ -> token m p)) + (ensures (fun h0 _ h1 -> h0==h1 /\ p (sel h1 m))) + +let spred (#a:Type) (rel:preorder a) = p:(a -> Type){Preorder.stable p rel} + +val lemma_functoriality (#a:Type) (#rel:preorder a) (r:mref a rel) (p q:spred rel) + : Lemma (requires (token r p /\ (forall x. p x ==> q x))) + (ensures (token r q)) + +(* KM : These don't have much to do here... *) + +val recall: p:(heap -> Type){ST.stable p} -> + ST unit + (requires (fun _ -> witnessed p)) + (ensures (fun h0 _ h1 -> h0 == h1 /\ p h1)) + +val witness: p:(heap -> Type){ST.stable p} -> + ST unit + (requires (fun h0 -> p h0)) + (ensures (fun h0 _ h1 -> h0==h1 /\ witnessed p)) diff --git a/stage0/ulib/FStar.Map.fst b/stage0/ulib/FStar.Map.fst new file mode 100644 index 00000000000..540ea614292 --- /dev/null +++ b/stage0/ulib/FStar.Map.fst @@ -0,0 +1,102 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +(** + * Implementation of partial maps with extensional equality + *) +module FStar.Map +open FStar.Set +open FStar.FunctionalExtensionality +module S = FStar.Set +module F = FStar.FunctionalExtensionality + + +(* The main "trick" in the representation of the type `t` + * is to use a domain-restricted function type `key ^-> value` + * from the FStar.FunctionalExtensionality library. + * These restricted function types enjoy extensional equality, + * which is necessary if Map.t is to also enjoy extensional equality. + *) +noeq +type t (key:eqtype) (value:Type) = { + mappings: key ^-> value; + domain: set key +} + +let sel #key #value m k = m.mappings k + +(* Since mappings are restricted functions, + assignments to that field must use `F.on` + to restrict the domain of the functional maps *) +let upd #key #value m k v = { + mappings = F.on key (fun x -> if x = k then v else m.mappings x); + domain = S.union m.domain (singleton k) +} + +(* idem *) +let const #key #value v = { + mappings = F.on key (fun _ -> v); + domain = complement empty +} + +let domain #key #value m = m.domain + +let contains #key #value m k = mem k m.domain + +(* Again, use F.on to build a domain-restricted function *) +let concat #key #value m1 m2 = { + mappings = F.on key (fun x -> if mem x m2.domain then m2.mappings x else m1.mappings x); + domain = union m1.domain m2.domain +} + +let map_val #_ #_ f #key m = { + mappings = F.on key (fun x -> f (m.mappings x)); + domain = m.domain +} + +let restrict #key #value s m = { + mappings = m.mappings; + domain = intersect s m.domain +} + +let map_literal #k #v f = { + mappings = F.on k f; + domain = complement empty; +} + +let lemma_SelUpd1 #key #value m k v = () +let lemma_SelUpd2 #key #value m k1 k2 v = () +let lemma_SelConst #key #value v k = () +let lemma_SelRestrict #key #value m ks k = () +let lemma_SelConcat1 #key #value m1 m2 k = () +let lemma_SelConcat2 #key #value m1 m2 k = () +let lemma_SelMapVal #val1 #val2 f #key m k = () +let lemma_InDomUpd1 #key #value m k1 k2 v = () +let lemma_InDomUpd2 #key #value m k1 k2 v = () +let lemma_InDomConstMap #key #value v k = () +let lemma_InDomConcat #key #value m1 m2 k = () +let lemma_InMapVal #val1 #val2 f #key m k = () +let lemma_InDomRestrict #key #value m ks k = () +let lemma_ContainsDom #key #value m k = () +let lemma_UpdDomain #key #value m k v = () +let lemma_map_literal #key #value f = () + +let equal (#key:eqtype) (#value:Type) (m1:t key value) (m2:t key value) : Type0 = + F.feq m1.mappings m2.mappings /\ + S.equal m1.domain m2.domain + +let lemma_equal_intro #key #value m1 m2 = () +let lemma_equal_elim #key #value m1 m2 = () +let lemma_equal_refl #key #value m1 m2 = () diff --git a/stage0/ulib/FStar.Map.fsti b/stage0/ulib/FStar.Map.fsti new file mode 100644 index 00000000000..109aa14bdeb --- /dev/null +++ b/stage0/ulib/FStar.Map.fsti @@ -0,0 +1,189 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +(** + + @summary FStar.Map provides a polymorphic, partial map from keys to + values, where keys support decidable equality. + + `m:Map.t key value` is a partial map from `key` to `value` + + A distinctive feature of the library is in its model of partiality. + + A map can be seen as a pair of: + 1. a total map `key -> Tot value` + 2. a set of keys that record the domain of the map + +*) +module FStar.Map +module S = FStar.Set + +(* Map.t key value: The main type provided by this module *) +val t (key:eqtype) ([@@@strictly_positive] value:Type u#a) + : Type u#a + +(* sel m k : Look up key `k` in map `m` *) +val sel: #key:eqtype -> #value:Type -> t key value -> key -> Tot value + +(* upd m k v : A map identical to `m` except mapping `k` to `v` *) +val upd: #key:eqtype -> #value:Type -> t key value -> key -> value -> Tot (t key value) + +(* const v : A constant map mapping all keys to `v` *) +val const: #key:eqtype -> #value:Type -> value -> Tot (t key value) + +(* domain m : The set of keys on which this partial map is defined *) +val domain: #key:eqtype -> #value:Type -> t key value -> Tot (S.set key) + +(* contains m k: Decides if key `k` is in the map `m` *) +val contains: #key:eqtype -> #value:Type -> t key value -> key -> Tot bool + +(* concat m1 m2 : + A map whose domain is the union of the domains of `m1` and `m2`. + + Maps every key `k` in the domain of `m1` to `sel m1 k` + and all other keys to `sel m2 k`. +*) +val concat: #key:eqtype -> #value:Type -> t key value -> t key value -> Tot (t key value) + +(* map_val f m: + A map whose domain is the same as `m` but all values have + `f` applied to them. +*) +val map_val: #val1:Type -> #val2:Type -> f:(val1 -> val2) -> #key:eqtype -> t key val1 -> Tot (t key val2) + +(* restrict s m: + Restricts the domain of `m` to (domain m `intersect` s) +*) +val restrict: #key:eqtype -> #value:Type -> S.set key -> t key value -> Tot (t key value) + +(* const_on dom v: A defined notion, for convenience + A partial constant map on dom +*) +let const_on (#key:eqtype) (#value:Type) (dom:S.set key) (v:value) + : t key value + = restrict dom (const v) + + +(* map_literal f: A map that is extensionally equal to the function [f] *) +val map_literal (#k:eqtype) (#v:Type) (f: k -> Tot v) + : t k v + +(* disjoint_dom m1 m2: + Disjoint domains. TODO: its pattern is biased towards `m1`. Why? + *) +let disjoint_dom (#key:eqtype) (#value:Type) (m1:t key value) (m2:t key value) + = forall x.{:pattern (contains m1 x)(* ; (contains m2 x) *)} contains m1 x ==> not (contains m2 x) + +(* has_dom m dom: A relational version of the `domain m` function *) +let has_dom (#key:eqtype) (#value:Type) (m:t key value) (dom:S.set key) + = forall x. contains m x <==> S.mem x dom + +(* Properties about map functions *) +val lemma_SelUpd1: #key:eqtype -> #value:Type -> m:t key value -> k:key -> v:value -> + Lemma (requires True) (ensures (sel (upd m k v) k == v)) + [SMTPat (sel (upd m k v) k)] + +val lemma_SelUpd2: #key:eqtype -> #value:Type -> m:t key value -> k1:key -> k2:key -> v:value -> + Lemma (requires True) (ensures (k2=!=k1 ==> sel (upd m k2 v) k1 == sel m k1)) + [SMTPat (sel (upd m k2 v) k1)] + +val lemma_SelConst: #key:eqtype -> #value:Type -> v:value -> k:key -> + Lemma (requires True) (ensures (sel (const v) k == v)) + [SMTPat (sel (const v) k)] + +val lemma_SelRestrict: #key:eqtype -> #value:Type -> m:t key value -> ks:S.set key -> k:key -> + Lemma (requires True) (ensures (sel (restrict ks m) k == sel m k)) + [SMTPat (sel (restrict ks m) k)] + +val lemma_SelConcat1: #key:eqtype -> #value:Type -> m1:t key value -> m2:t key value -> k:key -> + Lemma (requires True) (ensures (contains m2 k ==> sel (concat m1 m2) k==sel m2 k)) + [SMTPat (sel (concat m1 m2) k)] + +val lemma_SelConcat2: #key:eqtype -> #value:Type -> m1:t key value -> m2:t key value -> k:key -> + Lemma (requires True) (ensures (not(contains m2 k) ==> sel (concat m1 m2) k==sel m1 k)) + [SMTPat (sel (concat m1 m2) k)] + +val lemma_SelMapVal: #val1:Type -> #val2:Type -> f:(val1 -> val2) -> #key:eqtype -> m:t key val1 -> k:key -> + Lemma (requires True) (ensures (sel (map_val f m) k == f (sel m k))) + [SMTPat (sel (map_val f m) k)] + +val lemma_InDomUpd1: #key:eqtype -> #value:Type -> m:t key value -> k1:key -> k2:key -> v:value -> + Lemma (requires True) (ensures (contains (upd m k1 v) k2 == (k1=k2 || contains m k2))) + [SMTPat (contains (upd m k1 v) k2)] + +val lemma_InDomUpd2: #key:eqtype -> #value:Type -> m:t key value -> k1:key -> k2:key -> v:value -> + Lemma (requires True) (ensures (k2=!=k1 ==> contains (upd m k2 v) k1 == contains m k1)) + [SMTPat (contains (upd m k2 v) k1)] + +val lemma_InDomConstMap: #key:eqtype -> #value:Type -> v:value -> k:key -> + Lemma (requires True) (ensures (contains (const v) k)) + [SMTPat (contains (const v) k)] + +val lemma_InDomConcat: #key:eqtype -> #value:Type -> m1:t key value -> m2:t key value -> k:key -> + Lemma (requires True) (ensures (contains (concat m1 m2) k==(contains m1 k || contains m2 k))) + [SMTPat (contains (concat m1 m2) k)] + +val lemma_InMapVal: #val1:Type -> #val2:Type -> f:(val1 -> val2) -> #key:eqtype -> m:t key val1 -> k:key -> + Lemma (requires True) (ensures (contains (map_val f m) k == contains m k)) + [SMTPat (contains (map_val f m) k)] + +val lemma_InDomRestrict: #key:eqtype -> #value:Type -> m:t key value -> ks:S.set key -> k:key -> + Lemma (requires True) (ensures (contains (restrict ks m) k == (S.mem k ks && contains m k))) + [SMTPat (contains (restrict ks m) k)] + +val lemma_ContainsDom: #key:eqtype -> #value:Type -> m:t key value -> k:key -> + Lemma (requires True) (ensures (contains m k = S.mem k (domain m))) + [SMTPatOr[[SMTPat (contains m k)]; [SMTPat (S.mem k (domain m))]]] + +val lemma_UpdDomain : #key:eqtype -> #value:Type -> m:t key value -> k:key -> v:value -> + Lemma (requires True) + (ensures (S.equal (domain (upd m k v)) (S.union (domain m) (S.singleton k)))) + [SMTPat (domain (upd m k v))] + +val lemma_map_literal (#k:eqtype) (#v:Type) (f: k -> Tot v) + : Lemma ((forall k.{:pattern (sel (map_literal f) k)} sel (map_literal f) k == f k) /\ + domain (map_literal f) == Set.complement Set.empty) + [SMTPat (map_literal f)] + +(*** Extensional equality ***) + +(* equal m1 m2: + Maps `m1` and `m2` have the same domain and + and are pointwise equal on that domain. + *) +val equal (#key:eqtype) (#value:Type) (m1:t key value) (m2:t key value) : prop + +(* lemma_equal_intro: + Introducing `equal m1 m2` by showing maps to be pointwise equal on the same domain +*) +val lemma_equal_intro: #key:eqtype -> #value:Type -> m1:t key value -> m2:t key value -> + Lemma (requires (forall k. sel m1 k == sel m2 k /\ + contains m1 k = contains m2 k)) + (ensures (equal m1 m2)) + [SMTPat (equal m1 m2)] + +(* lemma_equal_elim: + Eliminating `equal m1 m2` to provable equality of maps + Internally, this involves a use of functional extensionality +*) +val lemma_equal_elim: #key:eqtype -> #value:Type -> m1:t key value -> m2:t key value -> + Lemma (ensures (equal m1 m2 <==> m1 == m2)) + [SMTPat (equal m1 m2)] + +[@@(deprecated "Use lemma_equal_elim instead")] +val lemma_equal_refl: #key:eqtype -> #value:Type -> m1:t key value -> m2:t key value -> + Lemma (requires (m1 == m2)) + (ensures (equal m1 m2)) diff --git a/stage0/ulib/FStar.MarkovsPrinciple.fst b/stage0/ulib/FStar.MarkovsPrinciple.fst new file mode 100644 index 00000000000..fd10c91d1fa --- /dev/null +++ b/stage0/ulib/FStar.MarkovsPrinciple.fst @@ -0,0 +1,26 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.MarkovsPrinciple + +assume val markovs_principle : p:(nat -> Tot bool) -> Ghost nat + (requires (~(forall (n:nat). ~(p n)))) + (ensures (fun n -> p n)) + +(* here is a stronger variant of Markov's principle + (might be as strong as indefinite description?) *) +assume val stronger_markovs_principle : p:(nat -> GTot bool) -> Ghost nat + (requires (~(forall (n:nat). ~(p n)))) + (ensures (fun n -> p n)) diff --git a/stage0/ulib/FStar.Math.Euclid.fst b/stage0/ulib/FStar.Math.Euclid.fst new file mode 100644 index 00000000000..cdd3bae2979 --- /dev/null +++ b/stage0/ulib/FStar.Math.Euclid.fst @@ -0,0 +1,273 @@ +module FStar.Math.Euclid + +open FStar.Mul +open FStar.Math.Lemmas + +/// +/// Auxiliary lemmas +/// + +val eq_mult_left (a b:int) : Lemma (requires a = b * a) (ensures a = 0 \/ b = 1) +let eq_mult_left a b = () + +val eq_mult_one (a b:int) : Lemma + (requires a * b = 1) + (ensures (a = 1 /\ b = 1) \/ (a = -1 /\ b = -1)) +let eq_mult_one a b = () + +val opp_idempotent (a:int) : Lemma (-(-a) == a) +let opp_idempotent a = () + +val add_sub_l (a b:int) : Lemma (a - b + b = a) +let add_sub_l a b = () + +val add_sub_r (a b:int) : Lemma (a + b - b = a) +let add_sub_r a b = () + +/// +/// Divides relation +/// + +let divides_reflexive a = + Classical.exists_intro (fun q -> a = q * a) 1 + +let divides_transitive a b c = + eliminate exists q1. b == q1 * a + returns a `divides` c + with _pf. + eliminate exists q2. c == q2 * b + returns _ + with _pf2. + introduce exists q. c == q * a + with (q1 * q2) + and () + + +let divide_antisym a b = + if a <> 0 then + Classical.exists_elim (a = b \/ a = -b) (Squash.get_proof (exists q1. b = q1 * a)) + (fun q1 -> + Classical.exists_elim (a = b \/ a = -b) (Squash.get_proof (exists q2. a = q2 * b)) + (fun q2 -> + assert (b = q1 * a); + assert (a = q2 * b); + assert (b = q1 * (q2 * b)); + paren_mul_right q1 q2 b; + eq_mult_left b (q1 * q2); + eq_mult_one q1 q2)) + +let divides_0 a = + Classical.exists_intro (fun q -> 0 = q * a) 0 + +let divides_1 a = () + +let divides_minus a b = + Classical.exists_elim (a `divides` (-b)) + (Squash.get_proof (a `divides` b)) + (fun q -> Classical.exists_intro (fun q' -> -b = q' * a) (-q)) + +let divides_opp a b = + Classical.exists_elim ((-a) `divides` b) + (Squash.get_proof (a `divides` b)) + (fun q -> Classical.exists_intro (fun q' -> b = q' * (-a)) (-q)) + +let divides_plus a b d = + Classical.exists_elim (d `divides` (a + b)) (Squash.get_proof (exists q1. a = q1 * d)) + (fun q1 -> + Classical.exists_elim (d `divides` (a + b)) (Squash.get_proof (exists q2. b = q2 * d)) + (fun q2 -> + assert (a + b = q1 * d + q2 * d); + distributivity_add_left q1 q2 d; + Classical.exists_intro (fun q -> a + b = q * d) (q1 + q2))) + +let divides_sub a b d = + Classical.forall_intro_2 (Classical.move_requires_2 divides_minus); + divides_plus a (-b) d + +let divides_mult_right a b d = + Classical.exists_elim (d `divides` (a * b)) (Squash.get_proof (d `divides` b)) + (fun q -> + paren_mul_right a q d; + Classical.exists_intro (fun r -> a * b = r * d) (a * q)) + +/// +/// GCD +/// + +let mod_divides a b = + Classical.exists_intro (fun q -> a = q * b) (a / b) + +let divides_mod a b = + Classical.exists_elim (a % b = 0) (Squash.get_proof (b `divides` a)) + (fun q -> cancel_mul_div q b) + +let is_gcd_unique a b c d = + divide_antisym c d + +let is_gcd_reflexive a = () + +let is_gcd_symmetric a b d = () + +let is_gcd_0 a = () + +let is_gcd_1 a = () + +let is_gcd_minus a b d = + Classical.forall_intro_2 (Classical.move_requires_2 divides_minus); + opp_idempotent b + +let is_gcd_opp a b d = + Classical.forall_intro_2 (Classical.move_requires_2 divides_minus); + divides_opp d a; + divides_opp d b + +let is_gcd_plus a b q d = + add_sub_r b (q * a); + Classical.forall_intro_3 (Classical.move_requires_3 divides_plus); + Classical.forall_intro_3 (Classical.move_requires_3 divides_mult_right); + Classical.forall_intro_3 (Classical.move_requires_3 divides_sub) + +/// +/// Extended Euclidean algorithm +/// + +val is_gcd_for_euclid (a b q d:int) : Lemma + (requires is_gcd b (a - q * b) d) + (ensures is_gcd a b d) +let is_gcd_for_euclid a b q d = + add_sub_l a (q * b); + is_gcd_plus b (a - q * b) q d + +val egcd (a b u1 u2 u3 v1 v2 v3:int) : Pure (int & int & int) + (requires v3 >= 0 /\ + u1 * a + u2 * b = u3 /\ + v1 * a + v2 * b = v3 /\ + (forall d. is_gcd u3 v3 d ==> is_gcd a b d)) + (ensures (fun (u, v, d) -> u * a + v * b = d /\ is_gcd a b d)) + (decreases v3) + +let rec egcd a b u1 u2 u3 v1 v2 v3 = + if v3 = 0 then + begin + divides_0 u3; + (u1, u2, u3) + end + else + begin + let q = u3 / v3 in + euclidean_division_definition u3 v3; + assert (u3 - q * v3 = (q * v3 + u3 % v3) - q * v3); + assert (q * v3 - q * v3 = 0); + swap_add_plus_minus (q * v3) (u3 % v3) (q * v3); + calc (==) { + (u1 - q * v1) * a + (u2 - q * v2) * b; + == { _ by (FStar.Tactics.Canon.canon()) } + (u1 * a + u2 * b) - q * (v1 * a + v2 * b); + == { } + u3 - q * v3; + == { lemma_div_mod u3 v3 } + u3 % v3; + }; + let u1, v1 = v1, u1 - q * v1 in + let u2, v2 = v2, u2 - q * v2 in + let u3' = u3 in + let v3' = v3 in + let u3, v3 = v3, u3 - q * v3 in + (* proving the implication in the precondition *) + introduce forall d. is_gcd v3' (u3' - q * v3') d ==> is_gcd u3' v3' d with + introduce _ ==> _ with _. + is_gcd_for_euclid u3' v3' q d; + let r = egcd a b u1 u2 u3 v1 v2 v3 in + r + end + +let euclid_gcd a b = + if b >= 0 then + egcd a b 1 0 a 0 1 b + else ( + introduce forall d. is_gcd a (-b) d ==> is_gcd a b d + with introduce _ ==> _ + with _pf. + (is_gcd_minus a b d; + is_gcd_symmetric b a d); + let res = egcd a b 1 0 a 0 (-1) (-b) in + let _, _, d = res in + assert (is_gcd a b d); + res + ) + +val is_gcd_prime_aux (p:int) (a:pos{a < p}) (d:int) : Lemma + (requires is_prime p /\ d `divides` p /\ d `divides` a) + (ensures d = 1 \/ d = -1) +let is_gcd_prime_aux p a d = () + +val is_gcd_prime (p:int{is_prime p}) (a:pos{a < p}) : Lemma (is_gcd p a 1) +let is_gcd_prime p a = + Classical.forall_intro_2 (Classical.move_requires_2 divides_minus); + Classical.forall_intro (Classical.move_requires (is_gcd_prime_aux p a)); + assert (forall x. x `divides` p /\ x `divides` a ==> x = 1 \/ x = -1 /\ x `divides` 1) + +let bezout_prime p a = + let r, s, d = euclid_gcd p a in + assert (r * p + s * a = d); + assert (is_gcd p a d); + is_gcd_prime p a; + is_gcd_unique p a 1 d; + assert (d = 1 \/ d = -1); + assert ((-r) * p + (-s) * a == -(r * p + s * a)) by (FStar.Tactics.Canon.canon()); + if d = 1 then r, s else -r, -s + +let euclid n a b r s = + let open FStar.Math.Lemmas in + calc (==) { + b % n; + == { distributivity_add_left (r * n) (s * a) b } + (r * n * b + s * a * b) % n; + == { paren_mul_right s a b } + (r * n * b + s * (a * b)) % n; + == { modulo_distributivity (r * n * b) (s * (a * b)) n } + ((r * n * b) % n + s * (a * b) % n) % n; + == { lemma_mod_mul_distr_r s (a * b) n } + ((r * n * b) % n + s * ((a * b) % n) % n) % n; + == { assert (a * b % n = 0) } + ((r * n * b) % n + s * 0 % n) % n; + == { assert (s * 0 == 0) } + ((r * n * b) % n + 0 % n) % n; + == { modulo_lemma 0 n } + ((r * n * b) % n) % n; + == { lemma_mod_twice (r * n * b) n } + (r * n * b) % n; + == { _ by (FStar.Tactics.Canon.canon ()) } + (n * (r * b)) % n; + == { lemma_mod_mul_distr_l n (r * b) n} + n % n * (r * b) % n; + == { assert (n % n = 0) } + (0 * (r * b)) % n; + == { assert (0 * (r * b) == 0) } + 0 % n; + == { small_mod 0 n } + 0; + } + +let euclid_prime p a b = + let ra, sa, da = euclid_gcd p a in + let rb, sb, db = euclid_gcd p b in + assert (is_gcd p a da); + assert (is_gcd p b db); + assert (da `divides` p); + assert (da = 1 \/ da = -1 \/ da = p \/ da = -p); + if da = 1 then + euclid p a b ra sa + else if da = -1 then + begin + assert ((-ra) * p + (-sa) * a == -(ra * p + sa * a)) by (FStar.Tactics.Canon.canon()); + euclid p a b (-ra) (-sa) + end + else if da = p then + divides_mod a p + else + begin + opp_idempotent p; + divides_opp (-p) a; + divides_mod a p + end diff --git a/stage0/ulib/FStar.Math.Euclid.fsti b/stage0/ulib/FStar.Math.Euclid.fsti new file mode 100644 index 00000000000..f5d2ac9b965 --- /dev/null +++ b/stage0/ulib/FStar.Math.Euclid.fsti @@ -0,0 +1,127 @@ +module FStar.Math.Euclid + +open FStar.Mul + +/// +/// Divides relation +/// +/// It is reflexive, transitive, and antisymmetric up to sign. +/// When a <> 0, a `divides` b iff a % b = 0 (this is proved below) +/// + +let divides (a b:int) : prop = exists q. b = q * a + +val divides_reflexive (a:int) : Lemma (a `divides` a) [SMTPat (a `divides` a)] + +val divides_transitive (a b c:int) : Lemma + (requires a `divides` b /\ b `divides` c) + (ensures a `divides` c) + +val divide_antisym (a b:int) : Lemma + (requires a `divides` b /\ b `divides` a) + (ensures a = b \/ a = -b) + +val divides_0 (a:int) : Lemma (a `divides` 0) + +val divides_1 (a:int) : Lemma (requires a `divides` 1) (ensures a = 1 \/ a = -1) + +val divides_minus (a b:int) : Lemma + (requires a `divides` b) + (ensures a `divides` (-b)) + +val divides_opp (a b:int) : Lemma + (requires a `divides` b) + (ensures (-a) `divides` b) + +val divides_plus (a b d:int) : Lemma + (requires d `divides` a /\ d `divides` b) + (ensures d `divides` (a + b)) + +val divides_sub (a b d:int) : Lemma + (requires d `divides` a /\ d `divides` b) + (ensures d `divides` (a - b)) + +val divides_mult_right (a b d:int) : Lemma + (requires d `divides` b) + (ensures d `divides` (a * b)) + +/// +/// Greatest Common Divisor (GCD) relation +/// +/// We deviate from the standard definition in that we allow the divisor to +/// be negative. Thus, the GCD of two integers is unique up to sign. +/// + +let is_gcd (a b d:int) : prop = + d `divides` a /\ + d `divides` b /\ + (forall x. (x `divides` a /\ x `divides` b) ==> x `divides` d) + +val mod_divides (a:int) (b:nonzero) : Lemma (requires a % b = 0) (ensures b `divides` a) + +val divides_mod (a:int) (b:nonzero) : Lemma (requires b `divides` a) (ensures a % b = 0) + +val is_gcd_unique (a b c d:int) : Lemma + (requires is_gcd a b c /\ is_gcd a b d) + (ensures c = d \/ c = -d) + +val is_gcd_reflexive (a:int) : Lemma (is_gcd a a a) + +val is_gcd_symmetric (a b d:int) : Lemma + (requires is_gcd a b d) + (ensures is_gcd b a d) + +val is_gcd_0 (a:int) : Lemma (is_gcd a 0 a) + +val is_gcd_1 (a:int) : Lemma (is_gcd a 1 1) + +val is_gcd_minus (a b d:int) : Lemma + (requires is_gcd a (-b) d) + (ensures is_gcd b a d) + +val is_gcd_opp (a b d:int) : Lemma + (requires is_gcd a b d) + (ensures is_gcd b a (-d)) + +val is_gcd_plus (a b q d:int) : Lemma + (requires is_gcd a b d) + (ensures is_gcd a (b + q * a) d) + +/// +/// Extended Euclidean algorithm +/// +/// Computes the GCD of two integers (a, b) together with Bézout coefficients +/// (r, s) satisfying r a + s b = gcd(a, b) +/// + +val euclid_gcd (a b:int) : Pure (int & int & int) + (requires True) + (ensures fun (r, s, d) -> r * a + s * b = d /\ is_gcd a b d) + +/// +/// A definition of primality based on the divides relation +/// + +let is_prime (p:int) = + 1 < p /\ + (forall (d:int).{:pattern (d `divides` p)} + (d `divides` p ==> (d = 1 \/ d = -1 \/ d = p \/ d = -p))) + +val bezout_prime (p:int) (a:pos{a < p}) : Pure (int & int) + (requires is_prime p) + (ensures fun (r, s) -> r * p + s * a = 1) + +/// +/// Euclid's lemma and its generalization to arbitrary integers +/// +/// - If a prime p divides a*b, then it must divide at least one of a or b +/// - If n divides a*b and a,n are coprime then n divides b +/// + +val euclid (n:pos) (a b r s:int) : Lemma + (requires (a * b) % n = 0 /\ r * n + s * a = 1) + (ensures b % n = 0) + +val euclid_prime (p:int{is_prime p}) (a b:int) : Lemma + (requires (a * b) % p = 0) + (ensures a % p = 0 \/ b % p = 0) diff --git a/stage0/ulib/FStar.Math.Fermat.fst b/stage0/ulib/FStar.Math.Fermat.fst new file mode 100644 index 00000000000..1c8140d0cff --- /dev/null +++ b/stage0/ulib/FStar.Math.Fermat.fst @@ -0,0 +1,524 @@ +module FStar.Math.Fermat + +open FStar.Mul +open FStar.Math.Lemmas +open FStar.Math.Euclid + +#set-options "--fuel 1 --ifuel 0 --z3rlimit 20" + +/// +/// Pow +/// + +val pow_zero (k:pos) : Lemma (ensures pow 0 k == 0) (decreases k) +let rec pow_zero k = + match k with + | 1 -> () + | _ -> pow_zero (k - 1) + +val pow_one (k:nat) : Lemma (pow 1 k == 1) +let rec pow_one = function + | 0 -> () + | k -> pow_one (k - 1) + +val pow_plus (a:int) (k m:nat): Lemma (pow a (k + m) == pow a k * pow a m) +let rec pow_plus a k m = + match k with + | 0 -> () + | _ -> + calc (==) { + pow a (k + m); + == { } + a * pow a ((k + m) - 1); + == { pow_plus a (k - 1) m } + a * (pow a (k - 1) * pow a m); + == { } + pow a k * pow a m; + } + +val pow_mod (p:pos) (a:int) (k:nat) : Lemma (pow a k % p == pow (a % p) k % p) +let rec pow_mod p a k = + if k = 0 then () + else + calc (==) { + pow a k % p; + == { } + a * pow a (k - 1) % p; + == { lemma_mod_mul_distr_r a (pow a (k - 1)) p } + (a * (pow a (k - 1) % p)) % p; + == { pow_mod p a (k - 1) } + (a * (pow (a % p) (k - 1) % p)) % p; + == { lemma_mod_mul_distr_r a (pow (a % p) (k - 1)) p } + a * pow (a % p) (k - 1) % p; + == { lemma_mod_mul_distr_l a (pow (a % p) (k - 1)) p } + (a % p * pow (a % p) (k - 1)) % p; + == { } + pow (a % p) k % p; + } + +/// +/// Binomial theorem +/// + +val binomial (n k:nat) : nat +let rec binomial n k = + match n, k with + | _, 0 -> 1 + | 0, _ -> 0 + | _, _ -> binomial (n - 1) k + binomial (n - 1) (k - 1) + +val binomial_0 (n:nat) : Lemma (binomial n 0 == 1) +let binomial_0 n = () + +val binomial_lt (n:nat) (k:nat{n < k}) : Lemma (binomial n k = 0) +let rec binomial_lt n k = + match n, k with + | _, 0 -> () + | 0, _ -> () + | _ -> binomial_lt (n - 1) k; binomial_lt (n - 1) (k - 1) + +val binomial_n (n:nat) : Lemma (binomial n n == 1) +let rec binomial_n n = + match n with + | 0 -> () + | _ -> binomial_lt n (n + 1); binomial_n (n - 1) + +val pascal (n:nat) (k:pos{k <= n}) : Lemma + (binomial n k + binomial n (k - 1) = binomial (n + 1) k) +let pascal n k = () + +val factorial: nat -> pos +let rec factorial = function + | 0 -> 1 + | n -> n * factorial (n - 1) + +let ( ! ) n = factorial n + +val binomial_factorial (m n:nat) : Lemma (binomial (n + m) n * (!n * !m) == !(n + m)) +let rec binomial_factorial m n = + match m, n with + | 0, _ -> binomial_n n + | _, 0 -> () + | _ -> + let open FStar.Math.Lemmas in + let reorder1 (a b c d:int) : Lemma (a * (b * (c * d)) == c * (a * (b * d))) = + assert (a * (b * (c * d)) == c * (a * (b * d))) by (FStar.Tactics.CanonCommSemiring.int_semiring()) + in + let reorder2 (a b c d:int) : Lemma (a * ((b * c) * d) == b * (a * (c * d))) = + assert (a * ((b * c) * d) == b * (a * (c * d))) by (FStar.Tactics.CanonCommSemiring.int_semiring()) + in + calc (==) { + binomial (n + m) n * (!n * !m); + == { pascal (n + m - 1) n } + (binomial (n + m - 1) n + binomial (n + m - 1) (n - 1)) * (!n * !m); + == { addition_is_associative n m (-1) } + (binomial (n + (m - 1)) n + binomial (n + (m - 1)) (n - 1)) * (!n * !m); + == { distributivity_add_left (binomial (n + (m - 1)) n) + (binomial (n + (m - 1)) (n - 1)) + (!n * !m) + } + binomial (n + (m - 1)) n * (!n * !m) + + binomial (n + (m - 1)) (n - 1) * (!n * !m); + == { } + binomial (n + (m - 1)) n * (!n * (m * !(m - 1))) + + binomial ((n - 1) + m) (n - 1) * ((n * !(n - 1)) * !m); + == { reorder1 (binomial (n + (m - 1)) n) (!n) m (!(m - 1)); + reorder2 (binomial ((n - 1) + m) (n - 1)) n (!(n - 1)) (!m) + } + m * (binomial (n + (m - 1)) n * (!n * !(m - 1))) + + n * (binomial ((n - 1) + m) (n - 1) * (!(n - 1) * !m)); + == { binomial_factorial (m - 1) n; binomial_factorial m (n - 1) } + m * !(n + (m - 1)) + n * !((n - 1) + m); + == { } + m * !(n + m - 1) + n * !(n + m - 1); + == { } + n * !(n + m - 1) + m * !(n + m - 1); + == { distributivity_add_left m n (!(n + m - 1)) } + (n + m) * !(n + m - 1); + == { } + !(n + m); + } + +val sum: a:nat -> b:nat{a <= b} -> f:((i:nat{a <= i /\ i <= b}) -> int) + -> Tot int (decreases (b - a)) +let rec sum a b f = + if a = b then f a else f a + sum (a + 1) b f + +val sum_extensionality (a:nat) (b:nat{a <= b}) (f g:(i:nat{a <= i /\ i <= b}) -> int) : Lemma + (requires forall (i:nat{a <= i /\ i <= b}). f i == g i) + (ensures sum a b f == sum a b g) + (decreases (b - a)) +let rec sum_extensionality a b f g = + if a = b then () + else sum_extensionality (a + 1) b f g + +val sum_first (a:nat) (b:nat{a < b}) (f:(i:nat{a <= i /\ i <= b}) -> int) : Lemma + (sum a b f == f a + sum (a + 1) b f) +let sum_first a b f = () + +val sum_last (a:nat) (b:nat{a < b}) (f:(i:nat{a <= i /\ i <= b}) -> int) : Lemma + (ensures sum a b f == sum a (b - 1) f + f b) + (decreases (b - a)) +let rec sum_last a b f = + if a + 1 = b then sum_first a b f + else sum_last (a + 1) b f + +val sum_const (a:nat) (b:nat{a <= b}) (k:int) : Lemma + (ensures sum a b (fun i -> k) == k * (b - a + 1)) + (decreases (b - a)) +let rec sum_const a b k = + if a = b then () + else + begin + sum_const (a + 1) b k; + sum_extensionality (a + 1) b + (fun (i:nat{a <= i /\ i <= b}) -> k) + (fun (i:nat{a + 1 <= i /\ i <= b}) -> k) + end + +val sum_scale (a:nat) (b:nat{a <= b}) (f:(i:nat{a <= i /\ i <= b}) -> int) (k:int) : Lemma + (ensures k * sum a b f == sum a b (fun i -> k * f i)) + (decreases (b - a)) +let rec sum_scale a b f k = + if a = b then () + else + begin + sum_scale (a + 1) b f k; + sum_extensionality (a + 1) b + (fun (i:nat{a <= i /\ i <= b}) -> k * f i) + (fun (i:nat{a + 1 <= i /\ i <= b}) -> k * f i) + end + +val sum_add (a:nat) (b:nat{a <= b}) (f g:(i:nat{a <= i /\ i <= b}) -> int) : Lemma + (ensures sum a b f + sum a b g == sum a b (fun i -> f i + g i)) + (decreases (b - a)) +let rec sum_add a b f g = + if a = b then () + else + begin + sum_add (a + 1) b f g; + sum_extensionality (a + 1) b + (fun (i:nat{a <= i /\ i <= b}) -> f i + g i) + (fun (i:nat{a + 1 <= i /\ i <= b}) -> f i + g i) + end + +val sum_shift (a:nat) (b:nat{a <= b}) (f:(i:nat{a <= i /\ i <= b}) -> int) : Lemma + (ensures sum a b f == sum (a + 1) (b + 1) (fun (i:nat{a + 1 <= i /\ i <= b + 1}) -> f (i - 1))) + (decreases (b - a)) +let rec sum_shift a b f = + if a = b then () + else + begin + sum_shift (a + 1) b f; + sum_extensionality (a + 2) (b + 1) + (fun (i:nat{a + 1 <= i /\ i <= b + 1}) -> f (i - 1)) + (fun (i:nat{a + 1 + 1 <= i /\ i <= b + 1}) -> f (i - 1)) + end + +val sum_mod (a:nat) (b:nat{a <= b}) (f:(i:nat{a <= i /\ i <= b}) -> int) (n:pos) : Lemma + (ensures sum a b f % n == sum a b (fun i -> f i % n) % n) + (decreases (b - a)) +let rec sum_mod a b f n = + if a = b then () + else + let g = fun (i:nat{a <= i /\ i <= b}) -> f i % n in + let f' = fun (i:nat{a + 1 <= i /\ i <= b}) -> f i % n in + calc (==) { + sum a b f % n; + == { sum_first a b f } + (f a + sum (a + 1) b f) % n; + == { lemma_mod_plus_distr_r (f a) (sum (a + 1) b f) n } + (f a + (sum (a + 1) b f) % n) % n; + == { sum_mod (a + 1) b f n; sum_extensionality (a + 1) b f' g } + (f a + sum (a + 1) b g % n) % n; + == { lemma_mod_plus_distr_r (f a) (sum (a + 1) b g) n } + (f a + sum (a + 1) b g) % n; + == { lemma_mod_plus_distr_l (f a) (sum (a + 1) b g) n } + (f a % n + sum (a + 1) b g) % n; + == { } + sum a b g % n; + } + +val binomial_theorem_aux (a b:int) (n:nat) (i:nat{1 <= i /\ i <= n - 1}) : Lemma + (a * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i) + + b * (binomial (n - 1) (i - 1) * pow a (n - 1 - (i - 1)) * pow b (i - 1)) == + binomial n i * pow a (n - i) * pow b i) +let binomial_theorem_aux a b n i = + let open FStar.Math.Lemmas in + calc (==) { + a * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i) + + b * (binomial (n - 1) (i - 1) * pow a (n - 1 - (i - 1)) * pow b (i - 1)); + == { } + a * (binomial (n - 1) i * pow a ((n - i) - 1) * pow b i) + + b * (binomial (n - 1) (i - 1) * pow a (n - i) * pow b (i - 1)); + == { _ by (FStar.Tactics.CanonCommSemiring.int_semiring()) } + binomial (n - 1) i * ((a * pow a ((n - i) - 1)) * pow b i) + + binomial (n - 1) (i - 1) * (pow a (n - i) * (b * pow b (i - 1))); + == { assert (a * pow a ((n - i) - 1) == pow a (n - i)); assert (b * pow b (i - 1) == pow b i) } + binomial (n - 1) i * (pow a (n - i) * pow b i) + + binomial (n - 1) (i - 1) * (pow a (n - i) * pow b i); + == { _ by (FStar.Tactics.CanonCommSemiring.int_semiring()) } + (binomial (n - 1) i + binomial (n - 1) (i - 1)) * (pow a (n - i) * pow b i); + == { pascal (n - 1) i } + binomial n i * (pow a (n - i) * pow b i); + == { paren_mul_right (binomial n i) (pow a (n - i)) (pow b i) } + binomial n i * pow a (n - i) * pow b i; + } + +#push-options "--fuel 2" + +val binomial_theorem (a b:int) (n:nat) : Lemma + (pow (a + b) n == sum 0 n (fun i -> binomial n i * pow a (n - i) * pow b i)) +let rec binomial_theorem a b n = + if n = 0 then () + else + if n = 1 then + (binomial_n 1; binomial_0 1) + else + let reorder (a b c d:int) : Lemma (a + b + (c + d) == a + d + (b + c)) = + assert (a + b + (c + d) == a + d + (b + c)) by (FStar.Tactics.CanonCommSemiring.int_semiring()) + in + calc (==) { + pow (a + b) n; + == { } + (a + b) * pow (a + b) (n - 1); + == { distributivity_add_left a b (pow (a + b) (n - 1)) } + a * pow (a + b) (n - 1) + b * pow (a + b) (n - 1); + == { binomial_theorem a b (n - 1) } + a * sum 0 (n - 1) (fun i -> binomial (n - 1) i * pow a (n - 1 - i) * pow b i) + + b * sum 0 (n - 1) (fun i -> binomial (n - 1) i * pow a (n - 1 - i) * pow b i); + == { sum_scale 0 (n - 1) (fun i -> binomial (n - 1) i * pow a (n - 1 - i) * pow b i) a; + sum_scale 0 (n - 1) (fun i -> binomial (n - 1) i * pow a (n - 1 - i) * pow b i) b + } + sum 0 (n - 1) (fun i -> a * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)) + + sum 0 (n - 1) (fun i -> b * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)); + == { sum_first 0 (n - 1) (fun i -> a * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)); + sum_last 0 (n - 1) (fun i -> b * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)); + sum_extensionality 1 (n - 1) + (fun (i:nat{1 <= i /\ i <= n - 1}) -> a * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)) + (fun (i:nat{0 <= i /\ i <= n - 1}) -> a * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)); + sum_extensionality 0 (n - 2) + (fun (i:nat{0 <= i /\ i <= n - 2}) -> b * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)) + (fun (i:nat{0 <= i /\ i <= n - 1}) -> b * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i))} + (a * (binomial (n - 0) 0 * pow a (n - 1 - 0) * pow b 0)) + sum 1 (n - 1) (fun i -> a * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)) + + (sum 0 (n - 2) (fun i -> b * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)) + b * (binomial (n - 1) (n - 1) * pow a (n - 1 - (n - 1)) * pow b (n - 1))); + == { binomial_0 n; binomial_n (n - 1) } + pow a n + sum 1 (n - 1) (fun i -> a * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)) + + (sum 0 (n - 2) (fun i -> b * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)) + pow b n); + == { sum_shift 0 (n - 2) (fun i -> b * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)); + sum_extensionality 1 (n - 1) + (fun (i:nat{1 <= i /\ i <= n - 1}) -> (fun (i:nat{0 <= i /\ i <= n - 2}) -> b * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)) (i - 1)) + (fun (i:nat{1 <= i /\ i <= n - 2 + 1}) -> b * (binomial (n - 1) (i - 1) * pow a (n - 1 - (i - 1)) * pow b (i - 1))) + } + pow a n + sum 1 (n - 1) (fun i -> a * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)) + + (sum 1 (n - 1) (fun i -> b * (binomial (n - 1) (i - 1) * pow a (n - 1 - (i - 1)) * pow b (i - 1))) + pow b n); + == { reorder (pow a n) + (sum 1 (n - 1) (fun i -> a * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i))) + (sum 1 (n - 2 + 1) (fun i -> b * (binomial (n - 1) (i - 1) * pow a (n - 1 - (i - 1)) * pow b (i - 1)))) + (pow b n) + } + a * pow a (n - 1) + b * pow b (n - 1) + + (sum 1 (n - 1) (fun i -> a * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)) + + sum 1 (n - 1) (fun i -> b * (binomial (n - 1) (i - 1) * pow a (n - 1 - (i - 1)) * pow b (i - 1)))); + == { sum_add 1 (n - 1) + (fun i -> a * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i)) + (fun i -> b * (binomial (n - 1) (i - 1) * pow a (n - 1 - (i - 1)) * pow b (i - 1))) + } + pow a n + pow b n + + (sum 1 (n - 1) (fun i -> a * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i) + + b * (binomial (n - 1) (i - 1) * pow a (n - 1 - (i - 1)) * pow b (i - 1)))); + + == { Classical.forall_intro (binomial_theorem_aux a b n); + sum_extensionality 1 (n - 1) + (fun i -> a * (binomial (n - 1) i * pow a (n - 1 - i) * pow b i) + + b * (binomial (n - 1) (i - 1) * pow a (n - 1 - (i - 1)) * pow b (i - 1))) + (fun i -> binomial n i * pow a (n - i) * pow b i) + } + pow a n + pow b n + sum 1 (n - 1) (fun i -> binomial n i * pow a (n - i) * pow b i); + == { } + pow a n + (sum 1 (n - 1) (fun i -> binomial n i * pow a (n - i) * pow b i) + pow b n); + == { binomial_0 n; binomial_n n } + binomial n 0 * pow a (n - 0) * pow b 0 + + (sum 1 (n - 1) (fun i -> binomial n i * pow a (n - i) * pow b i) + + binomial n n * pow a (n - n) * pow b n); + == { sum_first 0 n (fun i -> binomial n i * pow a (n - i) * pow b i); + sum_last 1 n (fun i -> binomial n i * pow a (n - i) * pow b i); + sum_extensionality 1 n + (fun (i:nat{0 <= i /\ i <= n}) -> binomial n i * pow a (n - i) * pow b i) + (fun (i:nat{1 <= i /\ i <= n}) -> binomial n i * pow a (n - i) * pow b i); + sum_extensionality 1 (n - 1) + (fun (i:nat{1 <= i /\ i <= n}) -> binomial n i * pow a (n - i) * pow b i) + (fun (i:nat{1 <= i /\ i <= n - 1}) -> binomial n i * pow a (n - i) * pow b i) + } + sum 0 n (fun i -> binomial n i * pow a (n - i) * pow b i); + } + +#pop-options + +val factorial_mod_prime (p:int{is_prime p}) (k:pos{k < p}) : Lemma + (requires !k % p = 0) + (ensures False) + (decreases k) +let rec factorial_mod_prime p k = + if k = 0 then () + else + begin + euclid_prime p k !(k - 1); + factorial_mod_prime p (k - 1) + end + +val binomial_prime (p:int{is_prime p}) (k:pos{k < p}) : Lemma + (binomial p k % p == 0) +let binomial_prime p k = + calc (==) { + (p * !(p -1)) % p; + == { FStar.Math.Lemmas.lemma_mod_mul_distr_l p (!(p - 1)) p } + (p % p * !(p - 1)) % p; + == { } + (0 * !(p - 1)) % p; + == { } + 0; + }; + binomial_factorial (p - k) k; + assert (binomial p k * (!k * !(p - k)) == p * !(p - 1)); + euclid_prime p (binomial p k) (!k * !(p - k)); + if (binomial p k % p <> 0) then + begin + euclid_prime p !k !(p - k); + assert (!k % p = 0 \/ !(p - k) % p = 0); + if !k % p = 0 then + factorial_mod_prime p k + else + factorial_mod_prime p (p - k) + end + +val freshman_aux (p:int{is_prime p}) (a b:int) (i:pos{i < p}): Lemma + ((binomial p i * pow a (p - i) * pow b i) % p == 0) +let freshman_aux p a b i = + calc (==) { + (binomial p i * pow a (p - i) * pow b i) % p; + == { paren_mul_right (binomial p i) (pow a (p - i)) (pow b i) } + (binomial p i * (pow a (p - i) * pow b i)) % p; + == { lemma_mod_mul_distr_l (binomial p i) (pow a (p - i) * pow b i) p } + (binomial p i % p * (pow a (p - i) * pow b i)) % p; + == { binomial_prime p i } + 0; + } + +val freshman (p:int{is_prime p}) (a b:int) : Lemma + (pow (a + b) p % p = (pow a p + pow b p) % p) +let freshman p a b = + let f (i:nat{0 <= i /\ i <= p}) = binomial p i * pow a (p - i) * pow b i % p in + Classical.forall_intro (freshman_aux p a b); + calc (==) { + pow (a + b) p % p; + == { binomial_theorem a b p } + sum 0 p (fun i -> binomial p i * pow a (p - i) * pow b i) % p; + == { sum_mod 0 p (fun i -> binomial p i * pow a (p - i) * pow b i) p } + sum 0 p f % p; + == { sum_first 0 p f; sum_last 1 p f } + (f 0 + sum 1 (p - 1) f + f p) % p; + == { sum_extensionality 1 (p - 1) f (fun _ -> 0) } + (f 0 + sum 1 (p - 1) (fun _ -> 0) + f p) % p; + == { sum_const 1 (p - 1) 0 } + (f 0 + f p) % p; + == { } + ((binomial p 0 * pow a p * pow b 0) % p + + (binomial p p * pow a 0 * pow b p) % p) % p; + == { binomial_0 p; binomial_n p; small_mod 1 p } + (pow a p % p + pow b p % p) % p; + == { lemma_mod_plus_distr_l (pow a p) (pow b p % p) p; + lemma_mod_plus_distr_r (pow a p) (pow b p) p } + (pow a p + pow b p) % p; + } + +val fermat_aux (p:int{is_prime p}) (a:pos{a < p}) : Lemma + (ensures pow a p % p == a % p) + (decreases a) +let rec fermat_aux p a = + if a = 1 then pow_one p + else + calc (==) { + pow a p % p; + == { } + pow ((a - 1) + 1) p % p; + == { freshman p (a - 1) 1 } + (pow (a - 1) p + pow 1 p) % p; + == { pow_one p } + (pow (a - 1) p + 1) % p; + == { lemma_mod_plus_distr_l (pow (a - 1) p) 1 p } + (pow (a - 1) p % p + 1) % p; + == { fermat_aux p (a - 1) } + ((a - 1) % p + 1) % p; + == { lemma_mod_plus_distr_l (a - 1) 1 p } + ((a - 1) + 1) % p; + == { } + a % p; + } + +let fermat p a = + if a % p = 0 then + begin + small_mod 0 p; + pow_mod p a p; + pow_zero p + end + else + calc (==) { + pow a p % p; + == { pow_mod p a p } + pow (a % p) p % p; + == { fermat_aux p (a % p) } + (a % p) % p; + == { lemma_mod_twice a p } + a % p; + } + +val mod_mult_congr_aux (p:int{is_prime p}) (a b c:int) : Lemma + (requires (a * c) % p = (b * c) % p /\ 0 <= b /\ b <= a /\ a < p /\ c % p <> 0) + (ensures a = b) +let mod_mult_congr_aux p a b c = + let open FStar.Math.Lemmas in + calc (==>) { + (a * c) % p == (b * c) % p; + ==> { mod_add_both (a * c) (b * c) (-b * c) p } + (a * c - b * c) % p == (b * c - b * c) % p; + ==> { swap_mul a c; swap_mul b c; lemma_mul_sub_distr c a b } + (c * (a - b)) % p == (b * c - b * c) % p; + ==> { small_mod 0 p; lemma_mod_mul_distr_l c (a - b) p } + (c % p * (a - b)) % p == 0; + }; + let r, s = FStar.Math.Euclid.bezout_prime p (c % p) in + FStar.Math.Euclid.euclid p (c % p) (a - b) r s; + small_mod (a - b) p + +let mod_mult_congr p a b c = + let open FStar.Math.Lemmas in + lemma_mod_mul_distr_l a c p; + lemma_mod_mul_distr_l b c p; + if a % p = b % p then () + else if b % p < a % p then mod_mult_congr_aux p (a % p) (b % p) c + else mod_mult_congr_aux p (b % p) (a % p) c + +let fermat_alt p a = + calc (==) { + (pow a (p - 1) * a) % p; + == { lemma_mod_mul_distr_r (pow a (p - 1)) a p; + lemma_mod_mul_distr_l (pow a (p - 1)) (a % p) p + } + ((pow a (p - 1) % p) * (a % p)) % p; + == { pow_mod p a (p - 1) } + ((pow (a % p) (p - 1) % p) * (a % p)) % p; + == { lemma_mod_mul_distr_l (pow (a % p) (p - 1)) (a % p) p } + (pow (a % p) (p - 1) * (a % p)) % p; + == { } + pow (a % p) p % p; + == { fermat p (a % p) } + (a % p) % p; + == { lemma_mod_twice a p } + a % p; + == { } + (1 * a) % p; + }; + small_mod 1 p; + mod_mult_congr p (pow a (p - 1)) 1 a diff --git a/stage0/ulib/FStar.Math.Fermat.fsti b/stage0/ulib/FStar.Math.Fermat.fsti new file mode 100644 index 00000000000..51972bd6513 --- /dev/null +++ b/stage0/ulib/FStar.Math.Fermat.fsti @@ -0,0 +1,37 @@ +module FStar.Math.Fermat + +open FStar.Mul +open FStar.Math.Euclid + +/// Fermat's Little Theorem (and Binomial Theorem) +/// +/// Proven by induction from the Freshman's dream identity +/// +/// pow (a + b) p % p = (pow a p + pow b p) % p +/// +/// which follows from the Binomial Theorem +/// +/// pow (a + b) n = sum_{i=0}^n (binomial n k * pow a (n - i) * pow b i) +/// +/// which in turn can be proved by induction from Pascal's identity +/// +/// binomial n k + binomial n (k - 1) = binomial (n + 1) k +/// +/// See +/// https://github.com/coqtail/coqtail/blob/master/src/Hierarchy/Commutative_ring_binomial.v +/// https://github.com/coq-contribs/rsa/blob/master/Binomials.v +/// + +#set-options "--fuel 0 --ifuel 0" + +let rec pow (a:int) (k:nat) : int = + if k = 0 then 1 + else a * pow a (k - 1) + +val fermat (p:int{is_prime p}) (a:int) : Lemma (pow a p % p == a % p) + +val mod_mult_congr (p:int{is_prime p}) (a b c:int) : Lemma + (requires (a * c) % p = (b * c) % p /\ c % p <> 0) + (ensures a % p = b % p) + +val fermat_alt (p:int{is_prime p}) (a:int{a % p <> 0}) : Lemma (pow a (p - 1) % p == 1) diff --git a/stage0/ulib/FStar.Math.Lemmas.fst b/stage0/ulib/FStar.Math.Lemmas.fst new file mode 100644 index 00000000000..7f3cbbc03f5 --- /dev/null +++ b/stage0/ulib/FStar.Math.Lemmas.fst @@ -0,0 +1,711 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Math.Lemmas + +open FStar.Mul +open FStar.Math.Lib + +#set-options "--fuel 0 --ifuel 0" + +(* Lemma: definition of Euclidean division *) +let euclidean_div_axiom a b = () + +let lemma_eucl_div_bound a b q = () + +let lemma_mult_le_left a b c = () + +let lemma_mult_le_right a b c = () + +let lemma_mult_lt_left a b c = () + +let lemma_mult_lt_right a b c = () + +let lemma_mult_lt_sqr (n:nat) (m:nat) (k:nat{n < k && m < k}) + : Lemma (n * m < k * k) = + calc (<=) { + n * m; + <= { lemma_mult_le_left n m (k - 1) } + n * (k - 1); + <= { lemma_mult_le_right (k - 1) n (k - 1) } + (k - 1) * (k - 1); + <= {} + k*k - 1; + } + +(* Lemma: multiplication on integers is commutative *) +let swap_mul a b = () + +let lemma_cancel_mul a b n = () + +(* Lemma: multiplication is right distributive over addition *) +let distributivity_add_left a b c = () + +(* Lemma: multiplication is left distributive over addition *) +let distributivity_add_right a b c = + calc (==) { + a * (b + c); + == {} + (b + c) * a; + == { distributivity_add_left b c a } + b * a + c * a; + == {} + a * b + a * c; + } + +(* Lemma: multiplication is associative, hence parenthesizing is meaningless *) +(* GM: This is really just an identity since the LHS is associated to the left *) +let paren_mul_left a b c = () + +(* Lemma: multiplication is associative, hence parenthesizing is meaningless *) +let paren_mul_right a b c = () + +(* Lemma: addition is associative, hence parenthesizing is meaningless *) +let paren_add_left a b c = () + +(* Lemma: addition is associative, hence parenthesizing is meaningless *) +let paren_add_right a b c = () + +let addition_is_associative a b c = () + +let subtraction_is_distributive a b c = () + +let swap_add_plus_minus a b c = () + +(* Lemma: minus applies to the whole term *) +let neg_mul_left a b = () + +(* Lemma: minus applies to the whole term *) +let neg_mul_right a b = () + +let swap_neg_mul a b = + neg_mul_left a b; + neg_mul_right a b + +(* Lemma: multiplication is left distributive over subtraction *) +let distributivity_sub_left a b c = + calc (==) { + (a - b) * c; + == {} + (a + (-b)) * c; + == { distributivity_add_left a (-b) c } + a * c + (-b) * c; + == { neg_mul_left b c } + a * c - b * c; + } + +(* Lemma: multiplication is right distributive over subtraction *) +let distributivity_sub_right a b c = + calc (==) { + a * (b - c); + == {} + a * (b + (-c)); + == { distributivity_add_right a b (-c) } + a * b + a * (-c); + == { neg_mul_right a c } + a * b - a * c; + } + +(* Lemma: multiplication precedence on addition *) +let mul_binds_tighter a b c = () + +let lemma_abs_mul a b = () + +let lemma_abs_bound a b = () + +let mul_ineq1 a b c d = + if a = 0 || c = 0 then () + else begin + lemma_abs_bound a b; + lemma_abs_bound c d; + lemma_abs_mul a c; + lemma_mult_lt_left (abs a) (abs c) d; + lemma_mult_lt_right d (abs a) b; + lemma_abs_bound (a * c) (b * d); + () + end + +(* Zero is neutral for addition *) +let add_zero_left_is_same (n : int) : Lemma(0 + n = n) = () +let add_zero_right_is_same (n : int) : Lemma(n + 0 = n) = () + +(* One is neutral for multiplication *) +let mul_one_left_is_same (n : int) : Lemma(1 * n = n) = () +let mul_one_right_is_same (n : int) : Lemma(n * 1 = n) = () + +(* Multiplying by zero gives zero *) +let mul_zero_left_is_zero (n : int) : Lemma(0 * n = 0) = () +let mul_zero_right_is_zero (n : int) : Lemma(n * 0 = 0) = () + +let nat_times_nat_is_nat a b = () + +let pos_times_pos_is_pos a b = () + +let nat_over_pos_is_nat a b = () + +let nat_plus_nat_equal_zero_lemma a b = () + +let int_times_int_equal_zero_lemma a b = () + +#push-options "--fuel 1" +let pow2_double_sum n = () + +let pow2_double_mult n = pow2_double_sum n + +let rec pow2_lt_compat n m = + match m with + | 0 -> () + | _ -> pow2_lt_compat (n-1) (m-1) +#pop-options + +let pow2_le_compat n m = + if m < n then pow2_lt_compat n m + +#push-options "--fuel 1" +let rec pow2_plus n m = + match n with + | 0 -> () + | _ -> pow2_plus (n - 1) m +#pop-options + +(* Lemma : definition of the exponential property of pow2 *) +let pow2_minus n m = + pow2_plus (n - m) m; + slash_star_axiom (pow2 (n - m)) (pow2 m) (pow2 n) + +(* Lemma: loss of precision in euclidean division *) +let multiply_fractions a n = () + +(** Same as `small_mod` *) +let modulo_lemma a b = () + +(** Same as `lemma_div_def` in Math.Lib *) +let lemma_div_mod a p = () + +let lemma_mod_lt a p = () + +let lemma_div_lt_nat a n m = + lemma_div_mod a (pow2 m); + assert(a = pow2 m * (a / pow2 m) + a % pow2 m); + pow2_plus m (n-m); + assert(pow2 n = pow2 m * pow2 (n - m)) + +let lemma_div_lt a n m = + if a >= 0 then lemma_div_lt_nat a n m + +let bounded_multiple_is_zero (x:int) (n:pos) = () + +let small_div (a:nat) (n:pos) : Lemma (requires a < n) (ensures a / n == 0) = () + +let small_mod (a:nat) (n:pos) : Lemma (requires a < n) (ensures a % n == a) = () + +let lt_multiple_is_equal a b x n = + assert (0 * n == 0); + bounded_multiple_is_zero x n + +let lemma_mod_plus (a:int) (k:int) (n:pos) = + calc (==) { + (a+k*n)%n - a%n; + == { lemma_div_mod a n; lemma_div_mod (a+k*n) n } + ((a + k*n) - n*((a + k*n)/n)) - (a - n*(a/n)); + == {} + n*k + n*(a/n) - n*((a + k*n)/n); + == { distributivity_add_right n k (a/n); + distributivity_sub_right n (k + a/n) ((a + k*n)/n) } + n * (k + a/n - (a+k*n)/n); + }; + lt_multiple_is_equal ((a+k*n)%n) (a%n) (k + a/n - (a+k*n)/n) n; + () + +let lemma_div_plus (a:int) (k:int) (n:pos) = + calc (==) { + n * ((a+k*n)/n - a/n); + == { distributivity_sub_right n ((a+k*n)/n) (a/n) } + n * ((a+k*n)/n) - n*(a/n); + == { lemma_div_mod (a+k*n) n; lemma_div_mod a n } + (a + k*n - (a+k*n)%n) - (a - a%n); + == {} + k*n - (a+k*n)%n + a%n; + == { lemma_mod_plus a k n } + k*n; + }; + lemma_cancel_mul ((a+k*n)/n - a/n) k n + +let lemma_div_mod_plus (a:int) (k:int) (n:pos) : Lemma ((a + k * n) / n = a / n + k /\ + (a + k * n) % n = a % n) = + lemma_div_plus a k n; + lemma_mod_plus a k n + +let add_div_mod_1 a n = + lemma_mod_plus a 1 n; + lemma_div_plus a 1 n + +let sub_div_mod_1 a n = + lemma_mod_plus a (-1) n; + lemma_div_plus a (-1) n + +#push-options "--smtencoding.elim_box true --smtencoding.nl_arith_repr native" + +let cancel_mul_div (a:int) (n:nonzero) = () + +#pop-options + +let cancel_mul_mod (a:int) (n:pos) = + small_mod 0 n; + lemma_mod_plus 0 a n + +let lemma_mod_add_distr (a:int) (b:int) (n:pos) = + calc (==) { + (a + b%n) % n; + == { lemma_mod_plus (a + (b % n)) (b / n) n } + (a + b%n + n * (b/n)) % n; + == { lemma_div_mod b n } + (a + b) % n; + } + +let lemma_mod_sub_distr (a:int) (b:int) (n:pos) = + calc (==) { + (a - b%n) % n; + == { lemma_mod_plus (a - (b % n)) (-(b / n)) n } + (a - b%n + n * (-(b/n))) % n; + == { neg_mul_right n (b/n) } + (a - b%n - n * (b/n)) % n; + == { lemma_div_mod b n } + (a - b) % n; + } + +let lemma_mod_sub_0 a = () + +let lemma_mod_sub_1 a b = + calc (==) { + (-a) % b; + == { lemma_mod_plus (-a) 1 b } + ((-a) + 1*b) % b; + == {} + (b - a) % b; + == { small_mod (b-a) b } + b - a; + == { small_mod a b } + b - a%b; + } + +let lemma_mod_mul_distr_l a b n = + calc (==) { + (a * b) % n; + == { lemma_div_mod a n } + ((n * (a/n) + a%n) * b) % n; + == { distributivity_add_left (n * (a/n)) (a%n) b } + (n * (a/n) * b + (a%n) * b) % n; + == { paren_mul_right n (a/n) b; swap_mul ((a/n) * b) n } + ((a%n) * b + ((a/n) * b) * n) % n; + == { lemma_mod_plus ((a%n) * b) ((a/n) * b) n } + ((a%n) * b) % n; + } + +let lemma_mod_mul_distr_r (a:int) (b:int) (n:pos) = + calc (==) { + (a * b) % n; + == { swap_mul a b } + (b * a) % n; + == { lemma_mod_mul_distr_l b a n } + (b%n * a) % n; + == { swap_mul a (b%n) } + (a * (b%n)) % n; + } + +let lemma_mod_injective p a b = () + +let lemma_mul_sub_distr a b c = + distributivity_sub_right a b c + +let lemma_div_exact a p = () + +let div_exact_r (a:int) (n:pos) = lemma_div_exact a n + +let lemma_mod_spec a p = + calc (==) { + (a - a%p)/p; + == { lemma_div_mod a p } + (p*(a/p))/p; + == { cancel_mul_div (a/p) p } + a/p; + } + +let lemma_mod_spec2 a p = + calc (==) { + (a % p) + ((a - (a % p)) / p) * p; + == { lemma_mod_spec a p } + (a % p) + (a / p) * p; + == { lemma_div_mod a p } + a; + } + +let lemma_mod_plus_distr_l a b p = + let q = (a - (a % p)) / p in + lemma_mod_spec2 a p; + lemma_mod_plus (a % p + b) q p + +let lemma_mod_plus_distr_r a b p = + lemma_mod_plus_distr_l b a p + +let lemma_mod_mod a b p = + lemma_mod_lt b p; + modulo_lemma (b % p) p + +(* * Lemmas about multiplication, division and modulo. **) +(* * This part focuses on the situation where **) +(* * dividend: nat divisor: pos **) +(* * TODO: add triggers for certain lemmas. **) + +(* Lemma: Definition of euclidean division *) +let euclidean_division_definition a b = () + +(* Lemma: Propriety about modulo *) +let modulo_range_lemma a b = () + +let small_modulo_lemma_1 a b = () + +let small_modulo_lemma_2 a b = () + +let small_division_lemma_1 a b = () + +let small_division_lemma_2 (a:int) (n:pos) = lemma_div_mod a n + +(* Lemma: Multiplication by a positive integer preserves order *) +let multiplication_order_lemma a b p = () + +(* Lemma: Propriety about multiplication after division *) +let division_propriety a b = () + +(* Internal lemmas for proving the definition of division *) +let division_definition_lemma_1 a b m = + if a / b - 1 < 0 then () else begin + division_propriety a b; + multiplication_order_lemma m (a / b - 1) b + end + +let division_definition_lemma_2 a b m = + division_propriety a b; + multiplication_order_lemma (a / b + 1) m b + +(* Lemma: Definition of division *) +let division_definition a b m = + division_definition_lemma_1 a b m; + division_definition_lemma_2 a b m + +(* Lemma: (a * b) / b = a; identical to `cancel_mul_div` above *) +let multiple_division_lemma (a:int) (n:nonzero) = cancel_mul_div a n + +(* Lemma: (a * b) % b = 0 *) +let multiple_modulo_lemma (a:int) (n:pos) = cancel_mul_mod a n + +(* Lemma: Division distributivity under special condition *) +let division_addition_lemma a b n = division_definition (a + n * b) b (a / b + n) + +(* Lemma: Modulo distributivity *) +let modulo_distributivity a b c = + calc (==) { + (a + b) % c; + == { lemma_mod_plus_distr_l a b c } + ((a % c) + b) % c; + == { lemma_mod_plus_distr_r (a % c) b c } + ((a % c) + (b % c)) % c; + } + +let lemma_div_le a b d = + calc (==>) { + (a <= b) <: Type0; + ==> { lemma_div_mod a d; lemma_div_mod b d } + d * (a/d) + a%d <= d * (b/d) + b%d; + ==> {} + d * (a/d) - d * (b/d) <= b%d - a%d; + ==> {} + d * (a/d - b/d) <= b%d - a%d; + ==> { (* a%d >= 0, and b%d < d*) } + d * (a/d - b/d) < d; + ==> {} + a/d - b/d <= 0; + } + +(* Lemma: Division distributivity under special condition *) +let division_sub_lemma (a:int) (n:pos) (b:nat) = + neg_mul_left b n; + lemma_div_plus a (-b) n + +let lemma_mod_plus_mul_distr a b c p = + calc (==) { + ((a + b) * c) % p; + == { lemma_mod_mul_distr_l (a + b) c p } + (((a + b) % p) * c) % p; + == { lemma_mod_mul_distr_r ((a + b) % p) c p } + (((a + b) % p) * (c % p)) % p; + == { modulo_distributivity a b p } + ((((a % p) + (b % p)) % p) * (c % p)) % p; + } + +(* Lemma: Modulo distributivity under special condition *) +let modulo_addition_lemma (a:int) (n:pos) (b:int) = lemma_mod_plus a b n + +(* Lemma: Modulo distributivity under special condition *) +let lemma_mod_sub (a:int) (n:pos) (b:int) = + neg_mul_left b n; + lemma_mod_plus a (-b) n + +let mod_mult_exact (a:int) (n:pos) (q:pos) = + calc (==) { + a % n; + == { lemma_div_mod a (n * q) } + ((n * q) * (a / (n * q)) + a % (n * q)) % n; + == { (* hyp *) } + ((n * q) * (a / (n * q))) % n; + == { paren_mul_right n q (a / (n * q)); + swap_mul n (q * (a / (n * q))) } + ((q * (a / (n * q))) * n) % n; + == { multiple_modulo_lemma (q * (a / (n*q))) n } + 0; + } + +let mod_mul_div_exact (a:int) (b:pos) (n:pos) = + calc (==) { + (a / b) % n; + == { lemma_div_mod a (b * n) (* + hyp *) } + (((b*n)*(a / (b*n))) / b) % n; + == { paren_mul_right b n (a / (b*n)) } + ((b*(n*(a / (b*n)))) / b) % n; + == { cancel_mul_div (n * (a / (b * n))) b } + (n*(a / (b*n))) % n; + == { cancel_mul_mod (a / (b*n)) n } + 0; + } + +#push-options "--fuel 1" +let mod_pow2_div2 (a:int) (m:pos) : Lemma + (requires a % pow2 m == 0) + (ensures (a / 2) % pow2 (m - 1) == 0) + = + mod_mul_div_exact a 2 (pow2 (m - 1)) +#pop-options + +private val lemma_div_lt_cancel (a : int) (b : pos) (n : int) : + Lemma (requires (a < b * n)) + (ensures (a / b < n)) + +private let lemma_div_lt_cancel a b n = + (* by contradiction *) + if a / b >= n then begin + calc (>=) { + a; + >= { slash_decr_axiom a b } + (a / b) * b; + >= {} + n * b; + }; + assert False + end + +private val lemma_mod_mult_zero (a : int) (b : pos) (c : pos) : Lemma ((a % (b * c)) / b / c == 0) +private let lemma_mod_mult_zero a b c = + (* < 1 *) + lemma_mod_lt a (b * c); + lemma_div_lt_cancel (a % (b * c)) b c; + lemma_div_lt_cancel ((a % (b * c)) / b) c 1; + + (* >= 0 *) + nat_over_pos_is_nat (a % (b * c)) b; + nat_over_pos_is_nat ((a % (b * c)) / b) c; + () + +(* Lemma: Divided by a product is equivalent to being divided one by one *) +let division_multiplication_lemma (a:int) (b:pos) (c:pos) = + calc (==) { + a / b / c; + == { lemma_div_mod a (b * c) } + ((b * c) * (a / (b * c)) + a % (b * c)) / b / c; + == { paren_mul_right b c (a / (b * c)) } + (b * (c * (a / (b * c))) + a % (b * c)) / b / c; + == { lemma_div_plus (a % (b * c)) (c * (a / (b * c))) b } + (c * (a / (b * c)) + ((a % (b * c)) / b)) / c; + == { lemma_div_plus ((a % (b * c)) / b) (a / (b * c)) c } + (a / (b * c)) + (a % (b * c)) / b / c; + == { lemma_mod_mult_zero a b c } + a / (b * c); + } + +private val cancel_fraction (a:int) (b:pos) (c:pos) : Lemma ((a * c) / (b * c) == a / b) +private let cancel_fraction a b c = + calc (==) { + (a * c) / (b * c); + == { swap_mul b c } + (a * c) / (c * b); + == { division_multiplication_lemma (a * c) c b } + ((a * c) / c) / b; + == { cancel_mul_div a c } + a / b; + } + +let modulo_scale_lemma a b c = + calc (==) { + (a * b) % (b * c); + == { lemma_div_mod (a * b) (b * c) } + a * b - (b * c) * ((a * b) / (b * c)); + == { cancel_fraction a c b } + a * b - (b * c) * (a / c); + == { paren_mul_right b c (a / c) } + a * b - b * (c * (a / c)); + == { swap_mul b (c * (a / c)); distributivity_sub_left a (c * (a / c)) b } + (a - c * (a / c)) * b; + == { lemma_div_mod a c } + (a % c) * b; + } + +let lemma_mul_pos_pos_is_pos (x:pos) (y:pos) : Lemma (x*y > 0) = () +let lemma_mul_nat_pos_is_nat (x:nat) (y:pos) : Lemma (x*y >= 0) = () + +let modulo_division_lemma_0 (a:nat) (b:pos) (c:pos) : Lemma + (a / (b*c) <= a /\ (a - (a / (b * c)) * (b * c)) / b = a / b - ((a / (b * c)) * c)) + = slash_decr_axiom a (b*c); + calc (==) { + (a / (b*c)) * (b * c); + == { swap_mul b c } + (a / (b*c)) * (c * b); + == { paren_mul_right (a / (b*c)) c b } + ((a / (b*c)) * c) * b; + }; + cut ((a / (b*c)) * (b * c) = ((a / (b * c)) * c) * b); + lemma_div_mod a (b*c); + division_sub_lemma a b ((a / (b*c)) * c); + () + +let modulo_division_lemma a b c = + calc (==) { + (a % (b * c)) / b; + == { lemma_div_mod a (b * c) } + (a - (b * c) * (a / (b * c))) / b; + == { paren_mul_right b c ((a / (b * c))); neg_mul_right b (c * (a / (b * c))) } + (a + b * (-(c * (a / (b * c))))) / b; + == { lemma_div_plus a (-(c * (a / (b * c)))) b } + (a / b) - c * (a / (b * c)); + == { division_multiplication_lemma a b c } + (a / b) - c * ((a / b) / c); + == { lemma_div_mod (a/b) c } + (a / b) % c; + } + +let modulo_modulo_lemma (a:int) (b:pos) (c:pos) = + pos_times_pos_is_pos b c; + calc (==) { + (a % (b * c)) % b; + == { calc (==) { + a % (b * c); + == { lemma_div_mod a (b * c) } + a - (b * c) * (a / (b * c)); + == { paren_mul_right b c (a / (b * c)) } + a - b * (c * (a / (b * c))); + }} + (a - b * (c * (a / (b * c)))) % b; + == { () } + (a + (- (b * (c * (a / (b * c)))))) % b; + == { neg_mul_right b (c * (a / (b * c))) } + (a + (b * (-c * (a / (b * c))))) % b; + == { () } + (a + (-c * (a / (b * c))) * b) % b; + == { lemma_mod_plus a (-c * (a / (b * c))) b} + a % b; + } + +let pow2_multiplication_division_lemma_1 a b c = + pow2_plus (c - b) b; + paren_mul_right a (pow2 (c - b)) (pow2 b); + paren_mul_left a (pow2 (c - b)) (pow2 b); + multiple_division_lemma (a * pow2 (c - b)) (pow2 b) + +let pow2_multiplication_division_lemma_2 a b c = + pow2_plus c (b - c); + division_multiplication_lemma (a * pow2 c) (pow2 c) (pow2 (b - c)); + multiple_division_lemma a (pow2 c) + +let pow2_multiplication_modulo_lemma_1 a b c = + pow2_plus (c - b) b; + paren_mul_right a (pow2 (c - b)) (pow2 b); + paren_mul_left a (pow2 (c - b)) (pow2 b); + multiple_modulo_lemma (a * pow2 (c - b)) (pow2 b) + +let pow2_multiplication_modulo_lemma_2 a b c = + calc (==) { + (a * pow2 c) % pow2 b; + == {} + (a * pow2 c) % pow2 (c + (b-c)); + == { pow2_plus c (b-c) } + (a * pow2 c) % (pow2 c * pow2 (b-c)); + == { modulo_scale_lemma a (pow2 c) (pow2 (b-c)) } + (a % pow2 (b - c)) * pow2 c; + } + +let pow2_modulo_division_lemma_1 a b c = + pow2_plus (c - b) b; + modulo_division_lemma a (pow2 b) (pow2 (c - b)) + +let pow2_modulo_division_lemma_2 a b c = + pow2_le_compat b c; + small_division_lemma_1 (a % pow2 c) (pow2 b) + +let pow2_modulo_modulo_lemma_1 a b c = + pow2_plus (c - b) b; + modulo_modulo_lemma a (pow2 b) (pow2 (c - b)) + +let pow2_modulo_modulo_lemma_2 a b c = + pow2_le_compat b c; + small_modulo_lemma_1 (a % pow2 c) (pow2 b) + +let modulo_add p a b c = + modulo_distributivity a b p; + modulo_distributivity a c p + +let lemma_mod_twice a p = lemma_mod_mod (a % p) a p + +let modulo_sub p a b c = + modulo_add p (-a) (a + b) (a + c) + +let mod_add_both (a:int) (b:int) (x:int) (n:pos) = + calc (==) { + (a + x) % n; + == { modulo_distributivity a x n } + ((a % n) + (x % n)) % n; + == { (* hyp *) } + ((b % n) + (x % n)) % n; + == { modulo_distributivity b x n } + (b + x) % n; + } + +let lemma_mod_plus_injective (n:pos) (a:int) (b:nat) (c:nat) = + small_mod b n; + small_mod c n; + mod_add_both (a + b) (a + c) (-a) n + +(* Another characterization of the modulo *) +let modulo_sub_lemma a b c = + calc (==) { + b; + == { modulo_lemma b c } + b % c; + == { lemma_mod_twice b c } + (b%c) % c; + == { (* hyp *) } + (b%c + (a-b)%c) % c; + == { modulo_distributivity b (a-b) c } + (b+(a-b)) % c; + == {} + a % c; + } diff --git a/stage0/ulib/FStar.Math.Lemmas.fsti b/stage0/ulib/FStar.Math.Lemmas.fsti new file mode 100644 index 00000000000..162041482a3 --- /dev/null +++ b/stage0/ulib/FStar.Math.Lemmas.fsti @@ -0,0 +1,407 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Math.Lemmas + +open FStar.Mul + +(* Lemma: definition of Euclidean division *) +val euclidean_div_axiom: a:int -> b:pos -> Lemma + (a - b * (a / b) >= 0 /\ a - b * (a / b) < b) + +val lemma_eucl_div_bound: a:int -> b:int -> q:int -> Lemma + (requires (a < q)) + (ensures (a + q * b < q * (b+1))) + +val lemma_mult_le_left: a:nat -> b:int -> c:int -> Lemma + (requires (b <= c)) + (ensures (a * b <= a * c)) + +val lemma_mult_le_right: a:nat -> b:int -> c:int -> Lemma + (requires (b <= c)) + (ensures (b * a <= c * a)) + +val lemma_mult_lt_left: a:pos -> b:int -> c:int -> Lemma + (requires (b < c)) + (ensures (a * b < a * c)) + +val lemma_mult_lt_right: a:pos -> b:int -> c:int -> Lemma + (requires (b < c)) + (ensures (b * a < c * a)) + +val lemma_mult_lt_sqr (n:nat) (m:nat) (k:nat{n < k && m < k}) + : Lemma (n * m < k * k) + +(* Lemma: multiplication on integers is commutative *) +val swap_mul: a:int -> b:int -> Lemma (a * b = b * a) + +val lemma_cancel_mul (a b : int) (n : pos) : Lemma (requires (a * n = b * n)) (ensures (a = b)) + +(* Lemma: multiplication is right distributive over addition *) +val distributivity_add_left: a:int -> b:int -> c:int -> Lemma + ((a + b) * c = a * c + b * c) + +(* Lemma: multiplication is left distributive over addition *) +val distributivity_add_right: a:int -> b:int -> c:int -> Lemma + (a * (b + c) = a * b + a * c) + +(* Lemma: multiplication is associative, hence parenthesizing is meaningless *) +(* GM: This is really just an identity since the LHS is associated to the left *) +val paren_mul_left: a:int -> b:int -> c:int -> Lemma + (a * b * c = (a * b) * c) + +(* Lemma: multiplication is associative, hence parenthesizing is meaningless *) +val paren_mul_right: a:int -> b:int -> c:int -> Lemma + (a * b * c = a * (b * c)) + +(* Lemma: addition is associative, hence parenthesizing is meaningless *) +val paren_add_left: a:int -> b:int -> c:int -> Lemma + (a + b + c = (a + b) + c) + +(* Lemma: addition is associative, hence parenthesizing is meaningless *) +val paren_add_right: a:int -> b:int -> c:int -> Lemma + (a + b + c = a + (b + c)) + +val addition_is_associative: a:int -> b:int -> c:int -> Lemma + (a + b + c = (a + b) + c /\ a + b + c = a + (b + c)) + +val subtraction_is_distributive: a:int -> b:int -> c:int -> Lemma + (a - b + c = (a - b) + c /\ + a - b - c = a - (b + c) /\ + a - b - c = (a - b) - c /\ + a + (-b - c) = a - b - c /\ + a - (b - c) = a - b + c) + +val swap_add_plus_minus: a:int -> b:int -> c:int -> Lemma + (a + b - c = (a - c) + b) + +(* Lemma: minus applies to the whole term *) +val neg_mul_left: a:int -> b:int -> Lemma (-(a * b) = (-a) * b) + +(* Lemma: minus applies to the whole term *) +val neg_mul_right: a:int -> b:int -> Lemma (-(a * b) = a * (-b)) + +val swap_neg_mul: a:int -> b:int -> Lemma ((-a) * b = a * (-b)) + +(* Lemma: multiplication is left distributive over subtraction *) +val distributivity_sub_left: a:int -> b:int -> c:int -> + Lemma ((a - b) * c = a * c - b * c) + +(* Lemma: multiplication is right distributive over subtraction *) +val distributivity_sub_right: a:int -> b:int -> c:int -> + Lemma ((a * (b - c) = a * b - a * c)) + +(* Lemma: multiplication precedence on addition *) +val mul_binds_tighter: a:int -> b:int -> c:int -> Lemma (a + (b * c) = a + b * c) + +val lemma_abs_mul : a:int -> b:int -> Lemma (abs a * abs b = abs (a * b)) + +val lemma_abs_bound : a:int -> b:nat -> Lemma (abs a < b <==> -b < a /\ a < b) + +(* Lemma: multiplication keeps symmetric bounds : + b > 0 && d > 0 && -b < a < b && -d < c < d ==> - b * d < a * c < b * d *) +val mul_ineq1: a:int -> b:nat -> c:int -> d:nat -> Lemma + (requires (-b < a /\ a < b /\ + -d < c /\ c < d)) + (ensures (-(b * d) < a * c /\ a * c < b * d)) + +(* Zero is neutral for addition *) +val add_zero_left_is_same (n : int) : Lemma(0 + n = n) +val add_zero_right_is_same (n : int) : Lemma(n + 0 = n) + +(* One is neutral for multiplication *) +val mul_one_left_is_same (n : int) : Lemma(1 * n = n) +val mul_one_right_is_same (n : int) : Lemma(n * 1 = n) + +(* Multiplying by zero gives zero *) +val mul_zero_left_is_zero (n : int) : Lemma(0 * n = 0) +val mul_zero_right_is_zero (n : int) : Lemma(n * 0 = 0) + +val nat_times_nat_is_nat: a:nat -> b:nat -> Lemma (a * b >= 0) + +val pos_times_pos_is_pos: a:pos -> b:pos -> Lemma (a * b > 0) + +val nat_over_pos_is_nat: a:nat -> b:pos -> Lemma (a / b >= 0) + +val nat_plus_nat_equal_zero_lemma: a:nat -> b:nat{a + b = 0} -> Lemma(a = 0 /\ b = 0) + +val int_times_int_equal_zero_lemma: a:int -> b:int{a * b = 0} -> Lemma(a = 0 \/ b = 0) + +val pow2_double_sum: n:nat -> Lemma (pow2 n + pow2 n = pow2 (n + 1)) + +val pow2_double_mult: n:nat -> Lemma (2 * pow2 n = pow2 (n + 1)) + +val pow2_lt_compat: n:nat -> m:nat -> Lemma + (requires (m < n)) + (ensures (pow2 m < pow2 n)) + (decreases m) + +val pow2_le_compat: n:nat -> m:nat -> Lemma + (requires (m <= n)) + (ensures (pow2 m <= pow2 n)) + +val pow2_plus: n:nat -> m:nat -> Lemma + (ensures (pow2 n * pow2 m = pow2 (n + m))) + (decreases n) + +(* Lemma : definition of the exponential property of pow2 *) +val pow2_minus: n:nat -> m:nat{ n >= m } -> Lemma + ((pow2 n) / (pow2 m) = pow2 (n - m)) + +(* Lemma: loss of precision in euclidean division *) +val multiply_fractions (a:int) (n:nonzero) : Lemma (n * ( a / n ) <= a) + +(** Same as `small_mod` *) +val modulo_lemma: a:nat -> b:pos -> Lemma (requires (a < b)) (ensures (a % b = a)) + +(** Same as `lemma_div_def` in Math.Lib *) +val lemma_div_mod: a:int -> p:nonzero -> Lemma (a = p * (a / p) + a % p) + +val lemma_mod_lt: a:int -> p:pos -> Lemma (0 <= a % p /\ a % p < p /\ (a >= 0 ==> a % p <= a)) + +val lemma_div_lt_nat: a:int -> n:nat -> m:nat{m <= n} -> + Lemma (requires (a < pow2 n)) + (ensures (a / pow2 m < pow2 (n-m))) + +val lemma_div_lt (a:int) (n:nat) (m:nat) : Lemma + (requires m <= n /\ a < pow2 n) + (ensures a / pow2 m < pow2 (n-m)) + +val bounded_multiple_is_zero (x:int) (n:pos) : Lemma + (requires -n < x * n /\ x * n < n) + (ensures x == 0) + +val small_div (a:nat) (n:pos) : Lemma (requires a < n) (ensures a / n == 0) + +val small_mod (a:nat) (n:pos) : Lemma (requires a < n) (ensures a % n == a) + +val lt_multiple_is_equal (a:nat) (b:nat) (x:int) (n:nonzero) : Lemma + (requires a < n /\ b < n /\ a == b + x * n) + (ensures a == b /\ x == 0) + +val lemma_mod_plus (a:int) (k:int) (n:pos) : Lemma ((a + k * n) % n = a % n) + +val lemma_div_plus (a:int) (k:int) (n:pos) : Lemma ((a + k * n) / n = a / n + k) + +val lemma_div_mod_plus (a:int) (k:int) (n:pos) + : Lemma ((a + k * n) / n = a / n + k /\ + (a + k * n) % n = a % n) + +val add_div_mod_1 (a:int) (n:pos) + : Lemma ((a + n) % n == a % n /\ + (a + n) / n == a / n + 1) + +val sub_div_mod_1 (a:int) (n:pos) + : Lemma ((a - n) % n == a % n /\ + (a - n) / n == a / n - 1) + +val cancel_mul_div (a:int) (n:nonzero) : Lemma ((a * n) / n == a) + +val cancel_mul_mod (a:int) (n:pos) : Lemma ((a * n) % n == 0) + +val lemma_mod_add_distr (a:int) (b:int) (n:pos) : Lemma ((a + b % n) % n = (a + b) % n) + +val lemma_mod_sub_distr (a:int) (b:int) (n:pos) : Lemma ((a - b % n) % n = (a - b) % n) + +val lemma_mod_sub_0: a:pos -> Lemma ((-1) % a = a - 1) + +val lemma_mod_sub_1: a:pos -> b:pos{a < b} -> Lemma ((-a) % b = b - (a%b)) + +val lemma_mod_mul_distr_l (a:int) (b:int) (n:pos) : Lemma + (requires True) + (ensures (a * b) % n = ((a % n) * b) % n) + + +val lemma_mod_mul_distr_r (a:int) (b:int) (n:pos) : Lemma ((a * b) % n = (a * (b % n)) % n) + +val lemma_mod_injective: p:pos -> a:nat -> b:nat -> Lemma + (requires (a < p /\ b < p /\ a % p = b % p)) + (ensures (a = b)) + +val lemma_mul_sub_distr: a:int -> b:int -> c:int -> Lemma + (a * b - a * c = a * (b - c)) + +val lemma_div_exact: a:int -> p:pos -> Lemma + (requires (a % p = 0)) + (ensures (a = p * (a / p))) + +val div_exact_r (a:int) (n:pos) : Lemma + (requires (a % n = 0)) + (ensures (a = (a / n) * n)) + +val lemma_mod_spec: a:int -> p:pos -> Lemma + (a / p = (a - (a % p)) / p) + +val lemma_mod_spec2: a:int -> p:pos -> Lemma + (let q:int = (a - (a % p)) / p in a = (a % p) + q * p) + +val lemma_mod_plus_distr_l: a:int -> b:int -> p:pos -> Lemma + ((a + b) % p = ((a % p) + b) % p) + +val lemma_mod_plus_distr_r: a:int -> b:int -> p:pos -> Lemma + ((a + b) % p = (a + (b % p)) % p) + +val lemma_mod_mod: a:int -> b:int -> p:pos -> Lemma + (requires (a = b % p)) + (ensures (a % p = b % p)) + +(* * Lemmas about multiplication, division and modulo. **) +(* * This part focuses on the situation where **) +(* * dividend: nat divisor: pos **) +(* * TODO: add triggers for certain lemmas. **) + +(* Lemma: Definition of euclidean division *) +val euclidean_division_definition: a:int -> b:nonzero -> + Lemma (a = (a / b) * b + a % b) + +(* Lemma: Propriety about modulo *) +val modulo_range_lemma: a:int -> b:pos -> + Lemma (a % b >= 0 && a % b < b) + +val small_modulo_lemma_1: a:nat -> b:nonzero -> + Lemma (requires a < b) (ensures a % b = a) + +val small_modulo_lemma_2: a:int -> b:pos -> + Lemma (requires a % b = a) (ensures a < b) + +val small_division_lemma_1: a:nat -> b:nonzero -> + Lemma (requires a < b) (ensures a / b = 0) + +val small_division_lemma_2 (a:int) (n:pos) : Lemma + (requires a / n = 0) + (ensures 0 <= a /\ a < n) + +(* Lemma: Multiplication by a positive integer preserves order *) +val multiplication_order_lemma: a:int -> b:int -> p:pos -> + Lemma (a >= b <==> a * p >= b * p) + +(* Lemma: Propriety about multiplication after division *) +val division_propriety: a:int -> b:pos -> + Lemma (a - b < (a / b) * b && (a / b) * b <= a) + +(* Internal lemmas for proving the definition of division *) +val division_definition_lemma_1: a:int -> b:pos -> m:int{a - b < m * b} -> + Lemma (m > a / b - 1) + +val division_definition_lemma_2: a:int -> b:pos -> m:int{m * b <= a} -> + Lemma (m < a / b + 1) + +(* Lemma: Definition of division *) +val division_definition: a:int -> b:pos -> m:int{a - b < m * b && m * b <= a} -> + Lemma (m = a / b) + +(* Lemma: (a * b) / b = a; identical to `cancel_mul_div` above *) +val multiple_division_lemma (a:int) (n:nonzero) : Lemma ((a * n) / n = a) + +(* Lemma: (a * b) % b = 0 *) +val multiple_modulo_lemma (a:int) (n:pos) : Lemma ((a * n) % n = 0) + +(* Lemma: Division distributivity under special condition *) +val division_addition_lemma: a:int -> b:pos -> n:int -> + Lemma ( (a + n * b) / b = a / b + n ) + +(* Lemma: Modulo distributivity *) +val modulo_distributivity: a:int -> b:int -> c:pos -> Lemma ((a + b) % c == (a % c + b % c) % c) + +val lemma_div_le: a:int -> b:int -> d:pos -> + Lemma (requires (a <= b)) + (ensures (a / d <= b / d)) + +(* Lemma: Division distributivity under special condition *) +val division_sub_lemma (a:int) (n:pos) (b:nat) : Lemma ((a - b * n) / n = a / n - b) + +val lemma_mod_plus_mul_distr: a:int -> b:int -> c:int -> p:pos -> Lemma + (((a + b) * c) % p = ((((a % p) + (b % p)) % p) * (c % p)) % p) + +(* Lemma: Modulo distributivity under special condition *) +val modulo_addition_lemma (a:int) (n:pos) (b:int) : Lemma ((a + b * n) % n = a % n) + +(* Lemma: Modulo distributivity under special condition *) +val lemma_mod_sub (a:int) (n:pos) (b:int) : Lemma (ensures (a - b * n) % n = a % n) + +val mod_mult_exact (a:int) (n:pos) (q:pos) : Lemma + (requires (a % (n * q) == 0)) + (ensures a % n == 0) + +val mod_mul_div_exact (a:int) (b:pos) (n:pos) : Lemma + (requires (a % (b * n) == 0)) + (ensures (a / b) % n == 0) + +val mod_pow2_div2 (a:int) (m:pos) : Lemma + (requires a % pow2 m == 0) + (ensures (a / 2) % pow2 (m - 1) == 0) + +(* Lemma: Divided by a product is equivalent to being divided one by one *) +val division_multiplication_lemma (a:int) (b:pos) (c:pos) : Lemma + (a / (b * c) = (a / b) / c) + +val modulo_scale_lemma : a:int -> b:pos -> c:pos -> Lemma ((a * b) % (b * c) == (a % c) * b) + +val lemma_mul_pos_pos_is_pos (x:pos) (y:pos) : Lemma (x*y > 0) +val lemma_mul_nat_pos_is_nat (x:nat) (y:pos) : Lemma (x*y >= 0) + +val modulo_division_lemma: a:nat -> b:pos -> c:pos -> + Lemma ((a % (b * c)) / b = (a / b) % c) + +val modulo_modulo_lemma (a:int) (b:pos) (c:pos) : Lemma + ((a % (b * c)) % b = a % b) + +val pow2_multiplication_division_lemma_1: a:int -> b:nat -> c:nat{c >= b} -> + Lemma ( (a * pow2 c) / pow2 b = a * pow2 (c - b)) + +val pow2_multiplication_division_lemma_2: a:int -> b:nat -> c:nat{c <= b} -> + Lemma ( (a * pow2 c) / pow2 b = a / pow2 (b - c)) + +val pow2_multiplication_modulo_lemma_1: a:int -> b:nat -> c:nat{c >= b} -> + Lemma ( (a * pow2 c) % pow2 b = 0 ) + +val pow2_multiplication_modulo_lemma_2: a:int -> b:nat -> c:nat{c <= b} -> + Lemma ( (a * pow2 c) % pow2 b = (a % pow2 (b - c)) * pow2 c ) + +val pow2_modulo_division_lemma_1: a:nat -> b:nat -> c:nat{c >= b} -> + Lemma ( (a % pow2 c) / pow2 b = (a / pow2 b) % (pow2 (c - b)) ) + +val pow2_modulo_division_lemma_2: a:int -> b:nat -> c:nat{c <= b} -> + Lemma ( (a % pow2 c) / pow2 b = 0 ) + +val pow2_modulo_modulo_lemma_1: a:int -> b:nat -> c:nat{c >= b} -> + Lemma ( (a % pow2 c) % pow2 b = a % pow2 b ) + +val pow2_modulo_modulo_lemma_2: a:int -> b:nat -> c:nat{c <= b} -> + Lemma ( (a % pow2 c) % pow2 b = a % pow2 c ) + +val modulo_add : p:pos -> a:int -> b:int -> c:int -> Lemma + (requires (b % p = c % p)) + (ensures ((a + b) % p = (a + c) % p)) + +val lemma_mod_twice : a:int -> p:pos -> Lemma ((a % p) % p == a % p) + +val modulo_sub : p:pos -> a:int -> b:int -> c:int -> Lemma + (requires ((a + b) % p = (a + c) % p)) + (ensures (b % p = c % p)) + +val mod_add_both (a:int) (b:int) (x:int) (n:pos) : Lemma + (requires a % n == b % n) + (ensures (a + x) % n == (b + x) % n) + +val lemma_mod_plus_injective (n:pos) (a:int) (b:nat) (c:nat) : Lemma + (requires b < n /\ c < n /\ (a + b) % n = (a + c) % n) + (ensures b = c) + +(* Another characterization of the modulo *) +val modulo_sub_lemma (a : int) (b : nat) (c : pos) : + Lemma + (requires (b < c /\ (a - b) % c = 0)) + (ensures (b = a % c)) \ No newline at end of file diff --git a/stage0/ulib/FStar.Math.Lib.fst b/stage0/ulib/FStar.Math.Lib.fst new file mode 100644 index 00000000000..2b9408631a6 --- /dev/null +++ b/stage0/ulib/FStar.Math.Lib.fst @@ -0,0 +1,146 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Math.Lib + +open FStar.Mul + +(* Definition of the division operator *) +val lemma_div_def: a:nat -> b:pos -> Lemma (a = b * (a/b) + a % b) +let lemma_div_def a b = () + +private let mul_lemma (a:nat) (b:nat) (c:nat) : Lemma (requires (a <= b)) + (ensures (c * a <= c * b)) + = () + +private let mul_lemma' (a:nat) (b:nat) (c:pos) : Lemma (requires (c * a <= c * b)) + (ensures (a <= b)) + = () + +private let mul_div_lemma (a:nat) (b:pos) : Lemma (b * (a / b) <= a) = () + +val slash_decr_axiom: a:nat -> b:pos -> Lemma (a / b <= a) +let slash_decr_axiom a b = + mul_lemma 1 b a; + mul_div_lemma a b; + mul_lemma' (a / b) a b + +private let lemma_mul_minus_distr_l (a:int) (b:int) (c:int) : Lemma (a * (b - c) = a * b - a * c) + = () + +(* Axiom: definition of the "b divides c" relation *) +#reset-options "--z3rlimit 30" +val slash_star_axiom: a:nat -> b:pos -> c:nat -> Lemma + (requires (a * b = c)) + (ensures (a = c / b)) +let slash_star_axiom a b c = + lemma_div_def c b; + lemma_mul_minus_distr_l b a (c/b) + +#reset-options +val log_2: x:pos -> Tot nat +let rec log_2 x = + if x >= 2 then 1 + log_2 (x / 2) else 0 + +(* Function: power of x *) +val powx : x:int -> n:nat -> Tot int +let rec powx x n = + match n with + | 0 -> 1 + | n -> x * powx x (n - 1) + +(* Function: absolute value *) +val abs: x:int -> Tot (y:int{ (x >= 0 ==> y = x) /\ (x < 0 ==> y = -x) }) +let abs x = if x >= 0 then x else -x + +(* Function: maximum value *) +val max: x:int -> y:int -> Tot (z:int{ (x >= y ==> z = x) /\ (x < y ==> z = y) }) +let max x y = if x >= y then x else y + +(* Function: minimum value *) +val min: x:int -> y:int -> Tot (z:int{ (x >= y ==> z = y) /\ (x < y ==> z = x) }) +let min x y = if x >= y then y else x + +(* Function: standard euclidean division, the rest is always positive *) +val div: a:int -> b:pos -> Tot (c:int{(a < 0 ==> c < 0) /\ (a >= 0 ==> c >= 0)}) +let div a b = + if a < 0 then + begin + slash_decr_axiom (-a) b; + if a % b = 0 then - (-a / b) + else - (-a / b) - 1 + end + else a / b + +(* Function: equivalent of the '/' operator in C, hence the rest can be negative *) +val div_non_eucl: a:int -> b:pos -> + Tot (q:int{ ( a >= 0 ==> q = a / b ) /\ ( a < 0 ==> q = -((-a)/b) ) }) +let div_non_eucl a b = + if a < 0 then 0 - ((0 - a) / b) + else a / b + +(* The equivalent of the << C operator *) +val shift_left: v:int -> i:nat -> Tot (res:int{res = v * (pow2 i)}) +let shift_left v i = + v * (pow2 i) + +(* asr OCaml operator *) +val arithmetic_shift_right: v:int -> i:nat -> Tot (res:int{ res = div v (pow2 i) }) +let arithmetic_shift_right v i = + div v (pow2 i) + +(* Case of C cast functions ? *) +(* Implemented by "mod" in OCaml *) +val signed_modulo: v:int -> p:pos -> Tot (res:int{ res = v - ((div_non_eucl v p) * p) }) +let signed_modulo v p = + if v >= 0 then v % p + else 0 - ( (0-v) % p) + +val op_Plus_Percent : a:int -> p:pos -> + Tot (res:int{ (a >= 0 ==> res = a % p) /\ (a < 0 ==> res = -((-a) % p)) }) +let op_Plus_Percent a p = signed_modulo a p + +(** Useful lemmas for future proofs **) + +(* Lemmas of x^n *) +val powx_lemma1: a:int -> Lemma (powx a 1 = a) +let powx_lemma1 a = () + +val powx_lemma2: x:int -> n:nat -> m:nat -> Lemma + (powx x n * powx x m = powx x (n + m)) +let rec powx_lemma2 x n m = + let ass (x y z : int) : Lemma ((x*y)*z == x*(y*z)) = () in + match n with + | 0 -> () + | _ -> powx_lemma2 x (n-1) m; ass x (powx x (n-1)) (powx x m) + +(* Lemma: absolute value of product is the product of the absolute values *) +val abs_mul_lemma: a:int -> b:int -> Lemma (abs (a * b) = abs a * abs b) +let abs_mul_lemma a b = () + +(* Lemma: absolute value of a signed_module b is bounded by b *) +val signed_modulo_property: v:int -> p:pos -> Lemma (abs (signed_modulo v p ) < p) +let signed_modulo_property v p = () + +(* Lemma: non-Euclidean division has a smaller output compared to its input *) +val div_non_eucl_decr_lemma: a:int -> b:pos -> Lemma (abs (div_non_eucl a b) <= abs a) +let div_non_eucl_decr_lemma a b = + slash_decr_axiom (abs a) b + +(* Lemma: dividing by a bigger value leads to 0 in non-Euclidean division *) +val div_non_eucl_bigger_denom_lemma: a:int -> b:pos -> Lemma + (requires (b > abs a)) + (ensures (div_non_eucl a b = 0)) +let div_non_eucl_bigger_denom_lemma a b = () diff --git a/stage0/ulib/FStar.Matrix.fst b/stage0/ulib/FStar.Matrix.fst new file mode 100644 index 00000000000..c44a524868f --- /dev/null +++ b/stage0/ulib/FStar.Matrix.fst @@ -0,0 +1,1174 @@ +(* + Copyright 2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: A. Rozanov +*) + +(* + In this module we provide basic definitions to work with matrices via + seqs, and define transpose transform together with theorems that assert + matrix fold equality of original and transposed matrices. +*) + + +module FStar.Matrix + +module CE = FStar.Algebra.CommMonoid.Equiv +module CF = FStar.Algebra.CommMonoid.Fold +module SP = FStar.Seq.Permutation +module SB = FStar.Seq.Base +module SProp = FStar.Seq.Properties +module ML = FStar.Math.Lemmas + +open FStar.IntegerIntervals +open FStar.Mul +open FStar.Seq.Equiv + +(* + A little glossary that might help reading this file + We don't list common terms like associativity and reflexivity. + + lhs, rhs left hand side, right hand side + liat subsequence of all elements except the last (tail read backwards) + snoc construction of sequence from a pair (liat, last) (cons read backwards) + un_snoc decomposition of sequence into a pair (liat, last) + foldm sum or product of all elements in a sequence using given CommMonoid + foldm_snoc recursively defined sum/product of a sequence, starting from the last element + congruence respect of equivalence ( = ) by a binary operation ( * ), a=b ==> a*x = b*x + unit identity element (xu=x, ux=x) (not to be confused with invertible elements) +*) + +type matrix c m n = z:SB.seq c { SB.length z = m*n } + +let seq_of_matrix #c #m #n mx = mx + +let ijth #c #m #n mx i j = SB.index mx (get_ij m n i j) + +let ijth_lemma #c #m #n mx i j + : Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j)) = () + +let matrix_of_seq #c m n s = s + +let foldm #c #eq #m #n cm mx = SP.foldm_snoc cm mx + +let matrix_fold_equals_fold_of_seq #c #eq #m #n cm mx + : Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)] + = eq.reflexivity (foldm cm mx) + +let matrix_fold_internal #c #eq #m #n (cm:CE.cm c eq) (mx: matrix c m n) + : Lemma (ensures foldm cm mx == SP.foldm_snoc cm mx) = () + +(* A flattened matrix (seq) constructed from generator function + Notice how the domains of both indices are strictly controlled. *) +let init #c (#m #n: pos) (generator: matrix_generator c m n) + : matrix_of generator = + let mn = m * n in + let generator_ij ij = generator (get_i m n ij) (get_j m n ij) in + let flat_indices = indices_seq mn in + let result = SProp.map_seq generator_ij flat_indices in + SProp.map_seq_len generator_ij flat_indices; + assert (SB.length result == SB.length flat_indices); + let aux (i: under m) (j: under n) + : Lemma (SB.index (SProp.map_seq generator_ij flat_indices) (get_ij m n i j) == generator i j) + = consistency_of_i_j m n i j; + consistency_of_ij m n (get_ij m n i j); + assert (generator_ij (get_ij m n i j) == generator i j); + SProp.map_seq_index generator_ij flat_indices (get_ij m n i j) in + let aux1 (ij: under mn) + : Lemma (SB.index (SProp.map_seq generator_ij flat_indices) ij == generator_ij ij) + = SProp.map_seq_index generator_ij flat_indices ij in + FStar.Classical.forall_intro aux1; + FStar.Classical.forall_intro_2 aux; + result + +private let matrix_seq #c #m #n (gen: matrix_generator c m n) : (t:SB.seq c{ (SB.length t = (m*n)) /\ + (forall (i: under m) (j: under n). SB.index t (get_ij m n i j) == gen i j) /\ + (forall(ij: under (m*n)). SB.index t ij == gen (get_i m n ij) (get_j m n ij)) +}) = init gen + +(* This auxiliary lemma establishes the decomposition of the seq-matrix + into the concatenation of its first (m-1) rows and its last row (thus snoc) *) +let matrix_append_snoc_lemma #c (#m #n: pos) (generator: matrix_generator c m n) + : Lemma (matrix_seq generator == (SB.slice (matrix_seq generator) 0 ((m-1)*n)) + `SB.append` + (SB.slice (matrix_seq generator) ((m-1)*n) (m*n))) + = SB.lemma_eq_elim (matrix_seq generator) + (SB.append (SB.slice (matrix_seq generator) 0 ((m-1)*n)) + (SB.slice (matrix_seq generator) ((m-1)*n) (m*n))) + +let matrix_seq_decomposition_lemma #c (#m:greater_than 1) (#n: pos) (generator: matrix_generator c m n) + : Lemma ((matrix_seq generator) == + SB.append (matrix_seq #c #(m-1) #n generator) + (SB.slice (matrix_seq generator) ((m-1)*n) (m*n))) + = SB.lemma_eq_elim (matrix_seq generator) + ((matrix_seq #c #(m-1) #n generator) `SB.append` + (SB.slice (matrix_seq generator) ((m-1)*n) (m*n))) + +(* This auxiliary lemma establishes the equality of the fold of the entire matrix + to the op of folds of (the submatrix of the first (m-1) rows) and (the last row). *) +let matrix_fold_snoc_lemma #c #eq + (#m: not_less_than 2) + (#n: pos) + (cm: CE.cm c eq) + (generator: matrix_generator c m n) + : Lemma (assert ((m-1)*n < m*n); + SP.foldm_snoc cm (matrix_seq generator) `eq.eq` + cm.mult (SP.foldm_snoc cm (matrix_seq #c #(m-1) #n generator)) + (SP.foldm_snoc cm (SB.slice (matrix_seq #c #m #n generator) ((m-1)*n) (m*n)))) + = SB.lemma_eq_elim (matrix_seq generator) + ((matrix_seq #c #(m-1) #n generator) `SB.append` + (SB.slice (matrix_seq generator) ((m-1)*n) (m*n))); + SP.foldm_snoc_append cm (matrix_seq #c #(m-1) #n generator) + (SB.slice (matrix_seq generator) ((m-1)*n) (m*n)) + +(* + There are many auxiliary lemmas like this that are extracted because + lemma_eq_elim invocations often impact verification speed more than + one might expect they would. +*) +let matrix_submatrix_lemma #c (#m: not_less_than 2) (#n: pos) + (generator: matrix_generator c m n) + : Lemma ((matrix_seq generator) == (matrix_seq (fun (i:under(m-1)) (j:under n) -> generator i j) + `SB.append` SB.init n (generator (m-1)))) + = SB.lemma_eq_elim (matrix_seq (fun (i:under (m-1)) (j:under n) -> generator i j)) + (matrix_seq #c #(m-1) #n generator); + SB.lemma_eq_elim (SB.slice (matrix_seq generator) ((m-1)*n) (m*n)) + (SB.init n (generator (m-1))); + matrix_seq_decomposition_lemma generator + +let matrix_seq_of_one_row_matrix #c #m #n (generator : matrix_generator c m n) + : Lemma (requires m==1) + (ensures matrix_seq generator == (SB.init n (generator 0))) = + SB.lemma_eq_elim (matrix_seq generator) (SB.init n (generator 0)) + +let one_row_matrix_fold_aux #c #eq #m #n (cm:CE.cm c eq) (generator : matrix_generator c m n) : Lemma + (requires m=1) + (ensures foldm cm (init generator) `eq.eq` + SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i)))) /\ + SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq` + SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))) = + let lhs_seq = matrix_seq generator in + let rhs_seq = SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))) in + let lhs = SP.foldm_snoc cm (matrix_seq generator) in + let rhs = SP.foldm_snoc cm rhs_seq in + SP.foldm_snoc_singleton cm (SP.foldm_snoc cm (SB.init n (generator 0))); + SB.lemma_eq_elim (SB.create 1 (SP.foldm_snoc cm (SB.init n (generator 0)))) + (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i)))); + matrix_seq_of_one_row_matrix generator; + eq.symmetry rhs lhs + + +let fold_of_subgen_aux #c #eq (#m:pos{m>1}) #n (cm: CE.cm c eq) (gen: matrix_generator c m n) (subgen: matrix_generator c (m-1) n) : Lemma + (requires subgen == (fun (i: under (m-1)) (j: under n) -> gen i j)) + (ensures forall (i: under (m-1)). SP.foldm_snoc cm (SB.init n (subgen i)) == + SP.foldm_snoc cm (SB.init n (gen i))) = + let aux_pat (i: under (m-1)) : Lemma (SP.foldm_snoc cm (SB.init n (subgen i)) + == SP.foldm_snoc cm (SB.init n (gen i))) = + SB.lemma_eq_elim (SB.init n (subgen i)) (SB.init n (gen i)) in + Classical.forall_intro aux_pat + +let arithm_aux (m: pos{m>1}) (n: pos) : Lemma ((m-1)*n < m*n) = () + +let terminal_case_aux #c #eq (#p:pos{p=1}) #n (cm:CE.cm c eq) (generator: matrix_generator c p n) (m: pos{m<=p}) : Lemma + (ensures SP.foldm_snoc cm (SB.slice (seq_of_matrix (init generator)) 0 (m*n)) `eq.eq` + SP.foldm_snoc cm (SB.init m (fun (i:under m) -> SP.foldm_snoc cm (SB.init n (generator i))))) + = one_row_matrix_fold_aux cm generator + +#push-options "--ifuel 0 --fuel 1 --z3rlimit 10" +let terminal_case_two_aux #c #eq (#p:pos) #n (cm:CE.cm c eq) (generator: matrix_generator c p n) (m: pos{m=1}) : Lemma + (ensures SP.foldm_snoc cm (SB.slice (seq_of_matrix (init generator)) 0 (m*n)) `eq.eq` + SP.foldm_snoc cm (SB.init m (fun (i:under m) -> SP.foldm_snoc cm (SB.init n (generator i))))) + = + SP.foldm_snoc_singleton cm (SP.foldm_snoc cm (SB.init n (generator 0))); + assert (SP.foldm_snoc cm (SB.init m (fun (i:under m) -> SP.foldm_snoc cm (SB.init n (generator i)))) `eq.eq` + SP.foldm_snoc cm (SB.init n (generator 0))); + let line = SB.init n (generator 0) in + let slice = SB.slice (matrix_seq generator) 0 n in + let aux (ij: under n) : Lemma (SB.index slice ij == SB.index line ij) = + Math.Lemmas.small_div ij n; + Math.Lemmas.small_mod ij n + in Classical.forall_intro aux; + SB.lemma_eq_elim line slice; + eq.symmetry (SP.foldm_snoc cm (SB.init m (fun (i:under m) -> SP.foldm_snoc cm (SB.init n (generator i))))) + (SP.foldm_snoc cm line) +#pop-options + +let liat_equals_init #c (m:pos) (gen: under m -> c) + : Lemma (fst (SProp.un_snoc (SB.init m gen)) == SB.init (m-1) gen) = + SB.lemma_eq_elim (fst (SProp.un_snoc (SB.init m gen))) (SB.init (m-1) gen) + +let math_aux (m n: pos) (j: under n) : Lemma (j+((m-1)*n) < m*n) = () + +let math_aux_2 (m n: pos) (j: under n) : Lemma (get_j m n (j+(m-1)*n) == j) + = + Math.Lemmas.modulo_addition_lemma j n (m-1); + Math.Lemmas.small_mod j n + +let math_aux_3 (m n: pos) (j: under n) : Lemma (get_i m n (j+(m-1)*n) == (m-1)) + = + Math.Lemmas.division_addition_lemma j n (m-1); + Math.Lemmas.small_div j n + +let math_aux_4 (m n: pos) (j: under n) : Lemma ((j+((m-1)*n)) - ((m-1)*n) == j) = () + +let seq_eq_from_member_eq #c (n: pos) (p q: (z:SB.seq c{SB.length z=n})) + (proof: (i: under n) -> Lemma (SB.index p i == SB.index q i)) + : Lemma (p == q) = + Classical.forall_intro proof; + SB.lemma_eq_elim p q + +let math_wut_lemma (x: pos) : Lemma (requires x>1) (ensures x-1 > 0) = () + +(* This proof used to be very unstable, so I rewrote it with as much precision + and control over lambdas as possible. + + I also left intact some trivial auxiliaries and the quake option + in order to catch regressions the moment they happen instead of several + releases later -- Alex *) +#push-options "--ifuel 0 --fuel 0 --z3rlimit 15" +#restart-solver +let rec matrix_fold_equals_double_fold #c #eq (#p:pos) #n (cm:CE.cm c eq) + (generator: matrix_generator c p n) (m: pos{m<=p}) + : Lemma (ensures SP.foldm_snoc cm (SB.slice (seq_of_matrix (init generator)) 0 (m*n)) `eq.eq` + SP.foldm_snoc cm (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (generator i))))) + (decreases m) = + if p=1 then terminal_case_aux cm generator m + else if m=1 then terminal_case_two_aux cm generator m + else + let lhs_seq = (SB.slice (matrix_seq generator) 0 (m*n)) in + let rhs_seq_gen = fun (i: under m) -> SP.foldm_snoc cm (SB.init n (generator i)) in + let rhs_seq_subgen = fun (i: under (m-1)) -> SP.foldm_snoc cm (SB.init n (generator i)) in + let rhs_seq = SB.init m rhs_seq_gen in + let lhs = SP.foldm_snoc cm lhs_seq in + let rhs = SP.foldm_snoc cm rhs_seq in + let matrix = lhs_seq in + let submatrix = SB.slice (matrix_seq generator) 0 ((m-1)*n) in + let last_row = SB.slice (matrix_seq generator) ((m-1)*n) (m*n) in + SB.lemma_len_slice (matrix_seq generator) ((m-1)*n) (m*n); + assert (SB.length last_row = n); + SB.lemma_eq_elim matrix (SB.append submatrix last_row); + SP.foldm_snoc_append cm submatrix last_row; + matrix_fold_equals_double_fold #c #eq #p #n cm generator (m-1); + SB.lemma_eq_elim (SB.init (m-1) rhs_seq_gen) + (SB.init (m-1) rhs_seq_subgen); + let aux (j: under n) : Lemma (SB.index last_row j == generator (m-1) j) = + SB.lemma_index_app2 submatrix last_row (j+((m-1)*n)); + math_aux_2 m n j; + math_aux_3 m n j; + math_aux_4 m n j; + () in Classical.forall_intro aux; + let rhs_liat, rhs_last = SProp.un_snoc rhs_seq in + let rhs_last_seq = SB.init n (generator (m-1)) in + liat_equals_init m rhs_seq_gen; + SP.foldm_snoc_decomposition cm rhs_seq; + let aux_2 (j: under n) : Lemma (SB.index last_row j == SB.index rhs_last_seq j) = () in + seq_eq_from_member_eq n last_row rhs_last_seq aux_2; + SB.lemma_eq_elim rhs_liat (SB.init (m-1) rhs_seq_gen); + cm.commutativity (SP.foldm_snoc cm submatrix) (SP.foldm_snoc cm last_row); + eq.transitivity lhs (SP.foldm_snoc cm submatrix `cm.mult` SP.foldm_snoc cm last_row) + (SP.foldm_snoc cm last_row `cm.mult` SP.foldm_snoc cm submatrix); + eq.reflexivity (SP.foldm_snoc cm last_row); + cm.congruence (SP.foldm_snoc cm last_row) (SP.foldm_snoc cm submatrix) + (SP.foldm_snoc cm last_row) (SP.foldm_snoc cm (SB.init (m-1) rhs_seq_subgen)); + eq.transitivity lhs (SP.foldm_snoc cm last_row `cm.mult` SP.foldm_snoc cm submatrix) rhs +#pop-options + +let matrix_fold_equals_fold_of_seq_folds #c #eq #m #n cm generator : Lemma + (ensures foldm cm (init generator) `eq.eq` + SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i)))) /\ + SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq` + SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))) = + matrix_fold_equals_double_fold cm generator m; + assert ((SB.slice (seq_of_matrix (init generator)) 0 (m*n)) == seq_of_matrix (init generator)); + SB.lemma_eq_elim (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i)))) + (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (generator i)))); + assert ((SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i)))) == + (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (generator i))))); +() + +(* This auxiliary lemma shows that the fold of the last line of a matrix + is equal to the corresponding fold of the generator function *) +let matrix_last_line_equals_gen_fold #c #eq + (#m #n: pos) + (cm: CE.cm c eq) + (generator: matrix_generator c m n) + : Lemma (SP.foldm_snoc cm (SB.slice (matrix_seq generator) ((m-1)*n) (m*n)) + `eq.eq` CF.fold cm 0 (n-1) (generator (m-1))) = + let slice = SB.slice #c in + let foldm_snoc = SP.foldm_snoc #c #eq in + assert (matrix_seq generator == seq_of_matrix (init generator)); + let init = SB.init #c in + let lemma_eq_elim = SB.lemma_eq_elim #c in + lemma_eq_elim (slice (matrix_seq generator) ((m-1)*n) (m*n)) + (init n (generator (m-1))); + let g : ifrom_ito 0 (n-1) -> c = generator (m-1) in + CF.fold_equals_seq_foldm cm 0 (n-1) g; + let gen = CF.init_func_from_expr g 0 (n-1) in + eq.reflexivity (foldm_snoc cm (init (closed_interval_size 0 (n-1)) gen)); + lemma_eq_elim (slice (matrix_seq generator) ((m-1)*n) (m*n)) + (init (closed_interval_size 0 (n-1)) gen); + eq.symmetry (CF.fold cm 0 (n-1) (generator (m-1))) + (foldm_snoc cm (init (closed_interval_size 0 (n-1)) gen)); + eq.transitivity (foldm_snoc cm (slice (matrix_seq generator) ((m-1)*n) (m*n))) + (foldm_snoc cm (init (closed_interval_size 0 (n-1)) gen)) + (CF.fold cm 0 (n-1) (generator (m-1))) + +(* This lemma proves that a matrix fold is the same thing as double-fold of + its generator function against full indices ranges *) +#push-options "--ifuel 0 --fuel 0" +let rec matrix_fold_aux #c #eq // lemma needed for precise generator domain control + (#gen_m #gen_n: pos) // full generator domain + (cm: CE.cm c eq) + (m: ifrom_ito 1 gen_m) (n: ifrom_ito 1 gen_n) //subdomain + (generator: matrix_generator c gen_m gen_n) + : Lemma (ensures SP.foldm_snoc cm (matrix_seq #c #m #n generator) `eq.eq` + CF.fold cm 0 (m-1) (fun (i: under m) -> CF.fold cm 0 (n-1) (generator i))) + (decreases m) = + Classical.forall_intro_2 (ijth_lemma (init generator)); + let slice = SB.slice #c in + let foldm_snoc = SP.foldm_snoc #c #eq in + let lemma_eq_elim = SB.lemma_eq_elim #c in + if m = 1 then begin + matrix_fold_equals_fold_of_seq cm (init generator); + matrix_last_line_equals_gen_fold #c #eq #m #n cm generator; + CF.fold_singleton_lemma cm 0 (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)); + assert (CF.fold cm 0 (m-1) (fun (i: under m) -> CF.fold cm 0 (n-1) (generator i)) + == CF.fold cm 0 (n-1) (generator 0)) + end else begin + Classical.forall_intro_3 (Classical.move_requires_3 eq.transitivity); + matrix_fold_aux cm (m-1) n generator; + let outer_func (i: under m) = CF.fold cm 0 (n-1) (generator i) in + let outer_func_on_subdomain (i: under (m-1)) = CF.fold cm 0 (n-1) (generator i) in + CF.fold_equality cm 0 (m-2) outer_func_on_subdomain outer_func; + CF.fold_snoc_decomposition cm 0 (m-1) outer_func; + matrix_fold_snoc_lemma #c #eq #m #n cm generator; + matrix_last_line_equals_gen_fold #c #eq #m #n cm generator; + cm.congruence (foldm_snoc cm (matrix_seq #c #(m-1) #n generator)) + (foldm_snoc cm (slice (matrix_seq #c #m #n generator) ((m-1)*n) (m*n))) + (CF.fold cm 0 (m-2) outer_func) + (CF.fold cm 0 (n-1) (generator (m-1))) + end +#pop-options + +(* This lemma establishes that the fold of a matrix is equal to + nested Algebra.CommMonoid.Fold.fold over the matrix generator *) +let matrix_fold_equals_func_double_fold #c #eq #m #n cm generator + : Lemma (foldm cm (init generator) `eq.eq` + CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i))) + = matrix_fold_aux cm m n generator + +(* This function provides the transposed matrix generator, with indices swapped + Notice how the forall property of the result function is happily proved + automatically by z3 :) *) +let transposed_matrix_gen #c #m #n (generator: matrix_generator c m n) + : (f: matrix_generator c n m { forall i j. f j i == generator i j }) + = fun j i -> generator i j + +(* This lemma shows that the transposed matrix is + a permutation of the original one *) +let matrix_transpose_is_permutation #c #m #n generator + : Lemma (SP.is_permutation (seq_of_matrix (init generator)) + (seq_of_matrix (init (transposed_matrix_gen generator))) + (transpose_ji m n)) = + let matrix_transposed_eq_lemma #c (#m #n: pos) + (gen: matrix_generator c m n) + (ij: under (m*n)) + : Lemma (SB.index (seq_of_matrix (init gen)) ij == + SB.index (seq_of_matrix (init (transposed_matrix_gen gen))) (transpose_ji m n ij)) + = + ijth_lemma (init gen) (get_i m n ij) (get_j m n ij); + ijth_lemma (init (transposed_matrix_gen gen)) + (get_i n m (transpose_ji m n ij)) + (get_j n m (transpose_ji m n ij)); + () in + let transpose_inequality_lemma (m n: pos) (ij: under (m*n)) (kl: under (n*m)) + : Lemma (requires kl <> ij) (ensures transpose_ji m n ij <> transpose_ji m n kl) = + dual_indices m n ij; + dual_indices m n kl in + Classical.forall_intro (matrix_transposed_eq_lemma generator); + Classical.forall_intro_2 (Classical.move_requires_2 + (transpose_inequality_lemma m n)); + SP.reveal_is_permutation (seq_of_matrix (init generator)) + (seq_of_matrix (init (transposed_matrix_gen generator))) + (transpose_ji m n) + +(* Fold over matrix equals fold over transposed matrix *) +let matrix_fold_equals_fold_of_transpose #c #eq #m #n + (cm: CE.cm c eq) + (gen: matrix_generator c m n) + : Lemma (foldm cm (init gen) `eq.eq` + foldm cm (init (transposed_matrix_gen gen))) = + let matrix_seq #c #m #n (g: matrix_generator c m n) = (seq_of_matrix (init g)) in + let matrix_mn = matrix_seq gen in + let matrix_nm = matrix_seq (transposed_matrix_gen gen) in + matrix_transpose_is_permutation gen; + SP.foldm_snoc_perm cm (matrix_seq gen) + (matrix_seq (transposed_matrix_gen gen)) + (transpose_ji m n); + matrix_fold_equals_fold_of_seq cm (init gen); + matrix_fold_equals_fold_of_seq cm (init (transposed_matrix_gen gen)); + eq.symmetry (foldm cm (init (transposed_matrix_gen gen))) + (SP.foldm_snoc cm (matrix_seq (transposed_matrix_gen gen))); + eq.transitivity (foldm cm (init gen)) (SP.foldm_snoc cm (matrix_seq gen)) + (SP.foldm_snoc cm (matrix_seq (transposed_matrix_gen gen))); + eq.transitivity (foldm cm (init gen)) (SP.foldm_snoc cm (matrix_seq (transposed_matrix_gen gen))) + (foldm cm (init (transposed_matrix_gen gen))) + +let matrix_eq_fun #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n) = + eq_of_seq eq (seq_of_matrix ma) (seq_of_matrix mb) + +(* + Matrix equivalence, defined as element-wise equivalence of its underlying + flattened sequence, is constructed trivially from the element equivalence + and the lemmas defined above. +*) +let matrix_equiv #c (eq: CE.equiv c) (m n: pos) : CE.equiv (matrix c m n) = + CE.EQ (matrix_eq_fun eq) + (fun m -> eq_of_seq_reflexivity eq (seq_of_matrix m)) + (fun ma mb -> eq_of_seq_symmetry eq (seq_of_matrix ma) (seq_of_matrix mb)) + (fun ma mb mc -> eq_of_seq_transitivity eq (seq_of_matrix ma) (seq_of_matrix mb) (seq_of_matrix mc)) + +(* Equivalence of matrices means equivalence of all corresponding elements *) +let matrix_equiv_ijth #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n) (i: under m) (j: under n) + : Lemma (requires (matrix_equiv eq m n).eq ma mb) (ensures ijth ma i j `eq.eq` ijth mb i j) = + eq_of_seq_element_equality eq (seq_of_matrix ma) (seq_of_matrix mb) + +(* Equivalence of all corresponding elements means equivalence of matrices *) +let matrix_equiv_from_element_eq #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n) + : Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j)) + (ensures matrix_eq_fun eq ma mb) = + assert (SB.length (seq_of_matrix ma) = SB.length (seq_of_matrix mb)); + let s1 = seq_of_matrix ma in + let s2 = seq_of_matrix mb in + assert (forall (ij: under (m*n)). SB.index s1 ij == ijth ma (get_i m n ij) (get_j m n ij)); + assert (forall (ij: under (m*n)). SB.index s2 ij == ijth mb (get_i m n ij) (get_j m n ij)); + assert (forall (ij: under (m*n)). SB.index s1 ij `eq.eq` SB.index s2 ij); + eq_of_seq_from_element_equality eq (seq_of_matrix ma) (seq_of_matrix mb) + +(* We construct addition CommMonoid from the following definitions *) +let matrix_add_is_associative #c #eq #m #n (add: CE.cm c eq) (ma mb mc: matrix c m n) + : Lemma (matrix_add add (matrix_add add ma mb) mc `(matrix_equiv eq m n).eq` + matrix_add add ma (matrix_add add mb mc)) = + matrix_equiv_from_proof eq + (matrix_add add (matrix_add add ma mb) mc) + (matrix_add add ma (matrix_add add mb mc)) + (fun i j -> add.associativity (ijth ma i j) (ijth mb i j) (ijth mc i j)) + +let matrix_add_is_commutative #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) + : Lemma (matrix_add add ma mb `(matrix_equiv eq m n).eq` matrix_add add mb ma) = + matrix_equiv_from_proof eq (matrix_add add ma mb) (matrix_add add mb ma) + (fun i j -> add.commutativity (ijth ma i j) (ijth mb i j)) + +let matrix_add_congruence #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb mc md: matrix c m n) + : Lemma (requires matrix_eq_fun eq ma mc /\ matrix_eq_fun eq mb md) + (ensures matrix_add add ma mb `matrix_eq_fun eq` matrix_add add mc md) = + matrix_equiv_from_proof eq (matrix_add add ma mb) (matrix_add add mc md) + (fun i j -> matrix_equiv_ijth eq ma mc i j; + matrix_equiv_ijth eq mb md i j; + add.congruence (ijth ma i j) (ijth mb i j) + (ijth mc i j) (ijth md i j)) + +let matrix_add_zero #c #eq (add: CE.cm c eq) (m n: pos) + : (z: matrix c m n { forall (i: under m) (j: under n). ijth z i j == add.unit }) + = matrix_of_seq m n (SB.create (m*n) add.unit) + +let matrix_add_identity #c #eq (add: CE.cm c eq) (#m #n: pos) (mx: matrix c m n) + : Lemma (matrix_add add (matrix_add_zero add m n) mx `matrix_eq_fun eq` mx) = + matrix_equiv_from_proof eq (matrix_add add (matrix_add_zero add m n) mx) mx + (fun i j -> add.identity (ijth mx i j)) + +let matrix_add_comm_monoid #c #eq (add: CE.cm c eq) (m n: pos) + : CE.cm (matrix c m n) (matrix_equiv eq m n) + = CE.CM (matrix_add_zero add m n) + (matrix_add add) + (matrix_add_identity add) + (matrix_add_is_associative add) + (matrix_add_is_commutative add) + (matrix_add_congruence add) + +(* equivalence of addressing styles *) +let matrix_row_col_lemma #c #m #n (mx: matrix c m n) (i: under m) (j: under n) + : Lemma (ijth mx i j == SB.index (row mx i) j /\ ijth mx i j == SB.index (col mx j) i) = () + +(* + See how lemma_eq_elim is defined, note the SMTPat there. + Invoking this is often more efficient in big proofs than invoking + lemma_eq_elim directly. +*) +let seq_of_products_lemma #c #eq (mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s}) + (r: SB.seq c{SB.equal r (SB.init (SB.length s) (fun (i: under (SB.length s)) -> SB.index s i `mul.mult` SB.index t i))}) + : Lemma (seq_of_products mul s t == r) = () + +let dot_lemma #c #eq add mul s t + : Lemma (dot add mul s t == SP.foldm_snoc add (seq_of_products mul s t)) = () + +let matrix_mul_gen #c #eq #m #n #p (add mul: CE.cm c eq) + (mx: matrix c m n) (my: matrix c n p) + (i: under m) (k: under p) + = dot add mul (row mx i) (col my k) + +let matrix_mul #c #eq #m #n #p (add mul: CE.cm c eq) (mx: matrix c m n) (my: matrix c n p) + = init (matrix_mul_gen add mul mx my) + +(* the following lemmas improve verification performance. *) +(* Sometimes this fact gets lost and needs an explicit proof *) +let seq_last_index #c (s: SB.seq c{SB.length s > 0}) + : Lemma (SProp.last s == SB.index s (SB.length s - 1)) = () + +(* It often takes assert_norm to obtain the fact that, + (fold s == last s `op` fold (slice s 0 (length s - 1))). + Invoking this lemma instead offers a more stable option. *) +let seq_fold_decomposition #c #eq (cm: CE.cm c eq) (s: SB.seq c{SB.length s > 0}) + : Lemma (SP.foldm_snoc cm s == cm.mult (SProp.last s) (SP.foldm_snoc cm (fst (SProp.un_snoc s)))) = () + + +(* Using common notation for algebraic operations instead of `mul` / `add` infix + simplifies the code and makes it more compact. *) +let rec foldm_snoc_distributivity_left #c #eq (mul add: CE.cm c eq) (a: c) (s: SB.seq c) + : Lemma (requires is_fully_distributive mul add /\ is_absorber add.unit mul) + (ensures mul.mult a (SP.foldm_snoc add s) `eq.eq` + SP.foldm_snoc add (const_op_seq mul a s)) + (decreases SB.length s) = + if SB.length s > 0 then + let ((+), ( * ), (=)) = add.mult, mul.mult, eq.eq in + let sum s = SP.foldm_snoc add s in + let liat, last = SProp.un_snoc s in + let rhs_liat, rhs_last = SProp.un_snoc (const_op_seq mul a s) in + foldm_snoc_distributivity_left mul add a liat; + SB.lemma_eq_elim rhs_liat (const_op_seq mul a liat); + eq.reflexivity rhs_last; + add.congruence rhs_last (a*sum liat) rhs_last (sum rhs_liat); + eq.transitivity (a*sum s) (rhs_last + a*sum liat) (rhs_last + sum rhs_liat) + +let rec foldm_snoc_distributivity_right #c #eq (mul add: CE.cm c eq) (s: SB.seq c) (a: c) + : Lemma (requires is_fully_distributive mul add /\ is_absorber add.unit mul) + (ensures mul.mult (SP.foldm_snoc add s) a `eq.eq` + SP.foldm_snoc add (seq_op_const mul s a)) + (decreases SB.length s) = + if SB.length s > 0 then + let ((+), ( * ), (=)) = add.mult, mul.mult, eq.eq in + let sum s = SP.foldm_snoc add s in + let liat, last = SProp.un_snoc s in + let rhs_liat, rhs_last = SProp.un_snoc (seq_op_const mul s a) in + foldm_snoc_distributivity_right mul add liat a; + SB.lemma_eq_elim rhs_liat (seq_op_const mul liat a); + eq.reflexivity rhs_last; + add.congruence rhs_last (sum liat*a) rhs_last (sum rhs_liat); + eq.transitivity (sum s*a) (rhs_last + sum liat*a) (rhs_last + sum rhs_liat) + +let foldm_snoc_distributivity_right_eq #c #eq (mul add: CE.cm c eq) (s: SB.seq c) (a: c) (r: SB.seq c) + : Lemma (requires is_fully_distributive mul add /\ is_absorber add.unit mul /\ + SB.equal r (seq_op_const mul s a)) + (ensures mul.mult (SP.foldm_snoc add s) a `eq.eq` + SP.foldm_snoc add r) + = foldm_snoc_distributivity_right mul add s a + +let foldm_snoc_distributivity_left_eq #c #eq (mul add: CE.cm c eq) (a: c) + (s: SB.seq c) + (r: SB.seq c{SB.equal r (const_op_seq mul a s)}) + : Lemma (requires is_fully_distributive mul add /\ is_absorber add.unit mul) + (ensures (mul.mult a(SP.foldm_snoc add s)) `eq.eq` + SP.foldm_snoc add r) + = foldm_snoc_distributivity_left mul add a s + +let matrix_mul_ijth #c #eq #m #n #k (add mul: CE.cm c eq) + (mx: matrix c m n) (my: matrix c n k) i h + : Lemma (ijth (matrix_mul add mul mx my) i h == dot add mul (row mx i) (col my h)) = () + +let matrix_mul_ijth_as_sum #c #eq #m #n #p (add mul: CE.cm c eq) + (mx: matrix c m n) (my: matrix c n p) i k + : Lemma (ijth (matrix_mul add mul mx my) i k == + SP.foldm_snoc add (SB.init n (fun (j: under n) -> mul.mult (ijth mx i j) (ijth my j k)))) = + let r = SB.init n (fun (j: under n) -> mul.mult (ijth mx i j) (ijth my j k)) in + assert (ijth (matrix_mul add mul mx my) i k == + SP.foldm_snoc add (seq_of_products mul (row mx i) (col my k))); + seq_of_products_lemma mul (row mx i) (col my k) r + +let matrix_mul_ijth_eq_sum_of_seq #c #eq #m #n #p (add: CE.cm c eq) + (mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul}) + (mx: matrix c m n) (my: matrix c n p) (i: under m) (k: under p) + (r: SB.seq c{r `SB.equal` seq_of_products mul (row mx i) (col my k)}) + : Lemma (ijth (matrix_mul add mul mx my) i k == SP.foldm_snoc add r) = () + + +let double_foldm_snoc_transpose_lemma #c #eq (#m #n: pos) (cm: CE.cm c eq) (f: under m -> under n -> c) + : Lemma (SP.foldm_snoc cm (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j)))) `eq.eq` + SP.foldm_snoc cm (SB.init n (fun (j: under n) -> SP.foldm_snoc cm (SB.init m (fun (i: under m) -> f i j))))) = + Classical.forall_intro_2 (Classical.move_requires_2 eq.symmetry); + let gen : matrix_generator c m n = f in + let mx = init gen in + let mx_seq = matrix_seq gen in + matrix_fold_equals_fold_of_seq_folds cm gen; + let aux (i: under m) : Lemma (SB.init n (gen i) == SB.init n (fun (j: under n) -> f i j)) + = SB.lemma_eq_elim (SB.init n (gen i))(SB.init n (fun (j: under n) -> f i j)) + in Classical.forall_intro aux; + SB.lemma_eq_elim (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (gen i)))) + (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j)))); + SB.lemma_eq_elim (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j)))) + (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j)))); + matrix_transpose_is_permutation gen; + matrix_fold_equals_fold_of_transpose cm gen; + let trans_gen = transposed_matrix_gen gen in + let mx_trans = init trans_gen in + let mx_trans_seq = matrix_seq trans_gen in + matrix_fold_equals_fold_of_seq_folds cm trans_gen; + assert (foldm cm mx_trans `eq.eq` + SP.foldm_snoc cm (SB.init n (fun j -> SP.foldm_snoc cm (SB.init m (trans_gen j))))); + let aux_tr_lemma (j: under n) + : Lemma ((SB.init m (trans_gen j)) == (SB.init m (fun (i: under m) -> f i j))) + = SB.lemma_eq_elim (SB.init m (trans_gen j)) (SB.init m (fun (i: under m) -> f i j)) + in Classical.forall_intro aux_tr_lemma; + SB.lemma_eq_elim (SB.init n (fun j -> SP.foldm_snoc cm (SB.init m (trans_gen j)))) + (SB.init n (fun (j:under n) -> SP.foldm_snoc cm (SB.init m (fun (i: under m) -> f i j)))); + assert (foldm cm mx_trans `eq.eq` + SP.foldm_snoc cm (SB.init n (fun (j:under n) -> SP.foldm_snoc cm (SB.init m (fun (i: under m) -> f i j))))); + eq.transitivity (SP.foldm_snoc cm (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j))))) + (foldm cm mx) + (foldm cm mx_trans); + eq.transitivity (SP.foldm_snoc cm (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j))))) + (foldm cm mx_trans) + (SP.foldm_snoc cm (SB.init n (fun (j:under n) -> SP.foldm_snoc cm (SB.init m (fun (i: under m) -> f i j))))) + +let matrix_mul_ijth_eq_sum_of_seq_for_init #c #eq #m #n #p (add mul: CE.cm c eq) + (mx: matrix c m n) (my: matrix c n p) i k + (f: under n -> c { SB.init n f `SB.equal` seq_of_products mul (row mx i) (col my k)}) + : Lemma (ijth (matrix_mul add mul mx my) i k == SP.foldm_snoc add (SB.init n f)) = () + +let double_foldm_snoc_of_equal_generators #c #eq (#m #n: pos) + (cm: CE.cm c eq) + (f g: under m -> under n -> c) + : Lemma (requires (forall (i: under m) (j: under n). f i j `eq.eq` g i j)) + (ensures SP.foldm_snoc cm (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j)))) + `eq.eq` SP.foldm_snoc cm (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> g i j))))) = + let aux i : Lemma (SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j)) `eq.eq` + SP.foldm_snoc cm (SB.init n (fun (j: under n) -> g i j))) + = SP.foldm_snoc_of_equal_inits cm (fun j -> f i j) (fun j -> g i j) in + Classical.forall_intro aux; + SP.foldm_snoc_of_equal_inits cm (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j))) + (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> g i j))) + +#push-options "--z3rlimit 15 --ifuel 0 --fuel 0" +let matrix_mul_is_associative #c #eq #m #n #p #q (add: CE.cm c eq) + (mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul}) + (mx: matrix c m n) (my: matrix c n p) (mz: matrix c p q) + : Lemma (matrix_eq_fun eq ((matrix_mul add mul mx my) `matrix_mul add mul` mz) + (matrix_mul add mul mx (matrix_mul add mul my mz))) = + let rhs = mx `matrix_mul add mul` (my `matrix_mul add mul` mz) in + let lhs = (mx `matrix_mul add mul` my) `matrix_mul add mul` mz in + let mxy = matrix_mul add mul mx my in + let myz = matrix_mul add mul my mz in + let ((+), ( * ), (=)) = add.mult, mul.mult, eq.eq in + let aux i l : squash (ijth lhs i l = ijth rhs i l) = + let sum_j (f: under n -> c) = SP.foldm_snoc add (SB.init n f) in + let sum_k (f: under p -> c) = SP.foldm_snoc add (SB.init p f) in + let xy_products_init k j = ijth mx i j * ijth my j k in + let xy_cell_as_sum k = sum_j (xy_products_init k) in + let xy_cell_lemma k : Lemma (ijth mxy i k == xy_cell_as_sum k) = + matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx my i k (xy_products_init k) + in Classical.forall_intro xy_cell_lemma; + let xy_z_products_init k = xy_cell_as_sum k * ijth mz k l in + matrix_mul_ijth_eq_sum_of_seq_for_init add mul mxy mz i l xy_z_products_init; + let full_init_kj k j = (ijth mx i j * ijth my j k) * ijth mz k l in + let full_init_jk j k = (ijth mx i j * ijth my j k) * ijth mz k l in + let full_init_rh j k = ijth mx i j * (ijth my j k * ijth mz k l) in + let sum_jk (f: (under n -> under p -> c)) = sum_j (fun j -> sum_k (fun k -> f j k)) in + let sum_kj (f: (under p -> under n -> c)) = sum_k (fun k -> sum_j (fun j -> f k j)) in + let xy_z_distr k : Lemma (((xy_cell_as_sum k) * (ijth mz k l)) = sum_j (full_init_kj k)) + = foldm_snoc_distributivity_right_eq mul add (SB.init n (xy_products_init k)) (ijth mz k l) + (SB.init n (full_init_kj k)) + in Classical.forall_intro xy_z_distr; + SP.foldm_snoc_of_equal_inits add xy_z_products_init + (fun k -> sum_j (full_init_kj k)); + double_foldm_snoc_transpose_lemma add full_init_kj; + eq.transitivity (ijth lhs i l) (sum_kj full_init_kj) + (sum_jk full_init_jk); + let aux_rh j k : Lemma (full_init_jk j k = full_init_rh j k) + = mul.associativity (ijth mx i j) (ijth my j k) (ijth mz k l) + in Classical.forall_intro_2 aux_rh; + double_foldm_snoc_of_equal_generators add full_init_jk full_init_rh; + eq.transitivity (ijth lhs i l) (sum_jk full_init_jk) (sum_jk full_init_rh); + + // now expand the right hand side, fully dual to the first part of the lemma. + let yz_products_init j k = ijth my j k * ijth mz k l in + let yz_cell_as_sum j = sum_k (yz_products_init j) in + let x_yz_products_init j = ijth mx i j * yz_cell_as_sum j in + let yz_cell_lemma j : Lemma (ijth myz j l == sum_k (yz_products_init j)) = + matrix_mul_ijth_eq_sum_of_seq_for_init add mul my mz j l (yz_products_init j); + () in Classical.forall_intro yz_cell_lemma; + matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx myz i l x_yz_products_init; + let x_yz_distr j : Lemma (ijth mx i j * yz_cell_as_sum j = sum_k (full_init_rh j)) + = foldm_snoc_distributivity_left_eq mul add (ijth mx i j) (SB.init p (yz_products_init j)) + (SB.init p (full_init_rh j)) + in Classical.forall_intro x_yz_distr; + SP.foldm_snoc_of_equal_inits add x_yz_products_init (fun j -> sum_k (full_init_rh j)); + eq.symmetry (ijth rhs i l) (sum_jk full_init_rh); + eq.transitivity (ijth lhs i l) (sum_jk full_init_rh) (ijth rhs i l); + () in matrix_equiv_from_proof eq lhs rhs aux +#pop-options + +let matrix_mul_unit_row_lemma #c #eq m (add mul: CE.cm c eq) (i: under m) + : Lemma ((row (matrix_mul_unit add mul m) i + == (SB.create i add.unit) `SB.append` + ((SB.create 1 mul.unit) `SB.append` (SB.create (m-i-1) add.unit))) /\ + (row (matrix_mul_unit add mul m) i + == ((SB.create i add.unit) `SB.append` (SB.create 1 mul.unit)) `SB.append` + (SB.create (m-i-1) add.unit))) = + SB.lemma_eq_elim ((SB.create i add.unit `SB.append` SB.create 1 mul.unit) + `SB.append` (SB.create (m-i-1) add.unit)) + (row (matrix_mul_unit add mul m) i); + SB.lemma_eq_elim ((SB.create i add.unit) `SB.append` + (SB.create 1 mul.unit `SB.append` SB.create (m-i-1) add.unit)) + (row (matrix_mul_unit add mul m) i) + +let matrix_mul_unit_col_lemma #c #eq m (add mul: CE.cm c eq) (i: under m) + : Lemma ((col (matrix_mul_unit add mul m) i + == (SB.create i add.unit) `SB.append` + ((SB.create 1 mul.unit) `SB.append` (SB.create (m-i-1) add.unit))) /\ + (col (matrix_mul_unit add mul m) i == + ((SB.create i add.unit) `SB.append` (SB.create 1 mul.unit)) `SB.append` + (SB.create (m-i-1) add.unit))) = + SB.lemma_eq_elim ((SB.create i add.unit `SB.append` SB.create 1 mul.unit) + `SB.append` (SB.create (m-i-1) add.unit)) + (col (matrix_mul_unit add mul m) i); + SB.lemma_eq_elim ((SB.create i add.unit) `SB.append` + (SB.create 1 mul.unit `SB.append` SB.create (m-i-1) add.unit)) + (col (matrix_mul_unit add mul m) i) + +let seq_of_products_zeroes_lemma #c #eq #m (mul: CE.cm c eq) + (z: c{is_absorber z mul}) + (s: SB.seq c{SB.length s == m}) + : Lemma (ensures (eq_of_seq eq (seq_of_products mul (SB.create m z) s) (SB.create m z))) + = eq_of_seq_from_element_equality eq (seq_of_products mul (SB.create m z) s) (SB.create m z) + +let rec foldm_snoc_zero_lemma #c #eq (add: CE.cm c eq) (zeroes: SB.seq c) + : Lemma (requires (forall (i: under (SB.length zeroes)). SB.index zeroes i `eq.eq` add.unit)) + (ensures eq.eq (SP.foldm_snoc add zeroes) add.unit) + (decreases SB.length zeroes) = + if (SB.length zeroes < 1) then begin + assert_norm (SP.foldm_snoc add zeroes == add.unit); + eq.reflexivity add.unit + end else + let liat, last = SProp.un_snoc zeroes in + foldm_snoc_zero_lemma add liat; + add.congruence last (SP.foldm_snoc add liat) add.unit add.unit; + add.identity add.unit; + SP.foldm_snoc_decomposition add zeroes; + eq.transitivity (SP.foldm_snoc add zeroes) + (add.mult add.unit add.unit) + add.unit + + +let matrix_mul_unit_ijth #c #eq (add mul: CE.cm c eq) m (i j: under m) + : Lemma (ijth (matrix_mul_unit add mul m) i j == (if i=j then mul.unit else add.unit))=() + +let last_equals_index #c (s: SB.seq c{SB.length s > 0}) + : Lemma ((snd (SProp.un_snoc s)) == SB.index s (SB.length s - 1)) = () + + + +let matrix_right_mul_identity_aux_0 #c #eq #m + (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + (i j: under m) (k:nat{k=0}) + : Lemma (ensures SP.foldm_snoc add (SB.init k (fun (k: under m) + -> ijth mx i k `mul.mult` + ijth (matrix_mul_unit add mul m) k j)) + `eq.eq` add.unit) + = eq.reflexivity add.unit + +let rec matrix_right_mul_identity_aux_1 #c #eq #m + (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + (i j: under m) (k:nat{k<=j}) + : Lemma (ensures SP.foldm_snoc add (SB.init k (fun (k: under m) + -> ijth mx i k `mul.mult` + ijth (matrix_mul_unit add mul m) k j)) + `eq.eq` add.unit) + (decreases k) + = if k = 0 then matrix_right_mul_identity_aux_0 add mul mx i j k + else + let unit = matrix_mul_unit add mul m in + let mxu = matrix_mul add mul mx unit in + let ( * ) = mul.mult in + let ( $=$ ) = eq.eq in + let gen = fun (k: under m) -> ijth mx i k * ijth unit k j in + let full = SB.init k gen in + let liat,last = SProp.un_snoc full in + matrix_right_mul_identity_aux_1 add mul mx i j (k-1); + liat_equals_init k gen; + eq.reflexivity (SP.foldm_snoc add liat); + mul.congruence last (SP.foldm_snoc add liat) add.unit (SP.foldm_snoc add liat); + eq.transitivity (last * SP.foldm_snoc add liat) + (add.unit * SP.foldm_snoc add liat) + (add.unit); + + eq.reflexivity (SP.foldm_snoc add (SB.init (k-1) gen)); + matrix_mul_unit_ijth add mul m (k-1) j; // This one reduces the rlimits needs to default + add.congruence last (SP.foldm_snoc add liat) add.unit add.unit; + add.identity add.unit; + SP.foldm_snoc_decomposition add full; + eq.transitivity (SP.foldm_snoc add full) + (add.mult add.unit add.unit) + add.unit + +let matrix_right_mul_identity_aux_2 #c #eq #m + (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + (i j: under m) (k:nat{k=j+1}) + : Lemma (ensures SP.foldm_snoc add (SB.init k (fun (k: under m) + -> ijth mx i k `mul.mult` + ijth (matrix_mul_unit add mul m) k j)) + `eq.eq` ijth mx i j) = + let unit = matrix_mul_unit add mul m in + let mxu = matrix_mul add mul mx unit in + let ( * ) = mul.mult in + let ( $=$ ) = eq.eq in + let gen = fun (k: under m) -> ijth mx i k * ijth unit k j in + let full = SB.init k gen in + let liat,last = SProp.un_snoc full in + matrix_right_mul_identity_aux_1 add mul mx i j j; + liat_equals_init k gen; + mul.identity (ijth mx i j); + eq.reflexivity last; + add.congruence last (SP.foldm_snoc add liat) last add.unit; + matrix_mul_unit_ijth add mul m (k-1) j; // This one reduces the rlimits needs to default + add.identity last; + add.commutativity last add.unit; + mul.commutativity (ijth mx i j) mul.unit; + eq.transitivity (add.mult last add.unit) (add.mult add.unit last) last; + SP.foldm_snoc_decomposition add full; + eq.transitivity (SP.foldm_snoc add full) (add.mult last add.unit) last; + eq.transitivity last (mul.unit * ijth mx i j) (ijth mx i j); + eq.transitivity (SP.foldm_snoc add full) last (ijth mx i j) + +let rec matrix_right_mul_identity_aux_3 #c #eq #m + (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + (i j: under m) (k:under (m+1){k>j+1}) + : Lemma (ensures SP.foldm_snoc add (SB.init k + (fun (k: under m) -> ijth mx i k `mul.mult` ijth (matrix_mul_unit add mul m) k j)) + `eq.eq` ijth mx i j) + (decreases k) = + if (k-1) > j+1 then matrix_right_mul_identity_aux_3 add mul mx i j (k-1) + else matrix_right_mul_identity_aux_2 add mul mx i j (k-1); + let unit = matrix_mul_unit add mul m in + let mxu = matrix_mul add mul mx unit in + let ( * ) = mul.mult in + let ( $=$ ) = eq.eq in + let gen = fun (k: under m) -> ijth mx i k * ijth unit k j in + let subgen (i: under (k)) = gen i in + let full = SB.init k gen in + SP.foldm_snoc_decomposition add full; + liat_equals_init k gen; + let liat,last = SProp.un_snoc full in + SB.lemma_eq_elim liat (SB.init (k-1) gen); + add.identity add.unit; + mul.commutativity (ijth mx i (k-1)) add.unit; + eq.reflexivity (SP.foldm_snoc add (SB.init (k-1) gen)); + matrix_mul_unit_ijth add mul m (k-1) j; // This one reduces the rlimits needs to default + add.congruence last (SP.foldm_snoc add (SB.init (k-1) gen)) + add.unit (SP.foldm_snoc add (SB.init (k-1) gen)); + add.identity (SP.foldm_snoc add (SB.init (k-1) gen)); + eq.transitivity (SP.foldm_snoc add full) + (add.mult add.unit (SP.foldm_snoc add (SB.init (k-1) gen))) + (SP.foldm_snoc add (SB.init (k-1) gen)); + eq.transitivity (SP.foldm_snoc add full) + (SP.foldm_snoc add (SB.init (k-1) gen)) + (ijth mx i j) + +let matrix_right_identity_aux #c #eq #m + (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + (i j: under m) (k:under (m+1)) + : Lemma (ensures SP.foldm_snoc add (SB.init k + (fun (k: under m) -> ijth mx i k `mul.mult` ijth (matrix_mul_unit add mul m) k j)) + `eq.eq` + (if k>j then ijth mx i j else add.unit)) + (decreases k) = + if k=0 then matrix_right_mul_identity_aux_0 add mul mx i j k + else if k <= j then matrix_right_mul_identity_aux_1 add mul mx i j k + else if k = j+1 then matrix_right_mul_identity_aux_2 add mul mx i j k + else matrix_right_mul_identity_aux_3 add mul mx i j k + +let matrix_left_mul_identity_aux_0 #c #eq #m + (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + (i j: under m) (k:nat{k=0}) + : Lemma (ensures SP.foldm_snoc add (SB.init k + (fun (k: under m) -> ijth (matrix_mul_unit add mul m) i k `mul.mult` ijth mx k j)) + `eq.eq` add.unit) = eq.reflexivity add.unit + +#restart-solver +let rec matrix_left_mul_identity_aux_1 #c #eq #m + (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + (i j: under m) (k:nat{k<=i /\ k>0}) + : Lemma (ensures SP.foldm_snoc add (SB.init k + (fun (k: under m) -> ijth (matrix_mul_unit add mul m) i k `mul.mult` ijth mx k j)) + `eq.eq` add.unit) = + let unit = matrix_mul_unit add mul m in + let mxu = matrix_mul add mul mx unit in + let ( * ) = mul.mult in + let ( $=$ ) = eq.eq in + let gen (k: under m) = ijth unit i k * ijth mx k j in + let full = SB.init k gen in + let liat,last = SProp.un_snoc full in + if k=1 then matrix_left_mul_identity_aux_0 add mul mx i j (k-1) + else matrix_left_mul_identity_aux_1 add mul mx i j (k-1); + liat_equals_init k gen; + eq.reflexivity (SP.foldm_snoc add liat); + SP.foldm_snoc_decomposition add full; + mul.congruence last (SP.foldm_snoc add liat) add.unit (SP.foldm_snoc add liat); + eq.transitivity (last * SP.foldm_snoc add liat) + (add.unit * SP.foldm_snoc add liat) + (add.unit); + add.congruence last (SP.foldm_snoc add liat) add.unit add.unit; + add.identity add.unit; + eq.transitivity (SP.foldm_snoc add full) + (add.mult add.unit add.unit) + add.unit + +#push-options "--z3rlimit 20" +let matrix_left_mul_identity_aux_2 #c #eq #m + (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + (i j: under m) (k:nat{k=i+1}) + : Lemma (ensures SP.foldm_snoc add (SB.init k + (fun (k: under m) -> ijth (matrix_mul_unit add mul m) i k `mul.mult` ijth mx k j)) + `eq.eq` ijth mx i j) = + let unit = matrix_mul_unit add mul m in + let mxu = matrix_mul add mul mx unit in + let ( * ) = mul.mult in + let ( $=$ ) = eq.eq in + let gen (k: under m) = ijth unit i k * ijth mx k j in + let full = SB.init k gen in + let liat,last = SProp.un_snoc full in + assert (k-1 <= i /\ k-1 >= 0); + if (k-1)=0 then matrix_left_mul_identity_aux_0 add mul mx i j (k-1) + else matrix_left_mul_identity_aux_1 add mul mx i j (k-1); + matrix_mul_unit_ijth add mul m i (k-1); // This one reduces the rlimits needs to default + SP.foldm_snoc_decomposition add full; + liat_equals_init k gen; + mul.identity (ijth mx i j); + eq.reflexivity last; + add.congruence last (SP.foldm_snoc add liat) last add.unit; + add.identity last; + add.commutativity last add.unit; + mul.commutativity (ijth mx i j) mul.unit; + eq.transitivity (add.mult last add.unit) (add.mult add.unit last) last; + eq.transitivity (SP.foldm_snoc add full) (add.mult last add.unit) last; + eq.transitivity last (mul.unit * ijth mx i j) (ijth mx i j); + eq.transitivity (SP.foldm_snoc add full) last (ijth mx i j) + +let rec matrix_left_mul_identity_aux_3 #c #eq #m + (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + (i j: under m) (k:under(m+1){k>i+1}) + : Lemma (ensures SP.foldm_snoc add (SB.init k + (fun (k: under m) -> ijth (matrix_mul_unit add mul m) i k `mul.mult` ijth mx k j)) + `eq.eq` ijth mx i j) = + let unit = matrix_mul_unit add mul m in + let mxu = matrix_mul add mul mx unit in + let ( * ) = mul.mult in + let ( $=$ ) = eq.eq in + let gen (k: under m) = ijth unit i k * ijth mx k j in + let full = SB.init k gen in + if (k-1 = i+1) then matrix_left_mul_identity_aux_2 add mul mx i j (k-1) + else matrix_left_mul_identity_aux_3 add mul mx i j (k-1); + matrix_mul_unit_ijth add mul m i (k-1); // This one reduces the rlimits needs to default + SP.foldm_snoc_decomposition add full; + liat_equals_init k gen; + let liat,last = SProp.un_snoc full in + SB.lemma_eq_elim liat (SB.init (k-1) gen); + add.identity add.unit; + mul.commutativity (ijth mx i (k-1)) add.unit; + eq.reflexivity (SP.foldm_snoc add (SB.init (k-1) gen)); + add.congruence last (SP.foldm_snoc add (SB.init (k-1) gen)) + add.unit (SP.foldm_snoc add (SB.init (k-1) gen)); + add.identity (SP.foldm_snoc add (SB.init (k-1) gen)); + eq.transitivity (SP.foldm_snoc add full) + (add.mult add.unit (SP.foldm_snoc add (SB.init (k-1) gen))) + (SP.foldm_snoc add (SB.init (k-1) gen)); + eq.transitivity (SP.foldm_snoc add full) + (SP.foldm_snoc add (SB.init (k-1) gen)) + (ijth mx i j) + +let matrix_left_identity_aux #c #eq #m + (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + (i j: under m) (k:under (m+1)) + : Lemma (ensures SP.foldm_snoc add (SB.init k + (fun (k: under m) -> ijth (matrix_mul_unit add mul m) i k `mul.mult` ijth mx k j)) + `eq.eq` (if k>i then ijth mx i j else add.unit)) + (decreases k) = + if k=0 then matrix_left_mul_identity_aux_0 add mul mx i j k + else if k <= i then matrix_left_mul_identity_aux_1 add mul mx i j k + else if k = i+1 then matrix_left_mul_identity_aux_2 add mul mx i j k + else matrix_left_mul_identity_aux_3 add mul mx i j k + +let matrix_mul_right_identity #c #eq #m (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + : Lemma (matrix_mul add mul mx (matrix_mul_unit add mul m) `matrix_eq_fun eq` mx) = + let unit = matrix_mul_unit add mul m in + let mxu = matrix_mul add mul mx unit in + let ( * ) = mul.mult in + let ( $=$ ) = eq.eq in + let aux (i j: under m) : Lemma (ijth mxu i j $=$ ijth mx i j) = + let gen = fun (k: under m) -> ijth mx i k * ijth unit k j in + matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx unit i j gen; + let seq = SB.init m gen in + matrix_right_identity_aux add mul mx i j m + in Classical.forall_intro_2 aux; + matrix_equiv_from_element_eq eq mxu mx + +let matrix_mul_left_identity #c #eq #m (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + : Lemma (matrix_mul add mul (matrix_mul_unit add mul m) mx `matrix_eq_fun eq` mx) = + let unit = matrix_mul_unit add mul m in + let mxu = matrix_mul add mul unit mx in + let ( * ) = mul.mult in + let ( $=$ ) = eq.eq in + let aux (i j: under m) : squash (ijth mxu i j $=$ ijth mx i j) = + let gen (k: under m) = ijth unit i k * ijth mx k j in + matrix_mul_ijth_eq_sum_of_seq_for_init add mul unit mx i j gen; + let seq = SB.init m gen in + matrix_left_identity_aux add mul mx i j m + in + matrix_equiv_from_proof eq mxu mx aux + +let matrix_mul_identity #c #eq #m (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + : Lemma (matrix_mul add mul mx (matrix_mul_unit add mul m) `matrix_eq_fun eq` mx /\ + matrix_mul add mul (matrix_mul_unit add mul m) mx `matrix_eq_fun eq` mx) = + matrix_mul_left_identity add mul mx; + matrix_mul_right_identity add mul mx + +let dot_of_equal_sequences #c #eq (add mul: CE.cm c eq) m + (p q r s: (z:SB.seq c{SB.length z == m})) + : Lemma (requires eq_of_seq eq p r /\ eq_of_seq eq q s) + (ensures eq.eq (dot add mul p q) (dot add mul r s)) = + eq_of_seq_element_equality eq p r; + eq_of_seq_element_equality eq q s; + let aux (i: under (SB.length p)) : Lemma (SB.index (seq_of_products mul p q) i `eq.eq` + SB.index (seq_of_products mul r s) i) + = mul.congruence (SB.index p i) (SB.index q i) (SB.index r i) (SB.index s i) + in Classical.forall_intro aux; + eq_of_seq_from_element_equality eq (seq_of_products mul p q) (seq_of_products mul r s); + SP.foldm_snoc_equality add (seq_of_products mul p q) (seq_of_products mul r s) + +let matrix_mul_congruence #c #eq #m #n #p (add mul: CE.cm c eq) + (mx: matrix c m n) (my: matrix c n p) + (mz: matrix c m n) (mw: matrix c n p) + : Lemma (requires matrix_eq_fun eq mx mz /\ matrix_eq_fun eq my mw) + (ensures matrix_eq_fun eq (matrix_mul add mul mx my) (matrix_mul add mul mz mw)) = + let aux (i: under m) (k: under p) : Lemma (ijth (matrix_mul add mul mx my) i k + `eq.eq` ijth (matrix_mul add mul mz mw) i k) = + let init_xy (j: under n) = mul.mult (ijth mx i j) (ijth my j k) in + let init_zw (j: under n) = mul.mult (ijth mz i j) (ijth mw j k) in + matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx my i k init_xy; + matrix_mul_ijth_eq_sum_of_seq_for_init add mul mz mw i k init_zw; + let sp_xy = SB.init n init_xy in + let sp_zw = SB.init n init_zw in + let all_eq (j: under n) : Lemma (init_xy j `eq.eq` init_zw j) = + matrix_equiv_ijth eq mx mz i j; + matrix_equiv_ijth eq my mw j k; + mul.congruence (ijth mx i j) (ijth my j k) (ijth mz i j) (ijth mw j k) + in Classical.forall_intro all_eq; + eq_of_seq_from_element_equality eq sp_xy sp_zw; + SP.foldm_snoc_equality add sp_xy sp_zw + in matrix_equiv_from_proof eq (matrix_mul add mul mx my) (matrix_mul add mul mz mw) aux + +#push-options "--z3rlimit 30 --ifuel 0 --fuel 0" +let matrix_mul_is_left_distributive #c #eq #m #n #p (add: CE.cm c eq) + (mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul}) + (mx: matrix c m n) (my mz: matrix c n p) + : Lemma (matrix_mul add mul mx (matrix_add add my mz) `matrix_eq_fun eq` + matrix_add add (matrix_mul add mul mx my) (matrix_mul add mul mx mz)) = + let myz = matrix_add add my mz in + let mxy = matrix_mul add mul mx my in + let mxz = matrix_mul add mul mx mz in + let lhs = matrix_mul add mul mx myz in + let rhs = matrix_add add mxy mxz in + let sum_j (f: under n -> c) = SP.foldm_snoc add (SB.init n f) in + let sum_k (f: under p -> c) = SP.foldm_snoc add (SB.init p f) in + let aux i k : Lemma (ijth lhs i k `eq.eq` ijth rhs i k) = + let init_lhs j = mul.mult (ijth mx i j) (ijth myz j k) in + let init_xy j = mul.mult (ijth mx i j) (ijth my j k) in + let init_xz j = mul.mult (ijth mx i j) (ijth mz j k) in + let init_rhs j = mul.mult (ijth mx i j) (ijth my j k) `add.mult` + mul.mult (ijth mx i j) (ijth mz j k) in + Classical.forall_intro eq.reflexivity; + matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx myz i k init_lhs; + matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx my i k init_xy; + matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx mz i k init_xz; + SP.foldm_snoc_split_seq add (SB.init n init_xy) + (SB.init n init_xz) + (SB.init n init_rhs) + (fun j -> ()); + eq.symmetry (ijth rhs i k) (sum_j init_rhs); + SP.foldm_snoc_of_equal_inits add init_lhs init_rhs; + eq.transitivity (ijth lhs i k) + (sum_j init_rhs) + (ijth rhs i k) + in matrix_equiv_from_proof eq lhs rhs aux +#pop-options + +let matrix_mul_is_right_distributive #c #eq #m #n #p (add: CE.cm c eq) + (mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul}) + (mx my: matrix c m n) (mz: matrix c n p) + : Lemma (matrix_mul add mul (matrix_add add mx my) mz `matrix_eq_fun eq` + matrix_add add (matrix_mul add mul mx mz) (matrix_mul add mul my mz)) = + let mxy = matrix_add add mx my in + let mxz = matrix_mul add mul mx mz in + let myz = matrix_mul add mul my mz in + let lhs = matrix_mul add mul mxy mz in + let rhs = matrix_add add mxz myz in + let sum_j (f: under n -> c) = SP.foldm_snoc add (SB.init n f) in + let sum_k (f: under p -> c) = SP.foldm_snoc add (SB.init p f) in + let aux i k : Lemma (ijth lhs i k `eq.eq` + ijth rhs i k) = + let init_lhs j = mul.mult (ijth mxy i j) (ijth mz j k) in + let init_xz j = mul.mult (ijth mx i j) (ijth mz j k) in + let init_yz j = mul.mult (ijth my i j) (ijth mz j k) in + let init_rhs j = mul.mult (ijth mx i j) (ijth mz j k) `add.mult` + mul.mult (ijth my i j) (ijth mz j k) in + Classical.forall_intro eq.reflexivity; + matrix_mul_ijth_eq_sum_of_seq_for_init add mul mxy mz i k init_lhs; + matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx mz i k init_xz; + matrix_mul_ijth_eq_sum_of_seq_for_init add mul my mz i k init_yz; + SP.foldm_snoc_split_seq add (SB.init n init_xz) + (SB.init n init_yz) + (SB.init n init_rhs) + (fun j -> ()); + eq.symmetry (ijth rhs i k) (sum_j init_rhs); + SP.foldm_snoc_of_equal_inits add init_lhs init_rhs; + eq.transitivity (ijth lhs i k) + (sum_j init_rhs) + (ijth rhs i k) + in matrix_equiv_from_proof eq lhs rhs aux +#pop-options diff --git a/stage0/ulib/FStar.Matrix.fsti b/stage0/ulib/FStar.Matrix.fsti new file mode 100644 index 00000000000..b3039acdc70 --- /dev/null +++ b/stage0/ulib/FStar.Matrix.fsti @@ -0,0 +1,365 @@ +(* + Copyright 2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: A. Rozanov +*) + +(* + In this module we provide basic definitions to work with matrices via + seqs, and define transpose transform together with theorems that assert + matrix fold equality of original and transposed matrices. +*) + + +module FStar.Matrix + +module CE = FStar.Algebra.CommMonoid.Equiv +module CF = FStar.Algebra.CommMonoid.Fold +module SP = FStar.Seq.Permutation +module SB = FStar.Seq.Base +module ML = FStar.Math.Lemmas + +open FStar.IntegerIntervals +open FStar.Mul + +(* This is similar to lambdas passed to FStar.Seq.Base.init *) +type matrix_generator c (m n: pos) = under m -> under n -> c + +(* We hide the implementation details of a matrix. *) +val matrix (c:Type u#a) (m n : pos) : Type u#a + +(* This lemma asserts the flattened index to be valid + for the flattened matrix seq *) +let flattened_index_is_under_flattened_size (m n: pos) (i: under m) (j: under n) + : Lemma ((((i*n)+j)) < m*n) = assert (i*n <= (m-1)*n) + +(* Returns the flattened index from 2D indices pair + and the two array dimensions. *) +let get_ij (m n: pos) (i:under m) (j: under n) : under (m*n) + = flattened_index_is_under_flattened_size m n i j; i*n + j + +(* The following two functions return the matrix indices from the + flattened index and the two dimensions *) +let get_i (m n: pos) (ij: under (m*n)) : under m = ij / n +let get_j (m n: pos) (ij: under (m*n)) : under n = ij % n + +(* A proof that getting a 2D index back from the flattened + index works correctly *) +let consistency_of_i_j (m n: pos) (i: under m) (j: under n) + : Lemma (get_i m n (get_ij m n i j) = i /\ get_j m n (get_ij m n i j) = j) = + flattened_index_is_under_flattened_size m n i j; //speeds up the proof + ML.lemma_mod_plus j i n; + ML.lemma_div_plus j i n + +(* A proof that getting the flattened index from 2D + indices works correctly *) +let consistency_of_ij (m n: pos) (ij: under (m*n)) + : Lemma (get_ij m n (get_i m n ij) (get_j m n ij) == ij) = () + +(* The transposition transform for the flattened index *) +let transpose_ji (m n: pos) (ij: under (m*n)) : under (n*m) = + flattened_index_is_under_flattened_size n m (get_j m n ij) (get_i m n ij); + (get_j m n ij)*m + (get_i m n ij) + +(* Auxiliary arithmetic lemma *) +let indices_transpose_lemma (m: pos) (i: under m) (j: nat) + : Lemma (((j*m+i)%m=i) && ((j*m+i)/m=j)) = ML.lemma_mod_plus i j m + +(* A proof of trasnspotition transform bijectivity *) +let ji_is_transpose_of_ij (m n: pos) (ij: under (m*n)) + : Lemma (transpose_ji n m (transpose_ji m n ij) = ij) = + indices_transpose_lemma m (get_i m n ij) (get_j m n ij) + +(* A proof that 2D indices are swapped with the transpotition transform *) +let dual_indices (m n: pos) (ij: under (m*n)) : Lemma ( + (get_j n m (transpose_ji m n ij) = get_i m n ij) /\ + (get_i n m (transpose_ji m n ij) = get_j m n ij)) + = consistency_of_ij m n ij; + indices_transpose_lemma m (get_i m n ij) (get_j m n ij) + +(* A matrix can always be treated as a flattened seq *) +val seq_of_matrix : (#c: Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> + (s:SB.seq c { + SB.length s=m*n /\ + (forall (ij: under (m*n)). SB.index s ij == SB.index s (get_ij m n (get_i m n ij) (get_j m n ij))) + }) + +(* Indexer for a matrix *) +val ijth : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) -> + (t:c{t == SB.index (seq_of_matrix mx) (get_ij m n i j)}) + +(* Indexer for a matrix returns the correct value *) +val ijth_lemma : (#c:Type) -> (#m:pos) -> (#n:pos) -> (mx: matrix c m n) -> (i: under m) -> (j: under n) -> + Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j)) + +(* A matrix can always be constructed from an m*n-sized seq *) +val matrix_of_seq : (#c: Type) -> (m:pos) -> (n:pos) -> (s: SB.seq c{SB.length s = m*n}) -> matrix c m n + +(* A type for matrices constructed via concrete generator *) +type matrix_of #c (#m #n: pos) (gen: matrix_generator c m n) = z:matrix c m n { + (forall (i: under m) (j: under n). ijth z i j == gen i j) /\ + (forall (ij: under (m*n)). (SB.index (seq_of_matrix z) ij) == (gen (get_i m n ij) (get_j m n ij))) +} + +(* Monoid-based fold of a matrix treated as a flat seq *) +val foldm : (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) -> c + +(* foldm_snoc of the corresponding seq is equal to foldm of the matrix *) +val matrix_fold_equals_fold_of_seq : + (#c:Type) -> (#eq:CE.equiv c) -> (#m:pos) -> (#n:pos) -> (cm: CE.cm c eq) -> (mx:matrix c m n) + -> Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)] + +(* A matrix constructed from given generator *) +val init : (#c:Type) -> (#m:pos) -> (#n: pos) -> (generator: matrix_generator c m n) + -> matrix_of generator + +(* A matrix fold is equal to double foldm_snoc over init-generated seq of seqs *) +val matrix_fold_equals_fold_of_seq_folds : (#c:Type) -> (#eq: CE.equiv c) -> + (#m: pos) -> (#n: pos) -> + (cm: CE.cm c eq) -> + (generator: matrix_generator c m n) -> + Lemma (ensures foldm cm (init generator) `eq.eq` + SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i)))) + /\ SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq` + SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i)))) + ) + +(* This auxiliary lemma shows that the fold of the last line of a matrix + is equal to the corresponding fold of the generator function *) + +(* This lemma establishes that the fold of a matrix is equal to + nested Algebra.CommMonoid.Fold.fold over the matrix generator *) +val matrix_fold_equals_func_double_fold : (#c:Type) -> (#eq: CE.equiv c) -> + (#m: pos) -> (#n: pos) -> + (cm: CE.cm c eq) -> + (generator: matrix_generator c m n) -> + Lemma (foldm cm (init generator) `eq.eq` + CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i))) + +val transposed_matrix_gen (#c:_) (#m:pos) (#n:pos) (generator: matrix_generator c m n) + : (f: matrix_generator c n m { forall i j. f j i == generator i j }) + +val matrix_transpose_is_permutation (#c:_) (#m #n: pos) + (generator: matrix_generator c m n) + : Lemma (SP.is_permutation (seq_of_matrix (init generator)) + (seq_of_matrix (init (transposed_matrix_gen generator))) + (transpose_ji m n)) + +val matrix_fold_equals_fold_of_transpose (#c:_) (#eq:_) + (#m #n: pos) + (cm: CE.cm c eq) + (gen: matrix_generator c m n) + : Lemma (foldm cm (init gen) `eq.eq` + foldm cm (init (transposed_matrix_gen gen))) + +(* The equivalence relation defined for matrices of given dimensions *) +val matrix_equiv : (#c: Type) -> + (eq: CE.equiv c) -> + (m: pos) -> (n: pos) -> + CE.equiv (matrix c m n) + +(* element-wise matrix equivalence lemma *) +val matrix_equiv_ijth (#c:_) (#m #n: pos) (eq: CE.equiv c) + (ma mb: matrix c m n) (i: under m) (j: under n) + : Lemma (requires (matrix_equiv eq m n).eq ma mb) + (ensures ijth ma i j `eq.eq` ijth mb i j) + +(* We can always establish matrix equivalence from element-wise equivalence *) +val matrix_equiv_from_element_eq (#c:_) (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n) + : Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j)) + (ensures (matrix_equiv eq m n).eq ma mb) + +(* + Notice that even though we can (and will) construct CommMonoid for matrix addition, + we still publish the operations as well since as soon as we get to multiplication, + results usually have different dimensions, so it would be convenient to have both + the CommMonoid for matrix addition and the explicit addition function. + + This becomes the only way with non-square matrix multiplication, since these + would not constitute a monoid to begin with. +*) + +(* This version of the lemma is useful if we don't want to invoke + Classical.forall_intro_2 in a big proof to conserve resources *) +let matrix_equiv_from_proof #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n) + (proof: (i:under m) -> (j:under n) -> Lemma (eq.eq (ijth ma i j) (ijth mb i j))) + : Lemma ((matrix_equiv eq m n).eq ma mb) + = Classical.forall_intro_2 proof; + matrix_equiv_from_element_eq eq ma mb + +(* This one is the generator function for sum of matrices *) +let matrix_add_generator #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) + : matrix_generator c m n = fun i j -> add.mult (ijth ma i j) (ijth mb i j) + +(* This is the matrix sum operation given the addition CommMonoid *) +let matrix_add #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) + : matrix_of (matrix_add_generator add ma mb) + = init (matrix_add_generator add ma mb) + +(* Sum of matrices ijth element lemma *) +let matrix_add_ijth #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n) (i: under m) (j: under n) + : Lemma (ijth (matrix_add add ma mb) i j == add.mult (ijth ma i j) (ijth mb i j)) = () + +(* m*n-sized matrix addition CommMonoid *) +val matrix_add_comm_monoid : (#c:Type) -> + (#eq:CE.equiv c) -> + (add: CE.cm c eq) -> + (m:pos) -> (n: pos) -> + CE.cm (matrix c m n) (matrix_equiv eq m n) + + +(* Sometimes we want matrix rows and columns to be accessed as sequences *) +let col #c #m #n (mx: matrix c m n) (j: under n) = SB.init m (fun (i: under m) -> ijth mx i j) + +let row #c #m #n (mx: matrix c m n) (i: under m) = SB.init n (fun (j: under n) -> ijth mx i j) + +(* ijth-based and row/col-based element access methods are equivalent *) +val matrix_row_col_lemma (#c:_) (#m #n:pos) (mx: matrix c m n) (i: under m) (j: under n) + : Lemma (ijth mx i j == SB.index (row mx i) j /\ ijth mx i j == SB.index (col mx j) i) + +(* This transforms a seq X={Xi} into a seq X={Xi `op` c} *) +let seq_op_const #c #eq (cm: CE.cm c eq) (s: SB.seq c) (const: c) + = SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult (SB.index s i) const) + +(* Well, technically it is the same thing as above, given cm is commutative. + We will still use prefix and postfix applications separately since + sometimes provable equality (==) rather than `eq.eq` comes in handy *) +let const_op_seq #c #eq (cm: CE.cm c eq) (const: c) (s: SB.seq c) + = SB.init (SB.length s) (fun (i: under (SB.length s)) -> cm.mult const (SB.index s i)) + + +(* We can get a sequence of products (or sums) from two sequences of equal length *) +let seq_of_products #c #eq (mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s}) + = SB.init (SB.length s) (fun (i: under (SB.length s)) -> SB.index s i `mul.mult` SB.index t i) + +(* As trivial as it seems to be, sometimes this lemma proves to be useful, mostly because + lemma_eq_elim invocation is surprisingly costly resources-wise. *) +val seq_of_products_lemma (#c:_) (#eq:_) (mul: CE.cm c eq) + (s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s}) + (r: SB.seq c { SB.equal r (SB.init (SB.length s) + (fun (i: under (SB.length s)) -> + SB.index s i `mul.mult` SB.index t i))}) + : Lemma (seq_of_products mul s t == r) + +(* The usual dot product of two sequences of equal lengths *) +let dot #c #eq (add mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c{SB.length t == SB.length s}) + = SP.foldm_snoc add (seq_of_products mul s t) + +val dot_lemma (#c:_) (#eq:_) (add mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c{SB.length t == SB.length s}) + : Lemma (dot add mul s t == SP.foldm_snoc add (seq_of_products mul s t)) + +(* Of course, it would be best to define the matrix product as a convolution, + but we don't have all the necessary framework for that level of generality yet. *) +val matrix_mul (#c:_) (#eq:_) (#m #n #p:pos) (add mul: CE.cm c eq) (mx: matrix c m n) (my: matrix c n p) + : matrix c m p + +(* Both distributivity laws hold for matrices as shown below *) +let is_left_distributive #c #eq (mul add: CE.cm c eq) = + forall (x y z: c). mul.mult x (add.mult y z) `eq.eq` add.mult (mul.mult x y) (mul.mult x z) + +let is_right_distributive #c #eq (mul add: CE.cm c eq) = + forall (x y z: c). mul.mult (add.mult x y) z `eq.eq` add.mult (mul.mult x z) (mul.mult y z) + +let is_fully_distributive #c #eq (mul add: CE.cm c eq) = is_left_distributive mul add /\ is_right_distributive mul add + +(* + This definition is of course far more general than matrices, and should rather + be a part of algebra core, as it is relevant to any magma. + + In the process of development of F* abstract algebra framework, this definition + will probably take its rightful place near the most basic of grouplike structures. + + Also note that this property is defined via forall. We would probably want + to make such properties opaque to SMT in the future, to avoid verification performance + issues. +*) +let is_absorber #c #eq (z:c) (op: CE.cm c eq) = + forall (x:c). op.mult z x `eq.eq` z /\ op.mult x z `eq.eq` z + +(* + Similar lemmas to reason about matrix product elements + We're going to refactor these a bit, as some are clearly redundant. + Might want to keep internal usages to one variant of the lemma and + remove the rest. +*) +val matrix_mul_ijth (#c:_) (#eq:_) (#m #n #k:pos) (add mul: CE.cm c eq) + (mx: matrix c m n) (my: matrix c n k) (i: under m) (h: under k) + : Lemma (ijth (matrix_mul add mul mx my) i h == dot add mul (row mx i) (col my h)) + +val matrix_mul_ijth_as_sum (#c:_) (#eq:_) (#m #n #p:pos) (add mul: CE.cm c eq) + (mx: matrix c m n) (my: matrix c n p) (i: under m) (k: under p) + : Lemma (ijth (matrix_mul add mul mx my) i k == + SP.foldm_snoc add (SB.init n (fun (j: under n) -> mul.mult (ijth mx i j) (ijth my j k)))) + +val matrix_mul_ijth_eq_sum_of_seq (#c:_) (#eq:_) (#m #n #p:pos) (add: CE.cm c eq) + (mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul}) + (mx: matrix c m n) (my: matrix c n p) (i: under m) (k: under p) + (r: SB.seq c{r `SB.equal` seq_of_products mul (row mx i) (col my k)}) + : Lemma (ijth (matrix_mul add mul mx my) i k == SP.foldm_snoc add r) + +val matrix_mul_ijth_eq_sum_of_seq_for_init (#c:_) (#eq:_) (#m #n #p:pos) (add mul: CE.cm c eq) + (mx: matrix c m n) (my: matrix c n p) (i: under m) (k: under p) + (f: under n -> c { SB.init n f `SB.equal` seq_of_products mul (row mx i) (col my k)}) + : Lemma (ijth (matrix_mul add mul mx my) i k == SP.foldm_snoc add (SB.init n f)) + + +(* Basically, we prove that (XY)Z = X(YZ) for any matrices of compatible sizes *) +val matrix_mul_is_associative (#c:_) (#eq:_) (#m #n #p #q: pos) (add: CE.cm c eq) + (mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul}) + (mx: matrix c m n) (my: matrix c n p) (mz: matrix c p q) + : Lemma ((matrix_equiv eq m q).eq ((matrix_mul add mul mx my) `matrix_mul add mul` mz) + (matrix_mul add mul mx (matrix_mul add mul my mz))) + +(* Square identity matrix of size m*m *) +let matrix_mul_unit #c #eq (add mul: CE.cm c eq) m + : matrix c m m = init (fun i j -> if i=j then mul.unit else add.unit) + +(* Matrix multiplicative identity lemmas *) +val matrix_mul_right_identity (#c:_) (#eq:_) (#m: pos) (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + : Lemma (matrix_mul add mul mx (matrix_mul_unit add mul m) `(matrix_equiv eq m m).eq` mx) + +val matrix_mul_left_identity (#c:_) (#eq:_) (#m: pos) (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + : Lemma (matrix_mul add mul (matrix_mul_unit add mul m) mx `(matrix_equiv eq m m).eq` mx) + +val matrix_mul_identity (#c:_) (#eq:_) (#m: pos) (add: CE.cm c eq) + (mul: CE.cm c eq{is_absorber add.unit mul}) + (mx: matrix c m m) + : Lemma (matrix_mul add mul mx (matrix_mul_unit add mul m) `(matrix_equiv eq m m).eq` mx /\ + matrix_mul add mul (matrix_mul_unit add mul m) mx `(matrix_equiv eq m m).eq` mx) + +(* Matrix multiplication of course also respects matrix equivalence *) +val matrix_mul_congruence (#c:_) (#eq:_) (#m #n #p:pos) (add mul: CE.cm c eq) + (mx: matrix c m n) (my: matrix c n p) + (mz: matrix c m n) (mw: matrix c n p) + : Lemma (requires (matrix_equiv eq m n).eq mx mz /\ (matrix_equiv eq n p).eq my mw) + (ensures (matrix_equiv eq m p).eq (matrix_mul add mul mx my) + (matrix_mul add mul mz mw)) + +(* Both distributivities for matrices *) +val matrix_mul_is_left_distributive (#c:_) (#eq:_) (#m #n #p:pos) (add: CE.cm c eq) + (mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul}) + (mx: matrix c m n) (my mz: matrix c n p) + : Lemma (matrix_mul add mul mx (matrix_add add my mz) `(matrix_equiv eq m p).eq` + matrix_add add (matrix_mul add mul mx my) (matrix_mul add mul mx mz)) + +val matrix_mul_is_right_distributive (#c:_) (#eq:_) (#m #n #p:pos) (add: CE.cm c eq) + (mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul}) + (mx my: matrix c m n) (mz: matrix c n p) + : Lemma (matrix_mul add mul (matrix_add add mx my) mz `(matrix_equiv eq m p).eq` + matrix_add add (matrix_mul add mul mx mz) (matrix_mul add mul my mz)) diff --git a/stage0/ulib/FStar.Modifies.fst b/stage0/ulib/FStar.Modifies.fst new file mode 100644 index 00000000000..106a2910789 --- /dev/null +++ b/stage0/ulib/FStar.Modifies.fst @@ -0,0 +1,459 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Modifies + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST +module B = FStar.Buffer +module U32 = FStar.UInt32 + +noeq +type loc_aux : Type = + | LocBuffer: + (#t: Type) -> + (b: B.buffer t) -> + loc_aux + +let loc_aux_in_addr + (l: loc_aux) + (r: HS.rid) + (n: nat) +: GTot Type0 += match l with + | LocBuffer b -> + B.frameOf b == r /\ + B.as_addr b == n + +let aloc (r: HS.rid) (n: nat) : Tot (Type u#1) = + (l: loc_aux { loc_aux_in_addr l r n } ) + +let loc_aux_includes_buffer + (#a: Type) + (s: loc_aux) + (b: B.buffer a) +: GTot Type0 += match s with + | LocBuffer #a0 b0 -> a == a0 /\ b0 `B.includes` b + +let loc_aux_includes + (s1 s2: loc_aux) +: GTot Type0 + (decreases s2) += match s2 with + | LocBuffer b -> loc_aux_includes_buffer s1 b + +let loc_aux_includes_refl + (s: loc_aux) +: Lemma + (loc_aux_includes s s) += () + +let loc_aux_includes_buffer_includes + (#a: Type) + (s: loc_aux) + (b1 b2: B.buffer a) +: Lemma + (requires (loc_aux_includes_buffer s b1 /\ b1 `B.includes` b2)) + (ensures (loc_aux_includes_buffer s b2)) += () + +let loc_aux_includes_loc_aux_includes_buffer + (#a: Type) + (s1 s2: loc_aux) + (b: B.buffer a) +: Lemma + (requires (loc_aux_includes s1 s2 /\ loc_aux_includes_buffer s2 b)) + (ensures (loc_aux_includes_buffer s1 b)) += match s2 with + | LocBuffer b2 -> loc_aux_includes_buffer_includes s1 b2 b + +let loc_aux_includes_trans + (s1 s2 s3: loc_aux) +: Lemma + (requires (loc_aux_includes s1 s2 /\ loc_aux_includes s2 s3)) + (ensures (loc_aux_includes s1 s3)) += match s3 with + | LocBuffer b -> loc_aux_includes_loc_aux_includes_buffer s1 s2 b + +(* the following is necessary because `decreases` messes up 2nd-order unification with `Classical.forall_intro_3` *) + +let loc_aux_includes_trans' + (s1 s2: loc_aux) + (s3: loc_aux) +: Lemma + ((loc_aux_includes s1 s2 /\ loc_aux_includes s2 s3) ==> loc_aux_includes s1 s3) += Classical.move_requires (loc_aux_includes_trans s1 s2) s3 + +let loc_aux_disjoint_buffer + (l: loc_aux) + (#t: Type) + (p: B.buffer t) +: GTot Type0 += match l with + | LocBuffer b -> B.disjoint b p + +let loc_aux_disjoint + (l1 l2: loc_aux) +: GTot Type0 += match l2 with + | LocBuffer b -> + loc_aux_disjoint_buffer l1 b + +let loc_aux_disjoint_sym + (l1 l2: loc_aux) +: Lemma + (ensures (loc_aux_disjoint l1 l2 <==> loc_aux_disjoint l2 l1)) += () + +let loc_aux_disjoint_buffer_includes + (l: loc_aux) + (#t: Type) + (p1: B.buffer t) + (p2: B.buffer t) +: Lemma + (requires (loc_aux_disjoint_buffer l p1 /\ p1 `B.includes` p2)) + (ensures (loc_aux_disjoint_buffer l p2)) += () + +let loc_aux_disjoint_loc_aux_includes_buffer + (l1 l2: loc_aux) + (#t3: Type) + (b3: B.buffer t3) +: Lemma + (requires (loc_aux_disjoint l1 l2 /\ loc_aux_includes_buffer l2 b3)) + (ensures (loc_aux_disjoint_buffer l1 b3)) += match l2 with + | LocBuffer b2 -> loc_aux_disjoint_buffer_includes l1 b2 b3 + +let loc_aux_disjoint_loc_aux_includes + (l1 l2 l3: loc_aux) +: Lemma + (requires (loc_aux_disjoint l1 l2 /\ loc_aux_includes l2 l3)) + (ensures (loc_aux_disjoint l1 l3)) += match l3 with + | LocBuffer b3 -> + loc_aux_disjoint_loc_aux_includes_buffer l1 l2 b3 + +let loc_aux_preserved (l: loc_aux) (h1 h2: HS.mem) : GTot Type0 += match l with + | LocBuffer b -> + ( + B.live h1 b + ) ==> ( + B.live h2 b /\ + B.as_seq h2 b == B.as_seq h1 b + ) + +module MG = FStar.ModifiesGen + +let cls : MG.cls aloc = MG.Cls #aloc + (fun #r #a -> loc_aux_includes) + (fun #r #a x -> ()) + (fun #r #a x1 x2 x3 -> ()) + (fun #r #a -> loc_aux_disjoint) + (fun #r #a x1 x2 -> ()) + (fun #r #a larger1 larger2 smaller1 smaller2 -> ()) + (fun #r #a -> loc_aux_preserved) + (fun #r #a x h -> ()) + (fun #r #a x h1 h2 h3 -> ()) + (fun #r #a b h1 h2 f -> + match b with + | LocBuffer b' -> + let g () : Lemma + (requires (B.live h1 b')) + (ensures (loc_aux_preserved b h1 h2)) + = f _ _ (B.content b') + in + Classical.move_requires g () + ) + +let loc = MG.loc cls + +let loc_none = MG.loc_none + +let loc_union = MG.loc_union + +let loc_union_idem = MG.loc_union_idem + +let loc_union_comm = MG.loc_union_comm + +let loc_union_assoc = MG.loc_union_assoc + +let loc_union_loc_none_l = MG.loc_union_loc_none_l + +let loc_union_loc_none_r = MG.loc_union_loc_none_r + +let loc_buffer #t b = + MG.loc_of_aloc #_ #cls #(B.frameOf b) #(B.as_addr b) (LocBuffer b) + +let loc_addresses = MG.loc_addresses + +let loc_regions = MG.loc_regions + +let loc_includes = MG.loc_includes + +let loc_includes_refl = MG.loc_includes_refl + +let loc_includes_trans = MG.loc_includes_trans + +let loc_includes_union_r = MG.loc_includes_union_r + +let loc_includes_union_l = MG.loc_includes_union_l + +let loc_includes_none = MG.loc_includes_none + +let loc_includes_buffer #t b1 b2 = + MG.loc_includes_aloc #_ #cls #(B.frameOf b1) #(B.as_addr b1) (LocBuffer b1) (LocBuffer b2) + +let loc_includes_gsub_buffer_r l #t b i len = + loc_includes_trans l (loc_buffer b) (loc_buffer (B.sub b i len)) + +let loc_includes_gsub_buffer_l #t b i1 len1 i2 len2 = () + +let loc_includes_addresses_buffer #t preserve_liveness r s p = + MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(B.as_addr p) (LocBuffer p) + +let loc_includes_region_buffer #t preserve_liveness s b = + MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(B.frameOf b) #(B.as_addr b) (LocBuffer b) + +let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls + +let loc_includes_region_region = MG.loc_includes_region_region #_ #cls + +let loc_includes_region_union_l = MG.loc_includes_region_union_l + +let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses #_ cls + +let loc_disjoint = MG.loc_disjoint + +let loc_disjoint_sym = MG.loc_disjoint_sym + +let loc_disjoint_none_r = MG.loc_disjoint_none_r + +let loc_disjoint_union_r = MG.loc_disjoint_union_r + +let loc_disjoint_includes = MG.loc_disjoint_includes + +let loc_disjoint_buffer #t1 #t2 b1 b2 = + MG.loc_disjoint_aloc_intro #_ #cls #(B.frameOf b1) #(B.as_addr b1) #(B.frameOf b2) #(B.as_addr b2) (LocBuffer b1) (LocBuffer b2) + +let loc_disjoint_gsub_buffer #t b i1 len1 i2 len2 = () + +let loc_disjoint_addresses = MG.loc_disjoint_addresses #_ #cls + +let loc_disjoint_buffer_addresses #t p preserve_liveness r n = + MG.loc_disjoint_aloc_addresses_intro #_ #cls #(B.frameOf p) #(B.as_addr p) (LocBuffer p) preserve_liveness r n + +let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls + +let modifies = MG.modifies + +let modifies_mreference_elim = MG.modifies_mreference_elim + +let modifies_buffer_elim #t1 b p h h' = + MG.modifies_aloc_elim #_ #cls #(B.frameOf b) #(B.as_addr b) (LocBuffer b) p h h' + +let modifies_refl = MG.modifies_refl + +let modifies_loc_includes = MG.modifies_loc_includes + +let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _ + +let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _ + +let address_liveness_insensitive_buffer #t b = + MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(B.frameOf b) #(B.as_addr b) (LocBuffer b) + +let address_liveness_insensitive_addresses = + MG.loc_includes_address_liveness_insensitive_locs_addresses cls + +let region_liveness_insensitive_buffer #t b = + MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(B.frameOf b) #(B.as_addr b) (LocBuffer b) + +let region_liveness_insensitive_addresses = + MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls + +let region_liveness_insensitive_regions = + MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls + +let region_liveness_insensitive_address_liveness_insensitive = + MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls + +let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness + +let modifies_liveness_insensitive_buffer l1 l2 h h' #t x = + MG.modifies_preserves_liveness_strong l1 l2 h h' (B.content x) (LocBuffer x) + +let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness + +let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference + +let modifies_liveness_insensitive_region_buffer l1 l2 h h' #t x = + MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(B.frameOf x) #(B.as_addr x) (LocBuffer x) + + +let modifies_trans = MG.modifies_trans + +let modifies_only_live_regions = MG.modifies_only_live_regions + +let no_upd_fresh_region = MG.no_upd_fresh_region + +let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped + +let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls + +let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro + +let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls + +let modifies_salloc_post = MG.modifies_salloc_post #_ #cls + +let modifies_free = MG.modifies_free #_ #cls + +let modifies_none_modifies = MG.modifies_none_modifies #_ #cls + +let modifies_buffer_none_modifies h1 h2 = + MG.modifies_none_intro #_ #cls h1 h2 + (fun _ -> ()) + (fun _ _ _ -> ()) + (fun _ _ -> ()) + +let modifies_0_modifies h1 h2 = + B.lemma_reveal_modifies_0 h1 h2; + MG.modifies_none_intro #_ #cls h1 h2 + (fun _ -> ()) + (fun _ _ _ -> ()) + (fun _ _ -> ()) + +let modifies_1_modifies #a b h1 h2 = + B.lemma_reveal_modifies_1 b h1 h2; + MG.modifies_intro (loc_buffer b) h1 h2 + (fun _ -> ()) + (fun t' pre' b' -> + MG.loc_disjoint_sym (loc_mreference b') (loc_buffer b); + MG.loc_disjoint_aloc_addresses_elim #_ #cls #(B.frameOf b) #(B.as_addr b) (LocBuffer b) true (HS.frameOf b') (Set.singleton (HS.as_addr b')) + ) + (fun t' pre' b' -> ()) + (fun r n -> ()) + (fun r' a' b' -> + MG.loc_disjoint_aloc_elim #_ #cls #r' #a' #(B.frameOf b) #(B.as_addr b) b' (LocBuffer b) + ) + +let modifies_2_modifies #a1 #a2 b1 b2 h1 h2 = + B.lemma_reveal_modifies_2 b1 b2 h1 h2; + MG.modifies_intro (loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2 + (fun _ -> ()) + (fun t' pre' b' -> + loc_disjoint_includes (loc_mreference b') (loc_union (loc_buffer b1) (loc_buffer b2)) (loc_mreference b') (loc_buffer b1); + loc_disjoint_sym (loc_mreference b') (loc_buffer b1); + MG.loc_disjoint_aloc_addresses_elim #_ #cls #(B.frameOf b1) #(B.as_addr b1) (LocBuffer b1) true (HS.frameOf b') (Set.singleton (HS.as_addr b')); + loc_disjoint_includes (loc_mreference b') (loc_union (loc_buffer b1) (loc_buffer b2)) (loc_mreference b') (loc_buffer b2); + loc_disjoint_sym (loc_mreference b') (loc_buffer b2); + MG.loc_disjoint_aloc_addresses_elim #_ #cls #(B.frameOf b2) #(B.as_addr b2) (LocBuffer b2) true (HS.frameOf b') (Set.singleton (HS.as_addr b')) + ) + (fun _ _ _ -> ()) + (fun _ _ -> ()) + (fun r' a' b' -> + loc_disjoint_includes (MG.loc_of_aloc b') (loc_union (loc_buffer b1) (loc_buffer b2)) (MG.loc_of_aloc b') (loc_buffer b1); + MG.loc_disjoint_aloc_elim #_ #cls #r' #a' #(B.frameOf b1) #(B.as_addr b1) b' (LocBuffer b1); + loc_disjoint_includes (MG.loc_of_aloc b') (loc_union (loc_buffer b1) (loc_buffer b2)) (MG.loc_of_aloc b') (loc_buffer b2); + MG.loc_disjoint_aloc_elim #_ #cls #r' #a' #(B.frameOf b2) #(B.as_addr b2) b' (LocBuffer b2) + ) + +#set-options "--z3rlimit 20" + +let modifies_3_modifies #a1 #a2 #a3 b1 b2 b3 h1 h2 = + B.lemma_reveal_modifies_3 b1 b2 b3 h1 h2; + MG.modifies_intro (loc_union (loc_buffer b1) (loc_union (loc_buffer b2) (loc_buffer b3))) h1 h2 + (fun _ -> ()) + (fun t' pre' b' -> + loc_disjoint_includes (loc_mreference b') (loc_union (loc_buffer b1) (loc_union (loc_buffer b2) (loc_buffer b3))) (loc_mreference b') (loc_buffer b1); + loc_disjoint_sym (loc_mreference b') (loc_buffer b1); + MG.loc_disjoint_aloc_addresses_elim #_ #cls #(B.frameOf b1) #(B.as_addr b1) (LocBuffer b1) true (HS.frameOf b') (Set.singleton (HS.as_addr b')); + loc_disjoint_includes (loc_mreference b') (loc_union (loc_buffer b1) (loc_union (loc_buffer b2) (loc_buffer b3))) (loc_mreference b') (loc_buffer b2); + loc_disjoint_sym (loc_mreference b') (loc_buffer b2); + MG.loc_disjoint_aloc_addresses_elim #_ #cls #(B.frameOf b2) #(B.as_addr b2) (LocBuffer b2) true (HS.frameOf b') (Set.singleton (HS.as_addr b')); + loc_disjoint_includes (loc_mreference b') (loc_union (loc_buffer b1) (loc_union (loc_buffer b2) (loc_buffer b3))) (loc_mreference b') (loc_buffer b3); + loc_disjoint_sym (loc_mreference b') (loc_buffer b3); + MG.loc_disjoint_aloc_addresses_elim #_ #cls #(B.frameOf b3) #(B.as_addr b3) (LocBuffer b3) true (HS.frameOf b') (Set.singleton (HS.as_addr b')) + ) + (fun _ _ _ -> ()) + (fun _ _ -> ()) + (fun r' a' b' -> + loc_disjoint_includes (MG.loc_of_aloc b') (loc_union (loc_buffer b1) (loc_union (loc_buffer b2) (loc_buffer b3))) (MG.loc_of_aloc b') (loc_buffer b1); + MG.loc_disjoint_aloc_elim #_ #cls #r' #a' #(B.frameOf b1) #(B.as_addr b1) b' (LocBuffer b1); + loc_disjoint_includes (MG.loc_of_aloc b') (loc_union (loc_buffer b1) (loc_union (loc_buffer b2) (loc_buffer b3))) (MG.loc_of_aloc b') (loc_buffer b2); + MG.loc_disjoint_aloc_elim #_ #cls #r' #a' #(B.frameOf b2) #(B.as_addr b2) b' (LocBuffer b2); + loc_disjoint_includes (MG.loc_of_aloc b') (loc_union (loc_buffer b1) (loc_union (loc_buffer b2) (loc_buffer b3))) (MG.loc_of_aloc b') (loc_buffer b3); + MG.loc_disjoint_aloc_elim #_ #cls #r' #a' #(B.frameOf b3) #(B.as_addr b3) b' (LocBuffer b3) + ) + +#reset-options + +let modifies_buffer_rcreate_post_common #a r init len b h0 h1 = + MG.modifies_none_intro #_ #cls h0 h1 + (fun _ -> ()) + (fun _ _ _ -> ()) + (fun _ _ -> ()) + +let mreference_live_buffer_unused_in_disjoint #t1 #pre #t2 h b1 b2 = + loc_disjoint_includes (loc_freed_mreference b1) (loc_freed_mreference (B.content b2)) (loc_freed_mreference b1) (loc_buffer b2) + +let buffer_live_mreference_unused_in_disjoint #t1 #t2 #pre h b1 b2 = + loc_disjoint_includes (loc_freed_mreference (B.content b1)) (loc_freed_mreference b2) (loc_buffer b1) (loc_freed_mreference b2) + +let does_not_contain_addr = MG.does_not_contain_addr + +let not_live_region_does_not_contain_addr = MG.not_live_region_does_not_contain_addr + +let unused_in_does_not_contain_addr = MG.unused_in_does_not_contain_addr + +let addr_unused_in_does_not_contain_addr = MG.addr_unused_in_does_not_contain_addr + +let free_does_not_contain_addr = MG.free_does_not_contain_addr + +let does_not_contain_addr_elim = MG.does_not_contain_addr_elim + +let modifies_only_live_addresses = MG.modifies_only_live_addresses + + +(* Type class instance *) + +let cloc_aloc = aloc + +let cloc_cls = cls + +let cloc_of_loc l = l + +let loc_of_cloc l = l + +let loc_of_cloc_of_loc l = () + +let cloc_of_loc_of_cloc l = () + +let cloc_of_loc_none _ = () + +let cloc_of_loc_union _ _ = () + +let cloc_of_loc_addresses _ _ _ = () + +let cloc_of_loc_regions _ _ = () + +let loc_includes_to_cloc l1 l2 = () + +let loc_disjoint_to_cloc l1 l2 = () + +let modifies_to_cloc l h1 h2 = () diff --git a/stage0/ulib/FStar.Modifies.fsti b/stage0/ulib/FStar.Modifies.fsti new file mode 100644 index 00000000000..f4fab8c0f4c --- /dev/null +++ b/stage0/ulib/FStar.Modifies.fsti @@ -0,0 +1,870 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Modifies + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST +module B = FStar.Buffer + +(*** The modifies clause *) + +val loc : Type u#1 + +val loc_none: loc + +val loc_union + (s1 s2: loc) +: GTot loc + +(** The following is useful to make Z3 cut matching loops with +modifies_trans and modifies_refl *) +val loc_union_idem + (s: loc) +: Lemma + (loc_union s s == s) + [SMTPat (loc_union s s)] + +val loc_union_comm + (s1 s2: loc) +: Lemma + (loc_union s1 s2 == loc_union s2 s1) + [SMTPat (loc_union s1 s2)] + +val loc_union_assoc + (s1 s2 s3: loc) +: Lemma + (loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3) + +val loc_union_loc_none_l + (s: loc) +: Lemma + (loc_union loc_none s == s) + [SMTPat (loc_union loc_none s)] + +val loc_union_loc_none_r + (s: loc) +: Lemma + (loc_union s loc_none == s) + [SMTPat (loc_union s loc_none)] + +val loc_buffer + (#t: Type) + (b: B.buffer t) +: GTot loc + +val loc_addresses + (preserve_liveness: bool) + (r: HS.rid) + (n: Set.set nat) +: GTot loc + +val loc_regions + (preserve_liveness: bool) + (r: Set.set HS.rid) +: GTot loc + +let loc_mreference + (#a: Type) + (#p: Preorder.preorder a) + (b: HS.mreference a p) +: GTot loc += loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b)) + +let loc_freed_mreference + (#a: Type) + (#p: Preorder.preorder a) + (b: HS.mreference a p) +: GTot loc += loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b)) + +let loc_region_only + (preserve_liveness: bool) + (r: HS.rid) +: GTot loc += loc_regions preserve_liveness (Set.singleton r) + +let loc_all_regions_from + (preserve_liveness: bool) + (r: HS.rid) +: GTot loc += loc_regions preserve_liveness (HS.mod_set (Set.singleton r)) + + +(* Inclusion of memory locations *) + +val loc_includes + (s1 s2: loc) +: GTot Type0 + +val loc_includes_refl + (s: loc) +: Lemma + (loc_includes s s) + [SMTPat (loc_includes s s)] + +val loc_includes_trans + (s1 s2 s3: loc) +: Lemma + (requires (loc_includes s1 s2 /\ loc_includes s2 s3)) + (ensures (loc_includes s1 s3)) + +val loc_includes_union_r + (s s1 s2: loc) +: Lemma + (requires (loc_includes s s1 /\ loc_includes s s2)) + (ensures (loc_includes s (loc_union s1 s2))) + [SMTPat (loc_includes s (loc_union s1 s2))] + +val loc_includes_union_l + (s1 s2 s: loc) +: Lemma + (requires (loc_includes s1 s \/ loc_includes s2 s)) + (ensures (loc_includes (loc_union s1 s2) s)) + [SMTPat (loc_includes (loc_union s1 s2) s)] + +val loc_includes_none + (s: loc) +: Lemma + (loc_includes s loc_none) + [SMTPat (loc_includes s loc_none)] + +val loc_includes_buffer + (#t: Type) + (b1 b2: B.buffer t) +: Lemma + (requires (b1 `B.includes` b2)) + (ensures (loc_includes (loc_buffer b1) (loc_buffer b2))) + [SMTPatOr [ + [SMTPat (B.includes b1 b2)]; + [SMTPat (loc_includes(loc_buffer b1) (loc_buffer b2))] + ]] + +val loc_includes_gsub_buffer_r + (l: loc) + (#t: Type) + (b: B.buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires (UInt32.v i + UInt32.v len <= (B.length b) /\ loc_includes l (loc_buffer b))) + (ensures (UInt32.v i + UInt32.v len <= (B.length b) /\ loc_includes l (loc_buffer (B.sub b i len)))) + [SMTPat (loc_includes l (loc_buffer (B.sub b i len)))] + +val loc_includes_gsub_buffer_l + (#t: Type) + (b: B.buffer t) + (i1: UInt32.t) + (len1: UInt32.t) + (i2: UInt32.t) + (len2: UInt32.t) +: Lemma + (requires (UInt32.v i1 + UInt32.v len1 <= (B.length b) /\ UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1)) + (ensures (UInt32.v i1 + UInt32.v len1 <= (B.length b) /\ UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1 /\ loc_includes (loc_buffer (B.sub b i1 len1)) (loc_buffer (B.sub b i2 len2)))) + [SMTPat (loc_includes (loc_buffer (B.sub b i1 len1)) (loc_buffer (B.sub b i2 len2)))] + +val loc_includes_addresses_buffer + (#t: Type) + (preserve_liveness: bool) + (r: HS.rid) + (s: Set.set nat) + (p: B.buffer t) +: Lemma + (requires (B.frameOf p == r /\ Set.mem (B.as_addr p) s)) + (ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))) + [SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))] + +val loc_includes_region_buffer + (#t: Type) + (preserve_liveness: bool) + (s: Set.set HS.rid) + (b: B.buffer t) +: Lemma + (requires (Set.mem (B.frameOf b) s)) + (ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))) + [SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))] + +val loc_includes_region_addresses + (preserve_liveness1: bool) + (preserve_liveness2: bool) + (s: Set.set HS.rid) + (r: HS.rid) + (a: Set.set nat) +: Lemma + (requires (Set.mem r s)) + (ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))) + [SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))] + +val loc_includes_region_region + (preserve_liveness1: bool) + (preserve_liveness2: bool) + (s1 s2: Set.set HS.rid) +: Lemma + (requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1)) + (ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))) + [SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))] + +val loc_includes_region_union_l + (preserve_liveness: bool) + (l: loc) + (s1 s2: Set.set HS.rid) +: Lemma + (requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1))))) + (ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))) + [SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))] + +val loc_includes_addresses_addresses + (preserve_liveness1 preserve_liveness2: bool) + (r: HS.rid) + (s1 s2: Set.set nat) +: Lemma + (requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1)) + (ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2))) + + +(* Disjointness of two memory locations *) + +val loc_disjoint + (s1 s2: loc) +: GTot Type0 + +val loc_disjoint_sym + (s1 s2: loc) +: Lemma + (requires (loc_disjoint s1 s2)) + (ensures (loc_disjoint s2 s1)) + +let loc_disjoint_sym' + (s1 s2: loc) +: Lemma + (loc_disjoint s1 s2 <==> loc_disjoint s2 s1) + [SMTPat (loc_disjoint s1 s2)] += Classical.move_requires (loc_disjoint_sym s1) s2; + Classical.move_requires (loc_disjoint_sym s2) s1 + +val loc_disjoint_none_r + (s: loc) +: Lemma + (ensures (loc_disjoint s loc_none)) + [SMTPat (loc_disjoint s loc_none)] + +val loc_disjoint_union_r + (s s1 s2: loc) +: Lemma + (requires (loc_disjoint s s1 /\ loc_disjoint s s2)) + (ensures (loc_disjoint s (loc_union s1 s2))) + [SMTPat (loc_disjoint s (loc_union s1 s2))] + +val loc_disjoint_includes + (p1 p2 p1' p2' : loc) +: Lemma + (requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2)) + (ensures (loc_disjoint p1' p2')) + [SMTPatOr [ + [SMTPat (loc_disjoint p1 p2); SMTPat (loc_disjoint p1' p2')]; + [SMTPat (loc_includes p1 p1'); SMTPat (loc_includes p2 p2')]; + ]] + +val loc_disjoint_buffer + (#t1 #t2: Type) + (b1: B.buffer t1) + (b2: B.buffer t2) +: Lemma + (requires (B.disjoint b1 b2)) + (ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2))) + [SMTPatOr [ + [SMTPat (B.disjoint b1 b2)]; + [SMTPat (loc_disjoint (loc_buffer b1) (loc_buffer b2))]; + ]] + +val loc_disjoint_gsub_buffer + (#t: Type) + (b: B.buffer t) + (i1: UInt32.t) + (len1: UInt32.t) + (i2: UInt32.t) + (len2: UInt32.t) +: Lemma + (requires ( + UInt32.v i1 + UInt32.v len1 <= (B.length b) /\ + UInt32.v i2 + UInt32.v len2 <= (B.length b) /\ ( + UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/ + UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + ))) + (ensures ( + UInt32.v i1 + UInt32.v len1 <= (B.length b) /\ + UInt32.v i2 + UInt32.v len2 <= (B.length b) /\ + loc_disjoint (loc_buffer (B.sub b i1 len1)) (loc_buffer (B.sub b i2 len2)) + )) + [SMTPat (loc_disjoint (loc_buffer (B.sub b i1 len1)) (loc_buffer (B.sub b i2 len2)))] + +val loc_disjoint_addresses + (preserve_liveness1 preserve_liveness2: bool) + (r1 r2: HS.rid) + (n1 n2: Set.set nat) +: Lemma + (requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty)) + (ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))) + [SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))] + +val loc_disjoint_buffer_addresses + (#t: Type) + (p: B.buffer t) + (preserve_liveness: bool) + (r: HS.rid) + (n: Set.set nat) +: Lemma + (requires (r <> B.frameOf p \/ (~ (Set.mem (B.as_addr p) n)))) + (ensures (loc_disjoint (loc_buffer p) (loc_addresses preserve_liveness r n))) + [SMTPat (loc_disjoint (loc_buffer p) (loc_addresses preserve_liveness r n))] + +val loc_disjoint_regions + (preserve_liveness1 preserve_liveness2: bool) + (rs1 rs2: Set.set HS.rid) +: Lemma + (requires (Set.subset (Set.intersect rs1 rs2) Set.empty)) + (ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))) + [SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))] + + +(** The modifies clause proper *) + +val modifies + (s: loc) + (h1 h2: HS.mem) +: GTot Type0 + +val modifies_mreference_elim + (#t: Type) + (#pre: Preorder.preorder t) + (b: HS.mreference t pre) + (p: loc) + (h h': HS.mem) +: Lemma + (requires ( + loc_disjoint (loc_mreference b) p /\ + HS.contains h b /\ + modifies p h h' + )) + (ensures ( + HS.contains h' b /\ + HS.sel h b == HS.sel h' b + )) + [SMTPatOr [ + [ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ; + [ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ]; + [ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ; + [ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ] + ] ] + +val modifies_buffer_elim + (#t1: Type) + (b: B.buffer t1) + (p: loc) + (h h': HS.mem) +: Lemma + (requires ( + loc_disjoint (loc_buffer b) p /\ + B.live h b /\ + modifies p h h' + )) + (ensures ( + B.live h' b /\ ( + B.as_seq h b == B.as_seq h' b + ))) + [SMTPatOr [ + [ SMTPat (modifies p h h'); SMTPat (B.as_seq h b) ] ; + [ SMTPat (modifies p h h'); SMTPat (B.live h b) ]; + [ SMTPat (modifies p h h'); SMTPat (B.as_seq h' b) ] ; + [ SMTPat (modifies p h h'); SMTPat (B.live h' b) ] + ] ] + +val modifies_refl + (s: loc) + (h: HS.mem) +: Lemma + (modifies s h h) + [SMTPat (modifies s h h)] + +val modifies_loc_includes + (s1: loc) + (h h': HS.mem) + (s2: loc) +: Lemma + (requires (modifies s2 h h' /\ loc_includes s1 s2)) + (ensures (modifies s1 h h')) + [SMTPatOr [ + [SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]; + [SMTPat (modifies s1 h h'); SMTPat (loc_includes s1 s2)]; + [SMTPat (modifies s2 h h'); SMTPat (loc_includes s1 s2)]; + ]] + +/// Some memory locations are tagged as liveness-insensitive: the +/// liveness preservation of a memory location only depends on its +/// disjointness from the liveness-sensitive memory locations of a +/// modifies clause. + + +val address_liveness_insensitive_locs: loc + +val region_liveness_insensitive_locs: loc + +val address_liveness_insensitive_buffer (#t: Type) (b: B.buffer t) : Lemma + (address_liveness_insensitive_locs `loc_includes` (loc_buffer b)) + [SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))] + +val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma + (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a)) + [SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))] + +val region_liveness_insensitive_buffer (#t: Type) (b: B.buffer t) : Lemma + (region_liveness_insensitive_locs `loc_includes` (loc_buffer b)) + [SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))] + +val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma + (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a)) + [SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))] + +val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma + (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs)) + [SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))] + +val region_liveness_insensitive_address_liveness_insensitive: + squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs) + +val modifies_liveness_insensitive_mreference + (l1 l2 : loc) + (h h' : HS.mem) + (#t: Type) + (#pre: Preorder.preorder t) + (x: HS.mreference t pre) +: Lemma + (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x)) + (ensures (h' `HS.contains` x)) + (* TODO: pattern *) + +val modifies_liveness_insensitive_buffer + (l1 l2 : loc) + (h h' : HS.mem) + (#t: Type) + (x: B.buffer t) +: Lemma + (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ B.live h x)) + (ensures (B.live h' x)) + (* TODO: pattern *) + +let modifies_liveness_insensitive_mreference_weak + (l : loc) + (h h' : HS.mem) + (#t: Type) + (#pre: Preorder.preorder t) + (x: HS.mreference t pre) +: Lemma + (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ h `HS.contains` x)) + (ensures (h' `HS.contains` x)) + [SMTPatOr [ + [SMTPat (h `HS.contains` x); SMTPat (modifies l h h');]; + [SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');]; + ]] += modifies_liveness_insensitive_mreference loc_none l h h' x + +let modifies_liveness_insensitive_buffer_weak + (l : loc) + (h h' : HS.mem) + (#t: Type) + (x: B.buffer t) +: Lemma + (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ B.live h x)) + (ensures (B.live h' x)) + [SMTPatOr [ + [SMTPat (B.live h x); SMTPat (modifies l h h');]; + [SMTPat (B.live h' x); SMTPat (modifies l h h');]; + ]] += modifies_liveness_insensitive_buffer loc_none l h h' x + +val modifies_liveness_insensitive_region + (l1 l2 : loc) + (h h' : HS.mem) + (x: HS.rid) +: Lemma + (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x)) + (ensures (HS.live_region h' x)) + (* TODO: pattern *) + +val modifies_liveness_insensitive_region_mreference + (l1 l2 : loc) + (h h' : HS.mem) + (#t: Type) + (#pre: Preorder.preorder t) + (x: HS.mreference t pre) +: Lemma + (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x))) + (ensures (HS.live_region h' (HS.frameOf x))) + (* TODO: pattern *) + +val modifies_liveness_insensitive_region_buffer + (l1 l2 : loc) + (h h' : HS.mem) + (#t: Type) + (x: B.buffer t) +: Lemma + (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (B.frameOf x))) + (ensures (HS.live_region h' (B.frameOf x))) + (* TODO: pattern *) + +let modifies_liveness_insensitive_region_weak + (l2 : loc) + (h h' : HS.mem) + (x: HS.rid) +: Lemma + (requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x)) + (ensures (HS.live_region h' x)) + [SMTPatOr [ + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)]; + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)]; + ]] += modifies_liveness_insensitive_region loc_none l2 h h' x + +let modifies_liveness_insensitive_region_mreference_weak + (l2 : loc) + (h h' : HS.mem) + (#t: Type) + (#pre: Preorder.preorder t) + (x: HS.mreference t pre) +: Lemma + (requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x))) + (ensures (HS.live_region h' (HS.frameOf x))) + [SMTPatOr [ + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))]; + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))]; + ]] += modifies_liveness_insensitive_region_mreference loc_none l2 h h' x + +let modifies_liveness_insensitive_region_buffer_weak + (l2 : loc) + (h h' : HS.mem) + (#t: Type) + (x: B.buffer t) +: Lemma + (requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (B.frameOf x))) + (ensures (HS.live_region h' (B.frameOf x))) + [SMTPatOr [ + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (B.frameOf x))]; + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (B.frameOf x))]; + ]] += modifies_liveness_insensitive_region_buffer loc_none l2 h h' x + + +val modifies_trans + (s12: loc) + (h1 h2: HS.mem) + (s23: loc) + (h3: HS.mem) +: Lemma + (requires (modifies s12 h1 h2 /\ modifies s23 h2 h3)) + (ensures (modifies (loc_union s12 s23) h1 h3)) + [SMTPat (modifies s12 h1 h2); SMTPat (modifies s23 h2 h3)] + +val modifies_only_live_regions + (rs: Set.set HS.rid) + (l: loc) + (h h' : HS.mem) +: Lemma + (requires ( + modifies (loc_union (loc_regions false rs) l) h h' /\ + (forall r . Set.mem r rs ==> (~ (HS.live_region h r))) + )) + (ensures (modifies l h h')) + +val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma + (requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1)) + (ensures (modifies l h0 h1)) + [SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)] + +val modifies_fresh_frame_popped + (h0 h1: HS.mem) + (s: loc) + (h2 h3: HS.mem) +: Lemma + (requires ( + HS.fresh_frame h0 h1 /\ + modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\ + (HS.get_tip h2) == (HS.get_tip h1) /\ + HS.popped h2 h3 + )) + (ensures ( + modifies s h0 h3 /\ + (HS.get_tip h3) == HS.get_tip h0 + )) + [SMTPat (HS.fresh_frame h0 h1); SMTPat (HS.popped h2 h3); SMTPat (modifies s h0 h3)] + +val modifies_loc_regions_intro + (rs: Set.set HS.rid) + (h1 h2: HS.mem) +: Lemma + (requires (HS.modifies rs h1 h2)) + (ensures (modifies (loc_regions true rs) h1 h2)) + +val modifies_loc_addresses_intro + (r: HS.rid) + (a: Set.set nat) + (l: loc) + (h1 h2: HS.mem) +: Lemma + (requires ( + HS.live_region h2 r /\ + modifies (loc_union (loc_region_only false r) l) h1 h2 /\ + HS.modifies_ref r a h1 h2 + )) + (ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2)) + +val modifies_ralloc_post + (#a: Type) + (#rel: Preorder.preorder a) + (i: HS.rid) + (init: a) + (h: HS.mem) + (x: HST.mreference a rel { HST.is_eternal_region (HS.frameOf x) } ) + (h' : HS.mem) +: Lemma + (requires (HST.ralloc_post i init h x h')) + (ensures (modifies loc_none h h')) + +val modifies_salloc_post + (#a: Type) + (#rel: Preorder.preorder a) + (init: a) + (h: HS.mem) + (x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } ) + (h' : HS.mem) +: Lemma + (requires (HST.salloc_post init h x h')) + (ensures (modifies loc_none h h')) + +val modifies_free + (#a: Type) + (#rel: Preorder.preorder a) + (r: HS.mreference a rel { HS.is_mm r } ) + (m: HS.mem { m `HS.contains` r } ) +: Lemma + (modifies (loc_freed_mreference r) m (HS.free r m)) + +val modifies_none_modifies + (h1 h2: HS.mem) +: Lemma + (requires (HST.modifies_none h1 h2)) + (ensures (modifies loc_none h1 h2)) + +val modifies_buffer_none_modifies + (h1 h2: HS.mem) +: Lemma + (requires (B.modifies_none h1 h2)) + (ensures (modifies loc_none h1 h2)) + +val modifies_0_modifies + (h1 h2: HS.mem) +: Lemma + (requires (B.modifies_0 h1 h2)) + (ensures (modifies loc_none h1 h2)) + [SMTPat (B.modifies_0 h1 h2)] + +val modifies_1_modifies + (#a: Type) + (b: B.buffer a) + (h1 h2: HS.mem) +: Lemma + (requires (B.modifies_1 b h1 h2)) + (ensures (modifies (loc_buffer b) h1 h2)) + [SMTPat (B.modifies_1 b h1 h2)] + +val modifies_2_modifies + (#a1 #a2: Type) + (b1: B.buffer a1) + (b2: B.buffer a2) + (h1 h2: HS.mem) +: Lemma + (requires (B.modifies_2 b1 b2 h1 h2)) + (ensures (modifies (loc_union (loc_buffer b1) (loc_buffer b2)) h1 h2)) + [SMTPat (B.modifies_2 b1 b2 h1 h2)] + +val modifies_3_modifies + (#a1 #a2 #a3: Type) + (b1: B.buffer a1) + (b2: B.buffer a2) + (b3: B.buffer a3) + (h1 h2: HS.mem) +: Lemma + (requires (B.modifies_3 b1 b2 b3 h1 h2)) + (ensures (modifies (loc_union (loc_buffer b1) (loc_union (loc_buffer b2) (loc_buffer b3))) h1 h2)) + +val modifies_buffer_rcreate_post_common + (#a: Type) + (r: HS.rid) + (init: a) + (len: FStar.UInt32.t) + (b: B.buffer a) + (h0 h1: HS.mem) +: Lemma + (requires (B.rcreate_post_common r init len b h0 h1)) + (ensures (modifies loc_none h0 h1)) + +val mreference_live_buffer_unused_in_disjoint + (#t1: Type) + (#pre: Preorder.preorder t1) + (#t2: Type) + (h: HS.mem) + (b1: HS.mreference t1 pre) + (b2: B.buffer t2) +: Lemma + (requires (HS.contains h b1 /\ B.unused_in b2 h)) + (ensures (loc_disjoint (loc_freed_mreference b1) (loc_buffer b2))) + [SMTPat (HS.contains h b1); SMTPat (B.unused_in b2 h)] + +val buffer_live_mreference_unused_in_disjoint + (#t1: Type) + (#t2: Type) + (#pre: Preorder.preorder t2) + (h: HS.mem) + (b1: B.buffer t1) + (b2: HS.mreference t2 pre) +: Lemma + (requires (B.live h b1 /\ HS.unused_in b2 h)) + (ensures (loc_disjoint (loc_buffer b1) (loc_freed_mreference b2))) + [SMTPat (B.live h b1); SMTPat (HS.unused_in b2 h)] + +(** BEGIN TODO: move to FStar.Monotonic.HyperStack *) + +val does_not_contain_addr + (h: HS.mem) + (ra: HS.rid & nat) +: GTot Type0 + +val not_live_region_does_not_contain_addr + (h: HS.mem) + (ra: HS.rid & nat) +: Lemma + (requires (~ (HS.live_region h (fst ra)))) + (ensures (h `does_not_contain_addr` ra)) + +val unused_in_does_not_contain_addr + (h: HS.mem) + (#a: Type) + (#rel: Preorder.preorder a) + (r: HS.mreference a rel) +: Lemma + (requires (r `HS.unused_in` h)) + (ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r))) + +val addr_unused_in_does_not_contain_addr + (h: HS.mem) + (ra: HS.rid & nat) +: Lemma + (requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra)))) + (ensures (h `does_not_contain_addr` ra)) + +val free_does_not_contain_addr + (#a: Type0) + (#rel: Preorder.preorder a) + (r: HS.mreference a rel) + (m: HS.mem) + (x: HS.rid & nat) +: Lemma + (requires ( + HS.is_mm r /\ + m `HS.contains` r /\ + fst x == HS.frameOf r /\ + snd x == HS.as_addr r + )) + (ensures ( + HS.free r m `does_not_contain_addr` x + )) + [SMTPat (HS.free r m `does_not_contain_addr` x)] + +val does_not_contain_addr_elim + (#a: Type0) + (#rel: Preorder.preorder a) + (r: HS.mreference a rel) + (m: HS.mem) + (x: HS.rid & nat) +: Lemma + (requires ( + m `does_not_contain_addr` x /\ + HS.frameOf r == fst x /\ + HS.as_addr r == snd x + )) + (ensures (~ (m `HS.contains` r))) + +(** END TODO *) + +val modifies_only_live_addresses + (r: HS.rid) + (a: Set.set nat) + (l: loc) + (h h' : HS.mem) +: Lemma + (requires ( + modifies (loc_union (loc_addresses false r a) l) h h' /\ + (forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x)) + )) + (ensures (modifies l h h')) + + +/// Type class instantiation for compositionality with other kinds of memory locations than regions, references or buffers (just in case). +/// No usage pattern has been found yet. + +module MG = FStar.ModifiesGen + +val cloc_aloc : HS.rid -> nat -> Tot (Type u#1) + +val cloc_cls: MG.cls cloc_aloc + +val cloc_of_loc (l: loc) : Tot (MG.loc cloc_cls) + +val loc_of_cloc (l: MG.loc cloc_cls) : Tot loc + +val loc_of_cloc_of_loc (l: loc) : Lemma + (loc_of_cloc (cloc_of_loc l) == l) + [SMTPat (loc_of_cloc (cloc_of_loc l))] + +val cloc_of_loc_of_cloc (l: MG.loc cloc_cls) : Lemma + (cloc_of_loc (loc_of_cloc l) == l) + [SMTPat (cloc_of_loc (loc_of_cloc l))] + +val cloc_of_loc_none : unit -> Lemma (cloc_of_loc loc_none == MG.loc_none) + +val cloc_of_loc_union (l1 l2: loc) : Lemma + (cloc_of_loc (loc_union l1 l2) == MG.loc_union (cloc_of_loc l1) (cloc_of_loc l2)) + +val cloc_of_loc_addresses + (preserve_liveness: bool) + (r: HS.rid) + (n: Set.set nat) +: Lemma + (cloc_of_loc (loc_addresses preserve_liveness r n) == MG.loc_addresses preserve_liveness r n) + +val cloc_of_loc_regions + (preserve_liveness: bool) + (r: Set.set HS.rid) +: Lemma + (cloc_of_loc (loc_regions preserve_liveness r) == MG.loc_regions preserve_liveness r) + +val loc_includes_to_cloc (l1 l2: loc) : Lemma + (loc_includes l1 l2 <==> MG.loc_includes (cloc_of_loc l1) (cloc_of_loc l2)) + +val loc_disjoint_to_cloc (l1 l2: loc) : Lemma + (loc_disjoint l1 l2 <==> MG.loc_disjoint (cloc_of_loc l1) (cloc_of_loc l2)) + +val modifies_to_cloc (l: loc) (h1 h2: HS.mem) : Lemma + (modifies l h1 h2 <==> MG.modifies (cloc_of_loc l) h1 h2) diff --git a/stage0/ulib/FStar.ModifiesGen.fst b/stage0/ulib/FStar.ModifiesGen.fst new file mode 100644 index 00000000000..412343c3c23 --- /dev/null +++ b/stage0/ulib/FStar.ModifiesGen.fst @@ -0,0 +1,2232 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.ModifiesGen + +#set-options "--split_queries no --ext 'context_pruning='" +#set-options "--using_facts_from '*,-FStar.Tactics,-FStar.Reflection,-FStar.List'" + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +noeq +type aloc (#al: aloc_t) (c: cls al) = | ALoc: + (region: HS.rid) -> + (addr: nat) -> + (loc: option (al region addr)) -> + aloc c + +let aloc_domain (#al: aloc_t) (c: cls al) (regions: Ghost.erased (Set.set HS.rid)) (addrs: ((r: HS.rid { Set.mem r (Ghost.reveal regions) } ) -> GTot (GSet.set nat))) : GTot (GSet.set (aloc c)) = + GSet.comprehend (fun a -> Set.mem a.region (Ghost.reveal regions) && GSet.mem a.addr (addrs a.region)) + +module F = FStar.FunctionalExtensionality + +[@@(unifier_hint_injective)] +let i_restricted_g_t = F.restricted_g_t + +let addrs_dom regions = + (r: HS.rid { Set.mem r (Ghost.reveal regions) } ) + +let non_live_addrs_codom + (regions: Ghost.erased (Set.set HS.rid)) + (region_liveness_tags: Ghost.erased (Set.set HS.rid) { Ghost.reveal region_liveness_tags `Set.subset` Ghost.reveal regions } ) + (r:addrs_dom regions) = + (y: GSet.set nat { r `Set.mem` (Ghost.reveal region_liveness_tags) ==> GSet.subset (GSet.complement GSet.empty) y }) + +let live_addrs_codom + (regions: Ghost.erased (Set.set HS.rid)) + (region_liveness_tags: Ghost.erased (Set.set HS.rid) { Ghost.reveal region_liveness_tags `Set.subset` Ghost.reveal regions } ) + (non_live_addrs: + i_restricted_g_t + (addrs_dom regions) + (non_live_addrs_codom regions region_liveness_tags)) + (r:addrs_dom regions) = (y: GSet.set nat { GSet.subset (non_live_addrs r) y } ) + +[@@erasable] +noeq +type loc' (#al: aloc_t u#x) (c: cls al) : Type u#x = + | Loc: + (regions: Ghost.erased (Set.set HS.rid)) -> + (region_liveness_tags: Ghost.erased (Set.set HS.rid) { Ghost.reveal region_liveness_tags `Set.subset` Ghost.reveal regions } ) -> + (non_live_addrs: + i_restricted_g_t + (addrs_dom regions) + (non_live_addrs_codom regions region_liveness_tags)) -> + (live_addrs: + i_restricted_g_t + (addrs_dom regions) + (live_addrs_codom regions region_liveness_tags non_live_addrs)) -> + (aux: Ghost.erased (GSet.set (aloc c)) { + aloc_domain c regions live_addrs `GSet.subset` Ghost.reveal aux /\ + Ghost.reveal aux `GSet.subset` (aloc_domain c regions (fun _ -> GSet.complement GSet.empty)) + } ) -> + loc' c + +let loc = loc' + +let mk_non_live_addrs (#regions:_) (#region_liveness_tags:_) + (f: (x:addrs_dom regions -> GTot (non_live_addrs_codom regions region_liveness_tags x))) + : i_restricted_g_t + (addrs_dom regions) + (non_live_addrs_codom regions region_liveness_tags) = + F.on_dom_g _ f + +let mk_live_addrs (#regions:_) (#region_liveness_tags:_) + (#non_live_addrs_codom: _) + (f: (x:addrs_dom regions -> GTot (live_addrs_codom regions region_liveness_tags non_live_addrs_codom x))) + : i_restricted_g_t + (addrs_dom regions) + (live_addrs_codom regions region_liveness_tags non_live_addrs_codom) = + F.on_dom_g _ f + +let loc_none #a #c = + Loc + (Ghost.hide (Set.empty)) + (Ghost.hide (Set.empty)) + (mk_non_live_addrs (fun _ -> GSet.empty)) + (mk_live_addrs (fun _ -> GSet.empty)) + (Ghost.hide GSet.empty) + +let regions_of_loc + (#al: aloc_t) (#c: cls al) + (s: loc c) +: GTot (Set.set HS.rid) += Ghost.reveal (Loc?.regions s) + +let addrs_of_loc_liveness_not_preserved + (#al: aloc_t) (#c: cls al) + (l: loc c) + (r: HS.rid) +: GTot (GSet.set nat) += if Set.mem r (regions_of_loc l) + then Loc?.non_live_addrs l r + else GSet.empty + +let addrs_of_loc_weak + (#al: aloc_t) (#c: cls al) + (l: loc c) + (r: HS.rid) +: GTot (GSet.set nat) += if Set.mem r (regions_of_loc l) + then Loc?.live_addrs l r + else GSet.empty + +let addrs_of_loc_aux_pred + (#al: aloc_t) (#c: cls al) + (l: loc c) + (r: HS.rid) + (addr: nat) +: GTot bool += StrongExcludedMiddle.strong_excluded_middle (exists a . GSet.mem a (Ghost.reveal (Loc?.aux l)) /\ a.region == r /\ a.addr == addr) + +let addrs_of_loc_aux + (#al: aloc_t) (#c: cls al) + (l: loc c) + (r: HS.rid) +: GTot (y: GSet.set nat { GSet.subset (GSet.intersect y (addrs_of_loc_weak l r)) GSet.empty } ) += GSet.comprehend (addrs_of_loc_aux_pred l r) + `GSet.intersect` (GSet.complement (addrs_of_loc_weak l r)) + +let addrs_of_loc + (#al: aloc_t) (#c: cls al) + (l: loc c) + (r: HS.rid) +: GTot (GSet.set nat) += GSet.union + (addrs_of_loc_weak l r) + (addrs_of_loc_aux l r) + +let addrs_of_loc_aux_prop + (#al: aloc_t) (#c: cls al) + (l: loc c) + (r: HS.rid) +: Lemma + (GSet.subset (GSet.intersect (addrs_of_loc_aux l r) (addrs_of_loc_weak l r)) GSet.empty) + [SMTPatOr [ + [SMTPat (addrs_of_loc_aux l r)]; + [SMTPat (addrs_of_loc_weak l r)]; + [SMTPat (addrs_of_loc l r)]; + ]] += () + +let loc_union #al #c s1 s2 = + let regions1 = Ghost.reveal (Loc?.regions s1) in + let regions2 = Ghost.reveal (Loc?.regions s2) in + let regions = Set.union regions1 regions2 in + let region_liveness_tags : Ghost.erased (Set.set HS.rid) = (Ghost.hide (Set.union (Ghost.reveal (Loc?.region_liveness_tags s1)) (Ghost.reveal (Loc?.region_liveness_tags s2)))) in + let gregions = Ghost.hide regions in + let non_live_addrs = + F.on_dom_g (addrs_dom gregions) #(non_live_addrs_codom gregions region_liveness_tags) + (fun r -> + GSet.union + (if Set.mem r regions1 then Loc?.non_live_addrs s1 r else GSet.empty) + (if Set.mem r regions2 then Loc?.non_live_addrs s2 r else GSet.empty)) + in + let live_addrs = + F.on_dom_g (addrs_dom gregions) #(live_addrs_codom gregions region_liveness_tags non_live_addrs) + (fun r -> + GSet.union + (if Set.mem r regions1 then addrs_of_loc_weak s1 r else GSet.empty) + (if Set.mem r regions2 then addrs_of_loc_weak s2 r else GSet.empty)) + in + let aux = Ghost.hide + (Ghost.reveal (Loc?.aux s1) `GSet.union` Ghost.reveal (Loc?.aux s2)) + in + Loc + (Ghost.hide regions) + region_liveness_tags + non_live_addrs + live_addrs + aux + +let fun_set_equal (#t: Type) (#t': Type) + (#p:(t -> GSet.set t' -> Type)) + (f1 f2: i_restricted_g_t t (fun x -> g:GSet.set t'{p x g})) :Tot Type0 = + forall (x: t) . {:pattern (f1 x) \/ (f2 x) } f1 x `GSet.equal` f2 x + +let fun_set_equal_elim (#t: Type) (#t': Type) + (#p:(t -> GSet.set t' -> Type)) + (f1 f2: i_restricted_g_t t (fun x -> g:GSet.set t'{p x g})) : Lemma + (requires (fun_set_equal f1 f2)) + (ensures (f1 == f2)) +// [SMTPat (fun_set_equal f1 f2)] += assert (f1 `FunctionalExtensionality.feq_g` f2) + +let loc_equal (#al: aloc_t) (#c: cls al) (s1 s2: loc c) : GTot Type0 = + let Loc regions1 region_liveness_tags1 _ _ aux1 = s1 in + let Loc regions2 region_liveness_tags2 _ _ aux2 = s2 in + Ghost.reveal regions1 `Set.equal` Ghost.reveal regions2 /\ + Ghost.reveal region_liveness_tags1 `Set.equal` Ghost.reveal region_liveness_tags2 /\ + fun_set_equal (Loc?.non_live_addrs s1) (Loc?.non_live_addrs s2) /\ + fun_set_equal (Loc?.live_addrs s1) (Loc?.live_addrs s2) /\ + Ghost.reveal (Loc?.aux s1) `GSet.equal` Ghost.reveal (Loc?.aux s2) + +let loc_equal_elim (#al: aloc_t) (#c: cls al) (s1 s2: loc c) : Lemma + (requires (loc_equal s1 s2)) + (ensures (s1 == s2)) + [SMTPat (s1 `loc_equal` s2)] += fun_set_equal_elim (Loc?.non_live_addrs s1) (Loc?.non_live_addrs s2); + fun_set_equal_elim (Loc?.live_addrs s1) (Loc?.live_addrs s2) + + +let loc_union_idem #al #c s = + assert (loc_union s s `loc_equal` s) + +let loc_union_comm #al #c s1 s2 = + assert (loc_union s1 s2 `loc_equal` loc_union s2 s1) + +let loc_union_assoc #al #c s1 s2 s3 = + assert (loc_union s1 (loc_union s2 s3) `loc_equal` loc_union (loc_union s1 s2) s3) + +let loc_union_loc_none_l #al #c s = + assert (loc_union loc_none s `loc_equal` s) + +let loc_union_loc_none_r #al #c s = + assert (loc_union s loc_none `loc_equal` s) + +let loc_of_aloc #al #c #r #n b = + let regions = (Ghost.hide (Set.singleton r)) in + let region_liveness_tags = (Ghost.hide (Set.empty)) in + Loc + regions + region_liveness_tags + (mk_non_live_addrs (fun _ -> GSet.empty)) + (mk_live_addrs (fun _ -> GSet.empty)) + (Ghost.hide (GSet.singleton (ALoc r n (Some b)))) + +let loc_of_aloc_not_none #al #c #r #n b = () + +let loc_addresses #al #c preserve_liveness r n = + let regions = (Ghost.hide (Set.singleton r)) in + Loc + regions + (Ghost.hide Set.empty) + (mk_non_live_addrs (fun _ -> if preserve_liveness then GSet.empty else GSet.of_set n)) + (mk_live_addrs (fun _ -> GSet.of_set n)) + (Ghost.hide (aloc_domain c regions (fun _ -> GSet.of_set n))) + +let loc_regions_region_liveness_tags (preserve_liveness: bool) (r: Set.set HS.rid) : Tot (Ghost.erased (Set.set HS.rid)) = + if preserve_liveness then Ghost.hide Set.empty else Ghost.hide r + +let loc_regions #al #c preserve_liveness r = + let region_liveness_tags = loc_regions_region_liveness_tags preserve_liveness r in + let addrs (r' : HS.rid { Set.mem r' r } ) : GTot (y: GSet.set nat { r' `Set.mem` (Ghost.reveal region_liveness_tags) ==> GSet.subset (GSet.complement GSet.empty) y } ) = + GSet.complement GSet.empty + in + let live_addrs (r' : HS.rid { Set.mem r' r } ) : GTot (y: GSet.set nat { addrs r' `GSet.subset` y } ) = + addrs r' + in + Loc + (Ghost.hide r) + region_liveness_tags + (mk_non_live_addrs addrs) + (mk_live_addrs live_addrs) + (Ghost.hide (aloc_domain c (Ghost.hide r) addrs)) + +let aloc_includes (#al: aloc_t) (#c: cls al) (b0 b: aloc c) : GTot Type0 = + b0.region == b.region /\ b0.addr == b.addr /\ Some? b0.loc == Some? b.loc /\ (if Some? b0.loc && Some? b.loc then c.aloc_includes (Some?.v b0.loc) (Some?.v b.loc) else True) + +let loc_aux_includes_buffer + (#al: aloc_t) (#c: cls al) + (s: GSet.set (aloc c)) + (b: aloc c) +: GTot Type0 + (decreases s) += exists (b0 : aloc c) . b0 `GSet.mem` s /\ b0 `aloc_includes` b + +let loc_aux_includes + (#al: aloc_t) (#c: cls al) + (s1 s2: GSet.set (aloc c)) +: GTot Type0 + (decreases s2) += forall (b2: aloc c) . GSet.mem b2 s2 ==> loc_aux_includes_buffer s1 b2 + +let loc_aux_includes_union_l + (#al: aloc_t) (#c: cls al) + (s1 s2 s: GSet.set (aloc c)) +: Lemma + (requires (loc_aux_includes s1 s \/ loc_aux_includes s2 s)) + (ensures (loc_aux_includes (GSet.union s1 s2) s)) + (decreases s) += () + +let loc_aux_includes_refl + (#al: aloc_t) (#c: cls al) + (s: GSet.set (aloc c)) +: Lemma + (loc_aux_includes s s) += Classical.forall_intro_3 (fun r a b -> c.aloc_includes_refl #r #a b) + +let loc_aux_includes_subset + (#al: aloc_t) (#c: cls al) + (s1 s2: GSet.set (aloc c)) +: Lemma + (requires (s1 `GSet.subset` s2)) + (ensures (loc_aux_includes s2 s1)) += Classical.forall_intro_3 (fun r a b -> c.aloc_includes_refl #r #a b) + +let loc_aux_includes_subset' + (#al: aloc_t) (#c: cls al) + (s1 s2: GSet.set (aloc c)) +: Lemma + (requires (s1 `GSet.subset` s2)) + (ensures (loc_aux_includes s2 s1)) + [SMTPatOr [ + [SMTPat (s1 `GSet.subset` s2)]; + [SMTPat (loc_aux_includes s2 s1)]; + ]] += loc_aux_includes_subset s1 s2 + +let loc_aux_includes_union_l_r + (#al: aloc_t) (#c: cls al) + (s s': GSet.set (aloc c)) +: Lemma + (loc_aux_includes (GSet.union s s') s) += loc_aux_includes_refl s; + loc_aux_includes_union_l s s' s + +let loc_aux_includes_union_l_l + (#al: aloc_t) (#c: cls al) + (s s': GSet.set (aloc c)) +: Lemma + (loc_aux_includes (GSet.union s' s) s) += loc_aux_includes_refl s; + loc_aux_includes_union_l s' s s + +let loc_aux_includes_buffer_includes + (#al: aloc_t) (#c: cls al) + (s: GSet.set (aloc c)) + (b1 b2: aloc c) +: Lemma + (requires (loc_aux_includes_buffer s b1 /\ b1 `aloc_includes` b2)) + (ensures (loc_aux_includes_buffer s b2)) += Classical.forall_intro_3 (fun r a b1 -> Classical.forall_intro_2 (fun b2 b3 -> Classical.move_requires (c.aloc_includes_trans #r #a b1 b2) b3)) + +let loc_aux_includes_loc_aux_includes_buffer + (#al: aloc_t) (#c: cls al) + (s1 s2: GSet.set (aloc c)) + (b: aloc c) +: Lemma + (requires (loc_aux_includes s1 s2 /\ loc_aux_includes_buffer s2 b)) + (ensures (loc_aux_includes_buffer s1 b)) += Classical.forall_intro_3 (fun s b1 b2 -> Classical.move_requires (loc_aux_includes_buffer_includes #al #c s b1) b2) + +let loc_aux_includes_trans + (#al: aloc_t) (#c: cls al) + (s1 s2 s3: GSet.set (aloc c)) +: Lemma + (requires (loc_aux_includes s1 s2 /\ loc_aux_includes s2 s3)) + (ensures (loc_aux_includes s1 s3)) += Classical.forall_intro_3 (fun r a b1 -> Classical.forall_intro_2 (fun b2 b3 -> Classical.move_requires (c.aloc_includes_trans #r #a b1 b2) b3)) + +let addrs_of_loc_weak_loc_union + (#al: aloc_t) (#c: cls al) + (l1 l2: loc c) + (r: HS.rid) +: Lemma + (addrs_of_loc_weak (loc_union l1 l2) r == GSet.union (addrs_of_loc_weak l1 r) (addrs_of_loc_weak l2 r)) + [SMTPat (addrs_of_loc_weak (loc_union l1 l2) r)] += assert (GSet.equal (addrs_of_loc_weak (loc_union l1 l2) r) (GSet.union (addrs_of_loc_weak l1 r) (addrs_of_loc_weak l2 r))) + +let addrs_of_loc_union + (#al: aloc_t) (#c: cls al) + (l1 l2: loc c) + (r: HS.rid) +: Lemma + (addrs_of_loc (loc_union l1 l2) r == GSet.union (addrs_of_loc l1 r) (addrs_of_loc l2 r)) + [SMTPat (addrs_of_loc (loc_union l1 l2) r)] += assert (GSet.equal (addrs_of_loc (loc_union l1 l2) r) (GSet.union (addrs_of_loc l1 r) (addrs_of_loc l2 r))) + +unfold +let loc_includes' #al (#c: cls al) (s1 s2: loc c) = + let regions1 = Ghost.reveal (Loc?.regions s1) in + let regions2 = Ghost.reveal (Loc?.regions s2) in ( + Set.subset regions2 regions1 /\ + Set.subset (Ghost.reveal (Loc?.region_liveness_tags s2)) (Ghost.reveal (Loc?.region_liveness_tags s1)) /\ + ( + forall (r: HS.rid { Set.mem r regions2 } ) . + GSet.subset (Loc?.non_live_addrs s2 r) (Loc?.non_live_addrs s1 r) + ) /\ + ( + forall (r: HS.rid) . + GSet.subset (addrs_of_loc_weak s2 r) (addrs_of_loc_weak s1 r) + ) /\ ( + forall (r: HS.rid) . + GSet.subset (addrs_of_loc s2 r) (addrs_of_loc s1 r) + ) /\ ( + (Ghost.reveal (Loc?.aux s1)) `loc_aux_includes` (Ghost.reveal (Loc?.aux s2)) + ) + ) + +let loc_includes #al #c s1 s2 = + loc_includes' s1 s2 + +let loc_includes_refl #al #c s = + loc_aux_includes_refl (Ghost.reveal (Loc?.aux s)) + +let loc_includes_refl' + (#al: aloc_t) (#c: cls al) + (s: loc c) +: Lemma + (loc_includes s s) + [SMTPat (loc_includes s s)] += loc_includes_refl s + +let loc_includes_trans #al #c s1 s2 s3 = + loc_aux_includes_trans (Ghost.reveal (Loc?.aux s1)) (Ghost.reveal (Loc?.aux s2)) (Ghost.reveal (Loc?.aux s3)) + +let loc_includes_union_r #al #c s s1 s2 = () + +let loc_includes_union_l #al #c s1 s2 s = + let u12 = loc_union s1 s2 in + Classical.or_elim + #(loc_includes s1 s) + #(loc_includes s2 s) + #(fun _ -> loc_includes (loc_union s1 s2) s) + (fun _ -> + loc_aux_includes_union_l_r (Ghost.reveal (Loc?.aux s1)) (Ghost.reveal (Loc?.aux s2)); + assert (loc_includes (loc_union s1 s2) s1); + loc_includes_trans u12 s1 s) + (fun _ -> + loc_aux_includes_union_l_l (Ghost.reveal (Loc?.aux s2)) (Ghost.reveal (Loc?.aux s1)); + assert (loc_includes (loc_union s1 s2) s2); + loc_includes_trans u12 s2 s) + +let loc_includes_none #al #c s = () + +let loc_includes_none_elim #al #c s = + assert (s `loc_equal` loc_none) + +let loc_includes_aloc #al #c #r #n b1 b2 = () + +let loc_includes_aloc_elim #aloc #c #r1 #r2 #n1 #n2 b1 b2 = () + +let addrs_of_loc_loc_of_aloc + (#al: aloc_t) + (#c: cls al) + (#r: HS.rid) + (#a: nat) + (p: al r a) + (r': HS.rid) +: Lemma + (addrs_of_loc (loc_of_aloc #_ #c p) r' `GSet.equal` (if r = r' then GSet.singleton a else GSet.empty)) + [SMTPat (addrs_of_loc (loc_of_aloc #_ #c p) r')] += () + +let loc_includes_addresses_aloc #al #c preserve_liveness r s #a p = () + +let loc_includes_region_aloc #al #c preserve_liveness s #r #a b = () + +let loc_includes_region_addresses #al #c s preserve_liveness1 preserve_liveness2 r a = () + +let loc_includes_region_region #al #c preserve_liveness1 preserve_liveness2 s1 s2 = () + +let loc_includes_region_union_l #al #c preserve_liveness l s1 s2 = + assert ((loc_regions #_ #c preserve_liveness (Set.intersect s2 (Set.complement s1)) `loc_union` loc_regions #_ #c preserve_liveness (Set.intersect s2 s1)) `loc_equal` loc_regions preserve_liveness s2); + loc_includes_region_region #_ #c preserve_liveness preserve_liveness s1 (Set.intersect s2 s1); + loc_includes_union_l (loc_regions preserve_liveness s1) l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1))); + loc_includes_union_l (loc_regions preserve_liveness s1) l (loc_regions preserve_liveness (Set.intersect s2 s1)); + loc_includes_union_r (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1))) (loc_regions preserve_liveness (Set.intersect s2 s1)) + +let loc_includes_addresses_addresses #al c preserve_liveness1 preserve_liveness2 r s1 s2 = () + +(* Disjointness of two memory locations *) + +let aloc_disjoint (#al: aloc_t) (#c: cls al) (b1 b2: aloc c) : GTot Type0 = + if b1.region = b2.region && b1.addr = b2.addr + then Some? b1.loc /\ Some? b2.loc /\ c.aloc_disjoint (Some?.v b1.loc) (Some?.v b2.loc) + else True + +let aloc_disjoint_sym (#al: aloc_t) (#c: cls al) (b1 b2: aloc c) : Lemma + (aloc_disjoint b1 b2 <==> aloc_disjoint b2 b1) += Classical.forall_intro_2 (fun r a -> Classical.forall_intro_2 (fun b1 b2 -> c.aloc_disjoint_sym #r #a b1 b2)) + +let loc_aux_disjoint + (#al: aloc_t) (#c: cls al) + (l1 l2: GSet.set (aloc c)) +: GTot Type0 += forall (b1 b2: aloc c) . (GSet.mem b1 l1 /\ GSet.mem b2 l2) ==> aloc_disjoint b1 b2 + +let loc_aux_disjoint_union_l + (#al: aloc_t) (#c: cls al) + (ll1 lr1 l2: GSet.set (aloc c)) +: Lemma + (ensures (loc_aux_disjoint (GSet.union ll1 lr1) l2 <==> (loc_aux_disjoint ll1 l2 /\ loc_aux_disjoint lr1 l2))) += () + +let loc_aux_disjoint_union_r + (#al: aloc_t) (#c: cls al) + (l1 ll2 lr2: GSet.set (aloc c)) +: Lemma + (loc_aux_disjoint l1 (GSet.union ll2 lr2) <==> (loc_aux_disjoint l1 ll2 /\ loc_aux_disjoint l1 lr2)) += () + +let loc_aux_disjoint_sym + (#al: aloc_t) (#c: cls al) + (l1 l2: GSet.set (aloc c)) +: Lemma + (ensures (loc_aux_disjoint l1 l2 <==> loc_aux_disjoint l2 l1)) += Classical.forall_intro_2 (aloc_disjoint_sym #al #c) + +let regions_of_loc_loc_union + (#al: aloc_t) (#c: cls al) + (s1 s2: loc c) +: Lemma + (regions_of_loc (loc_union s1 s2) == regions_of_loc s1 `Set.union` regions_of_loc s2) + [SMTPat (regions_of_loc (loc_union s1 s2))] += assert (regions_of_loc (loc_union s1 s2) `Set.equal` (regions_of_loc s1 `Set.union` regions_of_loc s2)) + +let regions_of_loc_monotonic + (#al: aloc_t) (#c: cls al) + (s1 s2: loc c) +: Lemma + (requires (loc_includes s1 s2)) + (ensures (Set.subset (regions_of_loc s2) (regions_of_loc s1))) += () + +let loc_disjoint_region_liveness_tags (#al: aloc_t) (#c: cls al) (l1 l2: loc c) : GTot Type0 = + Set.subset (Set.intersect (Ghost.reveal (Loc?.region_liveness_tags l1)) (Ghost.reveal (Loc?.region_liveness_tags l2))) Set.empty + +let loc_disjoint_addrs (#al: aloc_t) (#c: cls al) (l1 l2: loc c) : GTot Type0 = + (forall (r: HS.rid) . + GSet.subset (GSet.intersect (addrs_of_loc_weak l1 r) (addrs_of_loc l2 r)) GSet.empty /\ + GSet.subset (GSet.intersect (addrs_of_loc l1 r) (addrs_of_loc_weak l2 r)) GSet.empty + ) + +let loc_disjoint_aux (#al: aloc_t) (#c: cls al) (l1 l2: loc c) : GTot Type0 = + loc_aux_disjoint (Ghost.reveal (Loc?.aux l1)) (Ghost.reveal (Loc?.aux l2)) + +let loc_disjoint + (#al: aloc_t) (#c: cls al) + (l1 l2: loc c) +: GTot Type0 += loc_disjoint_region_liveness_tags l1 l2 /\ + loc_disjoint_addrs l1 l2 /\ + loc_disjoint_aux l1 l2 + +let loc_disjoint_sym #al #c l1 l2 = + Classical.forall_intro_2 (loc_aux_disjoint_sym #al #c) + +let loc_disjoint_sym' + (#al: aloc_t) (#c: cls al) + (s1 s2: loc c) +: Lemma + (requires (loc_disjoint s1 s2)) + (ensures (loc_disjoint s2 s1)) + [SMTPat (loc_disjoint s1 s2)] += loc_disjoint_sym s1 s2 + +let loc_disjoint_none_r #al #c s = () + +let loc_disjoint_union_r #al #c s s1 s2 = () + +let aloc_disjoint_includes (#al: aloc_t) (#c: cls al) (b1 b2 b3 : aloc c) : Lemma + (requires (aloc_disjoint b1 b2 /\ aloc_includes b2 b3)) + (ensures (aloc_disjoint b1 b3)) += if b1.region = b2.region && b1.addr = b2.addr + then begin + c.aloc_includes_refl (Some?.v b1.loc); + c.aloc_disjoint_includes (Some?.v b1.loc) (Some?.v b2.loc) (Some?.v b1.loc) (Some?.v b3.loc) + end + else () + +let loc_aux_disjoint_loc_aux_includes + (#al: aloc_t) (#c: cls al) + (l1 l2 l3: GSet.set (aloc c)) +: Lemma + (requires (loc_aux_disjoint l1 l2 /\ loc_aux_includes l2 l3)) + (ensures (loc_aux_disjoint l1 l3)) += // FIXME: WHY WHY WHY do I need this assert? + assert (forall (b1 b3: aloc c) . (GSet.mem b1 l1 /\ GSet.mem b3 l3) ==> (exists (b2: aloc c) . GSet.mem b2 l2 /\ aloc_disjoint b1 b2 /\ aloc_includes b2 b3)); + Classical.forall_intro_3 (fun b1 b2 b3 -> Classical.move_requires (aloc_disjoint_includes #al #c b1 b2) b3) + +let loc_disjoint_includes #al #c p1 p2 p1' p2' = + regions_of_loc_monotonic p1 p1'; + regions_of_loc_monotonic p2 p2'; + let l1 = Ghost.reveal (Loc?.aux p1) in + let l2 = Ghost.reveal (Loc?.aux p2) in + let l1' = Ghost.reveal (Loc?.aux p1') in + let l2' = Ghost.reveal (Loc?.aux p2') in + loc_aux_disjoint_loc_aux_includes l1 l2 l2'; + loc_aux_disjoint_sym l1 l2'; + loc_aux_disjoint_loc_aux_includes l2' l1 l1'; + loc_aux_disjoint_sym l2' l1' + +let loc_disjoint_aloc_intro #al #c #r1 #a1 #r2 #a2 b1 b2 = () + +let loc_disjoint_aloc_elim #al #c #r1 #a1 #r2 #a2 b1 b2 = + // FIXME: WHY WHY WHY this assert? + assert (aloc_disjoint (ALoc #_ #c r1 a1 (Some b1)) (ALoc #_ #c r2 a2 (Some b2))) + +#push-options "--z3rlimit 15" +let loc_disjoint_addresses_intro #al #c preserve_liveness1 preserve_liveness2 r1 r2 n1 n2 = + // FIXME: WHY WHY WHY this assert? + assert (loc_aux_disjoint (Ghost.reveal (Loc?.aux (loc_addresses #_ #c preserve_liveness1 r1 n1))) (Ghost.reveal (Loc?.aux (loc_addresses #_ #c preserve_liveness2 r2 n2)))) +#pop-options + +let loc_disjoint_addresses_elim #al #c preserve_liveness1 preserve_liveness2 r1 r2 n1 n2 = () + +let loc_disjoint_aloc_addresses_intro #al #c #r' #a' p preserve_liveness r n = () + +let loc_disjoint_aloc_addresses_elim #al #c #r' #a' p preserve_liveness r n = () + +#push-options "--z3rlimit 15" +let loc_disjoint_regions #al #c preserve_liveness1 preserve_liveness2 rs1 rs2 = + // FIXME: WHY WHY WHY this assert? + assert (loc_aux_disjoint (Ghost.reveal (Loc?.aux (loc_regions #_ #c preserve_liveness1 rs1))) (Ghost.reveal (Loc?.aux (loc_regions #_ #c preserve_liveness2 rs2)))) +#pop-options + +let loc_none_in_some_region #a (c: cls a) (r: HS.rid) : GTot (loc c) = + Loc + (Ghost.hide (Set.singleton r)) + (Ghost.hide (Set.empty)) + (mk_non_live_addrs (fun _ -> GSet.empty)) + (mk_live_addrs (fun _ -> GSet.empty)) + (Ghost.hide GSet.empty) + +(** Liveness-insensitive memory locations *) + +let address_liveness_insensitive_locs #al c = + Loc + (Ghost.hide (Set.complement Set.empty)) + (Ghost.hide Set.empty) + (mk_non_live_addrs (fun _ -> GSet.empty)) + (mk_live_addrs (fun _ -> GSet.complement GSet.empty)) + (Ghost.hide (aloc_domain c (Ghost.hide (Set.complement Set.empty)) (fun _ -> GSet.complement GSet.empty))) + +let loc_includes_address_liveness_insensitive_locs_aloc #al #c #r #n a = () + +let loc_includes_address_liveness_insensitive_locs_addresses #al c r a = () + +let region_liveness_insensitive_locs #al c = + Loc + (Ghost.hide (Set.complement Set.empty)) + (Ghost.hide Set.empty) + (mk_non_live_addrs (fun _ -> GSet.complement GSet.empty)) + (mk_live_addrs (fun _ -> GSet.complement GSet.empty)) + (Ghost.hide (aloc_domain c (Ghost.hide (Set.complement Set.empty)) (fun _ -> GSet.complement GSet.empty))) + +let loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs #al c = () + +let loc_includes_region_liveness_insensitive_locs_loc_regions #al c r = () + +let loc_includes_region_liveness_insensitive_locs_loc_addresses #al c preserve_liveness r a = () + +let loc_includes_region_liveness_insensitive_locs_loc_of_aloc #al c #r #a x = () + +(** The modifies clause proper *) + +let modifies_preserves_livenesses + (#al: aloc_t) (#c: cls al) + (s: loc c) + (h1 h2: HS.mem) +: GTot Type0 += (forall (t: Type) (pre: Preorder.preorder t) (p: HS.mreference t pre) . + let r = HS.frameOf p in ( + HS.contains h1 p /\ + (Set.mem r (regions_of_loc s) ==> ~ (GSet.mem (HS.as_addr p) (Loc?.non_live_addrs s r))) + ) ==> ( + HS.contains h2 p + )) + +let modifies_preserves_livenesses_elim + (#al: aloc_t) (#c: cls al) + (s: loc c) + (h1 h2: HS.mem) + (#t: Type) + (#pre: Preorder.preorder t) + (p: HS.mreference t pre) +: Lemma + (requires (modifies_preserves_livenesses s h1 h2 /\ HS.contains h1 p /\ (Set.mem (HS.frameOf p) (regions_of_loc s) ==> ~ (GSet.mem (HS.as_addr p) (Loc?.non_live_addrs s (HS.frameOf p)))))) + (ensures (HS.contains h2 p)) += () + +let modifies_preserves_livenesses_intro + (#al: aloc_t) (#c: cls al) + (s: loc c) + (h1 h2: HS.mem) + (f: ( + (t: Type) -> + (pre: Preorder.preorder t) -> + (p: HS.mreference t pre) -> + Lemma + (requires ( + HS.contains h1 p /\ + (Set.mem (HS.frameOf p) (regions_of_loc s) ==> ~ (GSet.mem (HS.as_addr p) (Loc?.non_live_addrs s (HS.frameOf p)))) + )) + (ensures (HS.contains h2 p)) + )) +: Lemma + (modifies_preserves_livenesses s h1 h2) += let f' + (t : Type) + (pre: Preorder.preorder t) + (p : HS.mreference t pre) + : Lemma + ( + (HS.contains h1 p /\ (Set.mem (HS.frameOf p) (regions_of_loc s) ==> ~ (GSet.mem (HS.as_addr p) (Loc?.non_live_addrs s (HS.frameOf p))))) ==> + (h2 `HS.contains` p)) + = Classical.move_requires (f t pre) p + in + Classical.forall_intro_3 f' + +let modifies_preserves_mreferences + (#al: aloc_t) (#c: cls al) + (s: loc c) + (h1 h2: HS.mem) +: GTot Type0 += (forall (t: Type) (pre: Preorder.preorder t) (p: HS.mreference t pre) . + let r = HS.frameOf p in ( + HS.contains h1 p /\ + (Set.mem r (regions_of_loc s) ==> ~ (GSet.mem (HS.as_addr p) (addrs_of_loc s r))) + ) ==> ( + HS.contains h2 p /\ + HS.sel h2 p == HS.sel h1 p + )) + +let modifies_preserves_mreferences_intro + (#al: aloc_t) (#c: cls al) + (s: loc c) + (h1 h2: HS.mem) + (f: ( + (t: Type) -> + (pre: Preorder.preorder t) -> + (p: HS.mreference t pre) -> + Lemma + (requires ( + HS.contains h1 p /\ + (Set.mem (HS.frameOf p) (regions_of_loc s) ==> ~ (GSet.mem (HS.as_addr p) (addrs_of_loc s (HS.frameOf p)))) + )) + (ensures (HS.contains h2 p /\ HS.sel h2 p == HS.sel h1 p)) + )) +: Lemma + (modifies_preserves_mreferences s h1 h2) += let f' + (t : Type) + (pre: Preorder.preorder t) + (p : HS.mreference t pre) + : Lemma + ( + (HS.contains h1 p /\ (Set.mem (HS.frameOf p) (regions_of_loc s) ==> ~ (GSet.mem (HS.as_addr p) (addrs_of_loc s (HS.frameOf p))))) ==> + (h2 `HS.contains` p /\ h2 `HS.sel` p == h1 `HS.sel` p)) + = Classical.move_requires (f t pre) p + in + Classical.forall_intro_3 f' + +let modifies_preserves_alocs + (#al: aloc_t) (#c: cls al) + (s: loc c) + (h1 h2: HS.mem) +: GTot Type0 += (forall (r: HS.rid) (a: nat) (b: al r a) . + loc_aux_disjoint (Ghost.reveal (Loc?.aux s)) (GSet.singleton (ALoc r a (Some b))) + ==> + c.aloc_preserved b h1 h2 + ) + +let modifies_preserves_alocs_intro + (#al: aloc_t) (#c: cls al) + (s: loc c) + (h1 h2: HS.mem) + (u: unit { modifies_preserves_mreferences s h1 h2 } ) + (f: ( + (r: HS.rid) -> + (a: nat) -> + (b: al r a) -> + Lemma + (requires ( + Set.mem r (regions_of_loc s) /\ + (~ (GSet.mem a (addrs_of_loc_weak s r))) /\ + (GSet.mem a (addrs_of_loc_aux s r) /\ loc_aux_disjoint (Ghost.reveal (Loc?.aux s)) (GSet.singleton (ALoc r a (Some b)))) + )) + (ensures (c.aloc_preserved b h1 h2)) + )) +: Lemma + (modifies_preserves_alocs s h1 h2) += let f' + (r: HS.rid) + (a: nat) + (b: al r a) + : Lemma + (requires (loc_aux_disjoint (Ghost.reveal (Loc?.aux s)) (GSet.singleton (ALoc r a (Some b))))) + (ensures (c.aloc_preserved b h1 h2)) + = if Set.mem r (regions_of_loc s) && (not (GSet.mem a (addrs_of_loc_weak s r))) + then begin + if GSet.mem a (addrs_of_loc_aux s r) + then + Classical.move_requires (f r a) b + else + c.same_mreference_aloc_preserved b h1 h2 (fun a' pre' r' -> ()) + end else if Set.mem r (regions_of_loc s) + then begin + assert (GSet.mem a (addrs_of_loc_weak s r)); + assert (GSet.mem (ALoc r a None) (Ghost.reveal (Loc?.aux s))); + assert (aloc_disjoint #_ #c (ALoc r a None) (ALoc r a (Some b))); + assert False + end + else + c.same_mreference_aloc_preserved b h1 h2 (fun a' pre' r' -> ()) + in + Classical.forall_intro_3 (fun r a b -> Classical.move_requires (f' r a) b) + +let modifies_preserves_regions + (#al: aloc_t) (#c: cls al) + (s: loc c) + (h1 h2: HS.mem) +: GTot Type0 += forall (r: HS.rid) . (HS.live_region h1 r /\ ~ (Set.mem r (Ghost.reveal (Loc?.region_liveness_tags s)))) ==> HS.live_region h2 r + + +let modifies_preserves_not_unused_in + (#al: aloc_t) (#c: cls al) + (s: loc c) + (h1 h2: HS.mem) +: GTot Type0 += (forall (r: HS.rid) (n: nat) . ( + HS.live_region h1 r /\ HS.live_region h2 r /\ + n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r) /\ + (Set.mem r (regions_of_loc s) ==> ~ (GSet.mem n (Loc?.non_live_addrs s r))) + ) ==> ( + n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r) + )) + +let modifies_preserves_not_unused_in_intro + (#al: aloc_t) (#c: cls al) + (s: loc c) + (h1 h2: HS.mem) + (f: ( + (r: HS.rid) -> + (n: nat) -> + Lemma + (requires ( + HS.live_region h1 r /\ HS.live_region h2 r /\ + n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r) /\ + (Set.mem r (regions_of_loc s) ==> ~ (GSet.mem n (Loc?.non_live_addrs s r))) + )) + (ensures ( + n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r) + )) + )) +: Lemma + (modifies_preserves_not_unused_in s h1 h2) += let f' + (r: HS.rid) + (n: nat) + : Lemma + (( + HS.live_region h1 r /\ HS.live_region h2 r /\ + n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r) /\ + (Set.mem r (regions_of_loc s) ==> ~ (GSet.mem n (Loc?.non_live_addrs s r))) + ) ==> ( + n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r) + )) + = Classical.move_requires (f r) n + in + Classical.forall_intro_2 f' + +let modifies + (#al: aloc_t) (#c: cls al) + (s: loc c) + (h1 h2: HS.mem) +: GTot Type0 += modifies_preserves_regions s h1 h2 /\ + modifies_preserves_not_unused_in s h1 h2 /\ + modifies_preserves_mreferences s h1 h2 /\ + modifies_preserves_livenesses s h1 h2 /\ + modifies_preserves_alocs s h1 h2 + +val modifies_intro_strong + (#al: aloc_t) (#c: cls al) (l: loc c) (h h' : HS.mem) + (regions: ( + (r: HS.rid) -> + Lemma + (requires (HS.live_region h r)) + (ensures (HS.live_region h' r)) + )) + (mrefs: ( + (t: Type0) -> + (pre: Preorder.preorder t) -> + (b: HS.mreference t pre) -> + Lemma + (requires ((loc_disjoint (loc_mreference b) l) /\ HS.contains h b)) + (ensures (HS.contains h' b /\ HS.sel h' b == HS.sel h b)) + )) + (livenesses: ( + (t: Type0) -> + (pre: Preorder.preorder t) -> + (b: HS.mreference t pre) -> + Lemma + (requires (HS.contains h b)) + (ensures (HS.contains h' b)) + )) + (addr_unused_in: ( + (r: HS.rid) -> + (n: nat) -> + Lemma + (requires ( + (Set.mem r (regions_of_loc l) ==> ~ (GSet.mem n (Loc?.non_live_addrs l r))) /\ + HS.live_region h r /\ + HS.live_region h' r /\ n `Heap.addr_unused_in` (HS.get_hmap h' `Map.sel` r) + )) + (ensures (n `Heap.addr_unused_in` (HS.get_hmap h `Map.sel` r))) + )) + (alocs: ( + (r: HS.rid) -> + (a: nat) -> + (x: al r a) -> + Lemma + (requires (loc_disjoint (loc_of_aloc x) l)) + (ensures (c.aloc_preserved x h h')) + )) +: Lemma + (modifies l h h') + +#push-options "--z3rlimit 20" +let modifies_intro_strong #al #c l h h' regions mrefs lives unused_ins alocs = + Classical.forall_intro (Classical.move_requires regions); + assert (modifies_preserves_regions l h h'); + + let aux (t:Type) (pre:Preorder.preorder t) (p:HS.mreference t pre) + :Lemma (requires (HS.contains h p /\ + (Set.mem (HS.frameOf p) (regions_of_loc l) ==> ~ (GSet.mem (HS.as_addr p) (addrs_of_loc l (HS.frameOf p)))))) + (ensures (HS.contains h' p /\ HS.sel h' p == HS.sel h p)) + = + assert_norm (Loc?.region_liveness_tags (loc_mreference #_ #c p) == Ghost.hide Set.empty); + assert (loc_disjoint_region_liveness_tags (loc_mreference p) l); + // FIXME: WHY WHY WHY is this assert necessary? + assert_spinoff (loc_aux_disjoint (Ghost.reveal (Loc?.aux (loc_mreference p))) (Ghost.reveal (Loc?.aux l))); + // FIXME: Now this one is too :) + assert (loc_disjoint_addrs (loc_mreference p) l); + assert ((loc_disjoint (loc_mreference p) l)); + mrefs t pre p + in + + modifies_preserves_mreferences_intro l h h' aux; + Classical.forall_intro_3 (fun t pre p -> Classical.move_requires (lives t pre) p); + modifies_preserves_not_unused_in_intro l h h' (fun r n -> + unused_ins r n + ); + modifies_preserves_alocs_intro l h h' () (fun r a b -> + loc_aux_disjoint_sym (Ghost.reveal (Loc?.aux l)) (Ghost.reveal (Loc?.aux (loc_of_aloc b))); + alocs r a b + ) +#pop-options + +let modifies_intro #al #c l h h' regions mrefs lives unused_ins alocs = + modifies_intro_strong l h h' + regions + mrefs + lives + (fun r n -> unused_ins r n) + alocs + +let modifies_none_intro #al #c h h' regions mrefs unused_ins = + modifies_intro_strong #_ #c loc_none h h' + (fun r -> regions r) + (fun t pre b -> mrefs t pre b) + (fun t pre b -> mrefs t pre b) + (fun r n -> unused_ins r n) + (fun r a x -> + c.same_mreference_aloc_preserved x h h' (fun t pre b -> mrefs t pre b) + ) + +let modifies_address_intro #al #c r n h h' regions mrefs unused_ins = + Classical.forall_intro (Classical.move_requires regions); + let l : loc c = loc_addresses #_ #c false r (Set.singleton n) in + modifies_preserves_mreferences_intro l h h' + (fun t pre p -> mrefs t pre p) + ; + modifies_preserves_livenesses_intro l h h' + (fun t pre p -> mrefs t pre p) + ; + modifies_preserves_not_unused_in_intro l h h' + (fun r n -> unused_ins r n) + ; + modifies_preserves_alocs_intro l h h' () + (fun r a b -> + c.same_mreference_aloc_preserved b h h' (fun t pre p -> mrefs t pre p) + ) + +let modifies_aloc_intro #al #c #r #n x h h' regions mrefs livenesses unused_ins alocs = + modifies_intro_strong #_ #c (loc_of_aloc x) h h' + (fun r -> regions r) + (fun t pre b -> mrefs t pre b) + (fun t pre b -> livenesses t pre b) + (fun r n -> unused_ins r n) + (fun r' n' z -> + if r' = r && n' = n + then begin + loc_disjoint_aloc_elim #_ #c z x; + alocs z + end else + c.same_mreference_aloc_preserved z h h' (fun t pre p -> + mrefs t pre p + ) + ) + +let modifies_live_region #al #c s h1 h2 r = () + +let modifies_mreference_elim #al #c #t #pre b p h h' = () + +let modifies_aloc_elim #al #c #r #a b p h h' = () + +let modifies_refl #al #c s h = + Classical.forall_intro_3 (fun r a b -> c.aloc_preserved_refl #r #a b h) + +let modifies_loc_includes #al #c s1 h h' s2 = + assert (modifies_preserves_mreferences s1 h h'); + Classical.forall_intro_2 (loc_aux_disjoint_sym #al #c); + Classical.forall_intro_3 (fun l1 l2 l3 -> Classical.move_requires (loc_aux_disjoint_loc_aux_includes #al #c l1 l2) l3); + assert (modifies_preserves_alocs s1 h h') + +let modifies_preserves_liveness #al #c s1 s2 h h' #t #pre r = () + +#push-options "--z3rlimit 20 --max_fuel 0 --max_ifuel 0" +let modifies_preserves_liveness_strong #al #c s1 s2 h h' #t #pre r x = + let rg = HS.frameOf r in + let ad = HS.as_addr r in + let la = loc_of_aloc #_ #c #rg #ad x in + if Set.mem rg (regions_of_loc s2) + then begin + assert (Loc?.non_live_addrs s2 rg `GSet.subset` Loc?.non_live_addrs (address_liveness_insensitive_locs c) rg); + assert (Loc?.non_live_addrs s2 rg `GSet.subset` GSet.empty); + assert (~ (GSet.mem ad (Loc?.non_live_addrs s2 rg))); + if Set.mem rg (regions_of_loc s1) + then begin + if GSet.mem ad (Loc?.non_live_addrs s1 rg) + then begin + assert (loc_disjoint_aux s1 la); + assert (GSet.subset (Loc?.non_live_addrs s1 rg) (Loc?.live_addrs s1 rg)); + assert (aloc_domain c (Loc?.regions s1) (Loc?.live_addrs s1) `GSet.subset` (Ghost.reveal (Loc?.aux s1))); + assert (GSet.mem (ALoc rg ad None) (Ghost.reveal (Loc?.aux s1))); + assert (GSet.mem (ALoc rg ad (Some x)) (Ghost.reveal (Loc?.aux la))); + assert (aloc_disjoint (ALoc rg ad None) (ALoc #_ #c rg ad (Some x))); + () + end else () + end else () + end else () +#pop-options + +let modifies_preserves_region_liveness #al #c l1 l2 h h' r = () + +let modifies_preserves_region_liveness_reference #al #c l1 l2 h h' #t #pre r = () + +let modifies_preserves_region_liveness_aloc #al #c l1 l2 h h' #r #n x = + if Set.mem r (Ghost.reveal (Loc?.region_liveness_tags l1)) + then begin + assert (GSet.subset (GSet.complement GSet.empty) (Loc?.non_live_addrs l1 r)); + assert (GSet.subset (Loc?.non_live_addrs l1 r) (Loc?.live_addrs l1 r)) + end else () + +let modifies_trans' + (#al: aloc_t) (#c: cls al) + (s: loc c) + (h1 h2: HS.mem) + (h3: HS.mem) +: Lemma + (requires (modifies s h1 h2 /\ modifies s h2 h3)) + (ensures (modifies s h1 h3)) += Classical.forall_intro_3 (fun r a b -> Classical.move_requires (c.aloc_preserved_trans #r #a b h1 h2) h3) + +let modifies_trans #al #c s12 h1 h2 s23 h3 = + let u = loc_union s12 s23 in + modifies_loc_includes u h1 h2 s12; + modifies_loc_includes u h2 h3 s23; + modifies_trans' u h1 h2 h3 + +let addr_unused_in_aloc_preserved + (#al: aloc_t) (#c: cls al) + (#r: HS.rid) + (#a: nat) + (b: al r a) + (h1: HS.mem) + (h2: HS.mem) + : Lemma + (requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))) + (ensures (c.aloc_preserved b h1 h2)) += c.same_mreference_aloc_preserved b h1 h2 (fun a' pre r' -> assert False) + +#push-options "--z3rlimit 10" +let modifies_only_live_regions_weak + (#al: aloc_t) (#c: cls al) + (rs: Set.set HS.rid) + (l: loc c) + (h h' : HS.mem) +: Lemma + (requires ( + modifies (loc_union (loc_regions false rs) l) h h' /\ + loc_disjoint (loc_regions false rs) l /\ + (forall r . Set.mem r rs ==> (~ (HS.live_region h r))) + )) + (ensures (modifies l h h')) += assert (modifies_preserves_mreferences l h h'); // FIXME: WHY WHY WHY? + Classical.forall_intro_3 (fun r a b -> Classical.move_requires (addr_unused_in_aloc_preserved #al #c #r #a b h) h') +#pop-options + +(* Restrict a set of locations along a set of regions *) + +let restrict_to_regions + (#al: aloc_t) (#c: cls al) + (l: loc c) + (rs: Set.set HS.rid) +: GTot (loc c) += let (Loc regions region_liveness_tags non_live_addrs live_addrs aux) = l in + let regions' = (Ghost.hide (Set.intersect (Ghost.reveal regions) rs)) in + Loc + regions' + (Ghost.hide (Set.intersect (Ghost.reveal region_liveness_tags) rs)) + (mk_non_live_addrs (fun (r: addrs_dom regions') -> (non_live_addrs r <: GSet.set nat))) + (mk_live_addrs (fun (r: addrs_dom regions') -> (live_addrs r <: GSet.set nat))) + (Ghost.hide (GSet.intersect (Ghost.reveal aux) (aloc_domain c (Ghost.hide rs) (fun r -> GSet.complement GSet.empty)))) + +let regions_of_loc_restrict_to_regions + (#al: aloc_t) (#c: cls al) + (l: loc c) + (rs: Set.set HS.rid) +: Lemma + (regions_of_loc (restrict_to_regions l rs) == Set.intersect (regions_of_loc l) rs) + [SMTPat (regions_of_loc (restrict_to_regions l rs))] += assert (Set.equal (regions_of_loc (restrict_to_regions l rs)) (Set.intersect (regions_of_loc l) rs)) + +let addrs_of_loc_weak_restrict_to_regions + (#al: aloc_t) (#c: cls al) + (l: loc c) + (rs: Set.set HS.rid) + (r: HS.rid) +: Lemma + (addrs_of_loc_weak (restrict_to_regions l rs) r == (if Set.mem r rs then addrs_of_loc_weak l r else GSet.empty)) + [SMTPat (addrs_of_loc_weak (restrict_to_regions l rs) r)] += assert (GSet.equal (addrs_of_loc_weak (restrict_to_regions l rs) r) (if Set.mem r rs then addrs_of_loc_weak l r else GSet.empty)) + +let addrs_of_loc_restrict_to_regions + (#al: aloc_t) (#c: cls al) + (l: loc c) + (rs: Set.set HS.rid) + (r: HS.rid) +: Lemma + (addrs_of_loc (restrict_to_regions l rs) r == (if Set.mem r rs then addrs_of_loc l r else GSet.empty)) + [SMTPat (addrs_of_loc (restrict_to_regions l rs) r)] += assert (GSet.equal (addrs_of_loc (restrict_to_regions l rs) r) (if Set.mem r rs then addrs_of_loc l r else GSet.empty)) + +let loc_includes_restrict_to_regions + (#al: aloc_t) (#c: cls al) + (l: loc c) + (rs: Set.set HS.rid) +: Lemma + (loc_includes l (restrict_to_regions l rs)) += Classical.forall_intro (loc_aux_includes_refl #al #c) + +let loc_includes_loc_union_restrict_to_regions + (#al: aloc_t) (#c: cls al) + (l: loc c) + (rs: Set.set HS.rid) +: Lemma + (loc_equal (loc_union (restrict_to_regions l rs) (restrict_to_regions l (Set.complement rs))) l) += () + +let loc_includes_loc_regions_restrict_to_regions + (#al: aloc_t) (#c: cls al) + (l: loc c) + (rs: Set.set HS.rid) +: Lemma + (loc_includes (loc_regions false rs) (restrict_to_regions l rs)) += Classical.forall_intro (loc_aux_includes_refl #al #c) + +let modifies_only_live_regions #al #c rs l h h' = + let s = l in + let c_rs = Set.complement rs in + let s_rs = restrict_to_regions s rs in + let s_c_rs = restrict_to_regions s c_rs in + let lrs = loc_regions false rs in + loc_includes_loc_regions_restrict_to_regions s rs; + loc_includes_union_l lrs s_c_rs s_rs; + loc_includes_refl s_c_rs; + loc_includes_union_l lrs s_c_rs s_c_rs; + loc_includes_union_r (loc_union lrs s_c_rs) s_rs s_c_rs; + loc_includes_loc_union_restrict_to_regions s rs; + loc_includes_trans (loc_union lrs s_c_rs) (loc_union s_rs s_c_rs) s; + modifies_loc_includes (loc_union lrs s_c_rs) h h' (loc_union lrs s); + loc_includes_loc_regions_restrict_to_regions s c_rs; + loc_disjoint_regions #al #c false false rs c_rs; + loc_includes_refl lrs; + loc_disjoint_includes lrs (loc_regions false c_rs) lrs s_c_rs; + modifies_only_live_regions_weak rs s_c_rs h h'; + loc_includes_restrict_to_regions s c_rs; + modifies_loc_includes s h h' s_c_rs + +let no_upd_fresh_region #al #c r l h0 h1 = + modifies_only_live_regions (HS.mod_set (Set.singleton r)) l h0 h1 + +let fresh_frame_modifies #al c h0 h1 = + modifies_intro_strong #_ #c loc_none h0 h1 + (fun _ -> ()) + (fun _ _ _ -> ()) + (fun _ _ _ -> ()) + (fun _ _ -> ()) + (fun r a x -> + c.same_mreference_aloc_preserved #r #a x h0 h1 (fun _ _ _ -> ())) + +let new_region_modifies #al c m0 r0 col += let (_, m1) = HS.new_eternal_region m0 r0 col in + modifies_intro_strong #_ #c loc_none m0 m1 + (fun _ -> ()) + (fun _ _ _ -> ()) + (fun _ _ _ -> ()) + (fun _ _ -> ()) + (fun r a x -> + c.same_mreference_aloc_preserved #r #a x m0 m1 (fun _ _ _ -> ())) + +#push-options "--z3rlimit 20" +let popped_modifies #al c h0 h1 = + let l = loc_region_only #_ #c false (HS.get_tip h0) in + modifies_preserves_mreferences_intro l h0 h1 (fun t pre p -> + assert_norm (Loc?.region_liveness_tags (loc_mreference #_ #c p) == Ghost.hide Set.empty); + assert (loc_disjoint_region_liveness_tags (loc_mreference p) l ); + // FIXME: WHY WHY WHY is this assert necessary? + assert (loc_aux_disjoint (Ghost.reveal (Loc?.aux (loc_mreference p))) (Ghost.reveal (Loc?.aux l))); + () + ); + modifies_preserves_alocs_intro l h0 h1 () (fun r a b -> + loc_aux_disjoint_sym (Ghost.reveal (Loc?.aux l)) (Ghost.reveal (Loc?.aux (loc_of_aloc b))); + () + ) +#pop-options + +let modifies_fresh_frame_popped #al #c h0 h1 s h2 h3 = + fresh_frame_modifies c h0 h1; + let r = loc_region_only #al #c false (HS.get_tip h2) in + let rs = HS.mod_set (Set.singleton (HS.get_tip h1)) in + let s' = loc_union (loc_regions false rs) s in + modifies_trans' s' h0 h1 h2; + assert (modifies_preserves_mreferences r h2 h3); + let f23 (r: HS.rid) (a: nat) (b: al r a) : Lemma + (requires (r <> HS.get_tip h2)) + (ensures (c.aloc_preserved b h2 h3)) + = c.same_mreference_aloc_preserved #r #a b h2 h3 (fun a' pre r' -> ()) + in + modifies_preserves_alocs_intro r h2 h3 () (fun r a b -> + f23 r a b + ); + modifies_trans' s' h0 h2 h3; + modifies_only_live_regions rs s h0 h3 + +let modifies_loc_regions_intro #al #c rs h1 h2 = + let f (r: HS.rid) (a: nat) (b: al r a) : Lemma + (requires (not (Set.mem r rs))) + (ensures (c.aloc_preserved b h1 h2)) + = c.same_mreference_aloc_preserved #r #a b h1 h2 (fun a' pre r' -> ()) + in + assert (modifies_preserves_mreferences (loc_regions #al #c true rs) h1 h2); + modifies_preserves_alocs_intro (loc_regions #_ #c true rs) h1 h2 () (fun r a b -> + f r a b + ) + +#push-options "--z3rlimit 20" +let modifies_loc_addresses_intro_weak + (#al: aloc_t) (#c: cls al) + (r: HS.rid) + (s: Set.set nat) + (l: loc c) + (h1 h2: HS.mem) +: Lemma + (requires ( + HS.live_region h2 r /\ + modifies (loc_union (loc_region_only false r) l) h1 h2 /\ + HS.modifies_ref r s h1 h2 /\ + loc_disjoint l (loc_region_only false r) + )) + (ensures (modifies (loc_union (loc_addresses true r s) l) h1 h2)) += modifies_preserves_mreferences_intro (loc_union (loc_addresses true r s) l) h1 h2 (fun r' a' b' -> + () + ); + modifies_preserves_livenesses_intro (loc_union (loc_addresses true r s) l) h1 h2 (fun r' a' b' -> + () + ); + modifies_preserves_not_unused_in_intro (loc_union (loc_addresses true r s) l) h1 h2 (fun r' n' -> + () + ); + let f (a: nat) (b: al r a) : Lemma + (requires (not (Set.mem a s))) + (ensures (c.aloc_preserved b h1 h2)) + = c.same_mreference_aloc_preserved #r #a b h1 h2 (fun a' pre r_ -> ()) + in + modifies_preserves_alocs_intro (loc_union (loc_addresses true r s) l) h1 h2 () (fun r' a b -> if r = r' then f a b else () + ) + +let modifies_loc_addresses_intro #al #c r s l h1 h2 = + loc_includes_loc_regions_restrict_to_regions l (Set.singleton r); + loc_includes_loc_union_restrict_to_regions l (Set.singleton r); + assert (modifies (loc_union (loc_region_only false r) (loc_union (restrict_to_regions l (Set.singleton r)) (restrict_to_regions l (Set.complement (Set.singleton r))))) h1 h2); + let l' = restrict_to_regions l (Set.complement (Set.singleton r)) in + loc_includes_refl (loc_region_only #_ #c false r) ; + loc_includes_loc_regions_restrict_to_regions l (Set.complement (Set.singleton r)); + loc_disjoint_regions #_ #c false false (Set.complement (Set.singleton r)) (Set.singleton r); + loc_disjoint_includes (loc_regions #_ #c false (Set.complement (Set.singleton r))) (loc_region_only false r) l' (loc_region_only false r); + modifies_loc_addresses_intro_weak r s l' h1 h2; + loc_includes_restrict_to_regions l (Set.complement (Set.singleton r)) +#pop-options + +let modifies_ralloc_post #al #c #a #rel i init h x h' = + let g (r: HS.rid) (a: nat) (b: al r a) : Lemma + (c.aloc_preserved b h h') + = c.same_mreference_aloc_preserved #r #a b h h' (fun a' pre r' -> ()) + in + Classical.forall_intro_3 g + +let modifies_salloc_post #al #c #a #rel init h x h' = + let g (r: HS.rid) (a: nat) (b: al r a) : Lemma + (c.aloc_preserved b h h') + = c.same_mreference_aloc_preserved #r #a b h h' (fun a' pre r' -> ()) + in + Classical.forall_intro_3 g + +let modifies_free #al #c #a #rel r m = + let g (r': HS.rid) (a: nat) (b: al r' a) : Lemma + (requires (r' <> HS.frameOf r \/ a <> HS.as_addr r)) + (ensures (c.aloc_preserved b m (HS.free r m))) + = c.same_mreference_aloc_preserved #r' #a b m (HS.free r m) (fun a' pre r' -> ()) + in + modifies_preserves_alocs_intro (loc_freed_mreference #_ #c r) m (HS.free r m) () (fun r a b -> g r a b) + +let modifies_none_modifies #al #c h1 h2 += let g (r: HS.rid) (a: nat) (b: al r a) : Lemma + (c.aloc_preserved b h1 h2) + = c.same_mreference_aloc_preserved #r #a b h1 h2 (fun a' pre r' -> ()) + in + Classical.forall_intro_3 g + +let modifies_upd #al #c #t #pre r v h = + let h' = HS.upd h r v in + modifies_intro #_ #c (loc_mreference r) h h' + (fun r -> ()) + (fun t pre b -> ()) + (fun t pre b -> ()) + (fun r n -> ()) + (fun r a b -> c.same_mreference_aloc_preserved #r #a b h h' (fun a' pre' r' -> ())) + +#push-options "--z3rlimit 15" +let addrs_of_loc_loc_union_loc_of_aloc_eq_loc_union_loc_addresses_singleton + (#al: aloc_t) (#c: cls al) (l: loc c) (#r0: HS.rid) (#a0: nat) (al0: al r0 a0) (r: HS.rid) +: Lemma + (addrs_of_loc (loc_union l (loc_of_aloc al0)) r == addrs_of_loc (loc_union l (loc_addresses true r0 (Set.singleton a0))) r) += assert (addrs_of_loc (loc_union l (loc_of_aloc al0)) r `GSet.equal` addrs_of_loc (loc_union l (loc_addresses true r0 (Set.singleton a0))) r) +#pop-options + +let addrs_of_loc_weak_loc_includes #al (#c: cls al) (l: loc c) (r0: HS.rid) (a0: nat) : Lemma + (requires (a0 `GSet.mem` addrs_of_loc_weak l r0)) + (ensures (l `loc_includes` loc_addresses true r0 (Set.singleton a0))) += () + +val modifies_strengthen' + (#al: aloc_t) (#c: cls al) (l: loc c) (#r0: HS.rid) (#a0: nat) (al0: al r0 a0) (h h' : HS.mem) + (alocs: ( + (f: ((t: Type) -> (pre: Preorder.preorder t) -> (m: HS.mreference t pre) -> Lemma + (requires (HS.frameOf m == r0 /\ HS.as_addr m == a0 /\ HS.contains h m)) + (ensures (HS.contains h' m)) + )) -> + (x: al r0 a0) -> + Lemma + (requires (c.aloc_disjoint x al0 /\ loc_disjoint (loc_of_aloc x) l)) + (ensures (c.aloc_preserved x h h')) + )) +: Lemma + (requires ((~ (a0 `GSet.mem` addrs_of_loc_weak l r0)) /\ modifies (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h')) + (ensures (modifies (loc_union l (loc_of_aloc al0)) h h')) + +#push-options "--z3rlimit 25 --fuel 0 --ifuel 0" +let modifies_strengthen' #al #c l #r0 #a0 al0 h h' alocs = + Classical.forall_intro (addrs_of_loc_loc_union_loc_of_aloc_eq_loc_union_loc_addresses_singleton l al0); + assert (modifies_preserves_regions (loc_union l (loc_of_aloc al0)) h h'); + assert (modifies_preserves_mreferences (loc_union l (loc_of_aloc al0)) h h'); + assert (modifies_preserves_not_unused_in (loc_union l (loc_of_aloc al0)) h h'); + assert (modifies_preserves_livenesses (loc_union l (loc_of_aloc al0)) h h'); + modifies_preserves_alocs_intro (loc_union l (loc_of_aloc al0)) h h' () (fun r a b -> + if r = r0 && a = a0 + then begin + assert (loc_aux_disjoint (Ghost.reveal (Loc?.aux (loc_union l (loc_of_aloc al0)))) (GSet.singleton (ALoc r0 a0 (Some b)))); + assert (loc_aux_disjoint (Ghost.reveal (Loc?.aux l)) (GSet.singleton (ALoc r0 a0 (Some b)))); + assert (loc_disjoint l (loc_of_aloc b)); + loc_disjoint_sym l (loc_of_aloc b); + assert (loc_aux_disjoint #_ #c (Ghost.reveal (Loc?.aux (loc_of_aloc al0))) (GSet.singleton (ALoc r0 a0 (Some b)))); + assert (loc_aux_disjoint #_ #c (GSet.singleton (ALoc r0 a0 (Some al0))) (GSet.singleton (ALoc r0 a0 (Some b)))); + assert (GSet.mem (ALoc r0 a0 (Some al0)) (GSet.singleton (ALoc #_ #c r0 a0 (Some al0)))); + assert (GSet.mem (ALoc r0 a0 (Some b)) (GSet.singleton (ALoc #_ #c r0 a0 (Some b)))); + assert (aloc_disjoint #_ #c (ALoc r0 a0 (Some al0)) (ALoc r0 a0 (Some b))); + assert (c.aloc_disjoint al0 b); + c.aloc_disjoint_sym al0 b; + alocs (fun t pre m -> ()) b + end + else begin + assert (loc_disjoint (loc_union l (loc_addresses true r0 (Set.singleton a0))) (loc_of_aloc b)) + by (let open FStar.Stubs.Tactics.V2.Builtins in + let open FStar.Tactics.SMT in + set_rlimit 64; + set_options "--z3cliopt 'smt.qi.eager_threshold=5'"; + ()) + end + ); + assert (modifies (loc_union l (loc_of_aloc al0)) h h') +#pop-options + +let modifies_strengthen #al #c l #r0 #a0 al0 h h' alocs = + if a0 `GSet.mem` addrs_of_loc_weak l r0 + then begin + addrs_of_loc_weak_loc_includes l r0 a0; + loc_includes_refl l; + loc_includes_union_r l l (loc_addresses true r0 (Set.singleton a0)); + loc_includes_union_l l (loc_of_aloc al0) l; + loc_includes_trans (loc_union l (loc_of_aloc al0)) l (loc_union l (loc_addresses true r0 (Set.singleton a0))); + modifies_loc_includes (loc_union l (loc_of_aloc al0)) h h' (loc_union l (loc_addresses true r0 (Set.singleton a0))) + end + else + modifies_strengthen' l al0 h h' alocs + + +let does_not_contain_addr (h: HS.mem) (ra: HS.rid & nat) : GTot Type0 = + HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (HS.get_hmap h `Map.sel` (fst ra)) + +let not_live_region_does_not_contain_addr h ra = () + +let unused_in_does_not_contain_addr h #a #rel r = () + +let addr_unused_in_does_not_contain_addr h ra = () + +let does_not_contain_addr_addr_unused_in h ra = () + +let free_does_not_contain_addr #a #rel r m x = () + +let does_not_contain_addr_elim #a #rel r m x = () + +let disjoint_addrs_of_loc_loc_disjoint + (#al: aloc_t) + (#c: cls al) + (l1 l2: loc c) +: Lemma + (requires ( + Set.subset (Set.intersect (Ghost.reveal (Loc?.region_liveness_tags l1)) (Ghost.reveal (Loc?.region_liveness_tags l2))) Set.empty /\ + (forall r . GSet.subset (GSet.intersect (addrs_of_loc l1 r) (addrs_of_loc l2 r)) GSet.empty) + )) + (ensures (loc_disjoint l1 l2)) += // FIXME: WHY WHY WHY do I need this assert? + let l1' = Ghost.reveal (Loc?.aux l1) in + let l2' = Ghost.reveal (Loc?.aux l2) in + assert (forall (b1 b2: aloc c) . (GSet.mem b1 l1' /\ GSet.mem b2 l2') ==> aloc_disjoint b1 b2) + +let loc_not_unused_in #al c h = + let f (r: HS.rid) : GTot (GSet.set nat) = + GSet.comprehend (fun a -> StrongExcludedMiddle.strong_excluded_middle (HS.live_region h r /\ ~ (h `does_not_contain_addr` (r, a)))) + in + Loc + (Ghost.hide (Set.complement Set.empty)) + (Ghost.hide Set.empty) + (mk_non_live_addrs f) + (mk_live_addrs (fun x -> f x)) + (Ghost.hide (aloc_domain c (Ghost.hide (Set.complement Set.empty)) f)) + +let loc_unused_in #al c h = + let f (r: HS.rid) : GTot (GSet.set nat) = + if not (HS.live_region h r) + then + GSet.complement GSet.empty + else + GSet.comprehend (fun a -> StrongExcludedMiddle.strong_excluded_middle (h `does_not_contain_addr` (r, a))) + in + Loc + (Ghost.hide (Set.complement Set.empty)) + (Ghost.hide (Set.complement (FStar.Map.domain (HS.get_hmap h)))) + (mk_non_live_addrs (fun x -> f x)) + (mk_live_addrs (fun x -> f x)) + (Ghost.hide (aloc_domain c (Ghost.hide (Set.complement Set.empty)) f)) + +let loc_regions_unused_in #al c h rs = () + +#push-options "--z3rlimit 20" +let loc_addresses_unused_in #al c r a h = () +#pop-options + +let loc_addresses_not_unused_in #al c r a h = () + +#push-options "--z3rlimit 50" +let loc_unused_in_not_unused_in_disjoint #al c h = + assert (Ghost.reveal (Loc?.aux (loc_unused_in c h)) `loc_aux_disjoint` Ghost.reveal (Loc?.aux (loc_not_unused_in c h))); + assert_spinoff (loc_disjoint #al #c (loc_unused_in #al c h) + (loc_not_unused_in #al c h)) +#pop-options + +#push-options "--z3cliopt 'smt.qi.eager_threshold=100'" +let not_live_region_loc_not_unused_in_disjoint #al c h0 r += let l1 = loc_region_only false r in + let l2 = loc_not_unused_in c h0 in + assert (loc_disjoint_region_liveness_tags l1 l2); + assert (loc_disjoint_addrs l1 l2); + assert (loc_disjoint_aux l1 l2) + +#push-options "--z3rlimit 16" +let modifies_address_liveness_insensitive_unused_in #al c h h' = + assert (forall r . HS.live_region h r ==> HS.live_region h' r) ; + let ln' = loc_not_unused_in c h' in + let ln = loc_not_unused_in c h in + assert (forall (r: HS.rid) . Loc?.non_live_addrs ln r `GSet.subset` Loc?.non_live_addrs ln' r); + assert (ln' `loc_includes` ln); + let lu = loc_unused_in c h in + let lu' = loc_unused_in c h' in + assert (forall (r: HS.rid) . Loc?.non_live_addrs lu' r `GSet.subset` Loc?.non_live_addrs lu r); + assert (forall (r: HS.rid) . Loc?.live_addrs lu' r `GSet.subset` Loc?.live_addrs lu r); + assert (lu `loc_includes` lu') +#pop-options +#pop-options + +#push-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 16" +let modifies_only_not_unused_in #al #c l h h' = + assert (modifies_preserves_regions l h h'); + assert (modifies_preserves_not_unused_in l h h'); + assert (modifies_preserves_mreferences l h h'); + assert (modifies_preserves_livenesses l h h'); + modifies_preserves_alocs_intro l h h' () (fun r a b -> + if StrongExcludedMiddle.strong_excluded_middle (h `does_not_contain_addr` (r, a)) + then c.same_mreference_aloc_preserved b h h' (fun a' pre' r' -> ()) + else () + ) +#pop-options + +#push-options "--z3rlimit 20" +let mreference_live_loc_not_unused_in #al c #t #pre h b = + Classical.move_requires (does_not_contain_addr_addr_unused_in h) (HS.frameOf b, HS.as_addr b); + assert (~ (h `does_not_contain_addr` (HS.frameOf b, HS.as_addr b))); + loc_addresses_not_unused_in c (HS.frameOf b) (Set.singleton (HS.as_addr b)) h; + loc_includes_trans (loc_not_unused_in c h) (loc_freed_mreference b) (loc_mreference b); + () +#pop-options + +#push-options "--z3cliopt 'smt.qi.eager_threshold=100'" +let mreference_unused_in_loc_unused_in #al c #t #pre h b = + Classical.move_requires (addr_unused_in_does_not_contain_addr h) (HS.frameOf b, HS.as_addr b); + loc_addresses_unused_in c (HS.frameOf b) (Set.singleton (HS.as_addr b)) h; + loc_includes_addresses_addresses c false true (HS.frameOf b) (Set.singleton (HS.as_addr b)) (Set.singleton (HS.as_addr b)); + loc_includes_trans (loc_unused_in c h) (loc_freed_mreference b) (loc_mreference b); + () +#pop-options + +(* * Compositionality *) + +noeq +type cls_union_aloc + (al: (bool -> HS.rid -> nat -> Tot (Type u#x))) + (r: HS.rid) (n: nat) : Type u#x += | ALOC_FALSE of (al false) r n + | ALOC_TRUE of (al true) r n + +let bool_of_cls_union_aloc + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (#r: HS.rid) (#n: nat) + (l: cls_union_aloc al r n) +: Tot bool = + match l with + | ALOC_FALSE _ -> false + | ALOC_TRUE _ -> true + +let aloc_of_cls_union_aloc + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (#r: HS.rid) (#n: nat) + (l: cls_union_aloc al r n) +: Tot ((al (bool_of_cls_union_aloc l)) r n) += match l with + | ALOC_FALSE x -> x + | ALOC_TRUE x -> x + +let make_cls_union_aloc + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (b: bool) + (#r: HS.rid) + (#n: nat) + (l: (al b) r n) +: Tot (cls_union_aloc al r n) += if b + then ALOC_TRUE l + else ALOC_FALSE l + +let cls_union_aloc_includes + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (#r: HS.rid) + (#a: nat) + (larger smaller: cls_union_aloc al r a) +: GTot Type0 = + bool_of_cls_union_aloc larger == bool_of_cls_union_aloc smaller /\ + (c (bool_of_cls_union_aloc larger)).aloc_includes + (aloc_of_cls_union_aloc larger) + (aloc_of_cls_union_aloc smaller) + +let cls_union_aloc_disjoint + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (#r: HS.rid) + (#a: nat) + (larger smaller: cls_union_aloc al r a) +: GTot Type0 = + bool_of_cls_union_aloc larger == bool_of_cls_union_aloc smaller /\ + (c (bool_of_cls_union_aloc larger)).aloc_disjoint + (aloc_of_cls_union_aloc larger) + (aloc_of_cls_union_aloc smaller) + +let cls_union_aloc_preserved + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (#r: HS.rid) + (#a: nat) + (x: cls_union_aloc al r a) + (h h' : HS.mem) +: GTot Type0 += (c (bool_of_cls_union_aloc x)).aloc_preserved + (aloc_of_cls_union_aloc x) + h + h' + +let aloc_union = cls_union_aloc + +let cls_union #al c = Cls + #(cls_union_aloc al) + (cls_union_aloc_includes c) + (* aloc_includes_refl *) + (fun #r #a x -> + (c (bool_of_cls_union_aloc x)).aloc_includes_refl (aloc_of_cls_union_aloc x)) + (* aloc_includes_trans *) + (fun #r #a x1 x2 x3 -> + (c (bool_of_cls_union_aloc x1)).aloc_includes_trans + (aloc_of_cls_union_aloc x1) + (aloc_of_cls_union_aloc x2) + (aloc_of_cls_union_aloc x3) + ) + (cls_union_aloc_disjoint c) + (* aloc_disjoint_sym *) + (fun #r #a x1 x2 -> + if bool_of_cls_union_aloc x1 = bool_of_cls_union_aloc x2 + then + (c (bool_of_cls_union_aloc x1)).aloc_disjoint_sym + (aloc_of_cls_union_aloc x1) + (aloc_of_cls_union_aloc x2) + else () + ) + (* aloc_disjoint_includes *) + (fun #r #a larger1 larger2 smaller1 smaller2 -> + (c (bool_of_cls_union_aloc larger1)).aloc_disjoint_includes + (aloc_of_cls_union_aloc larger1) + (aloc_of_cls_union_aloc larger2) + (aloc_of_cls_union_aloc smaller1) + (aloc_of_cls_union_aloc smaller2) + ) + (cls_union_aloc_preserved c) + (* aloc_preserved_refl *) + (fun #r #a x h -> + (c (bool_of_cls_union_aloc x)).aloc_preserved_refl + (aloc_of_cls_union_aloc x) + h + ) + (* aloc_preserved_trans *) + (fun #r #a x h1 h2 h3 -> + (c (bool_of_cls_union_aloc x)).aloc_preserved_trans + (aloc_of_cls_union_aloc x) + h1 + h2 + h3 + ) + (* same_mreference_aloc_preserved *) + (fun #r #a b h1 h2 f -> + (c (bool_of_cls_union_aloc b)).same_mreference_aloc_preserved + (aloc_of_cls_union_aloc b) + h1 + h2 + f + ) + +let union_aux_of_aux_left_pred + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (s: GSet.set (aloc (c b))) + (x: aloc (cls_union c)) +: GTot bool += let ALoc region addr loc = x in + match loc with + | None -> GSet.mem (ALoc region addr None) s + | Some loc -> + b = bool_of_cls_union_aloc #al #region #addr loc && + GSet.mem (ALoc region addr (Some (aloc_of_cls_union_aloc #al #region #addr loc))) s + +let union_aux_of_aux_left + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (s: GSet.set (aloc (c b))) +: Tot (GSet.set (aloc (cls_union c))) += GSet.comprehend (union_aux_of_aux_left_pred c b s) + +let union_loc_of_loc #al c b l = + let (Loc regions region_liveness_tags non_live_addrs live_addrs aux) = l in + let aux' : GSet.set (aloc #(cls_union_aloc al) (cls_union c)) = + union_aux_of_aux_left c b (Ghost.reveal aux) + `GSet.union` + (aloc_domain (cls_union c) regions live_addrs) + in + Loc + #(cls_union_aloc al) + #(cls_union c) + regions + region_liveness_tags + non_live_addrs + live_addrs + (Ghost.hide aux') + +let union_aux_of_aux_left_inv_pred + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (#c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (s: GSet.set (aloc (cls_union c))) + (x: aloc (c b)) +: GTot bool += let ALoc region addr loc = x in + match loc with + | None -> GSet.mem (ALoc region addr None) s + | Some loc -> + GSet.mem (ALoc region addr (Some (make_cls_union_aloc b loc))) s + +let union_aux_of_aux_left_inv + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (#c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (s: GSet.set (aloc (cls_union c))) +: Tot (GSet.set (aloc (c b))) += GSet.comprehend (union_aux_of_aux_left_inv_pred b s) + +let mem_union_aux_of_aux_left_intro + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (x: aloc (c b)) + (aux: GSet.set (aloc (c b))) +: Lemma + (GSet.mem x aux <==> GSet.mem (ALoc x.region x.addr (if None? x.loc then None else Some (make_cls_union_aloc b (Some?.v x.loc)))) (union_aux_of_aux_left c b aux)) + [SMTPat (GSet.mem x aux)] += () + +let mem_union_aux_of_aux_left_elim + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (x: aloc (cls_union c)) + (aux: GSet.set (aloc (c b))) +: Lemma + (GSet.mem x (union_aux_of_aux_left c b aux) <==> (if None? x.loc then GSet.mem (ALoc x.region x.addr None) aux else (bool_of_cls_union_aloc (Some?.v x.loc) == b /\ GSet.mem (ALoc x.region x.addr (Some (aloc_of_cls_union_aloc (Some?.v x.loc)))) aux))) + [SMTPat (GSet.mem x (union_aux_of_aux_left #al c b aux))] += () + +let addrs_of_loc_union_loc_of_loc + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (l: loc (c b)) + (r: HS.rid) +: Lemma + (addrs_of_loc (union_loc_of_loc c b l) r `GSet.equal` addrs_of_loc l r) + [SMTPat (addrs_of_loc (union_loc_of_loc #al c b l) r)] += () + +let union_loc_of_loc_none #al c b = + assert (loc_equal #_ #(cls_union c) (union_loc_of_loc c b (loc_none #_ #(c b))) (loc_none #_ #(cls_union c))) + +#push-options "--z3rlimit 15" +let union_loc_of_loc_union #al c b l1 l2 = + assert (loc_equal #_ #(cls_union c) (union_loc_of_loc c b (loc_union #_ #(c b) l1 l2)) (loc_union #_ #(cls_union c) (union_loc_of_loc c b l1) (union_loc_of_loc c b l2))) +#pop-options + +let union_loc_of_loc_addresses #al c b preserve_liveness r n = + assert (loc_equal #_ #(cls_union c) (union_loc_of_loc c b (loc_addresses #_ #(c b) preserve_liveness r n)) (loc_addresses #_ #(cls_union c) preserve_liveness r n)) + +let union_loc_of_loc_regions #al c b preserve_liveness r = + assert (loc_equal #_ #(cls_union c) (union_loc_of_loc c b (loc_regions #_ #(c b) preserve_liveness r)) (loc_regions #_ #(cls_union c) preserve_liveness r)) + +#push-options "--z3rlimit 25" +let union_loc_of_loc_includes_intro + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (larger smaller: loc (c b)) +: Lemma + (requires (larger `loc_includes` smaller)) + (ensures (union_loc_of_loc c b larger `loc_includes` union_loc_of_loc c b smaller)) += (); + let auxl = union_aux_of_aux_left c b (Ghost.reveal (Loc?.aux larger)) in + let auxs = union_aux_of_aux_left c b (Ghost.reveal (Loc?.aux smaller)) in + assert (forall r a . GSet.mem (ALoc r a None) auxs ==> ( + GSet.mem (ALoc r a None) (Ghost.reveal (Loc?.aux smaller)) /\ + GSet.mem (ALoc r a None) (Ghost.reveal (Loc?.aux larger)) /\ + GSet.mem (ALoc r a None) auxl + )); + assert (auxl `loc_aux_includes` auxs); + let doml = aloc_domain (cls_union c) (Loc?.regions larger) (Loc?.live_addrs larger) in + let doms = aloc_domain (cls_union c) (Loc?.regions smaller) (Loc?.live_addrs smaller) in + assert (doml `loc_aux_includes` doms) +#pop-options + +#push-options "--fuel 0 --ifuel 0 --z3rlimit 50 --z3cliopt 'smt.qi.eager_threshold=1'" +let union_loc_of_loc_includes_elim + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (larger smaller: loc (c b)) +: Lemma + (requires (union_loc_of_loc c b larger `loc_includes` union_loc_of_loc c b smaller)) + (ensures (larger `loc_includes` smaller)) += let auxl = Ghost.reveal (Loc?.aux larger) in + let auxl' = union_aux_of_aux_left c b auxl in + let auxs = Ghost.reveal (Loc?.aux smaller) in + let auxs' = union_aux_of_aux_left c b auxs in + let doml' = aloc_domain (cls_union c) (Loc?.regions larger) (Loc?.live_addrs larger) in + let doms' = aloc_domain (cls_union c) (Loc?.regions smaller) (Loc?.live_addrs smaller) in + let doml = aloc_domain (c b) (Loc?.regions larger) (Loc?.live_addrs larger) in + let doms = aloc_domain (c b) (Loc?.regions smaller) (Loc?.live_addrs smaller) in + let g + (r: HS.rid) + (a: nat) + (x: aloc (c b)) + (y: aloc (c b)) + : GTot Type0 + = GSet.mem y (GSet.union auxl doml) /\ y `aloc_includes` x + in + let g' (r: HS.rid) (a: nat) (x: aloc (c b)) : GTot Type0 = + exists (y: aloc (c b)) . g r a x y + in + let f + (r: HS.rid) + (a: nat) + (x: aloc (c b)) + : Lemma + (requires (GSet.mem x auxs /\ (~ (GSet.mem x.addr (addrs_of_loc_weak smaller x.region))))) + (ensures (g' r a x)) + = let x' : aloc (cls_union c) = ALoc x.region x.addr (if None? x.loc then None else Some (make_cls_union_aloc b (Some?.v x.loc))) in + Classical.exists_elim + (g' r a x) + #(aloc (cls_union c)) + #(fun y' -> GSet.mem y' (GSet.union auxl' doml') /\ y' `aloc_includes` x') + () + (fun (y': aloc (cls_union c) { GSet.mem y' (GSet.union auxl' doml') /\ y' `aloc_includes` x' } ) -> + let y : aloc (c b) = ALoc y'.region y'.addr (if None? y'.loc then None else Some (aloc_of_cls_union_aloc (Some?.v y'.loc))) in + assert (g r a x y) + ) + in + let f' + (r: HS.rid) + (a: nat) + (x: aloc (c b)) + : Lemma + ((GSet.mem x auxs /\ (~ (GSet.mem x.addr (addrs_of_loc_weak smaller x.region)))) ==> g' r a x) + = Classical.move_requires (f r a) x + in + Classical.forall_intro_3 f'; + assert (forall (r: HS.rid) (a: nat) (x: aloc (c b)) . + (GSet.mem x auxs /\ GSet.mem x.addr (addrs_of_loc_weak smaller x.region)) ==> + GSet.mem x (GSet.union auxl doml) + ) by ( + let open FStar.Stubs.Tactics.V2.Builtins in + set_options "--z3cliopt 'smt.qi.eager_threshold=1'"; + () + ); + assert (larger `loc_includes'` smaller) by ( + let open FStar.Stubs.Tactics.V2.Builtins in + let open FStar.Tactics.SMT in + set_rlimit 75; + set_options "--z3cliopt 'smt.qi.eager_threshold=1'"; + () + ); + () +#pop-options + +let union_loc_of_loc_includes #al c b s1 s2 = + Classical.move_requires (union_loc_of_loc_includes_elim c b s1) s2; + Classical.move_requires (union_loc_of_loc_includes_intro c b s1) s2 + +#push-options "--fuel 0 --ifuel 0" +let union_loc_of_loc_disjoint_intro + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (larger smaller: loc (c b)) +: Lemma + (requires (larger `loc_disjoint` smaller)) + (ensures (union_loc_of_loc c b larger `loc_disjoint` union_loc_of_loc c b smaller)) += let auxl = union_aux_of_aux_left c b (Ghost.reveal (Loc?.aux larger)) in + let auxs = union_aux_of_aux_left c b (Ghost.reveal (Loc?.aux smaller)) in + let g + (xl xs: aloc (cls_union c)) + : Lemma + (requires (GSet.mem xl auxl /\ GSet.mem xs auxs)) + (ensures (GSet.mem xl auxl /\ GSet.mem xs auxs /\ aloc_disjoint xl xs)) + = + let xl' : aloc (c b) = ALoc xl.region xl.addr (if None? xl.loc then None else Some (aloc_of_cls_union_aloc (Some?.v xl.loc))) in + let xs' : aloc (c b) = ALoc xs.region xs.addr (if None? xs.loc then None else Some (aloc_of_cls_union_aloc (Some?.v xs.loc))) in + assert (GSet.mem xl' (Ghost.reveal (Loc?.aux larger))); + assert (GSet.mem xs' (Ghost.reveal (Loc?.aux smaller))); + assert (aloc_disjoint xl' xs'); + assert (aloc_disjoint xl xs) + in + Classical.forall_intro_2 (fun xl -> Classical.move_requires (g xl)); + assert (forall xl xs . (GSet.mem xl auxl /\ GSet.mem xs auxs) ==> aloc_disjoint xl xs); + assert (auxl `loc_aux_disjoint` auxs); + let larger' = union_loc_of_loc c b larger in + let smaller' = union_loc_of_loc c b smaller in + let doml = aloc_domain (cls_union c) (Loc?.regions larger) (Loc?.live_addrs larger) in + let doms = aloc_domain (cls_union c) (Loc?.regions smaller) (Loc?.live_addrs smaller) in + assert (forall (xl xs: aloc (cls_union c)) . + (GSet.mem xl doml /\ GSet.mem xs auxs) ==> ( + xl.addr `GSet.mem` addrs_of_loc_weak larger xl.region /\ + xs.addr `GSet.mem` addrs_of_loc smaller xs.region /\ + aloc_disjoint xl xs + )) by ( + let open FStar.Stubs.Tactics.V2.Builtins in + let open FStar.Tactics.SMT in + set_rlimit 64; + set_options "--z3cliopt 'smt.qi.eager_threshold=1'"; + () + ); + assert (doml ` loc_aux_disjoint` auxs); + assert (forall (xl xs: aloc (cls_union c)) . + (GSet.mem xl auxl /\ GSet.mem xs doms) ==> ( + xl.addr `GSet.mem` addrs_of_loc larger xl.region /\ + xs.addr `GSet.mem` addrs_of_loc_weak smaller xs.region /\ + aloc_disjoint xl xs + )) by ( + let open FStar.Tactics.SMT in + set_rlimit 15; + () + ); + assert (auxl ` loc_aux_disjoint` doms); + assert (loc_disjoint_aux larger' smaller'); + () +#pop-options + +#push-options "--z3rlimit 32" +let union_loc_of_loc_disjoint_elim + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (larger smaller: loc (c b)) +: Lemma + (requires (union_loc_of_loc c b larger `loc_disjoint` union_loc_of_loc c b smaller)) + (ensures (larger `loc_disjoint` smaller)) += let auxl = Ghost.reveal (Loc?.aux larger) in + let auxl' = union_aux_of_aux_left c b auxl in + let auxs = Ghost.reveal (Loc?.aux smaller) in + let auxs' = union_aux_of_aux_left c b auxs in + assert (forall (x y: aloc (c b)) . (GSet.mem x auxl /\ GSet.mem y auxs) ==> ( + let x' = ALoc x.region x.addr (if None? x.loc then None else Some (make_cls_union_aloc b (Some?.v x.loc))) in + let y' = ALoc y.region y.addr (if None? y.loc then None else Some (make_cls_union_aloc b (Some?.v y.loc))) in + GSet.mem x' auxl' /\ GSet.mem y' auxs' /\ (aloc_disjoint x' y' ==> aloc_disjoint x y))); + assert (auxl `loc_aux_disjoint` auxs) +#pop-options + +let union_loc_of_loc_disjoint #al c b s1 s2 = + Classical.move_requires (union_loc_of_loc_disjoint_elim c b s1) s2; + Classical.move_requires (union_loc_of_loc_disjoint_intro c b s1) s2 + +#push-options "--z3rlimit 32" +let modifies_union_loc_of_loc_elim + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (l: loc (c b)) + (h1 h2: HS.mem) +: Lemma + (requires (modifies #_ #(cls_union c) (union_loc_of_loc c b l) h1 h2)) + (ensures (modifies #_ #(c b) l h1 h2)) += assert (modifies_preserves_regions l h1 h2); + assert (modifies_preserves_mreferences l h1 h2); + modifies_preserves_alocs_intro #_ #(c b) l h1 h2 () (fun r' a' b' -> + let g + (x: aloc (cls_union c)) + : Lemma + (requires ( + GSet.mem a' (addrs_of_loc_aux #_ #(cls_union c) (union_loc_of_loc c b l) r') /\ + GSet.mem x (Ghost.reveal (Loc?.aux #_ #(cls_union c) (union_loc_of_loc c b l))) + )) + (ensures ( + aloc_disjoint #_ #(cls_union c) x (ALoc #_ #(cls_union c) r' a' (Some (make_cls_union_aloc b b'))))) + = if r' = x.region && a' = x.addr + then begin + let x' : aloc (c b) = ALoc #_ #(c b) r' a' (if None? x.loc then None else Some (aloc_of_cls_union_aloc (Some?.v x.loc))) in + assert (aloc_disjoint #(al b) #(c b) x' (ALoc r' a' (Some b'))) + end else + () + in + Classical.forall_intro (Classical.move_requires g); + assert ((cls_union c).aloc_preserved (make_cls_union_aloc b b') h1 h2) + ) +#pop-options + +#push-options "--z3rlimit 32" +let modifies_union_loc_of_loc_intro + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (l: loc (c b)) + (h1 h2: HS.mem) +: Lemma + (requires (modifies #_ #(c b) l h1 h2)) + (ensures (modifies #_ #(cls_union c) (union_loc_of_loc c b l) h1 h2)) += let l' = union_loc_of_loc c b l in + assert (modifies_preserves_regions l' h1 h2); + assert (modifies_preserves_mreferences l' h1 h2); + assert (modifies_preserves_livenesses l' h1 h2); + assert (modifies_preserves_not_unused_in l' h1 h2); + modifies_preserves_alocs_intro #_ #(cls_union c) l' h1 h2 () (fun r' a' b' -> + let b_ = bool_of_cls_union_aloc b' in + let a_ = aloc_of_cls_union_aloc b' in + let ll' : aloc (cls_union c) = ALoc r' a' (Some b') in + let ll : aloc (c b_) = ALoc r' a' (Some a_) in + assert (exists (x: aloc (c b)) . GSet.mem x (Ghost.reveal (Loc?.aux l)) /\ + ( + let xr = x.region in + let xa = x.addr in + let xl : option (al b xr xa) = x.loc in + xr == r' /\ + xa == a' /\ ( + let xl' : option (aloc_union al r' a') = if None? xl then None else Some (make_cls_union_aloc #al b (Some?.v xl)) in + let x' : aloc (cls_union c) = ALoc r' a' xl' in + GSet.mem x' (Ghost.reveal (Loc?.aux l')) /\ + aloc_disjoint #_ #(cls_union c) x' ll' + ))); + assert (b_ == b); + let f (x: aloc (c b)) : Lemma + (requires (GSet.mem x (Ghost.reveal (Loc?.aux l)))) + (ensures (aloc_disjoint #_ #(c b) x ll)) + = let xr = x.region in + let xa = x.addr in + let xl : option (al b xr xa) = x.loc in + let xl' : option (aloc_union al xr xa) = if None? xl then None else Some (make_cls_union_aloc #al b (Some?.v xl)) in + let x' : aloc (cls_union c) = ALoc xr xa xl' in + assert (GSet.mem x' (Ghost.reveal (Loc?.aux l'))); + assert (aloc_disjoint #_ #(cls_union c) x' ll'); + assert (aloc_disjoint #_ #(c b) x ll) + in + Classical.forall_intro (Classical.move_requires f); + assert (loc_aux_disjoint (Ghost.reveal (Loc?.aux l)) (GSet.singleton ll)) + ) +#pop-options + +let modifies_union_loc_of_loc #al c b l h1 h2 = + Classical.move_requires (modifies_union_loc_of_loc_elim c b l h1) h2; + Classical.move_requires (modifies_union_loc_of_loc_intro c b l h1) h2 + +let loc_of_union_loc #al #c b l += let (Loc regions region_liveness_tags non_live_addrs live_addrs aux) = l in + let aux' = union_aux_of_aux_left_inv b (Ghost.reveal aux) in + Loc + regions + region_liveness_tags + non_live_addrs + live_addrs + (Ghost.hide aux') + +let loc_of_union_loc_union_loc_of_loc #al c b s += assert (loc_of_union_loc b (union_loc_of_loc c b s) `loc_equal` s) + +let loc_of_union_loc_none #al c b += assert (loc_of_union_loc #_ #c b loc_none `loc_equal` loc_none) + +let loc_of_union_loc_union #al c b l1 l2 += assert (loc_of_union_loc b (l1 `loc_union` l2) `loc_equal` (loc_of_union_loc b l1 `loc_union` loc_of_union_loc b l2)) + +let loc_of_union_loc_addresses #al c b preserve_liveness r n = + assert (loc_of_union_loc #_ #c b (loc_addresses preserve_liveness r n) `loc_equal` loc_addresses preserve_liveness r n) + +let loc_of_union_loc_regions #al c b preserve_liveness r = + assert (loc_of_union_loc #_ #c b (loc_regions preserve_liveness r) `loc_equal` loc_regions preserve_liveness r) + +module U = FStar.Universe + +let raise_aloc al r n = U.raise_t (al r n) + +let raise_cls #al c = Cls #(raise_aloc u#x u#y al) + (fun #r #a x1 x2 -> c.aloc_includes (U.downgrade_val x1) (U.downgrade_val x2)) + (fun #r #a x -> c.aloc_includes_refl (U.downgrade_val x)) + (fun #r #a x1 x2 x3 -> c.aloc_includes_trans (U.downgrade_val x1) (U.downgrade_val x2) (U.downgrade_val x3)) + (fun #r #a x1 x2 -> c.aloc_disjoint (U.downgrade_val x1) (U.downgrade_val x2)) + (fun #r #a x1 x2 -> c.aloc_disjoint_sym (U.downgrade_val x1) (U.downgrade_val x2)) + (fun #r #a larger1 larger2 smaller1 smaller2 -> c.aloc_disjoint_includes (U.downgrade_val larger1) (U.downgrade_val larger2) (U.downgrade_val smaller1) (U.downgrade_val smaller2)) + (fun #r #a x h1 h2 -> c.aloc_preserved (U.downgrade_val x) h1 h2) + (fun #r #a x h -> c.aloc_preserved_refl (U.downgrade_val x) h) + (fun #r #a x h1 h2 h3 -> c.aloc_preserved_trans (U.downgrade_val x) h1 h2 h3) + (fun #r #a b h1 h2 f -> c.same_mreference_aloc_preserved (U.downgrade_val b) h1 h2 f) + +let downgrade_aloc (#al: aloc_t u#a) (#c: cls al) (a: aloc (raise_cls u#a u#b c)) : Tot (aloc c) = + let ALoc region addr x = a in + ALoc region addr (if None? x then None else Some (U.downgrade_val (Some?.v x))) + +let upgrade_aloc (#al: aloc_t u#a) (#c: cls al) (a: aloc c) : Tot (aloc (raise_cls u#a u#b c)) = + let ALoc region addr x = a in + ALoc region addr (if None? x then None else Some (U.raise_val (Some?.v x))) + +let downgrade_aloc_upgrade_aloc (#al: aloc_t u#a) (#c: cls al) (a: aloc c) : Lemma + (downgrade_aloc (upgrade_aloc u#a u#b a) == a) + [SMTPat (downgrade_aloc (upgrade_aloc u#a u#b a))] += () + +let upgrade_aloc_downgrade_aloc (#al: aloc_t u#a) (#c: cls al) (a: aloc (raise_cls u#a u#b c)) : Lemma + (upgrade_aloc (downgrade_aloc a) == a) + [SMTPat (upgrade_aloc u#a u#b (downgrade_aloc a))] += () + +let raise_loc_aux_pred + (#al: aloc_t u#a) + (c: cls al) + (aux: Ghost.erased (GSet.set (aloc c))) + (a: aloc (raise_cls u#a u#b c)) +: GTot bool += GSet.mem (downgrade_aloc a) (Ghost.reveal aux) + +let raise_loc #al #c l = + let (Loc regions region_liveness_tags non_live_addrs live_addrs aux) = l in + Loc + regions + region_liveness_tags + non_live_addrs + live_addrs + (Ghost.hide (GSet.comprehend (raise_loc_aux_pred c aux))) + +let raise_loc_none #al #c = + assert (raise_loc u#x u#y (loc_none #_ #c) `loc_equal` loc_none) + +let raise_loc_union #al #c l1 l2 = + assert (raise_loc u#x u#y (loc_union l1 l2) `loc_equal` loc_union (raise_loc l1) (raise_loc l2)) + +let raise_loc_addresses #al #c preserve_liveness r a = + assert (raise_loc u#x u#y (loc_addresses #_ #c preserve_liveness r a) `loc_equal` loc_addresses preserve_liveness r a) + +let raise_loc_regions #al #c preserve_liveness r = + assert (raise_loc u#x u#y (loc_regions #_ #c preserve_liveness r) `loc_equal` loc_regions preserve_liveness r) + +#push-options "--z3rlimit 15 --z3cliopt 'smt.qi.eager_threshold=100'" +let raise_loc_includes #al #c l1 l2 = + let l1' = raise_loc l1 in + let l2' = raise_loc l2 in + assert (forall (x: aloc (raise_cls c)) . GSet.mem x (Ghost.reveal (Loc?.aux l1')) <==> GSet.mem (downgrade_aloc x) (Ghost.reveal (Loc?.aux l1))); + assert (forall (x: aloc (raise_cls c)) . GSet.mem x (Ghost.reveal (Loc?.aux l2')) <==> GSet.mem (downgrade_aloc x) (Ghost.reveal (Loc?.aux l2))); + assert (forall (x: aloc c) . GSet.mem x (Ghost.reveal (Loc?.aux l1)) <==> GSet.mem (upgrade_aloc x) (Ghost.reveal (Loc?.aux l1'))); + assert (forall (x: aloc c) . GSet.mem x (Ghost.reveal (Loc?.aux l2)) <==> GSet.mem (upgrade_aloc x) (Ghost.reveal (Loc?.aux l2'))); + assert (loc_aux_includes (Ghost.reveal (Loc?.aux l1')) (Ghost.reveal (Loc?.aux l2')) <==> loc_aux_includes (Ghost.reveal (Loc?.aux l1)) (Ghost.reveal (Loc?.aux l2))) +#pop-options + +#push-options "--z3rlimit 20" +let raise_loc_disjoint #al #c l1 l2 = + let l1' = raise_loc l1 in + let l2' = raise_loc l2 in + assert (forall (x: aloc (raise_cls c)) . GSet.mem x (Ghost.reveal (Loc?.aux l1')) <==> GSet.mem (downgrade_aloc x) (Ghost.reveal (Loc?.aux l1))); + assert (forall (x: aloc (raise_cls c)) . GSet.mem x (Ghost.reveal (Loc?.aux l2')) <==> GSet.mem (downgrade_aloc x) (Ghost.reveal (Loc?.aux l2))); + assert (forall (x: aloc c) . GSet.mem x (Ghost.reveal (Loc?.aux l1)) <==> GSet.mem (upgrade_aloc x) (Ghost.reveal (Loc?.aux l1'))); + assert (forall (x: aloc c) . GSet.mem x (Ghost.reveal (Loc?.aux l2)) <==> GSet.mem (upgrade_aloc x) (Ghost.reveal (Loc?.aux l2'))); + assert (forall r . addrs_of_loc l1' r `GSet.equal` addrs_of_loc l1 r); + assert (forall r . addrs_of_loc l2' r `GSet.equal` addrs_of_loc l2 r); + assert (forall (x1 x2: aloc (raise_cls u#x u#y c)) . aloc_disjoint x1 x2 <==> aloc_disjoint (downgrade_aloc x1) (downgrade_aloc x2)); + assert (forall (x1 x2: aloc (c)) . aloc_disjoint x1 x2 <==> aloc_disjoint (upgrade_aloc u#x u#y x1) (upgrade_aloc x2)) +#pop-options + +let modifies_raise_loc #al #c l h1 h2 = + let l' = raise_loc l in + assert (forall (x: aloc (raise_cls c)) . GSet.mem x (Ghost.reveal (Loc?.aux l')) <==> GSet.mem (downgrade_aloc x) (Ghost.reveal (Loc?.aux l))); + assert (forall (x: aloc c) . GSet.mem x (Ghost.reveal (Loc?.aux l)) <==> GSet.mem (upgrade_aloc x) (Ghost.reveal (Loc?.aux l'))); + assert (forall r . addrs_of_loc l' r `GSet.equal` addrs_of_loc l r); + assert (forall (x1 x2: aloc (raise_cls u#x u#y c)) . aloc_disjoint x1 x2 <==> aloc_disjoint (downgrade_aloc x1) (downgrade_aloc x2)); + assert (forall (r: HS.rid) (a: nat) (b: raise_aloc al r a) . + loc_aux_disjoint (Ghost.reveal (Loc?.aux l')) (GSet.singleton (ALoc r a (Some b))) ==> + loc_aux_disjoint (Ghost.reveal (Loc?.aux l)) (GSet.singleton (ALoc r a (Some (U.downgrade_val b))))); + assert (modifies_preserves_alocs l h1 h2 ==> modifies_preserves_alocs l' h1 h2); + assert (forall (r: HS.rid) (a: nat) (b: al r a) . + loc_aux_disjoint (Ghost.reveal (Loc?.aux l)) (GSet.singleton (ALoc r a (Some b))) ==> + loc_aux_disjoint (Ghost.reveal (Loc?.aux l')) (GSet.singleton (ALoc r a (Some (U.raise_val b))))); + assert (modifies_preserves_alocs l' h1 h2 ==> modifies_preserves_alocs l h1 h2) + +let lower_loc_aux_pred + (#al: aloc_t u#a) + (c: cls al) + (aux: Ghost.erased (GSet.set (aloc (raise_cls u#a u#b c)))) + (a: aloc c) +: GTot bool += GSet.mem (upgrade_aloc a) (Ghost.reveal aux) + +let lower_loc #al #c l = + let (Loc regions region_liveness_tags non_live_addrs live_addrs aux) = l in + Loc + regions + region_liveness_tags + non_live_addrs + live_addrs + (Ghost.hide (GSet.comprehend (lower_loc_aux_pred c aux))) + +let lower_loc_raise_loc #al #c l = + assert (lower_loc (raise_loc u#x u#y l) `loc_equal` l) + +let raise_loc_lower_loc #al #c l = + assert (raise_loc (lower_loc l) `loc_equal` l) + +let lower_loc_none #al #c = + assert (lower_loc u#x u#y #_ #c loc_none `loc_equal` loc_none) + +let lower_loc_union #al #c l1 l2 = + assert (lower_loc u#x u#y (loc_union l1 l2) `loc_equal` loc_union (lower_loc l1) (lower_loc l2)) + +let lower_loc_addresses #al #c preserve_liveness r a = + assert (lower_loc u#x u#y #_ #c (loc_addresses preserve_liveness r a) `loc_equal` loc_addresses preserve_liveness r a) + +let lower_loc_regions #al #c preserve_liveness r = + assert (lower_loc u#x u#y #_ #c (loc_regions preserve_liveness r) `loc_equal` loc_regions preserve_liveness r) diff --git a/stage0/ulib/FStar.ModifiesGen.fsti b/stage0/ulib/FStar.ModifiesGen.fsti new file mode 100644 index 00000000000..2277fc8089c --- /dev/null +++ b/stage0/ulib/FStar.ModifiesGen.fsti @@ -0,0 +1,1238 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.ModifiesGen + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +(*** The modifies clause *) + +(* NOTE: aloc cannot be a member of the class, because of OCaml + extraction. So it must be a parameter of the class instead. *) + +[@@erasable] +type aloc_t = HS.rid -> nat -> Tot Type + +[@@erasable] +noeq +type cls (aloc: aloc_t) : Type = | Cls: + (aloc_includes: ( + (#r: HS.rid) -> + (#a: nat) -> + aloc r a -> + aloc r a -> + GTot Type0 + )) -> + (aloc_includes_refl: ( + (#r: HS.rid) -> + (#a: nat) -> + (x: aloc r a) -> + Lemma + (aloc_includes x x) + )) -> + (aloc_includes_trans: ( + (#r: HS.rid) -> + (#a: nat) -> + (x1: aloc r a) -> + (x2: aloc r a) -> + (x3: aloc r a) -> + Lemma + (requires (aloc_includes x1 x2 /\ aloc_includes x2 x3)) + (ensures (aloc_includes x1 x3)) + )) -> + (aloc_disjoint: ( + (#r: HS.rid) -> + (#a: nat) -> + (x1: aloc r a) -> + (x2: aloc r a) -> + GTot Type0 + )) -> + (aloc_disjoint_sym: ( + (#r: HS.rid) -> + (#a: nat) -> + (x1: aloc r a) -> + (x2: aloc r a) -> + Lemma + (aloc_disjoint x1 x2 <==> aloc_disjoint x2 x1) + )) -> + (aloc_disjoint_includes: ( + (#r: HS.rid) -> + (#a: nat) -> + (larger1: aloc r a) -> + (larger2: aloc r a) -> + (smaller1: aloc r a) -> + (smaller2: aloc r a) -> + Lemma + (requires (aloc_disjoint larger1 larger2 /\ larger1 `aloc_includes` smaller1 /\ larger2 `aloc_includes` smaller2)) + (ensures (aloc_disjoint smaller1 smaller2)) + )) -> + (aloc_preserved: ( + (#r: HS.rid) -> + (#a: nat) -> + aloc r a -> + HS.mem -> + HS.mem -> + GTot Type0 + )) -> + (aloc_preserved_refl: ( + (#r: HS.rid) -> + (#a: nat) -> + (x: aloc r a) -> + (h: HS.mem) -> + Lemma + (aloc_preserved x h h) + )) -> + (aloc_preserved_trans: ( + (#r: HS.rid) -> + (#a: nat) -> + (x: aloc r a) -> + (h1: HS.mem) -> + (h2: HS.mem) -> + (h3: HS.mem) -> + Lemma + (requires (aloc_preserved x h1 h2 /\ aloc_preserved x h2 h3)) + (ensures (aloc_preserved x h1 h3)) + )) -> + (* if any reference at this address is preserved, then any location at this address is preserved *) + (same_mreference_aloc_preserved: ( + (#r: HS.rid) -> + (#a: nat) -> + (b: aloc r a) -> + (h1: HS.mem) -> + (h2: HS.mem) -> + (f: ( + (a' : Type0) -> + (pre: Preorder.preorder a') -> + (r': HS.mreference a' pre) -> + Lemma + (requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r')) + (ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r')) + )) -> + Lemma + (aloc_preserved b h1 h2) + )) -> + cls aloc + +[@@erasable] +val loc (#aloc: aloc_t u#x) (c: cls aloc) : Tot (Type u#x) + +val loc_none (#aloc: aloc_t) (#c: cls aloc): Tot (loc c) + +val loc_union + (#aloc: aloc_t) (#c: cls aloc) + (s1 s2: loc c) +: GTot (loc c) + +(** The following is useful to make Z3 cut matching loops with +modifies_trans and modifies_refl *) +val loc_union_idem + (#aloc: aloc_t) (#c: cls aloc) + (s: loc c) +: Lemma + (loc_union s s == s) + +val loc_union_comm + (#aloc: aloc_t) (#c: cls aloc) + (s1 s2: loc c) +: Lemma + (loc_union s1 s2 == loc_union s2 s1) + +val loc_union_assoc + (#aloc: aloc_t) (#c: cls aloc) + (s1 s2 s3: loc c) +: Lemma + (loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3) + +val loc_union_loc_none_l + (#aloc: aloc_t) (#c: cls aloc) + (s: loc c) +: Lemma + (loc_union loc_none s == s) + +val loc_union_loc_none_r + (#aloc: aloc_t) (#c: cls aloc) + (s: loc c) +: Lemma + (loc_union s loc_none == s) + + +val loc_of_aloc + (#aloc: aloc_t) (#c: cls aloc) + (#r: HS.rid) + (#n: nat) + (b: aloc r n) +: GTot (loc c) + +val loc_of_aloc_not_none + (#aloc: aloc_t) (#c: cls aloc) + (#r: HS.rid) + (#n: nat) + (b: aloc r n) +: Lemma (loc_of_aloc #_ #c b == loc_none ==> False) + +val loc_addresses + (#aloc: aloc_t) (#c: cls aloc) + (preserve_liveness: bool) + (r: HS.rid) + (n: Set.set nat) +: GTot (loc c) + +val loc_regions + (#aloc: aloc_t) (#c: cls aloc) + (preserve_liveness: bool) + (r: Set.set HS.rid) +: GTot (loc c) + +let loc_mreference + (#aloc: aloc_t) (#c: cls aloc) + (#a: Type) + (#p: Preorder.preorder a) + (b: HS.mreference a p) +: GTot (loc c) += loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b)) + +let loc_freed_mreference + (#aloc: aloc_t) (#c: cls aloc) + (#a: Type) + (#p: Preorder.preorder a) + (b: HS.mreference a p) +: GTot (loc c) += loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b)) + +let loc_region_only + (#aloc: aloc_t) (#c: cls aloc) + (preserve_liveness: bool) + (r: HS.rid) +: GTot (loc c) += loc_regions preserve_liveness (Set.singleton r) + +let loc_all_regions_from + (#aloc: aloc_t) (#c: cls aloc) + (preserve_liveness: bool) + (r: HS.rid) +: GTot (loc c) += loc_regions preserve_liveness (HS.mod_set (Set.singleton r)) + + +(* Inclusion of memory locations *) + +[@@erasable] +val loc_includes + (#aloc: aloc_t) (#c: cls aloc) + (s1 s2: loc c) +: GTot Type0 + +val loc_includes_refl + (#aloc: aloc_t) (#c: cls aloc) + (s: loc c) +: Lemma + (loc_includes s s) + +val loc_includes_trans + (#aloc: aloc_t) (#c: cls aloc) + (s1 s2 s3: loc c) +: Lemma + (requires (loc_includes s1 s2 /\ loc_includes s2 s3)) + (ensures (loc_includes s1 s3)) + +val loc_includes_union_r + (#aloc: aloc_t) (#c: cls aloc) + (s s1 s2: loc c) +: Lemma + (requires (loc_includes s s1 /\ loc_includes s s2)) + (ensures (loc_includes s (loc_union s1 s2))) + +val loc_includes_union_l + (#aloc: aloc_t) (#c: cls aloc) + (s1 s2 s: loc c) +: Lemma + (requires (loc_includes s1 s \/ loc_includes s2 s)) + (ensures (loc_includes (loc_union s1 s2) s)) + +val loc_includes_none + (#aloc: aloc_t) (#c: cls aloc) + (s: loc c) +: Lemma + (loc_includes s loc_none) + +val loc_includes_none_elim + (#aloc: aloc_t) (#c: cls aloc) + (s: loc c) +: Lemma + (requires (loc_includes loc_none s)) + (ensures (s == loc_none)) + + +val loc_includes_aloc + (#aloc: aloc_t) (#c: cls aloc) + (#r: HS.rid) + (#n: nat) + (b1 b2: aloc r n) +: Lemma + (requires (c.aloc_includes b1 b2)) + (ensures (loc_includes (loc_of_aloc b1) (loc_of_aloc #_ #c b2))) + +val loc_includes_aloc_elim + (#aloc: aloc_t) (#c: cls aloc) + (#r1 #r2: HS.rid) + (#n1 #n2: nat) + (b1: aloc r1 n1) + (b2: aloc r2 n2) +: Lemma + (requires (loc_includes (loc_of_aloc b1) (loc_of_aloc #_ #c b2))) + (ensures (r1 == r2 /\ n1 == n2 /\ c.aloc_includes b1 b2)) + +val loc_includes_addresses_aloc + (#aloc: aloc_t) (#c: cls aloc) + (preserve_liveness: bool) + (r: HS.rid) + (s: Set.set nat) + (#a: nat) + (p: aloc r a) +: Lemma + (requires (Set.mem a s)) + (ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_of_aloc #_ #c p))) + +val loc_includes_region_aloc + (#aloc: aloc_t) (#c: cls aloc) + (preserve_liveness: bool) + (s: Set.set HS.rid) + (#r: HS.rid) + (#a: nat) + (b: aloc r a) +: Lemma + (requires (Set.mem r s)) + (ensures (loc_includes (loc_regions preserve_liveness s) (loc_of_aloc #_ #c b))) + +val loc_includes_region_addresses + (#aloc: aloc_t) (#c: cls aloc) + (preserve_liveness1 preserve_liveness2: bool) + (s: Set.set HS.rid) + (r: HS.rid) + (a: Set.set nat) +: Lemma + (requires (Set.mem r s)) + (ensures (loc_includes (loc_regions #_ #c preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))) + +val loc_includes_region_region + (#aloc: aloc_t) (#c: cls aloc) + (preserve_liveness1 preserve_liveness2: bool) + (s1 s2: Set.set HS.rid) +: Lemma + (requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1)) + (ensures (loc_includes (loc_regions #_ #c preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))) + +val loc_includes_region_union_l + (#aloc: aloc_t) (#c: cls aloc) + (preserve_liveness: bool) + (l: loc c) + (s1 s2: Set.set HS.rid) +: Lemma + (requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1))))) + (ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))) + +val loc_includes_addresses_addresses + (#aloc: aloc_t) (c: cls aloc) + (preserve_liveness1 preserve_liveness2: bool) + (r: HS.rid) + (a1 a2: Set.set nat) +: Lemma + (requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset a2 a1)) + (ensures (loc_includes #_ #c (loc_addresses preserve_liveness1 r a1) (loc_addresses preserve_liveness2 r a2))) + + +(* Disjointness of two memory locations *) + +[@@erasable] +val loc_disjoint + (#aloc: aloc_t) (#c: cls aloc) + (s1 s2: loc c) +: GTot Type0 + +val loc_disjoint_sym + (#aloc: aloc_t) (#c: cls aloc) + (s1 s2: loc c) +: Lemma + (requires (loc_disjoint s1 s2)) + (ensures (loc_disjoint s2 s1)) + +val loc_disjoint_none_r + (#aloc: aloc_t) (#c: cls aloc) + (s: loc c) +: Lemma + (ensures (loc_disjoint s loc_none)) + +val loc_disjoint_union_r + (#aloc: aloc_t) (#c: cls aloc) + (s s1 s2: loc c) +: Lemma + (requires (loc_disjoint s s1 /\ loc_disjoint s s2)) + (ensures (loc_disjoint s (loc_union s1 s2))) + +val loc_disjoint_includes + (#aloc: aloc_t) (#c: cls aloc) + (p1 p2 p1' p2' : loc c) +: Lemma + (requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2)) + (ensures (loc_disjoint p1' p2')) + +val loc_disjoint_aloc_intro + (#aloc: aloc_t) (#c: cls aloc) + (#r1: HS.rid) + (#a1: nat) + (#r2: HS.rid) + (#a2: nat) + (b1: aloc r1 a1) + (b2: aloc r2 a2) +: Lemma + (requires ((r1 == r2 /\ a1 == a2) ==> c.aloc_disjoint b1 b2)) + (ensures (loc_disjoint (loc_of_aloc b1) (loc_of_aloc #_ #c b2))) + +val loc_disjoint_aloc_elim + (#aloc: aloc_t) (#c: cls aloc) + (#r1: HS.rid) + (#a1: nat) + (#r2: HS.rid) + (#a2: nat) + (b1: aloc r1 a1) + (b2: aloc r2 a2) +: Lemma + (requires (loc_disjoint (loc_of_aloc b1) (loc_of_aloc #_ #c b2))) + (ensures ((r1 == r2 /\ a1 == a2) ==> c.aloc_disjoint b1 b2)) + +val loc_disjoint_addresses_intro + (#aloc: aloc_t) (#c: cls aloc) + (preserve_liveness1 preserve_liveness2: bool) + (r1 r2: HS.rid) + (n1 n2: Set.set nat) +: Lemma + (requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty)) + (ensures (loc_disjoint (loc_addresses #_ #c preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))) + +let loc_disjoint_addresses #aloc #c = loc_disjoint_addresses_intro #aloc #c + +val loc_disjoint_addresses_elim + (#aloc: aloc_t) (#c: cls aloc) + (preserve_liveness1 preserve_liveness2: bool) + (r1 r2: HS.rid) + (n1 n2: Set.set nat) +: Lemma + (requires (loc_disjoint (loc_addresses #_ #c preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))) + (ensures (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty)) + +val loc_disjoint_aloc_addresses_intro + (#aloc: aloc_t) (#c: cls aloc) + (#r' : HS.rid) + (#a' : nat) + (p: aloc r' a') + (preserve_liveness: bool) + (r: HS.rid) + (n: Set.set nat) +: Lemma + (requires (r == r' ==> (~ (Set.mem a' n)))) + (ensures (loc_disjoint (loc_of_aloc p) (loc_addresses #_ #c preserve_liveness r n))) + +val loc_disjoint_aloc_addresses_elim + (#aloc: aloc_t) (#c: cls aloc) + (#r' : HS.rid) + (#a' : nat) + (p: aloc r' a') + (preserve_liveness: bool) + (r: HS.rid) + (n: Set.set nat) +: Lemma + (requires (loc_disjoint (loc_of_aloc p) (loc_addresses #_ #c preserve_liveness r n))) + (ensures (r == r' ==> (~ (Set.mem a' n)))) + +val loc_disjoint_regions + (#aloc: aloc_t) (#c: cls aloc) + (preserve_liveness1 preserve_liveness2: bool) + (rs1 rs2: Set.set HS.rid) +: Lemma + (requires (Set.subset (Set.intersect rs1 rs2) Set.empty)) + (ensures (loc_disjoint (loc_regions #_ #c preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))) + + +(** Liveness-insensitive memory locations *) + +val address_liveness_insensitive_locs (#aloc: aloc_t) (c: cls aloc) : Tot (loc c) + +val loc_includes_address_liveness_insensitive_locs_aloc (#aloc: aloc_t) (#c: cls aloc) (#r: HS.rid) (#n: nat) (a: aloc r n) : Lemma + (loc_includes (address_liveness_insensitive_locs c) (loc_of_aloc a)) + +val loc_includes_address_liveness_insensitive_locs_addresses (#aloc: aloc_t) (c: cls aloc) (r: HS.rid) (a: Set.set nat) : Lemma + (loc_includes (address_liveness_insensitive_locs c) (loc_addresses true r a)) + +val region_liveness_insensitive_locs (#al: aloc_t) (c: cls al) : Tot (loc c) + +val loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs (#al: aloc_t) (c: cls al) : Lemma + (loc_includes (region_liveness_insensitive_locs c) (address_liveness_insensitive_locs c)) + +val loc_includes_region_liveness_insensitive_locs_loc_regions + (#al: aloc_t) (c: cls al) (r: Set.set HS.rid) +: Lemma + (region_liveness_insensitive_locs c `loc_includes` loc_regions #_ #c true r) + +val loc_includes_region_liveness_insensitive_locs_loc_addresses + (#al: aloc_t) (c: cls al) (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) +: Lemma + (region_liveness_insensitive_locs c `loc_includes` loc_addresses #_ #c preserve_liveness r a) + +val loc_includes_region_liveness_insensitive_locs_loc_of_aloc + (#al: aloc_t) (c: cls al) (#r: HS.rid) (#a: nat) (x: al r a) +: Lemma + (region_liveness_insensitive_locs c `loc_includes` loc_of_aloc #_ #c x) + + +(** The modifies clause proper *) + +[@@erasable] +val modifies + (#aloc: aloc_t) (#c: cls aloc) + (s: loc c) + (h1 h2: HS.mem) +: GTot Type0 + +val modifies_intro + (#al: aloc_t) (#c: cls al) (l: loc c) (h h' : HS.mem) + (regions: ( + (r: HS.rid) -> + Lemma + (requires (HS.live_region h r)) + (ensures (HS.live_region h' r)) + )) + (mrefs: ( + (t: Type0) -> + (pre: Preorder.preorder t) -> + (b: HS.mreference t pre) -> + Lemma + (requires ((loc_disjoint (loc_mreference b) l) /\ HS.contains h b)) + (ensures (HS.contains h' b /\ HS.sel h' b == HS.sel h b)) + )) + (livenesses: ( + (t: Type0) -> + (pre: Preorder.preorder t) -> + (b: HS.mreference t pre) -> + Lemma + (requires (HS.contains h b)) + (ensures (HS.contains h' b)) + )) + (addr_unused_in: ( + (r: HS.rid) -> + (n: nat) -> + Lemma + (requires ( + HS.live_region h r /\ + HS.live_region h' r /\ n `Heap.addr_unused_in` (HS.get_hmap h' `Map.sel` r) + )) + (ensures (n `Heap.addr_unused_in` (HS.get_hmap h `Map.sel` r))) + )) + (alocs: ( + (r: HS.rid) -> + (a: nat) -> + (x: al r a) -> + Lemma + (requires (loc_disjoint (loc_of_aloc x) l)) + (ensures (c.aloc_preserved x h h')) + )) +: Lemma + (modifies l h h') + +val modifies_none_intro + (#al: aloc_t) (#c: cls al) (h h' : HS.mem) + (regions: ( + (r: HS.rid) -> + Lemma + (requires (HS.live_region h r)) + (ensures (HS.live_region h' r)) + )) + (mrefs: ( + (t: Type0) -> + (pre: Preorder.preorder t) -> + (b: HS.mreference t pre) -> + Lemma + (requires (HS.contains h b)) + (ensures (HS.contains h' b /\ HS.sel h' b == HS.sel h b)) + )) + (addr_unused_in: ( + (r: HS.rid) -> + (n: nat) -> + Lemma + (requires (HS.live_region h r /\ HS.live_region h' r /\ n `Heap.addr_unused_in` (HS.get_hmap h' `Map.sel` r))) + (ensures (n `Heap.addr_unused_in` (HS.get_hmap h `Map.sel` r))) + )) +: Lemma + (modifies (loc_none #_ #c) h h') + +val modifies_address_intro + (#al: aloc_t) (#c: cls al) (r: HS.rid) (n: nat) (h h' : HS.mem) + (regions: ( + (r: HS.rid) -> + Lemma + (requires (HS.live_region h r)) + (ensures (HS.live_region h' r)) + )) + (mrefs: ( + (t: Type0) -> + (pre: Preorder.preorder t) -> + (b: HS.mreference t pre) -> + Lemma + (requires ((r <> HS.frameOf b \/ n <> HS.as_addr b) /\ HS.contains h b)) + (ensures (HS.contains h' b /\ HS.sel h' b == HS.sel h b)) + )) + (addr_unused_in: ( + (r': HS.rid) -> + (n' : nat) -> + Lemma + (requires ((r' <> r \/ n' <> n) /\ HS.live_region h r' /\ HS.live_region h' r' /\ n' `Heap.addr_unused_in` (HS.get_hmap h' `Map.sel` r'))) + (ensures (n' `Heap.addr_unused_in` (HS.get_hmap h `Map.sel` r'))) + )) +: Lemma + (modifies (loc_addresses #_ #c false r (Set.singleton n)) h h') + +val modifies_aloc_intro + (#al: aloc_t) (#c: cls al) (#r: HS.rid) (#n: nat) (z: al r n) (h h' : HS.mem) + (regions: ( + (r: HS.rid) -> + Lemma + (requires (HS.live_region h r)) + (ensures (HS.live_region h' r)) + )) + (mrefs: ( + (t: Type0) -> + (pre: Preorder.preorder t) -> + (b: HS.mreference t pre) -> + Lemma + (requires ((r <> HS.frameOf b \/ n <> HS.as_addr b) /\ HS.contains h b)) + (ensures (HS.contains h' b /\ HS.sel h' b == HS.sel h b)) + )) + (livenesses: ( + (t: Type0) -> + (pre: Preorder.preorder t) -> + (b: HS.mreference t pre) -> + Lemma + (requires (HS.contains h b)) + (ensures (HS.contains h' b)) + )) + (addr_unused_in: ( + (r: HS.rid) -> + (n: nat) -> + Lemma + (requires (HS.live_region h r /\ HS.live_region h' r /\ n `Heap.addr_unused_in` (HS.get_hmap h' `Map.sel` r))) + (ensures (n `Heap.addr_unused_in` (HS.get_hmap h `Map.sel` r))) + )) + (alocs: ( + (x: al r n) -> + Lemma + (requires (c.aloc_disjoint x z)) + (ensures (c.aloc_preserved x h h')) + )) +: Lemma + (modifies (loc_of_aloc #_ #c z) h h') + +val modifies_live_region + (#aloc: aloc_t) (#c: cls aloc) + (s: loc c) + (h1 h2: HS.mem) + (r: HS.rid) +: Lemma + (requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r)) + (ensures (HS.live_region h2 r)) + +val modifies_mreference_elim + (#aloc: aloc_t) (#c: cls aloc) + (#t: Type) + (#pre: Preorder.preorder t) + (b: HS.mreference t pre) + (p: loc c) + (h h': HS.mem) +: Lemma + (requires ( + loc_disjoint (loc_mreference b) p /\ + HS.contains h b /\ + modifies p h h' + )) + (ensures ( + HS.contains h' b /\ + HS.sel h b == HS.sel h' b + )) + +val modifies_aloc_elim + (#aloc: aloc_t) (#c: cls aloc) + (#r: HS.rid) + (#a: nat) + (b: aloc r a) + (p: loc c) + (h h': HS.mem) +: Lemma + (requires ( + loc_disjoint (loc_of_aloc b) p /\ + modifies p h h' + )) + (ensures ( + c.aloc_preserved b h h' + )) + +val modifies_refl + (#aloc: aloc_t) (#c: cls aloc) + (s: loc c) + (h: HS.mem) +: Lemma + (modifies s h h) + +val modifies_loc_includes + (#aloc: aloc_t) (#c: cls aloc) + (s1: loc c) + (h h': HS.mem) + (s2: loc c) +: Lemma + (requires (modifies s2 h h' /\ loc_includes s1 s2)) + (ensures (modifies s1 h h')) + +val modifies_preserves_liveness + (#aloc: aloc_t) (#c: cls aloc) + (s1 s2: loc c) + (h h' : HS.mem) + (#t: Type) + (#pre: Preorder.preorder t) + (r: HS.mreference t pre) +: Lemma + (requires (modifies (loc_union s1 s2) h h' /\ loc_disjoint s1 (loc_mreference r) /\ loc_includes (address_liveness_insensitive_locs c) s2 /\ h `HS.contains` r)) + (ensures (h' `HS.contains` r)) + +val modifies_preserves_liveness_strong + (#aloc: aloc_t) (#c: cls aloc) + (s1 s2: loc c) + (h h' : HS.mem) + (#t: Type) + (#pre: Preorder.preorder t) + (r: HS.mreference t pre) + (x: aloc (HS.frameOf r) (HS.as_addr r)) +: Lemma + (requires (modifies (loc_union s1 s2) h h' /\ loc_disjoint s1 (loc_of_aloc #_ #c #(HS.frameOf r) #(HS.as_addr r) x) /\ loc_includes (address_liveness_insensitive_locs c) s2 /\ h `HS.contains` r)) + (ensures (h' `HS.contains` r)) + +val modifies_preserves_region_liveness + (#al: aloc_t) (#c: cls al) + (l1 l2: loc c) + (h h' : HS.mem) + (r: HS.rid) +: Lemma + (requires (modifies (loc_union l1 l2) h h' /\ region_liveness_insensitive_locs c `loc_includes` l2 /\ loc_disjoint (loc_region_only false r) l1 /\ HS.live_region h r)) + (ensures (HS.live_region h' r)) + +val modifies_preserves_region_liveness_reference + (#al: aloc_t) (#c: cls al) + (l1 l2: loc c) + (h h' : HS.mem) + (#t: Type) + (#pre: Preorder.preorder t) + (r: HS.mreference t pre) +: Lemma + (requires (modifies (loc_union l1 l2) h h' /\ region_liveness_insensitive_locs c `loc_includes` l2 /\ loc_disjoint (loc_mreference r) l1 /\ HS.live_region h (HS.frameOf r))) + (ensures (HS.live_region h' (HS.frameOf r))) + +val modifies_preserves_region_liveness_aloc + (#al: aloc_t) (#c: cls al) + (l1 l2: loc c) + (h h' : HS.mem) + (#r: HS.rid) + (#n: nat) + (x: al r n) +: Lemma + (requires (modifies (loc_union l1 l2) h h' /\ region_liveness_insensitive_locs c `loc_includes` l2 /\ loc_disjoint (loc_of_aloc x) l1 /\ HS.live_region h r)) + (ensures (HS.live_region h' r)) + +val modifies_trans + (#aloc: aloc_t) (#c: cls aloc) + (s12: loc c) + (h1 h2: HS.mem) + (s23: loc c) + (h3: HS.mem) +: Lemma + (requires (modifies s12 h1 h2 /\ modifies s23 h2 h3)) + (ensures (modifies (loc_union s12 s23) h1 h3)) + +val modifies_only_live_regions + (#aloc: aloc_t) (#c: cls aloc) + (rs: Set.set HS.rid) + (l: loc c) + (h h' : HS.mem) +: Lemma + (requires ( + modifies (loc_union (loc_regions false rs) l) h h' /\ + (forall r . Set.mem r rs ==> (~ (HS.live_region h r))) + )) + (ensures (modifies l h h')) + +val no_upd_fresh_region + (#aloc: aloc_t) (#c: cls aloc) + (r:HS.rid) + (l:loc c) + (h0:HS.mem) + (h1:HS.mem) +: Lemma + (requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1)) + (ensures (modifies l h0 h1)) + +val fresh_frame_modifies + (#aloc: aloc_t) (c: cls aloc) + (h0 h1: HS.mem) +: Lemma + (requires (HS.fresh_frame h0 h1)) + (ensures (modifies #_ #c loc_none h0 h1)) + +val new_region_modifies + (#al: aloc_t) + (c: cls al) + (m0: HS.mem) + (r0: HS.rid) + (col: option int) +: Lemma + (requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col)))) + (ensures ( + let (_, m1) = HS.new_eternal_region m0 r0 col in + modifies (loc_none #_ #c) m0 m1 + )) + +val popped_modifies + (#aloc: aloc_t) (c: cls aloc) + (h0 h1: HS.mem) : Lemma + (requires (HS.popped h0 h1)) + (ensures (modifies #_ #c (loc_region_only false (HS.get_tip h0)) h0 h1)) + +val modifies_fresh_frame_popped + (#aloc: aloc_t) (#c: cls aloc) + (h0 h1: HS.mem) + (s: loc c) + (h2 h3: HS.mem) +: Lemma + (requires ( + HS.fresh_frame h0 h1 /\ + modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\ + HS.get_tip h2 == HS.get_tip h1 /\ + HS.popped h2 h3 + )) + (ensures ( + modifies s h0 h3 /\ + HS.get_tip h3 == HS.get_tip h0 + )) + +val modifies_loc_regions_intro + (#aloc: aloc_t) (#c: cls aloc) + (rs: Set.set HS.rid) + (h1 h2: HS.mem) +: Lemma + (requires (HS.modifies rs h1 h2)) + (ensures (modifies (loc_regions #_ #c true rs) h1 h2)) + +val modifies_loc_addresses_intro + (#aloc: aloc_t) (#c: cls aloc) + (r: HS.rid) + (a: Set.set nat) + (l: loc c) + (h1 h2: HS.mem) +: Lemma + (requires ( + HS.live_region h2 r /\ + modifies (loc_union (loc_region_only false r) l) h1 h2 /\ + HS.modifies_ref r a h1 h2 + )) + (ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2)) + +val modifies_ralloc_post + (#aloc: aloc_t) (#c: cls aloc) + (#a: Type) + (#rel: Preorder.preorder a) + (i: HS.rid) + (init: a) + (h: HS.mem) + (x: HST.mreference a rel) + (h' : HS.mem) +: Lemma + (requires (HST.ralloc_post i init h x h')) + (ensures (modifies (loc_none #_ #c) h h')) + +val modifies_salloc_post + (#aloc: aloc_t) (#c: cls aloc) + (#a: Type) + (#rel: Preorder.preorder a) + (init: a) + (h: HS.mem) + (x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } ) + (h' : HS.mem) +: Lemma + (requires (HST.salloc_post init h x h')) + (ensures (modifies (loc_none #_ #c) h h')) + +val modifies_free + (#aloc: aloc_t) (#c: cls aloc) + (#a: Type) + (#rel: Preorder.preorder a) + (r: HS.mreference a rel { HS.is_mm r } ) + (m: HS.mem { m `HS.contains` r } ) +: Lemma + (modifies (loc_freed_mreference #_ #c r) m (HS.free r m)) + +val modifies_none_modifies + (#aloc: aloc_t) (#c: cls aloc) + (h1 h2: HS.mem) +: Lemma + (requires (HST.modifies_none h1 h2)) + (ensures (modifies (loc_none #_ #c) h1 h2)) + +val modifies_upd + (#aloc: aloc_t) (#c: cls aloc) + (#t: Type) (#pre: Preorder.preorder t) + (r: HS.mreference t pre) + (v: t) + (h: HS.mem) +: Lemma + (requires (HS.contains h r)) + (ensures (modifies #_ #c (loc_mreference r) h (HS.upd h r v))) + +val modifies_strengthen + (#al: aloc_t) (#c: cls al) (l: loc c) (#r0: HS.rid) (#a0: nat) (al0: al r0 a0) (h h' : HS.mem) + (alocs: ( + (f: ((t: Type) -> (pre: Preorder.preorder t) -> (m: HS.mreference t pre) -> Lemma + (requires (HS.frameOf m == r0 /\ HS.as_addr m == a0 /\ HS.contains h m)) + (ensures (HS.contains h' m)) + )) -> + (x: al r0 a0) -> + Lemma + (requires (c.aloc_disjoint x al0 /\ loc_disjoint (loc_of_aloc x) l)) + (ensures (c.aloc_preserved x h h')) + )) +: Lemma + (requires (modifies (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h')) + (ensures (modifies (loc_union l (loc_of_aloc al0)) h h')) + +(** BEGIN TODO: move to FStar.Monotonic.HyperStack *) + +[@@erasable] +val does_not_contain_addr + (h: HS.mem) + (ra: HS.rid & nat) +: GTot Type0 + +val not_live_region_does_not_contain_addr + (h: HS.mem) + (ra: HS.rid & nat) +: Lemma + (requires (~ (HS.live_region h (fst ra)))) + (ensures (h `does_not_contain_addr` ra)) + +val unused_in_does_not_contain_addr + (h: HS.mem) + (#a: Type) + (#rel: Preorder.preorder a) + (r: HS.mreference a rel) +: Lemma + (requires (r `HS.unused_in` h)) + (ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r))) + +val addr_unused_in_does_not_contain_addr + (h: HS.mem) + (ra: HS.rid & nat) +: Lemma + (requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (HS.get_hmap h `Map.sel` (fst ra)))) + (ensures (h `does_not_contain_addr` ra)) + +val does_not_contain_addr_addr_unused_in + (h: HS.mem) + (ra: HS.rid & nat) +: Lemma + (requires (h `does_not_contain_addr` ra)) + (ensures (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (HS.get_hmap h `Map.sel` (fst ra)))) + +val free_does_not_contain_addr + (#a: Type0) + (#rel: Preorder.preorder a) + (r: HS.mreference a rel) + (m: HS.mem) + (x: HS.rid & nat) +: Lemma + (requires ( + HS.is_mm r /\ + m `HS.contains` r /\ + fst x == HS.frameOf r /\ + snd x == HS.as_addr r + )) + (ensures ( + HS.free r m `does_not_contain_addr` x + )) + +val does_not_contain_addr_elim + (#a: Type0) + (#rel: Preorder.preorder a) + (r: HS.mreference a rel) + (m: HS.mem) + (x: HS.rid & nat) +: Lemma + (requires ( + m `does_not_contain_addr` x /\ + HS.frameOf r == fst x /\ + HS.as_addr r == snd x + )) + (ensures (~ (m `HS.contains` r))) + +(** END TODO *) + +val loc_not_unused_in (#al: aloc_t) (c: cls al) (h: HS.mem) : GTot (loc c) + +val loc_unused_in (#al: aloc_t) (c: cls al) (h: HS.mem) : GTot (loc c) + +val loc_regions_unused_in (#al: aloc_t) (c: cls al) (h: HS.mem) (rs: Set.set HS.rid) : Lemma + (requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r)))) + (ensures (loc_unused_in c h `loc_includes` loc_regions false rs)) + +val loc_addresses_unused_in (#al: aloc_t) (c: cls al) (r: HS.rid) (a: Set.set nat) (h: HS.mem) : Lemma + (requires (forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))) + (ensures (loc_unused_in c h `loc_includes` loc_addresses false r a)) + +val loc_addresses_not_unused_in (#al: aloc_t) (c: cls al) (r: HS.rid) (a: Set.set nat) (h: HS.mem) : Lemma + (requires (forall x . Set.mem x a ==> ~ (h `does_not_contain_addr` (r, x)))) + (ensures (loc_not_unused_in c h `loc_includes` loc_addresses false r a)) + +val loc_unused_in_not_unused_in_disjoint (#al: aloc_t) (c: cls al) (h: HS.mem) : Lemma + (loc_unused_in c h `loc_disjoint` loc_not_unused_in c h) + +val not_live_region_loc_not_unused_in_disjoint + (#al: aloc_t) + (c: cls al) + (h0: HS.mem) + (r: HS.rid) +: Lemma + (requires (~ (HS.live_region h0 r))) + (ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in c h0))) + +val modifies_address_liveness_insensitive_unused_in + (#al: aloc_t) + (c: cls al) + (h h' : HS.mem) +: Lemma + (requires (modifies (address_liveness_insensitive_locs c) h h')) + (ensures (loc_not_unused_in c h' `loc_includes` loc_not_unused_in c h /\ loc_unused_in c h `loc_includes` loc_unused_in c h')) + +val modifies_only_not_unused_in + (#al: aloc_t) + (#c: cls al) + (l: loc c) + (h h' : HS.mem) +: Lemma + (requires (modifies (loc_unused_in c h `loc_union` l) h h')) + (ensures (modifies l h h')) + +let modifies_only_live_addresses + (#aloc: aloc_t) (#c: cls aloc) + (r: HS.rid) + (a: Set.set nat) + (l: loc c) + (h h' : HS.mem) +: Lemma + (requires ( + modifies (loc_union (loc_addresses false r a) l) h h' /\ + (forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x)) + )) + (ensures (modifies l h h')) += loc_addresses_unused_in c r a h; + loc_includes_refl l; + loc_includes_union_l (loc_unused_in c h) l l; + loc_includes_union_l (loc_unused_in c h) l (loc_addresses false r a); + loc_includes_union_r (loc_union (loc_unused_in c h) l) (loc_addresses false r a) l; + modifies_loc_includes (loc_union (loc_unused_in c h) l) h h' (loc_union (loc_addresses false r a) l); + modifies_only_not_unused_in l h h' + +val mreference_live_loc_not_unused_in + (#al: aloc_t) + (c: cls al) + (#t: Type) + (#pre: Preorder.preorder t) + (h: HS.mem) + (r: HS.mreference t pre) +: Lemma + (requires (h `HS.contains` r)) + (ensures (loc_not_unused_in c h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in c h `loc_includes` loc_mreference r)) + + +val mreference_unused_in_loc_unused_in + (#al: aloc_t) + (c: cls al) + (#t: Type) + (#pre: Preorder.preorder t) + (h: HS.mem) + (r: HS.mreference t pre) +: Lemma + (requires (r `HS.unused_in` h)) + (ensures (loc_unused_in c h `loc_includes` loc_freed_mreference r /\ loc_unused_in c h `loc_includes` loc_mreference r)) + + +(** * Compositionality *) + +val aloc_union: (bool -> Tot (aloc_t u#x)) -> Tot (aloc_t u#x) + +val cls_union (#a: (bool -> Tot aloc_t)) (c: ((b: bool) -> Tot (cls (a b)))) : Tot (cls (aloc_union a)) + +val union_loc_of_loc (#al: (bool -> Tot aloc_t)) (c: (b: bool) -> Tot (cls (al b))) (b: bool) (l: loc (c b)) : GTot (loc (cls_union c)) + +val union_loc_of_loc_none + (#al: (bool -> Tot aloc_t)) (c: (b: bool) -> Tot (cls (al b))) + (b: bool) +: Lemma + (union_loc_of_loc c b (loc_none #_ #(c b)) == loc_none #_ #(cls_union c)) + +val union_loc_of_loc_union + (#al: (bool -> Tot aloc_t)) (c: (b: bool) -> Tot (cls (al b))) + (b: bool) + (l1 l2: loc (c b)) +: Lemma + (union_loc_of_loc c b (loc_union #_ #(c b) l1 l2) == loc_union #_ #(cls_union c) (union_loc_of_loc c b l1) (union_loc_of_loc c b l2)) + +val union_loc_of_loc_addresses + (#al: (bool -> Tot aloc_t)) (c: (b: bool) -> Tot (cls (al b))) + (b: bool) + (preserve_liveness: bool) + (r: HS.rid) + (n: Set.set nat) +: Lemma + (union_loc_of_loc c b (loc_addresses #_ #(c b) preserve_liveness r n) == loc_addresses #_ #(cls_union c) preserve_liveness r n) + +val union_loc_of_loc_regions + (#al: (bool -> Tot aloc_t)) (c: (b: bool) -> Tot (cls (al b))) + (b: bool) + (preserve_liveness: bool) + (r: Set.set HS.rid) +: Lemma + (union_loc_of_loc c b (loc_regions #_ #(c b) preserve_liveness r) == loc_regions #_ #(cls_union c) preserve_liveness r) + +val union_loc_of_loc_includes + (#al: (bool -> Tot aloc_t)) (c: (b: bool) -> Tot (cls (al b))) + (b: bool) + (s1 s2: loc (c b)) +: Lemma + (union_loc_of_loc c b s1 `loc_includes` union_loc_of_loc c b s2 <==> s1 `loc_includes` s2) + +val union_loc_of_loc_disjoint + (#al: (bool -> Tot aloc_t)) (c: (b: bool) -> Tot (cls (al b))) + (b: bool) + (s1 s2: loc (c b)) +: Lemma + (union_loc_of_loc c b s1 `loc_disjoint` union_loc_of_loc c b s2 <==> s1 `loc_disjoint` s2) + +val modifies_union_loc_of_loc + (#al: (bool -> Tot aloc_t)) (c: (b: bool) -> Tot (cls (al b))) + (b: bool) + (l: loc (c b)) + (h1 h2: HS.mem) +: Lemma + (modifies #_ #(cls_union c) (union_loc_of_loc c b l) h1 h2 <==> modifies #_ #(c b) l h1 h2) + +val loc_of_union_loc + (#al: (bool -> Tot aloc_t)) + (#c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (l: loc (cls_union c)) +: GTot (loc (c b)) + +val loc_of_union_loc_union_loc_of_loc + (#al: (bool -> HS.rid -> nat -> Tot Type)) + (c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (s: loc (c b)) +: Lemma + (loc_of_union_loc b (union_loc_of_loc c b s) == s) + +val loc_of_union_loc_none + (#al: (bool -> Tot aloc_t)) + (c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) +: Lemma + (loc_of_union_loc #_ #c b loc_none == loc_none) + +val loc_of_union_loc_union + (#al: (bool -> Tot aloc_t)) + (c: ((b: bool) -> Tot (cls (al b)))) + (b: bool) + (l1 l2: loc (cls_union c)) +: Lemma + (loc_of_union_loc b (l1 `loc_union` l2) == loc_of_union_loc b l1 `loc_union` loc_of_union_loc b l2) + +val loc_of_union_loc_addresses + (#al: (bool -> Tot aloc_t)) (c: (b: bool) -> Tot (cls (al b))) + (b: bool) + (preserve_liveness: bool) + (r: HS.rid) + (n: Set.set nat) +: Lemma + (loc_of_union_loc #_ #c b (loc_addresses preserve_liveness r n) == loc_addresses preserve_liveness r n) + +val loc_of_union_loc_regions + (#al: (bool -> Tot aloc_t)) (c: (b: bool) -> Tot (cls (al b))) + (b: bool) + (preserve_liveness: bool) + (r: Set.set HS.rid) +: Lemma + (loc_of_union_loc #_ #c b (loc_regions preserve_liveness r) == loc_regions preserve_liveness r) + + +/// Universes + +val raise_aloc (al: aloc_t u#x) : Tot (aloc_t u#(max x (y + 1))) + +val raise_cls (#al: aloc_t u#x) (c: cls al) : Tot (cls (raise_aloc u#x u#y al)) + +val raise_loc (#al: aloc_t u#x) (#c: cls al) (l: loc c) : Tot (loc (raise_cls u#x u#y c)) + +val raise_loc_none (#al: aloc_t u#x) (#c: cls al) : Lemma + (raise_loc u#x u#y (loc_none #_ #c) == loc_none) + +val raise_loc_union (#al: aloc_t u#x) (#c: cls al) (l1 l2: loc c) : Lemma + (raise_loc u#x u#y (loc_union l1 l2) == loc_union (raise_loc l1) (raise_loc l2)) + +val raise_loc_addresses (#al: aloc_t u#x) (#c: cls al) (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma + (raise_loc u#x u#y (loc_addresses #_ #c preserve_liveness r a) == loc_addresses preserve_liveness r a) + +val raise_loc_regions (#al: aloc_t u#x) (#c: cls al) (preserve_liveness: bool) (r: Set.set HS.rid) : Lemma + (raise_loc u#x u#y (loc_regions #_ #c preserve_liveness r) == loc_regions preserve_liveness r) + +val raise_loc_includes (#al: aloc_t u#x) (#c: cls al) (l1 l2: loc c) : Lemma + (loc_includes (raise_loc u#x u#y l1) (raise_loc l2) <==> loc_includes l1 l2) + +val raise_loc_disjoint (#al: aloc_t u#x) (#c: cls al) (l1 l2: loc c) : Lemma + (loc_disjoint (raise_loc u#x u#y l1) (raise_loc l2) <==> loc_disjoint l1 l2) + +val modifies_raise_loc (#al: aloc_t u#x) (#c: cls al) (l: loc c) (h1 h2: HS.mem) : Lemma + (modifies (raise_loc u#x u#y l) h1 h2 <==> modifies l h1 h2) + +val lower_loc (#al: aloc_t u#x) (#c: cls al) (l: loc (raise_cls u#x u#y c)) : Tot (loc c) + +val lower_loc_raise_loc (#al: aloc_t u#x) (#c: cls al) (l: loc c) : Lemma + (lower_loc (raise_loc u#x u#y l) == l) + +val raise_loc_lower_loc (#al: aloc_t u#x) (#c: cls al) (l: loc (raise_cls u#x u#y c)) : Lemma + (raise_loc (lower_loc l) == l) + +val lower_loc_none (#al: aloc_t u#x) (#c: cls al) : Lemma + (lower_loc u#x u#y #_ #c loc_none == loc_none) + +val lower_loc_union (#al: aloc_t u#x) (#c: cls al) (l1 l2: loc (raise_cls u#x u#y c)) : Lemma + (lower_loc u#x u#y (loc_union l1 l2) == loc_union (lower_loc l1) (lower_loc l2)) + +val lower_loc_addresses (#al: aloc_t u#x) (#c: cls al) (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma + (lower_loc u#x u#y #_ #c (loc_addresses preserve_liveness r a) == loc_addresses preserve_liveness r a) + +val lower_loc_regions (#al: aloc_t u#x) (#c: cls al) (preserve_liveness: bool) (r: Set.set HS.rid) : Lemma + (lower_loc u#x u#y #_ #c (loc_regions preserve_liveness r) == loc_regions preserve_liveness r) diff --git a/stage0/ulib/FStar.Monotonic.DependentMap.fst b/stage0/ulib/FStar.Monotonic.DependentMap.fst new file mode 100644 index 00000000000..5ad55dcc133 --- /dev/null +++ b/stage0/ulib/FStar.Monotonic.DependentMap.fst @@ -0,0 +1,90 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Monotonic.DependentMap +(** A library for mutable partial, dependent maps, + that grow monotonically, + while subject to an invariant on the entire map *) +open FStar.HyperStack.ST +module HS = FStar.HyperStack +module DM = FStar.DependentMap + +/// `map a b`: Represent the partial map as a list of pairs of points +let map a b = list (x:a & b x) + +/// `repr r`: Converts the list of pairs into a DM.t +let rec repr (#a:eqtype) (#b:a -> Type) (r:map a b) + : GTot (partial_dependent_map a b) + = match r with + | [] -> empty_partial_dependent_map + | (|x, y|)::tl -> DM.upd (repr tl) x (Some y) + +/// Three basic operations on map: empty, sel upd +let empty #a #b = [] + +let rec sel #a #b r x = + match r with + | [] -> None + | (|x', y|)::tl -> + if x = x' then Some y else sel tl x + +let upd #a #b r x v = (|x, v|)::r + +//////////////////////////////////////////////////////////////////////////////// + +/// `grows'` and `grows`: a preorder of invariant-respecting maps +/// - Needs to be introduced in 2 steps because of an F* limitation +let grows' (#a:eqtype) (#b:a -> Type) (#inv:(partial_dependent_map a b -> Type)) + (m1:imap a b inv) (m2:imap a b inv) = + forall x.{:pattern (Some? (sel m1 x))} + Some? (sel m1 x) ==> + Some? (sel m2 x) /\ + Some?.v (sel m1 x) == Some?.v (sel m2 x) +let grows #a #b #inv = grows' #a #b #inv + +let contains_stable #a #b #inv #r t x y = () +let defined_stable #a #b #inv #r t x = () +//////////////////////////////////////////////////////////////////////////////// + +//The main stateful interface is minimal and straigtforward +let alloc #a #b #inv #r _ = ralloc r [] + +let extend #a #b #inv #r t x y = + recall t; + let cur = !t in + t := upd cur x y; + mr_witness t (contains t x y) + +let lookup #a #b #inv #r t x = + let m = !t in + let y = sel m x in + match y with + | None -> y + | Some b -> + mr_witness t (contains t x b); + y + +let rec mmap_f #a #b #c m f = + match m with + | [] -> + assert (DM.equal (empty_partial_dependent_map #a #c) + (DM.map (f_opt f) (empty_partial_dependent_map #a #b))); + assert_norm (repr #a #c [] == empty_partial_dependent_map #a #c); + [] + | (| x, y |)::tl -> (| x, f x y |)::(mmap_f #a #b #c tl f) //AR: doesn't work without these implicits + +let map_f #a #b #c #inv #inv' #r #r' t f + = let m = !t in + ralloc r' (mmap_f m f) diff --git a/stage0/ulib/FStar.Monotonic.DependentMap.fsti b/stage0/ulib/FStar.Monotonic.DependentMap.fsti new file mode 100644 index 00000000000..37d1b9a82af --- /dev/null +++ b/stage0/ulib/FStar.Monotonic.DependentMap.fsti @@ -0,0 +1,246 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Monotonic.DependentMap +(** A library for mutable partial, dependent maps, + that grow monotonically, + while subject to an invariant on the entire map *) +open FStar.HyperStack.ST + +module HS = FStar.HyperStack +module DM = FStar.DependentMap +module HST = FStar.HyperStack.ST + +/// The logical model of the map is given in terms of DM.t/// +let opt (#a:eqtype) (b:a -> Type) = fun (x:a) -> option (b x) +let partial_dependent_map (a:eqtype) (b:a -> Type) = + DM.t a (opt b) + +/// An empty partial, dependent map maps all keys to None +let empty_partial_dependent_map (#a:_) (#b:_) + : partial_dependent_map a b + = DM.create #a #(opt b) (fun x -> None) +//////////////////////////////////////////////////////////////////////////////// + +/// `map a b`: Internally, the model is implemented using this abstract type +/// These maps provide three operations: +/// - empty, sel, upd +/// Which are proven to be in correspondence with the operations on DM.t +/// via the homomorphism `repr` below +val map + (a:eqtype) + (b:(a -> Type u#b)) + : Type u#b + +/// `repr m`: A ghost function that reveals the internal `map` as a `DM.t` +val repr (#a:_) (#b:_) + (r:map a b) + : GTot (partial_dependent_map a b) + +/// An `empty : map a b` is equivalent to the `empty_partial_dependent_map` +val empty (#a:_) (#b:_) + : r:map a b{repr r == empty_partial_dependent_map} + +/// Selecting a key from a map `sel r x` is equivalent to selecting it from its `repr` +val sel (#a:_) (#b:_) + (r:map a b) + (x:a) + : Pure (option (b x)) + (requires True) + (ensures (fun o -> DM.sel (repr r) x == o)) + +/// Updating a map using `upd r x v` is equivalent to updating its repr +val upd (#a:_) (#b:_) + (r:map a b) + (x:a) + (v:b x) + : Pure (map a b) + (requires True) + (ensures (fun r' -> repr r' == DM.upd (repr r) x (Some v))) + +/// `imap a b inv` further augments a map with an invariant on its repr +let imap (a:eqtype) (b: a -> Type) (inv:DM.t a (opt b) -> Type) = + r:map a b{inv (repr r)} + +/// `grows r1 r2` is an abstract preorder on `imap` +val grows (#a:_) (#b:_) (#inv:DM.t a (opt b) -> Type) + : FStar.Preorder.preorder (imap a b inv) + +/// And, finally, the main type of this module: +/// +/// `t r a b inv` is a mutable, imap stored in region `r` constrained +/// to evolve according to `grows` +let t (r:HST.erid) (a:eqtype) (b:a -> Type) (inv:DM.t a (opt b) -> Type) = + m_rref r (imap a b inv) grows + +/// `defined t x h`: In state `h`, map `t` is defined at point `x`. +/// - We define these in `Type` rather than `bool` +/// since it is typical for client code to use `defined` +/// as a stable heap predicate, which requires a `heap -> Type` +let defined + (#a:eqtype) + (#b:a -> Type) + (#inv:DM.t a (opt b) -> Type) + (#r:HST.erid) + (t:t r a b inv) + (x:a) + (h:HS.mem) + : GTot Type + = Some? (sel (HS.sel h t) x) + +/// `fresh t x h`: The map is not defined at point `x` +let fresh + (#a:eqtype) + (#b:a -> Type) + (#inv:DM.t a (opt b) -> Type) + (#r:HST.erid) + (t:t r a b inv) + (x:a) + (h:HS.mem) + : GTot Type0 + = ~ (defined t x h) + +/// `value_of t x h`: Get the value of `x` in the map `t` in state `h` +let value_of + (#a:eqtype) + (#b:a -> Type) + (#inv:DM.t a (opt b) -> Type) + (#r:HST.erid) + (t:t r a b inv) + (x:a) + (h:HS.mem{defined t x h}) + : GTot (b x) + = Some?.v (sel (HS.sel h t) x) + +/// `contains t x y h`: In state `h`, `t` maps `x` to `y` +let contains + (#a:eqtype) + (#b:a -> Type) + (#inv:DM.t a (opt b) -> Type) + (#r:HST.erid) + (t:t r a b inv) + (x:a) + (y:b x) + (h:HS.mem) + : GTot Type0 + = defined t x h /\ + value_of t x h == y + +/// `contains_stable`: The `contains` predicate is stable with respect to `grows` +val contains_stable + (#a:eqtype) + (#b:a -> Type) + (#inv:DM.t a (opt b) -> Type) + (#r:HST.erid) + (t:t r a b inv) + (x:a) + (y:b x) + : Lemma (ensures (HST.stable_on_t t (contains t x y))) + +/// `defined_stable`: The `defined` predicate is stable with respect to `grows` +/// - this is easily derivable from the previous lemma +/// But, we provide it here as a convenience to clients +val defined_stable + (#a:eqtype) + (#b:a -> Type) + (#inv:DM.t a (opt b) -> Type) + (#r:HST.erid) + (t:t r a b inv) + (x:a) + : Lemma (ensures (HST.stable_on_t t (defined t x))) + +//////////////////////////////////////////////////////////////////////////////// +// Interface of stateful operations +//////////////////////////////////////////////////////////////////////////////// + +/// `alloc ()`: Allocating a new `t` requires proving the `inv` of the empty map +val alloc (#a:eqtype) (#b:a -> Type) (#inv:DM.t a (opt b) -> Type) (#r:HST.erid) + (_:unit{inv (repr empty)}) + : ST (t r a b inv) + (requires (fun h -> HyperStack.ST.witnessed (region_contains_pred r))) + (ensures (fun h0 x h1 -> + ralloc_post r empty h0 x h1)) + +/// `extend t x y`: Extending `t` with (x -> y) +/// Requires: - proving that the `t` does not already define `x` +/// - and that the resulting heap would still respect `inv` +/// Ensures: - that only `t` is modified +/// - by updating it to contain `(x -> y)` +/// - and in the future `t` will always contain `(x -> y)` + +val extend + (#a:eqtype) + (#b:a -> Type) + (#inv:DM.t a (opt b) -> Type) + (#r:HST.erid) + (t:t r a b inv) + (x:a) + (y:b x) + : Stack unit + (requires (fun h -> + ~(defined t x h) /\ + inv (repr (upd (HS.sel h t) x y)))) + (ensures (fun h0 u h1 -> + let cur = HS.sel h0 t in + HS.contains h1 t /\ + HS.modifies (Set.singleton r) h0 h1 /\ + HS.modifies_ref r (Set.singleton (HS.as_addr t)) h0 h1 /\ + HS.sel h1 t == upd cur x y /\ + witnessed (contains t x y))) + +/// `lookup t x`: Querying the map `t` at point `x` +/// Ensures: - The state does not change +/// - If it returns `Some v`, then `t` will always contains `x -> v` +val lookup + (#a:eqtype) + (#b:a -> Type) + (#inv:DM.t a (opt b) -> Type) + (#r:HST.erid) + (t:t r a b inv) + (x:a) + : ST (option (b x)) + (requires (fun h -> True)) + (ensures (fun h0 y h1 -> + h0==h1 /\ + y == sel (HS.sel h1 t) x /\ + (match y with + | None -> ~(defined t x h1) + | Some v -> + contains t x v h1 /\ + witnessed (contains t x v)))) + +let forall_t (#a:eqtype) (#b:a -> Type) (#inv:DM.t a (opt b) -> Type) (#r:HST.erid) + (t:t r a b inv) (h:HS.mem) (pred: (x:a) -> b x -> Type0) + = forall (x:a).{:pattern (sel (HS.sel h t) x) \/ (DM.sel (repr (HS.sel h t)) x)} + defined t x h ==> pred x (Some?.v (sel (HS.sel h t) x)) + +let f_opt (#a:eqtype) (#b #c:a -> Type) (f: (x:a) -> b x -> c x) :(x:a) -> option (b x) -> option (c x) + = fun x y -> + match y with + | None -> None + | Some y -> Some (f x y) + +val mmap_f (#a:eqtype) (#b #c:a -> Type) (m:map a b) (f: (x:a) -> b x -> c x) + :Tot (m':(map a c){repr m' == DM.map (f_opt f) (repr m)}) + +val map_f (#a:eqtype) (#b #c:a -> Type) + (#inv:DM.t a (opt b) -> Type) (#inv':DM.t a (opt c) -> Type) + (#r #r':HST.erid) + (m:t r a b inv) (f: (x:a) -> b x -> c x) + :ST (t r' a c inv') + (requires (fun h0 -> inv' (DM.map (f_opt f) (repr (HS.sel h0 m))) /\ witnessed (region_contains_pred r'))) + (ensures (fun h0 m' h1 -> + inv' (DM.map (f_opt f) (repr (HS.sel h0 m))) /\ //AR: surprised that even after the fix for #57, we need this repetition from the requires clause + ralloc_post r' (mmap_f (HS.sel h0 m) f) h0 m' h1)) diff --git a/stage0/ulib/FStar.Monotonic.Heap.fst b/stage0/ulib/FStar.Monotonic.Heap.fst new file mode 100644 index 00000000000..1d9e5424251 --- /dev/null +++ b/stage0/ulib/FStar.Monotonic.Heap.fst @@ -0,0 +1,331 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Monotonic.Heap + +open FStar.Preorder +open FStar.Classical +module F = FStar.FunctionalExtensionality + +private noeq type heap_rec = { + next_addr: pos; + memory : F.restricted_t pos (fun (x:pos) + -> option (a:Type0 & rel:(option (preorder a)) & b:bool & a)) + //type, preorder, mm flag, and value +} + +let heap = h:heap_rec{(forall (n:nat). n >= h.next_addr ==> None? (h.memory n))} + +let equal h1 h2 = + let _ = () in + h1.next_addr = h2.next_addr /\ + FStar.FunctionalExtensionality.feq h1.memory h2.memory + +let equal_extensional h1 h2 = () + +let emp = { + next_addr = 1; + memory = F.on_dom pos (fun r -> None) +} + +let next_addr h = h.next_addr + +noeq +type core_mref (a:Type0) : Type0 = { + addr: (x: nat { x > 0 } ); + init: a; + mm: bool; //manually managed flag +} + +let addr_of #a #rel r = r.addr + +let is_mm #a #rel r = r.mm + +let contains #a #rel h r = + let _ = () in + Some? (h.memory r.addr) /\ + (let Some (| a1, pre_opt, mm, _ |) = h.memory r.addr in + a == a1 /\ Some? pre_opt /\ Some?.v pre_opt == rel /\ mm = r.mm) //using `===` here, since otherwise typechecker fails with a and a1 being different types, why? + +let addr_unused_in n h = n <> 0 && None? (h.memory n) + +let not_addr_unused_in_nullptr h = () + +let unused_in #a #rel r h = addr_unused_in (addr_of r) h + +let sel_tot #a #rel h r = + let Some (| _, _, _, x |) = h.memory r.addr in + x + +// +// We want to provide a `sel` API to the client that does not require a +// `contains` precondition, so that the clients don't have to prove it at +// every use of `sel` +// +// To do so, we need to be able to branch on whether the ref is contained in the heap +// +// But that's a problem since `contains` is in prop +// +// The following function assumes a boolean returning version of contains +// We could implement is using the classical strong excluded middle axiom, +// but we prefer to assume an specialized instance of it +// +assume val contains_bool (#a:Type0) (#rel:preorder a) (h:heap) (r:mref a rel) + : GTot (b:bool{b <==> (h `contains` r)}) + +let sel #a #rel h r = + if h `contains_bool` r + then sel_tot #a h r + else r.init + +let upd_tot' (#a: Type0) (#rel: preorder a) (h: heap) (r: mref a rel) (x: a) = + { h with memory = F.on_dom pos (fun r' -> if r.addr = r' + then Some (| a, Some rel, r.mm, x |) + else h.memory r') } + +let upd_tot #a #rel h r x = upd_tot' h r x + +let upd #a #rel h r x = + if h `contains_bool` r + then upd_tot' h r x + else + if r.addr >= h.next_addr + then + { next_addr = r.addr + 1; + memory = F.on_dom pos (fun r' -> if r' = r.addr + then Some (| a, Some rel, r.mm, x |) + else h.memory r') } + else + { h with memory = F.on_dom pos (fun r' -> if r' = r.addr + then Some (| a, Some rel, r.mm, x |) + else h.memory r') } + +let alloc #a rel h x mm = + let r = { addr = h.next_addr; init = x; mm = mm } in + r, { next_addr = r.addr + 1; + memory = F.on_dom pos (fun r' -> if r' = r.addr + then Some (| a, Some rel, r.mm, x |) + else h.memory r') } + +let free_mm #a #rel h r = + { h with memory = F.on_dom pos (fun r' -> if r' = r.addr then None else h.memory r') } + +(* + * update of a well-typed mreference + *) +private let lemma_upd_contains_test + (#a:Type) (#rel:preorder a) (h0:heap) (r:mref a rel) (x:a) + :Lemma (h0 `contains` r ==> + (let h1 = upd h0 r x in + (forall (b:Type) (rel:preorder b) (r':mref b rel). (h0 `contains` r' /\ addr_of r' = addr_of r) ==> sel h1 r' == x /\ + (forall (b:Type) (rel:preorder b) (r':mref b rel). addr_of r' <> addr_of r ==> sel h0 r' == sel h1 r') /\ + (forall (b:Type) (rel:preorder b) (r':mref b rel). h0 `contains` r' <==> h1 `contains` r') /\ + (forall (b:Type) (rel:preorder b) (r':mref b rel). r' `unused_in` h0 <==> r' `unused_in` h1)))) + = () + +(* + * update of a mreference that is mapped but not necessarily well-typed + * we cannot prove that h0 `contains` r' ==> h1 `contains` r' + * because consider that in h0 (r:mref a) contains (| b, y:b |), + * and that (r':mref b) s.t. r'.addr = r.addr + * in h0, r' is well-typed, but in h1 it's not + *) +private let lemma_upd_contains_not_necessarily_well_typed_test + (#a:Type) (#rel:preorder a) (h0:heap) (r:mref a rel) (x:a) + :Lemma ((~ (r `unused_in` h0)) ==> + (let h1 = upd h0 r x in + h1 `contains` r /\ + (forall (b:Type) (#rel:preorder b) (r':mref b rel). addr_of r' <> addr_of r ==> sel h0 r' == sel h1 r') /\ + (forall (b:Type) (#rel:preorder b) (r':mref b rel). (r'.addr <> r.addr /\ h0 `contains` r') ==> h1 `contains` r') /\ + (forall (b:Type) (#rel:preorder b) (r':mref b rel). r' `unused_in` h0 <==> r' `unused_in` h1))) + = () + +(* + * update of an unused mreference + *) +private let lemma_upd_unused_test + (#a:Type) (#rel:preorder a) (h0:heap) (r:mref a rel) (x:a) + :Lemma (r `unused_in` h0 ==> + (let h1 = upd h0 r x in + h1 `contains` r /\ + (forall (b:Type) (rel:preorder b) (r':mref b rel). addr_of r' <> addr_of r ==> sel h0 r' == sel h1 r') /\ + (forall (b:Type) (rel:preorder b) (r':mref b rel). h0 `contains` r' ==> h1 `contains` r') /\ + (forall (b:Type) (rel:preorder b) (r':mref b rel). ~ (r' `unused_in` h0) ==> ~ (r' `unused_in` h1)))) + = () + +(* + * alloc and alloc_mm behaves just like upd of an unmapped mreference + *) +private let lemma_alloc_test (#a:Type) (rel:preorder a) (h0:heap) (x:a) (mm:bool) + :Lemma (let (r, h1) = alloc rel h0 x mm in + r `unused_in` h0 /\ + h1 `contains` r /\ + is_mm r = mm /\ + (forall (b:Type) (rel:preorder b) (r':mref b rel). addr_of r' <> addr_of r ==> sel h0 r' == sel h1 r') /\ + (forall (b:Type) (rel:preorder b) (r':mref b rel). h0 `contains` r' ==> h1 `contains` r') /\ + (forall (b:Type) (rel:preorder b) (r':mref b rel). ~ (r' `unused_in` h0) ==> ~ (r' `unused_in` h1))) + = () + +private let lemma_free_mm_test (#a:Type) (rel:preorder a) (h0:heap) (r:mref a rel{h0 `contains` r /\ is_mm r}) + :Lemma (let h1 = free_mm h0 r in + r `unused_in` h1 /\ + (forall (b:Type) (rel:preorder b) (r':mref b rel). addr_of r' <> addr_of r ==> + ((sel h0 r' == sel h1 r' /\ + (h0 `contains` r' <==> h1 `contains` r') /\ + (r' `unused_in` h0 <==> r' `unused_in` h1))))) + = () + +private let lemma_alloc_fresh_test (#a:Type) (rel:preorder a) (h0:heap) (x:a) (mm:bool) + :Lemma (let r, h1 = alloc rel h0 x mm in + fresh r h0 h1 /\ modifies Set.empty h0 h1) + = () + +let lemma_ref_unused_iff_addr_unused #a #rel h r = () +let lemma_contains_implies_used #a #rel h r = () +let lemma_distinct_addrs_distinct_types #a #b #rel1 #rel2 h r1 r2 = () +let lemma_distinct_addrs_distinct_preorders u = () +let lemma_distinct_addrs_distinct_mm u = () +let lemma_distinct_addrs_unused #a #b #rel1 #rel2 h r1 r2 = () +let lemma_alloc #a rel h0 x mm = + let r, h1 = alloc rel h0 x mm in + let h1' = upd h0 r x in + assert (equal h1 h1') +let lemma_free_mm_sel #a #b #rel1 #rel2 h0 r1 r2 = () +let lemma_free_mm_contains #a #b #rel1 #rel2 h0 r1 r2 = () +let lemma_free_mm_unused #a #b #rel1 #rel2 h0 r1 r2 = () +let lemma_free_addr_unused_in #a #rel h r n = () +let lemma_sel_same_addr #a #rel h r1 r2 = () +let lemma_sel_upd1 #a #rel h r1 x r2 = () +let lemma_sel_upd2 #a #b #rel1 #rel2 h r1 r2 x = () +let lemma_mref_injectivity = () +let lemma_in_dom_emp #a #rel r = () +let lemma_upd_contains #a #rel h r x = () +let lemma_well_typed_upd_contains #a #b #rel1 #rel2 h r1 x r2 = () +let lemma_unused_upd_contains #a #b #rel1 #rel2 h r1 x r2 = () +let lemma_upd_contains_different_addr #a #b #rel1 #rel2 h r1 x r2 = () +let lemma_upd_unused #a #b #rel1 #rel2 h r1 x r2 = () +let lemma_contains_upd_modifies #a #rel h r x = () +let lemma_unused_upd_modifies #a #rel h r x = () + +let lemma_sel_equals_sel_tot_for_contained_refs #a #rel h r = () +let lemma_upd_equals_upd_tot_for_contained_refs #a #rel h r x = () +let lemma_modifies_and_equal_dom_sel_diff_addr #a #rel s h0 h1 r = () + +let lemma_heap_equality_upd_same_addr #a #rel h r1 r2 x = + assert (equal (upd h r1 x) (upd h r2 x)) + +let lemma_heap_equality_cancel_same_mref_upd #a #rel h r x y = + let h0 = upd (upd h r x) r y in + let h1 = upd h r y in + assert (equal h0 h1) + +let lemma_heap_equality_upd_with_sel #a #rel h r = + let h' = upd h r (sel h r) in + let Some (| _, _, _, _ |) = h.memory r.addr in + assert (equal h h') + +let lemma_heap_equality_commute_distinct_upds #a #b #rel_a #rel_b h r1 r2 x y = + let h0 = upd (upd h r1 x) r2 y in + let h1 = upd (upd h r2 y) r1 x in + assert (equal h0 h1) + +let lemma_next_addr_upd_tot #_ #_ _ _ _ = () +let lemma_next_addr_upd #_ #_ _ _ _ = () +let lemma_next_addr_alloc #_ _ _ _ _ = () +let lemma_next_addr_free_mm #_ #_ _ _ = () +let lemma_next_addr_contained_refs_addr #_ #_ _ _ = () + +(*** Untyped views of references *) + +(* Definition and ghost decidable equality *) +noeq type aref' :Type0 = { + a_addr: (x: nat { x > 0 } ); + a_mm: bool; //manually managed flag +} +let aref = aref' +let dummy_aref = { + a_addr = 1; + a_mm = false; +} +let aref_equal a1 a2 = a1.a_addr = a2.a_addr && a1.a_mm = a2.a_mm + +(* Introduction rule *) +let aref_of #t #rel r = { + a_addr = r.addr; + a_mm = r.mm; +} + +(* Operators lifted from ref *) +let addr_of_aref a = a.a_addr +let addr_of_aref_of #t #rel r = () +let aref_is_mm a = a.a_mm +let is_mm_aref_of #t #rel r = () +let aref_unused_in a h = None? (h.memory a.a_addr) +let unused_in_aref_of #t #rel r h = () +let contains_aref_unused_in #a #rel h x y = () + +(* Elimination rule *) +let aref_live_at (h: heap) (a: aref) (t: Type0) (rel: preorder t) = + let _ = () in + Some? (h.memory a.a_addr) /\ + (let Some (| a1, pre_opt, mm, _ |) = h.memory a.a_addr in + t == a1 /\ Some? pre_opt /\ Some?.v pre_opt === rel /\ mm == a.a_mm) //using `===` here, since otherwise typechecker fails with a and a1 being different types, why? + +let ref_of' + (h: heap) + (a: aref) + (t: Type0) + (rel: preorder t) +: Pure (mref t rel) + (requires (aref_live_at h a t rel)) + (ensures (fun _ -> True)) += let Some (| _, pre_opt, _, x |) = h.memory a.a_addr in + { + addr = a.a_addr; + init = x; + mm = a.a_mm + } + +let gref_of a t rel = + let m : squash (exists (h: heap) . aref_live_at h a t rel) = () in + let l : (exists (h: heap) . aref_live_at h a t rel) = + Squash.join_squash #(h: heap & aref_live_at h a t rel) m + in + let k : (exists (h: heap { aref_live_at h a t rel} ) . squash True ) = + FStar.Squash.bind_squash + #(h: heap & aref_live_at h a t rel) + #(h: (h: heap { aref_live_at h a t rel} ) & squash True) + l + (fun h -> let (| h', _ |) = h in Squash.return_squash (| h', () |) ) + in + let h = FStar.ErasedLogic.exists_proj1 #(h: heap {aref_live_at h a t rel}) #(fun _ -> squash True) k in + ref_of' h a t rel + +let ref_of h a t rel = ref_of' h a t rel + +let aref_live_at_aref_of h #t #rel r = () +let contains_gref_of h a t rel = () +let aref_of_gref_of a t rel = () + +let addr_of_gref_of a t rel = addr_of_aref_of (gref_of a t rel) + +let is_mm_gref_of a t rel = is_mm_aref_of (gref_of a t rel) + +let unused_in_gref_of a t rel h = unused_in_aref_of (gref_of a t rel) h + +let sel_ref_of a t rel h1 h2 = () + +let upd_ref_of a t rel h1 h2 x = + lemma_heap_equality_upd_same_addr h1 (ref_of h2 a t rel) (gref_of a t rel) x diff --git a/stage0/ulib/FStar.Monotonic.Heap.fsti b/stage0/ulib/FStar.Monotonic.Heap.fsti new file mode 100644 index 00000000000..35834a1df3b --- /dev/null +++ b/stage0/ulib/FStar.Monotonic.Heap.fsti @@ -0,0 +1,419 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Monotonic.Heap + +module S = FStar.Set +module TS = FStar.TSet + +open FStar.Preorder + +let set = Set.set +let tset = TSet.set + +val heap :Type u#1 + +val equal: heap -> heap -> Type0 + +val equal_extensional (h1:heap) (h2:heap) + :Lemma (requires True) (ensures (equal h1 h2 <==> h1 == h2)) + [SMTPat (equal h1 h2)] + +val emp :heap + +val next_addr: heap -> GTot pos + +new +val core_mref ([@@@ strictly_positive] a:Type0) : Type0 + +let mref (a:Type0) (rel:preorder a) : Type0 = core_mref a + +val addr_of: #a:Type0 -> #rel:preorder a -> mref a rel -> GTot pos + +val is_mm: #a:Type0 -> #rel:preorder a -> mref a rel -> GTot bool + +let compare_addrs (#a #b:Type0) (#rel1:preorder a) (#rel2:preorder b) (r1:mref a rel1) (r2:mref b rel2) + :GTot bool = addr_of r1 = addr_of r2 + +val contains: #a:Type0 -> #rel:preorder a -> heap -> mref a rel -> Type0 + +val addr_unused_in: nat -> heap -> Type0 + +val not_addr_unused_in_nullptr (h: heap) : Lemma (~ (addr_unused_in 0 h)) + +val unused_in: #a:Type0 -> #rel:preorder a -> mref a rel -> heap -> Type0 + +let fresh (#a:Type) (#rel:preorder a) (r:mref a rel) (h0:heap) (h1:heap) = + r `unused_in` h0 /\ h1 `contains` r + +let only_t (#a:Type0) (#rel:preorder a) (x:mref a rel) :GTot (tset nat) = TS.singleton (addr_of x) + +let only (#a:Type0) (#rel:preorder a) (x:mref a rel) :GTot (set nat) = S.singleton (addr_of x) + +let op_Hat_Plus_Plus (#a:Type0) (#rel:preorder a) (r:mref a rel) (s:set nat) :GTot (set nat) = S.union (only r) s + +let op_Plus_Plus_Hat (#a:Type0) (#rel:preorder a) (s:set nat) (r:mref a rel) :GTot (set nat) = S.union s (only r) + +let op_Hat_Plus_Hat (#a:Type0) (#b:Type0) (#rel1:preorder a) (#rel2:preorder b) (r1:mref a rel1) (r2:mref b rel2) + :GTot (set nat) = S.union (only r1) (only r2) + +val sel_tot: #a:Type0 -> #rel:preorder a -> h:heap -> r:mref a rel{h `contains` r} -> Tot a + +val sel: #a:Type0 -> #rel:preorder a -> heap -> mref a rel -> GTot a + +val upd_tot: #a:Type0 -> #rel:preorder a -> h:heap -> r:mref a rel{h `contains` r} -> x:a -> Tot heap + +val upd: #a:Type0 -> #rel:preorder a -> h:heap -> r:mref a rel -> x:a -> GTot heap + +val alloc: #a:Type0 -> rel:preorder a -> heap -> a -> mm:bool -> Tot (mref a rel & heap) + +val free_mm: #a:Type0 -> #rel:preorder a -> h:heap -> r:mref a rel{h `contains` r /\ is_mm r} -> Tot heap + +let modifies_t (s:tset nat) (h0:heap) (h1:heap) = + (forall (a:Type) (rel:preorder a) (r:mref a rel).{:pattern (sel h1 r)} + ((~ (TS.mem (addr_of r) s)) /\ h0 `contains` r) ==> sel h1 r == sel h0 r) /\ + (forall (a:Type) (rel:preorder a) (r:mref a rel).{:pattern (contains h1 r)} + h0 `contains` r ==> h1 `contains` r) /\ + (forall (a:Type) (rel:preorder a) (r:mref a rel).{:pattern (r `unused_in` h0)} + r `unused_in` h1 ==> r `unused_in` h0) /\ + (forall (n: nat) . {:pattern (n `addr_unused_in` h0) } + n `addr_unused_in` h1 ==> n `addr_unused_in` h0 + ) + + +let modifies (s:set nat) (h0:heap) (h1:heap) = modifies_t (TS.tset_of_set s) h0 h1 + +let equal_dom (h1:heap) (h2:heap) :GTot Type0 = + (forall (a:Type0) (rel:preorder a) (r:mref a rel). + {:pattern (h1 `contains` r) \/ (h2 `contains` r)} + h1 `contains` r <==> h2 `contains` r) /\ + (forall (a:Type0) (rel:preorder a) (r:mref a rel). + {:pattern (r `unused_in` h1) \/ (r `unused_in` h2)} + r `unused_in` h1 <==> r `unused_in` h2) + +val lemma_ref_unused_iff_addr_unused (#a:Type0) (#rel:preorder a) (h:heap) (r:mref a rel) + :Lemma (requires True) + (ensures (r `unused_in` h <==> addr_of r `addr_unused_in` h)) + [SMTPatOr [[SMTPat (r `unused_in` h)]; [SMTPat (addr_of r `addr_unused_in` h)]]] + +val lemma_contains_implies_used (#a:Type0) (#rel:preorder a) (h:heap) (r:mref a rel) + :Lemma (requires (h `contains` r)) + (ensures (~ (r `unused_in` h))) + [SMTPatOr [[SMTPat (h `contains` r)]; [SMTPat (r `unused_in` h)]]] + +val lemma_distinct_addrs_distinct_types + (#a:Type0) (#b:Type0) (#rel1:preorder a) (#rel2:preorder b) (h:heap) (r1:mref a rel1) (r2:mref b rel2) + :Lemma (requires (a =!= b /\ h `contains` r1 /\ h `contains` r2)) + (ensures (addr_of r1 <> addr_of r2)) + [SMTPat (h `contains` r1); SMTPat (h `contains` r2)] + +val lemma_distinct_addrs_distinct_preorders (u:unit) + :Lemma (forall (a:Type0) (rel1 rel2:preorder a) (r1:mref a rel1) (r2:mref a rel2) (h:heap). + {:pattern (h `contains` r1); (h `contains` r2)} + (h `contains` r1 /\ h `contains` r2 /\ rel1 =!= rel2) ==> addr_of r1 <> addr_of r2) + +val lemma_distinct_addrs_distinct_mm (u:unit) + :Lemma (forall (a b:Type0) (rel1:preorder a) (rel2:preorder b) (r1:mref a rel1) (r2:mref b rel2) (h:heap). + {:pattern (h `contains` r1); (h `contains` r2)} + (h `contains` r1 /\ h `contains` r2 /\ is_mm r1 =!= is_mm r2) ==> addr_of r1 <> addr_of r2) + +(* + * AR: this is a bit surprising. i had to add ~ (r1 === r2) postcondition to make the lemma + * lemma_live_1 in hyperstack to go through. if addr_of r1 <> addr_of r2, shouldn't we get ~ (r1 === r2) + * automatically? should dig into smt encoding to figure. + *) +val lemma_distinct_addrs_unused + (#a:Type0) (#b:Type0) (#rel1:preorder a) (#rel2:preorder b) (h:heap) (r1:mref a rel1) (r2:mref b rel2) + :Lemma (requires (r1 `unused_in` h /\ ~ (r2 `unused_in` h))) + (ensures (addr_of r1 <> addr_of r2 /\ (~ (r1 === r2)))) + [SMTPat (r1 `unused_in` h); SMTPat (r2 `unused_in` h)] + +val lemma_alloc (#a:Type0) (rel:preorder a) (h0:heap) (x:a) (mm:bool) + :Lemma (requires True) + (ensures (let r, h1 = alloc rel h0 x mm in + fresh r h0 h1 /\ h1 == upd h0 r x /\ is_mm r = mm /\ addr_of r == next_addr h0)) + [SMTPat (alloc rel h0 x mm)] + +val lemma_free_mm_sel + (#a:Type0) (#b:Type0) (#rel1:preorder a) (#rel2:preorder b) (h0:heap) + (r1:mref a rel1{h0 `contains` r1 /\ is_mm r1}) (r2:mref b rel2) + :Lemma (requires True) + (ensures (addr_of r2 <> addr_of r1 ==> sel h0 r2 == sel (free_mm h0 r1) r2)) + [SMTPat (sel (free_mm h0 r1) r2)] + +val lemma_free_mm_contains + (#a:Type0) (#b:Type0) (#rel1:preorder a) (#rel2:preorder b) (h0:heap) + (r1:mref a rel1{h0 `contains` r1 /\ is_mm r1}) (r2:mref b rel2) + :Lemma (requires True) + (ensures (let h1 = free_mm h0 r1 in + (addr_of r2 <> addr_of r1 /\ h0 `contains` r2) <==> h1 `contains` r2)) + [SMTPat ((free_mm h0 r1) `contains` r2)] + +val lemma_free_mm_unused + (#a:Type0) (#b:Type0) (#rel1:preorder a) (#rel2:preorder b) (h0:heap) + (r1:mref a rel1{h0 `contains` r1 /\ is_mm r1}) (r2:mref b rel2) + :Lemma (requires True) + (ensures (let h1 = free_mm h0 r1 in + ((addr_of r1 = addr_of r2 ==> r2 `unused_in` h1) /\ + (r2 `unused_in` h0 ==> r2 `unused_in` h1) /\ + (r2 `unused_in` h1 ==> (r2 `unused_in` h0 \/ addr_of r2 = addr_of r1))))) + [SMTPat (r2 `unused_in` (free_mm h0 r1))] + +val lemma_free_addr_unused_in + (#a: Type) (#rel: preorder a) (h: heap) (r: mref a rel { h `contains` r /\ is_mm r } ) + (n: nat) +: Lemma + (requires (n `addr_unused_in` (free_mm h r) /\ n <> addr_of r)) + (ensures (n `addr_unused_in` h)) + [SMTPat (n `addr_unused_in` (free_mm h r))] + +(* + * AR: we can prove this lemma only if both the mreferences have same preorder + *) +val lemma_sel_same_addr (#a:Type0) (#rel:preorder a) (h:heap) (r1:mref a rel) (r2:mref a rel) + :Lemma (requires (h `contains` r1 /\ addr_of r1 = addr_of r2 /\ is_mm r1 == is_mm r2)) + (ensures (h `contains` r2 /\ sel h r1 == sel h r2)) + [SMTPatOr [ + [SMTPat (sel h r1); SMTPat (sel h r2)]; + [SMTPat (h `contains` r1); SMTPat (h `contains` r2)]; + ]] + +(* + * AR: this is true only if the preorder is same, else r2 may not be contained in h + *) +val lemma_sel_upd1 (#a:Type0) (#rel:preorder a) (h:heap) (r1:mref a rel) (x:a) (r2:mref a rel) + :Lemma (requires (addr_of r1 = addr_of r2 /\ is_mm r1 == is_mm r2)) + (ensures (sel (upd h r1 x) r2 == x)) + [SMTPat (sel (upd h r1 x) r2)] + +val lemma_sel_upd2 (#a:Type0) (#b:Type0) (#rel1:preorder a) (#rel2:preorder b) (h:heap) (r1:mref a rel1) (r2:mref b rel2) (x:b) + :Lemma (requires (addr_of r1 <> addr_of r2)) + (ensures (sel (upd h r2 x) r1 == sel h r1)) + [SMTPat (sel (upd h r2 x) r1)] + +val lemma_mref_injectivity + :(u:unit{forall (a:Type0) (b:Type0) (rel1:preorder a) (rel2:preorder b) (r1:mref a rel1) (r2:mref b rel2). a =!= b ==> ~ (r1 === r2)}) + +val lemma_in_dom_emp (#a:Type0) (#rel:preorder a) (r:mref a rel) + :Lemma (requires True) + (ensures (r `unused_in` emp)) + [SMTPat (r `unused_in` emp)] + +val lemma_upd_contains (#a:Type0) (#rel:preorder a) (h:heap) (r:mref a rel) (x:a) + :Lemma (requires True) + (ensures ((upd h r x) `contains` r)) + [SMTPat ((upd h r x) `contains` r)] + +val lemma_well_typed_upd_contains + (#a:Type0) (#b:Type0) (#rel1:preorder a) (#rel2:preorder b) (h:heap) (r1:mref a rel1) (x:a) (r2:mref b rel2) + :Lemma (requires (h `contains` r1)) + (ensures (let h1 = upd h r1 x in + h1 `contains` r2 <==> h `contains` r2)) + [SMTPat ((upd h r1 x) `contains` r2)] + +val lemma_unused_upd_contains + (#a:Type0) (#b:Type0) (#rel1:preorder a) (#rel2:preorder b) (h:heap) (r1:mref a rel1) (x:a) (r2:mref b rel2) + :Lemma (requires (r1 `unused_in` h)) + (ensures (let h1 = upd h r1 x in + (h `contains` r2 ==> h1 `contains` r2) /\ + (h1 `contains` r2 ==> (h `contains` r2 \/ addr_of r2 = addr_of r1)))) + [SMTPat ((upd h r1 x) `contains` r2)] + +val lemma_upd_contains_different_addr + (#a:Type0) (#b:Type0) (#rel1:preorder a) (#rel2:preorder b) (h:heap) (r1:mref a rel1) (x:a) (r2:mref b rel2) + :Lemma (requires (h `contains` r2 /\ addr_of r1 <> addr_of r2)) + (ensures ((upd h r1 x) `contains` r2)) + [SMTPat ((upd h r1 x) `contains` r2)] + +val lemma_upd_unused + (#a:Type0) (#b:Type0) (#rel1:preorder a) (#rel2:preorder b) (h:heap) (r1:mref a rel1) (x:a) (r2:mref b rel2) + :Lemma (requires True) + (ensures ((addr_of r1 <> addr_of r2 /\ r2 `unused_in` h) <==> r2 `unused_in` (upd h r1 x))) + [SMTPat (r2 `unused_in` (upd h r1 x))] + +val lemma_contains_upd_modifies (#a:Type0) (#rel:preorder a) (h:heap) (r:mref a rel) (x:a) + :Lemma (requires (h `contains` r)) + (ensures (modifies (S.singleton (addr_of r)) h (upd h r x))) + [SMTPat (upd h r x)] + +val lemma_unused_upd_modifies (#a:Type0) (#rel:preorder a) (h:heap) (r:mref a rel) (x:a) + :Lemma (requires (r `unused_in` h)) + (ensures (modifies (Set.singleton (addr_of r)) h (upd h r x))) + [SMTPat (upd h r x); SMTPat (r `unused_in` h)] + +val lemma_sel_equals_sel_tot_for_contained_refs + (#a:Type0) (#rel:preorder a) (h:heap) (r:mref a rel{h `contains` r}) + :Lemma (requires True) + (ensures (sel_tot h r == sel h r)) + +val lemma_upd_equals_upd_tot_for_contained_refs + (#a:Type0) (#rel:preorder a) (h:heap) (r:mref a rel{h `contains` r}) (x:a) + :Lemma (requires True) + (ensures (upd_tot h r x == upd h r x)) + +val lemma_modifies_and_equal_dom_sel_diff_addr + (#a:Type0)(#rel:preorder a) (s:set nat) (h0:heap) (h1:heap) (r:mref a rel) + :Lemma (requires (modifies s h0 h1 /\ equal_dom h0 h1 /\ (~ (S.mem (addr_of r) s)))) + (ensures (sel h0 r == sel h1 r)) + [SMTPat (modifies s h0 h1); SMTPat (equal_dom h0 h1); SMTPat (sel h1 r)] + +val lemma_heap_equality_upd_same_addr (#a: Type0) (#rel: preorder a) (h: heap) (r1 r2: mref a rel) (x: a) + :Lemma (requires ((h `contains` r1 \/ h `contains` r2) /\ addr_of r1 = addr_of r2 /\ is_mm r1 == is_mm r2)) + (ensures (upd h r1 x == upd h r2 x)) + +val lemma_heap_equality_cancel_same_mref_upd + (#a:Type) (#rel:preorder a) (h:heap) (r:mref a rel) + (x:a) (y:a) + :Lemma (requires True) + (ensures (upd (upd h r x) r y == upd h r y)) + +val lemma_heap_equality_upd_with_sel + (#a:Type) (#rel:preorder a) (h:heap) (r:mref a rel) + :Lemma (requires (h `contains` r)) + (ensures (upd h r (sel h r) == h)) + +val lemma_heap_equality_commute_distinct_upds + (#a:Type) (#b:Type) (#rel_a:preorder a) (#rel_b:preorder b) (h:heap) (r1:mref a rel_a) (r2:mref b rel_b) + (x:a) (y:b) + :Lemma (requires (addr_of r1 =!= addr_of r2)) + (ensures (upd (upd h r1 x) r2 y == upd (upd h r2 y) r1 x)) + +val lemma_next_addr_upd_tot + (#a:Type0) (#rel:preorder a) (h0:heap) (r:mref a rel{h0 `contains` r}) (x:a) + :Lemma (let h1 = upd_tot h0 r x in next_addr h1 == next_addr h0) + +val lemma_next_addr_upd + (#a:Type0) (#rel:preorder a) (h0:heap) (r:mref a rel) (x:a) + :Lemma (let h1 = upd h0 r x in next_addr h1 >= next_addr h0) + +val lemma_next_addr_alloc + (#a:Type0) (rel:preorder a) (h0:heap) (x:a) (mm:bool) + :Lemma (let _, h1 = alloc rel h0 x mm in next_addr h1 > next_addr h0) + +val lemma_next_addr_free_mm + (#a:Type0) (#rel:preorder a) (h0:heap) (r:mref a rel{h0 `contains` r /\ is_mm r}) + :Lemma (let h1 = free_mm h0 r in next_addr h1 == next_addr h0) + +val lemma_next_addr_contained_refs_addr + (#a:Type0) (#rel:preorder a) (h:heap) (r:mref a rel) + :Lemma (h `contains` r ==> addr_of r < next_addr h) + +(*** Untyped views of monotonic references *) + +(* Definition and ghost decidable equality *) +val aref: Type0 +val dummy_aref: aref +val aref_equal (a1 a2: aref) : Ghost bool (requires True) (ensures (fun b -> b == true <==> a1 == a2)) + +(* Introduction rule *) +val aref_of: #t: Type0 -> #rel: preorder t -> r: mref t rel -> Tot aref + +(* Operators lifted from ref *) +val addr_of_aref: a: aref -> GTot (n: nat { n > 0 } ) +val addr_of_aref_of: #t: Type0 -> #rel: preorder t -> r: mref t rel -> Lemma (addr_of r == addr_of_aref (aref_of r)) +[SMTPat (addr_of_aref (aref_of r))] +val aref_is_mm: aref -> GTot bool +val is_mm_aref_of: #t: Type0 -> #rel: preorder t -> r: mref t rel -> Lemma (is_mm r == aref_is_mm (aref_of r)) +[SMTPat (aref_is_mm (aref_of r))] +val aref_unused_in: aref -> heap -> Type0 +val unused_in_aref_of: #t: Type0 -> #rel: preorder t -> r: mref t rel -> h: heap -> Lemma (unused_in r h <==> aref_unused_in (aref_of r) h) +[SMTPat (aref_unused_in (aref_of r) h)] +val contains_aref_unused_in: #a:Type -> #rel: preorder a -> h:heap -> x:mref a rel -> y:aref -> Lemma + (requires (contains h x /\ aref_unused_in y h)) + (ensures (addr_of x <> addr_of_aref y)) + +(* Elimination rule *) +val aref_live_at: h: heap -> a: aref -> t: Type0 -> rel: preorder t -> GTot Type0 +val gref_of: a: aref -> t: Type0 -> rel: preorder t -> Ghost (mref t rel) (requires (exists h . aref_live_at h a t rel)) (ensures (fun _ -> True)) +val ref_of: h: heap -> a: aref -> t: Type0 -> rel: preorder t -> Pure (mref t rel) (requires (aref_live_at h a t rel)) (ensures (fun x -> aref_live_at h a t rel /\ addr_of (gref_of a t rel) == addr_of x /\ is_mm x == aref_is_mm a)) +val aref_live_at_aref_of + (h: heap) + (#t: Type0) + (#rel: preorder t) + (r: mref t rel) +: Lemma + (ensures (aref_live_at h (aref_of r) t rel <==> contains h r)) + [SMTPat (aref_live_at h (aref_of r) t rel)] +val contains_gref_of + (h: heap) + (a: aref) + (t: Type0) + (rel: preorder t) +: Lemma + (requires (exists h' . aref_live_at h' a t rel)) + (ensures ((exists h' . aref_live_at h' a t rel) /\ (contains h (gref_of a t rel) <==> aref_live_at h a t rel))) + [SMTPatOr [ + [SMTPat (contains h (gref_of a t rel))]; + [SMTPat (aref_live_at h a t rel)]; + ]] + +val aref_of_gref_of + (a: aref) + (t: Type0) + (rel: preorder t) +: Lemma + (requires (exists h . aref_live_at h a t rel)) + (ensures ((exists h . aref_live_at h a t rel) /\ aref_of (gref_of a t rel) == a)) + [SMTPat (aref_of (gref_of a t rel))] + +(* Operators lowered to ref *) +val addr_of_gref_of + (a: aref) + (t: Type0) + (rel: preorder t) +: Lemma + (requires (exists h . aref_live_at h a t rel)) + (ensures ((exists h . aref_live_at h a t rel) /\ addr_of (gref_of a t rel) == addr_of_aref a)) + [SMTPat (addr_of (gref_of a t rel))] + +val is_mm_gref_of + (a: aref) + (t: Type0) + (rel: preorder t) +: Lemma + (requires (exists h . aref_live_at h a t rel)) + (ensures ((exists h . aref_live_at h a t rel) /\ is_mm (gref_of a t rel) == aref_is_mm a)) + [SMTPat (is_mm (gref_of a t rel))] + +val unused_in_gref_of + (a: aref) + (t: Type0) + (rel: preorder t) + (h: heap) +: Lemma + (requires (exists h . aref_live_at h a t rel)) + (ensures ((exists h . aref_live_at h a t rel) /\ (unused_in (gref_of a t rel) h <==> aref_unused_in a h))) + [SMTPat (unused_in (gref_of a t rel) h)] + +val sel_ref_of + (a: aref) + (t: Type0) + (rel: preorder t) + (h1 h2: heap) +: Lemma + (requires (aref_live_at h1 a t rel /\ aref_live_at h2 a t rel)) + (ensures (aref_live_at h2 a t rel /\ sel h1 (ref_of h2 a t rel) == sel h1 (gref_of a t rel))) + [SMTPat (sel h1 (ref_of h2 a t rel))] + +val upd_ref_of + (a: aref) + (t: Type0) + (rel: preorder t) + (h1 h2: heap) + (x: t) +: Lemma + (requires (aref_live_at h1 a t rel /\ aref_live_at h2 a t rel)) + (ensures (aref_live_at h2 a t rel /\ upd h1 (ref_of h2 a t rel) x == upd h1 (gref_of a t rel) x)) + [SMTPat (upd h1 (ref_of h2 a t rel) x)] diff --git a/stage0/ulib/FStar.Monotonic.HyperHeap.fst b/stage0/ulib/FStar.Monotonic.HyperHeap.fst new file mode 100644 index 00000000000..284ef0cf097 --- /dev/null +++ b/stage0/ulib/FStar.Monotonic.HyperHeap.fst @@ -0,0 +1,139 @@ +(* + Copyright 2008-2014 Nikhil Swamy, Aseem Rastogi, and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Monotonic.HyperHeap + +module Set = FStar.Set +module Map = FStar.Map + +open FStar.Monotonic.Heap +open FStar.Ghost + +(* + * This is a temporary assumption, we should fix the model to get rid of it + *) +assume HasEq_rid: hasEq (erased (list (int & int & bool))) + +let rid = erased (list (int & int & bool)) + +let reveal r = FStar.List.Tot.map (fun (i, j, _) -> i, j) (reveal r) + +let color r = + match reveal r with + | [] -> 0 + | (c, _)::_ -> c + +let rid_freeable r = + match Ghost.reveal r with + | [] -> false + | (_, _, b)::_ -> b + +let root = hide [] + +let root_last_component () = () + +let lemma_root_has_color_zero _ = () + +let root_is_not_freeable () = () + +let rid_length r = List.Tot.length (reveal r) + +let rid_tail (r:rid{rid_length r > 0}) = elift1_p (tot_to_gtot Cons?.tl) r + +let rec includes r1 r2 = + if r1 = r2 then true + else if rid_length r2 > rid_length r1 + then includes r1 (rid_tail r2) + else false + +private let rec lemma_aux (k:rid) (i:rid) + :Lemma (requires (rid_length k > 0 /\ + rid_length k <= rid_length i /\ + includes k i /\ + not (includes (rid_tail k) i))) + (ensures False) + (decreases (rid_length i)) + = lemma_aux k (rid_tail i) + +let rec lemma_disjoint_includes i j k = + if rid_length k <= rid_length j + then () + else (lemma_disjoint_includes i j (rid_tail k); + if rid_length i <= rid_length (rid_tail k) + then () + else (if includes k i + then lemma_aux k i + else ())) + +let extends r0 r1 = Cons? (reveal r0) && rid_tail r0 = r1 + +let parent r = rid_tail r + +let lemma_includes_refl _ = () + +let lemma_extends_includes _ _ = () + +let lemma_includes_anti_symmetric _ _ = () + +let lemma_extends_disjoint _ _ _ = () + +let lemma_extends_parent _ = () + +let lemma_extends_not_root _ _ = () + +let lemma_extends_only_parent _ _ = () + +private let test0 :unit = + assert (includes (hide [(0, 1, false) ; (1, 0, false)]) (hide [(2, 2, false); (0, 1, false); (1, 0, false)])) + +private let test1 (r1:rid) (r2:rid{includes r1 r2}) :unit = assert (includes r1 (hide ((0, 0, false)::(Ghost.reveal r2)))) + +let mod_set _ = magic () + +let rec lemma_includes_trans i j k = + if j = k then () + else match Ghost.reveal k with + | hd::tl -> lemma_includes_trans i j (hide tl) + +let lemma_modset _ _ = () + +let lemma_modifies_includes _ _ _ _ = () + +let lemma_modifies_includes2 _ _ _ _ = () + +let lemma_disjoint_parents _ _ _ _ = () + +let lemma_include_cons _ _ = () + +let extends_parent _ _ = () + +let includes_child _ _ = () + +let root_is_root _ = () + +let extend r n c = + elift1 (fun r -> + let freeable = rid_freeable (hide r) in + (c, n, freeable)::r + ) r + +let extend_monochrome_freeable r n freeable = + elift1 (fun r -> + let c = color (hide r) in + (c, n, freeable)::r + ) r + +let extend_monochrome r n = extend_monochrome_freeable r n (rid_freeable r) + diff --git a/stage0/ulib/FStar.Monotonic.HyperHeap.fsti b/stage0/ulib/FStar.Monotonic.HyperHeap.fsti new file mode 100644 index 00000000000..927f269cbaa --- /dev/null +++ b/stage0/ulib/FStar.Monotonic.HyperHeap.fsti @@ -0,0 +1,195 @@ +(* + Copyright 2008-2014 Nikhil Swamy, Aseem Rastogi, and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Monotonic.HyperHeap + +module Set = FStar.Set +module Map = FStar.Map + +open FStar.Monotonic.Heap +open FStar.Ghost + +(* + * This module provides the map view of the memory and associated functions and lemmas + * The intention of this module is for it to be included in HyperStack + * Clients should not open/know about HyperHeap, they should work only with HyperStack + *) + +(* + * AR: mark it must_erase_for_extraction temporarily until CMI comes in + *) +[@@must_erase_for_extraction] +val rid :eqtype + +val reveal (r:rid) :GTot (list (int & int)) + +let rid_last_component (r:rid) :GTot int + = let open FStar.List.Tot.Base in + let r = reveal r in + if length r = 0 then 0 + else snd (hd r) + +val color (x:rid) :GTot int + +val rid_freeable (x:rid) : GTot bool + +type hmap = Map.t rid heap + +val root : r:rid{color r == 0 /\ not (rid_freeable r)} + +val root_last_component (_:unit) : Lemma (rid_last_component root == 0) + +let root_has_color_zero (u:unit) :Lemma (color root == 0) = () + +val root_is_not_freeable (_:unit) : Lemma (not (rid_freeable root)) + +private val rid_length (r:rid) :GTot nat + +private val rid_tail (r:rid{rid_length r > 0}) :rid + +val includes (r1:rid) (r2:rid) :GTot bool (decreases (reveal r2)) + +let disjoint (i:rid) (j:rid) :GTot bool = not (includes i j) && not (includes j i) + +val lemma_disjoint_includes (i:rid) (j:rid) (k:rid) + :Lemma (requires (disjoint i j /\ includes j k)) + (ensures (disjoint i k)) + (decreases (List.Tot.Base.length (reveal k))) + [SMTPat (disjoint i j); SMTPat (includes j k)] + +val extends (i:rid) (j:rid) :GTot bool + +val parent (r:rid{r =!= root}) :rid + +val lemma_includes_refl (i:rid) + :Lemma (includes i i) + [SMTPat (includes i i)] + +val lemma_extends_includes (i:rid) (j:rid) + :Lemma (requires (extends j i)) + (ensures (includes i j /\ not(includes j i))) + [SMTPat (extends j i)] + +val lemma_includes_anti_symmetric (i:rid) (j:rid) + :Lemma (requires (includes i j /\ i =!= j)) + (ensures (not (includes j i))) + [SMTPat (includes i j)] + +val lemma_extends_disjoint (i:rid) (j:rid) (k:rid) + :Lemma (requires (extends j i /\ extends k i /\ j =!= k)) + (ensures (disjoint j k)) + +val lemma_extends_parent (i:rid{i =!= root}) + :Lemma (extends i (parent i)) + [SMTPat (parent i)] + +val lemma_extends_not_root (i:rid) (j:rid{extends j i}) + :Lemma (j =!= root) + [SMTPat (extends j i)] + +val lemma_extends_only_parent (i:rid) (j:rid{extends j i}) + :Lemma (i == parent j) + [SMTPat (extends j i)] + +val mod_set (s:Set.set rid) :(Set.set rid) +assume Mod_set_def: forall (x:rid) (s:Set.set rid). {:pattern Set.mem x (mod_set s)} + Set.mem x (mod_set s) <==> (exists (y:rid). Set.mem y s /\ includes y x) + +let modifies (s:Set.set rid) (m0:hmap) (m1:hmap) = + Map.equal m1 (Map.concat m1 (Map.restrict (Set.complement (mod_set s)) m0)) /\ + Set.subset (Map.domain m0) (Map.domain m1) + +let modifies_just (s:Set.set rid) (m0:hmap) (m1:hmap) = + Map.equal m1 (Map.concat m1 (Map.restrict (Set.complement s) m0)) /\ + Set.subset (Map.domain m0) (Map.domain m1) + +let modifies_one (r:rid) (m0:hmap) (m1:hmap) = modifies_just (Set.singleton r) m0 m1 + +let equal_on (s:Set.set rid) (m0:hmap) (m1:hmap) = + (forall (r:rid). {:pattern (Map.contains m0 r)} (Set.mem r (mod_set s) /\ Map.contains m0 r) ==> Map.contains m1 r) /\ + Map.equal m1 (Map.concat m1 (Map.restrict (mod_set s) m0)) + +let lemma_modifies_just_trans (m1:hmap) (m2:hmap) (m3:hmap) + (s1:Set.set rid) (s2:Set.set rid) + :Lemma (requires (modifies_just s1 m1 m2 /\ modifies_just s2 m2 m3)) + (ensures (modifies_just (Set.union s1 s2) m1 m3)) + = () + +let lemma_modifies_trans (m1:hmap) (m2:hmap) (m3:hmap) + (s1:Set.set rid) (s2:Set.set rid) + :Lemma (requires (modifies s1 m1 m2 /\ modifies s2 m2 m3)) + (ensures (modifies (Set.union s1 s2) m1 m3)) + = () + +val lemma_includes_trans (i:rid) (j:rid) (k:rid) + :Lemma (requires (includes i j /\ includes j k)) + (ensures (includes i k)) + (decreases (reveal k)) + [SMTPat (includes i j); SMTPat (includes j k)] + +val lemma_modset (i:rid) (j:rid) + :Lemma (requires (includes j i)) + (ensures (Set.subset (mod_set (Set.singleton i)) (mod_set (Set.singleton j)))) + +val lemma_modifies_includes (m1:hmap) (m2:hmap) (i:rid) (j:rid) + :Lemma (requires (modifies (Set.singleton i) m1 m2 /\ includes j i)) + (ensures (modifies (Set.singleton j) m1 m2)) + +val lemma_modifies_includes2 (m1:hmap) (m2:hmap) (s1:Set.set rid) (s2:Set.set rid) + :Lemma (requires (modifies s1 m1 m2 /\ (forall x. Set.mem x s1 ==> (exists y. Set.mem y s2 /\ includes y x)))) + (ensures (modifies s2 m1 m2)) + +val lemma_disjoint_parents (pr:rid) (r:rid) (ps:rid) (s:rid) + :Lemma (requires (r `extends` pr /\ s `extends` ps /\ disjoint pr ps)) + (ensures (disjoint r s)) + [SMTPat (extends r pr); SMTPat (extends s ps); SMTPat (disjoint pr ps)] + +val lemma_include_cons (i:rid) (j:rid) + :Lemma (requires (i =!= j /\ includes i j)) + (ensures (j =!= root)) + +let disjoint_regions (s1:Set.set rid) (s2:Set.set rid) = + forall x y. {:pattern (Set.mem x s1); (Set.mem y s2)} (Set.mem x s1 /\ Set.mem y s2) ==> disjoint x y + +val extends_parent (tip:rid{tip =!= root}) (r:rid) + :Lemma (extends r (parent tip) /\ r =!= tip ==> disjoint r tip \/ extends r tip) + [SMTPat (extends r (parent tip))] + +val includes_child (tip:rid{tip =!= root}) (r:rid) + :Lemma (includes r tip ==> r == tip \/ includes r (parent tip)) + [SMTPat (includes r (parent tip))] + +val root_is_root (s:rid) + :Lemma (requires (includes s root)) + (ensures (s == root)) + [SMTPat (includes s root)] + +unfold +let extend_post (r:rid) (n:int) (c:int) (freeable:bool) : pure_post rid = + fun s -> + s `extends` r /\ + Cons? (reveal s) /\ + Cons?.hd (reveal s) == (c, n) /\ + color s == c /\ + rid_freeable s == freeable + +val extend (r:rid) (n:int) (c:int) +: Pure rid (requires True) (extend_post r n c (rid_freeable r)) + +val extend_monochrome_freeable (r:rid) (n:int) (freeable:bool) +: Pure rid (requires True) (extend_post r n (color r) freeable) + +val extend_monochrome (r:rid) (n:int) +: Pure rid (requires True) (extend_post r n (color r) (rid_freeable r)) diff --git a/stage0/ulib/FStar.Monotonic.HyperStack.fst b/stage0/ulib/FStar.Monotonic.HyperStack.fst new file mode 100644 index 00000000000..54a0c230f6f --- /dev/null +++ b/stage0/ulib/FStar.Monotonic.HyperStack.fst @@ -0,0 +1,208 @@ +(* + Copyright 2008-2014 Aseem Rastogi, and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Monotonic.HyperStack + +open FStar.Preorder +module Map = FStar.Map + +let map_invariant = map_invariant_predicate + +let downward_closed = downward_closed_predicate + +let tip_top = tip_top_predicate + +let rid_ctr_pred = rid_ctr_pred_predicate + +noeq type mem' = + | HS :rid_ctr:int -> h:hmap -> tip:rid -> mem' + +let mk_mem rid_ctr h tip = HS rid_ctr h tip + +let get_hmap m = m.h +let get_rid_ctr m = m.rid_ctr +let get_tip m = m.tip + +let lemma_mk_mem'_projectors _ _ _ = () + +let lemma_mem_projectors_are_in_wf_relation _ = () + +let lemma_is_wf_ctr_and_tip_intro _ _ _ = root_is_not_freeable () + +let lemma_is_wf_ctr_and_tip_elim _ = () + +let lemma_map_invariant _ _ _ = () + +let lemma_downward_closed _ _ _ = () + +let lemma_tip_top _ _ = () + +let lemma_tip_top_smt _ _ = () + +let lemma_rid_ctr_pred _ = () + +let as_ref #_ #_ x = MkRef?.ref x + +let lemma_as_ref_inj #_ #_ _ = () + +private val lemma_extends_fresh_disjoint: i:rid -> j:rid -> ipar:rid -> jpar:rid + -> (m0:mem) -> (m1:mem) -> + Lemma (requires (let h0, h1 = get_hmap m0, get_hmap m1 in + (map_invariant h0 /\ + map_invariant h1 /\ + fresh_region i m0 m1 /\ + fresh_region j m0 m1 /\ + h0 `Map.contains` ipar /\ + h0 `Map.contains` jpar /\ + extends i ipar /\ + extends j jpar /\ + i<>j))) + (ensures (disjoint i j)) +let lemma_extends_fresh_disjoint i j ipar jpar m0 m1 = () + +let lemma_sel_same_addr #_ #_ _ _ _ = () + +let lemma_upd_same_addr #_ #_ h r1 r2 x = + FStar.Monotonic.Heap.lemma_heap_equality_upd_same_addr (Map.sel h.h (frameOf r1)) (as_ref r1) (as_ref r2) x; + Classical.or_elim #(h `contains` r1) #(~ (h `contains` r1)) + #(fun _ -> h `contains` r1 /\ h `contains` r2 /\ upd h r1 x == upd h r2 x) + (fun _ -> lemma_sel_same_addr h r1 r2) (fun _ -> lemma_sel_same_addr h r2 r1) + +let mreference_distinct_sel_disjoint #_ #_ #_ h r1 r2 = + Heap.lemma_distinct_addrs_distinct_preorders (); + Heap.lemma_distinct_addrs_distinct_mm (); + Heap.lemma_sel_same_addr (Map.sel h.h (frameOf r1)) (as_ref r1) (as_ref r2) + +private let lemma_pop_is_popped (m0:mem{poppable m0}) + : Lemma (popped m0 (pop m0)) + = let m1 = pop m0 in + assert (Set.equal (Map.domain m1.h) (remove_elt (Map.domain m0.h) m0.tip)) + +let modifies_drop_tip _ _ _ _ = () + +let eternal_disjoint_from_tip _ _ = () + +let above_tip_is_live #_ #_ _ _ = () + +let lemma_heap_equality_cancel_same_mref_upd #_ #_ h r x y = + let h0 = upd (upd h r x) r y in + let h1 = upd h r y in + Heap.lemma_heap_equality_cancel_same_mref_upd (Map.sel h.h (frameOf r)) (as_ref r) x y; + assert (Map.equal h0.h h1.h) + +let lemma_heap_equality_upd_with_sel #_ #_ h r = + let h' = upd h r (sel h r) in + Heap.lemma_heap_equality_upd_with_sel (Map.sel h.h (frameOf r)) (as_ref r); + assert (Map.equal h.h h'.h) + +let lemma_heap_equality_commute_distinct_upds #_ #_ #_ #_ h r1 r2 x y = + let h0 = upd (upd h r1 x) r2 y in + let h1 = upd (upd h r2 y) r1 x in + if frameOf r1 = frameOf r2 then Heap.lemma_heap_equality_commute_distinct_upds (Map.sel h.h (frameOf r1)) (as_ref r1) (as_ref r2) x y; + assert (Map.equal h0.h h1.h) + +let lemma_next_addr_contained_refs_addr _ = + let aux (a:Type0) (rel:preorder a) (r:mreference a rel) (m:mem) + :Lemma (m `contains` r ==> as_addr r < Heap.next_addr (get_hmap m `Map.sel` frameOf r)) + = Heap.lemma_next_addr_contained_refs_addr (get_hmap m `Map.sel` frameOf r) (as_ref r) + in + Classical.forall_intro_4 aux + +private let lemma_upd_1 #a #rel (h:mem) (x:mreference a rel) (v:a{rel (sel h x) v}) : Lemma + (requires (contains h x)) + (ensures (contains h x + /\ modifies_one (frameOf x) h (upd h x v) + /\ modifies_ref (frameOf x) (Set.singleton (as_addr x)) h (upd h x v) + /\ sel (upd h x v) x == v )) + = () + +private let lemma_upd_2 (#a:Type) (#rel:preorder a) (h:mem) (x:mreference a rel) (v:a{rel (sel h x) v}) : Lemma + (requires (frameOf x = get_tip h /\ x `unused_in` h)) + (ensures (frameOf x = get_tip h + /\ modifies_one (get_tip h) h (upd h x v) + /\ modifies_ref (get_tip h) Set.empty h (upd h x v) + /\ sel (upd h x v) x == v )) + = () + +private val lemma_live_1: #a:Type -> #a':Type -> #rel:preorder a -> #rel':preorder a' + -> h:mem -> x:mreference a rel -> x':mreference a' rel' -> Lemma + (requires (contains h x /\ x' `unused_in` h)) + (ensures (frameOf x <> frameOf x' \/ ~ (as_ref x === as_ref x'))) +let lemma_live_1 #a #a' #rel #rel' h x x' = () + +(*** Untyped views of references *) + +(* Definition and ghost decidable equality *) + +noeq type aref = + | ARef: aref_region:rid -> + aref_aref:Heap.aref -> + aref + +let dummy_aref = ARef root Heap.dummy_aref + +let aref_equal a1 a2 = a1.aref_region = a2.aref_region && Heap.aref_equal a1.aref_aref a2.aref_aref + +let aref_of #_ #_ r = ARef (frameOf r) (Heap.aref_of (as_ref r)) + +let frameOf_aref a = a.aref_region + +let frameOf_aref_of #_ #_ _ = () + +let aref_as_addr a = Heap.addr_of_aref a.aref_aref + +let aref_as_addr_aref_of #_ #_ r = Heap.addr_of_aref_of (as_ref r) + +let aref_is_mm r = Heap.aref_is_mm r.aref_aref + +let is_mm_aref_of #_ #_ r = Heap.is_mm_aref_of (as_ref r) + +let aref_unused_in a h = + ~ (live_region h a.aref_region) \/ + Heap.aref_unused_in a.aref_aref (Map.sel h.h a.aref_region) + +let unused_in_aref_of #_ #_ r h = Heap.unused_in_aref_of (as_ref r) (Map.sel h.h (frameOf r)) + +let contains_aref_unused_in #_ #_ h x y = + if frameOf x = frameOf_aref y + then + Heap.contains_aref_unused_in (Map.sel h.h (frameOf x)) (as_ref x) y.aref_aref + else () + +let aref_live_at h a v rel = + live_region h a.aref_region /\ + Heap.aref_live_at (Map.sel h.h a.aref_region) a.aref_aref v rel + +let greference_of a v rel = MkRef a.aref_region (Heap.gref_of a.aref_aref v rel) + +let reference_of h a v rel = MkRef a.aref_region (Heap.ref_of (Map.sel h.h a.aref_region) a.aref_aref v rel) + +let aref_live_at_aref_of _ #_ #_ _ = () + +let contains_greference_of _ _ _ _ = () + +let aref_of_greference_of _ _ _ = () + +let frameOf_greference_of _ _ _ = () + +let as_addr_greference_of _ _ _ = () + +let is_mm_greference_of _ _ _ = () + +let unused_in_greference_of _ _ _ _ = () + +let sel_reference_of _ _ _ _ _ = () + +let upd_reference_of _ _ _ _ _ _ = () diff --git a/stage0/ulib/FStar.Monotonic.HyperStack.fsti b/stage0/ulib/FStar.Monotonic.HyperStack.fsti new file mode 100644 index 00000000000..a05e34df1ef --- /dev/null +++ b/stage0/ulib/FStar.Monotonic.HyperStack.fsti @@ -0,0 +1,624 @@ +(* + Copyright 2008-2014 Aseem Rastogi, and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Monotonic.HyperStack + +open FStar.Preorder +module Map = FStar.Map + +include FStar.Monotonic.HyperHeap + + +(****** Some predicates ******) + +unfold let is_in (r:rid) (h:hmap) = h `Map.contains` r + +let is_stack_region r = color r > 0 +let is_heap_color c = c <= 0 + +[@@(deprecated "FStar.HyperStack.ST.is_eternal_region")] +let is_eternal_region r = is_heap_color (color r) && not (rid_freeable r) + +unfold let is_eternal_region_hs r = is_heap_color (color r) && not (rid_freeable r) + +type sid = r:rid{is_stack_region r} //stack region ids + +(* + * AR: marking these unfolds, else I think there are pattern firing issues depending on which one we use + *) +unfold let is_above r1 r2 = r1 `includes` r2 +unfold let is_just_below r1 r2 = r1 `extends` r2 +unfold let is_below r1 r2 = r2 `is_above` r1 +let is_strictly_below r1 r2 = r1 `is_below` r2 && r1 <> r2 +let is_strictly_above r1 r2 = r1 `is_above` r2 && r1 <> r2 + + +[@@"opaque_to_smt"] +unfold private let map_invariant_predicate (m:hmap) :Type0 = + forall r. Map.contains m r ==> + (forall s. includes s r ==> Map.contains m s) + +[@@"opaque_to_smt"] +unfold private let downward_closed_predicate (h:hmap) :Type0 = + forall (r:rid). r `is_in` h //for any region in the memory + ==> (r=root //either is the root + \/ (forall (s:rid). (r `is_above` s //or, any region beneath it + /\ s `is_in` h) //that is also in the memory + ==> ((is_stack_region r = is_stack_region s) /\ //must be of the same flavor as itself + ((is_heap_color (color r) /\ rid_freeable r) ==> s == r)))) //and if r is a freeable heap region, s can only be r (no regions strictly below r) + +[@@"opaque_to_smt"] +unfold private let tip_top_predicate (tip:rid) (h:hmap) :Type0 = + forall (r:sid). r `is_in` h <==> r `is_above` tip + +[@@"opaque_to_smt"] +unfold private let rid_ctr_pred_predicate (h:hmap) (n:int) :Type0 = + forall (r:rid). h `Map.contains` r ==> rid_last_component r < n + + +(****** Mem definition ******) + +[@@ remove_unused_type_parameters [0]] +val map_invariant (m:hmap) :Type0 //all regions above a contained region are contained +[@@ remove_unused_type_parameters [0]] +val downward_closed (h:hmap) :Type0 //regions below a non-root region are of the same color +[@@ remove_unused_type_parameters [0;1]] +val tip_top (tip:rid) (h:hmap) :Type0 //all contained stack regions are above tip +[@@ remove_unused_type_parameters [0;1]] +val rid_ctr_pred (h:hmap) (n:int) :Type0 //all live regions have last component less than the rid_ctr + +let is_tip (tip:rid) (h:hmap) = + (is_stack_region tip \/ tip = root) /\ //the tip is a stack region, or the root + tip `is_in` h /\ //the tip is live + tip_top tip h //any other sid activation is a above (or equal to) the tip + +let is_wf_with_ctr_and_tip (h:hmap) (ctr:int) (tip:rid) + = (not (rid_freeable root)) /\ + root `is_in` h /\ + tip `is_tip` h /\ + map_invariant h /\ + downward_closed h /\ + rid_ctr_pred h ctr + +private val mem' :Type u#1 + +private val mk_mem (rid_ctr:int) (h:hmap) (tip:rid) :mem' + +val get_hmap (m:mem') :hmap +val get_rid_ctr (m:mem') :int +val get_tip (m:mem') :rid + +private val lemma_mk_mem'_projectors (rid_ctr:int) (h:hmap) (tip:rid) + :Lemma (requires True) + (ensures (let m = mk_mem rid_ctr h tip in + (get_hmap m == h /\ get_rid_ctr m == rid_ctr /\ get_tip m == tip))) + [SMTPatOr [[SMTPat (get_hmap (mk_mem rid_ctr h tip))]; + [SMTPat (get_rid_ctr (mk_mem rid_ctr h tip))]; + [SMTPat (get_tip (mk_mem rid_ctr h tip))] + ]] + +type mem :Type = m:mem'{is_wf_with_ctr_and_tip (get_hmap m) (get_rid_ctr m) (get_tip m) } + + +(****** Lemmas about mem and predicates ******) + +private val lemma_mem_projectors_are_in_wf_relation (m:mem) + :Lemma (is_wf_with_ctr_and_tip (get_hmap m) (get_rid_ctr m) (get_tip m)) + +private val lemma_is_wf_ctr_and_tip_intro (h:hmap) (ctr:int) (tip:rid) + :Lemma (requires (root `is_in` h /\ (is_stack_region tip \/ tip = root) /\ tip `is_in` h /\ + tip_top_predicate tip h /\ map_invariant_predicate h /\ + downward_closed_predicate h /\ rid_ctr_pred_predicate h ctr)) + (ensures (is_wf_with_ctr_and_tip h ctr tip)) + +private val lemma_is_wf_ctr_and_tip_elim (m:mem) + :Lemma (let h, rid_ctr, tip = get_hmap m, get_rid_ctr m, get_tip m in + (root `is_in` h /\ (is_stack_region tip \/ tip = root) /\ tip `is_in` h /\ + tip_top_predicate tip h /\ map_invariant_predicate h /\ + downward_closed_predicate h /\ rid_ctr_pred_predicate h rid_ctr)) + +(******* map_invariant related lemmas ******) + +val lemma_map_invariant (m:mem) (r s:rid) + :Lemma (requires (r `is_in` get_hmap m /\ s `is_above` r)) + (ensures (s `is_in` get_hmap m)) + [SMTPat (r `is_in` get_hmap m); SMTPat (s `is_above` r); SMTPat (s `is_in` get_hmap m)] + +(****** downward_closed related lemmas *******) + +val lemma_downward_closed (m:mem) (r:rid) (s:rid{s =!= root}) + :Lemma (requires (r `is_in` get_hmap m /\ s `is_above` r)) + (ensures (is_heap_color (color r) == is_heap_color (color s) /\ + is_stack_region r == is_stack_region s)) + [SMTPatOr [[SMTPat (get_hmap m `Map.contains` r); SMTPat (s `is_above` r); SMTPat (is_heap_color (color s))]; + [SMTPat (get_hmap m `Map.contains` r); SMTPat (s `is_above` r); SMTPat (is_stack_region s)] + ]] + +(****** tip_top related lemmas ******) + +val lemma_tip_top (m:mem) (r:sid) + :Lemma (r `is_in` get_hmap m <==> r `is_above` get_tip m) + +(* + * Pointer uses lemma_tip_top by calling it explicitly with Classical.forall_intro2 + * Classical.forall_intro2 does not work well with SMTPat + * So adding this smt form of the same lemma + *) +val lemma_tip_top_smt (m:mem) (r:rid) + :Lemma (requires (is_stack_region r)) + (ensures (r `is_in` get_hmap m <==> r `is_above` get_tip m)) + [SMTPatOr [[SMTPat (is_stack_region r); SMTPat (r `is_above` get_tip m)]; + [SMTPat (is_stack_region r); SMTPat (r `is_in` get_hmap m)]]] + +(****** rid_ctr_pred related lemmas ******) + +val lemma_rid_ctr_pred (_:unit) + :Lemma (forall (m:mem) (r:rid).{:pattern (get_hmap m `Map.contains` r)} get_hmap m `Map.contains` r ==> rid_last_component r < get_rid_ctr m) + +(*****) + +(****** Operations on mem ******) + + +let empty_mem : mem = + let empty_map = Map.restrict Set.empty (Map.const Heap.emp) in + let h = Map.upd empty_map root Heap.emp in + let tip = root in + root_last_component (); + lemma_is_wf_ctr_and_tip_intro h 1 tip; + mk_mem 1 h tip + +let heap_region_does_not_overlap_with_tip + (m:mem) (r:rid{is_heap_color (color r) /\ not (disjoint r (get_tip m)) /\ r =!= root /\ is_stack_region (get_tip m)}) + : Lemma (requires True) + (ensures (~ (r `is_in` get_hmap m))) + = root_has_color_zero() + +let poppable (m:mem) = get_tip m =!= root + +private let remove_elt (#a:eqtype) (s:Set.set a) (x:a) = Set.intersect s (Set.complement (Set.singleton x)) + +let popped (m0 m1:mem) = + poppable m0 /\ + (let h0, tip0, h1, tip1 = get_hmap m0, get_tip m0, get_hmap m1, get_tip m1 in + (parent tip0 = tip1 /\ + Set.equal (Map.domain h1) (remove_elt (Map.domain h0) tip0) /\ + Map.equal h1 (Map.restrict (Map.domain h1) h0))) + +let pop (m0:mem{poppable m0}) :mem = + let h0, tip0, rid_ctr0 = get_hmap m0, get_tip m0, get_rid_ctr m0 in + root_has_color_zero(); + lemma_is_wf_ctr_and_tip_elim m0; + let dom = remove_elt (Map.domain h0) tip0 in + let h1 = Map.restrict dom h0 in + let tip1 = parent tip0 in + lemma_is_wf_ctr_and_tip_intro h1 rid_ctr0 tip1; + mk_mem rid_ctr0 h1 tip1 + +//A (reference a) may reside in the stack or heap, and may be manually managed +//Mark it private so that clients can't use its projectors etc. +//enabling extraction of mreference to just a reference in ML and pointer in C +//note that this not enforcing any abstraction +(* + * AR: 12/26: Defining it using Heap.mref directly, removing the HyperHeap.mref indirection + *) +private noeq +type mreference' (a:Type) (rel:preorder a) = + | MkRef : frame:rid -> ref:Heap.mref a rel -> mreference' a rel + +let mreference a rel = mreference' a rel + +//TODO: rename to frame_of, avoiding the inconsistent use of camelCase +let frameOf (#a:Type) (#rel:preorder a) (r:mreference a rel) :rid + = r.frame + +let mk_mreference (#a:Type) (#rel:preorder a) (id:rid) + (r:Heap.mref a rel) + :mreference a rel + = MkRef id r + +//Hopefully we can get rid of this one +val as_ref (#a:Type0) (#rel:preorder a) (x:mreference a rel) + :Heap.mref a rel + +//And make this one abstract +let as_addr #a #rel (x:mreference a rel) + :GTot pos + = Heap.addr_of (as_ref x) + +val lemma_as_ref_inj (#a:Type) (#rel:preorder a) (r:mreference a rel) + :Lemma (requires True) (ensures (mk_mreference (frameOf r) (as_ref r) == r)) + [SMTPat (as_ref r)] + +let is_mm (#a:Type) (#rel:preorder a) (r:mreference a rel) :GTot bool = + Heap.is_mm (as_ref r) + +// Warning: all of the type aliases below get special support for KaRaMeL +// extraction. If you rename or add to this list, +// src/extraction/FStar.Extraction.Karamel.fs needs to be updated. + +//adding (not s.mm) to stackref and ref so as to keep their semantics as is +let mstackref (a:Type) (rel:preorder a) = + s:mreference a rel{ is_stack_region (frameOf s) && not (is_mm s) } + +let mref (a:Type) (rel:preorder a) = + s:mreference a rel{ is_eternal_region_hs (frameOf s) && not (is_mm s) } + +let mmmstackref (a:Type) (rel:preorder a) = + s:mreference a rel{ is_stack_region (frameOf s) && is_mm s } + +let mmmref (a:Type) (rel:preorder a) = + s:mreference a rel{ is_eternal_region_hs (frameOf s) && is_mm s } + +//NS: Why do we need this one? +let s_mref (i:rid) (a:Type) (rel:preorder a) = s:mreference a rel{frameOf s = i} + +(* + * AR: this used to be (is_eternal_region i \/ i `is_above` m.tip) /\ Map.contains ... + * As far as the memory model is concerned, this should just be Map.contains + * The fact that an eternal region is always contained (because of monotonicity) should be used in the ST interface + *) +let live_region (m:mem) (i:rid) :bool = get_hmap m `Map.contains` i + +let contains (#a:Type) (#rel:preorder a) (m:mem) (s:mreference a rel) = + live_region m (frameOf s) /\ + Heap.contains (get_hmap m `Map.sel` (frameOf s)) (as_ref s) + +let unused_in (#a:Type) (#rel:preorder a) (r:mreference a rel) (m:mem) = + not ((get_hmap m) `Map.contains` (frameOf r)) \/ + Heap.unused_in (as_ref r) ((get_hmap m) `Map.sel` (frameOf r)) + +let contains_ref_in_its_region (#a:Type) (#rel:preorder a) (m:mem) (r:mreference a rel) = + Heap.contains (get_hmap m `Map.sel` (frameOf r)) (as_ref r) + +let fresh_ref (#a:Type) (#rel:preorder a) (r:mreference a rel) (m0:mem) (m1:mem) :Type0 = + let i = frameOf r in + Heap.fresh (as_ref r) (get_hmap m0 `Map.sel` i) (get_hmap m1 `Map.sel` i) + +let fresh_region (i:rid) (m0 m1:mem) = + not (get_hmap m0 `Map.contains` i) /\ get_hmap m1 `Map.contains` i + +let sel (#a:Type) (#rel:preorder a) (m:mem) (s:mreference a rel) :GTot a + = Heap.sel (get_hmap m `Map.sel` (frameOf s)) (as_ref s) + +let upd (#a:Type) (#rel:preorder a) (m:mem) (s:mreference a rel{live_region m (frameOf s)}) (v:a) + :GTot mem + = let h, rid_ctr, tip = get_hmap m, get_rid_ctr m, get_tip m in + lemma_is_wf_ctr_and_tip_elim m; + let i = frameOf s in + let h = Map.upd h i (Heap.upd (Map.sel h i) (as_ref s) v) in + lemma_is_wf_ctr_and_tip_intro h rid_ctr tip; + mk_mem rid_ctr h tip + +let alloc (#a:Type0) (rel:preorder a) (id:rid) (init:a) (mm:bool) (m:mem{get_hmap m `Map.contains` id}) + :Tot (p:(mreference a rel & mem){let (r, h) = Heap.alloc rel (get_hmap m `Map.sel` id) init mm in + as_ref (fst p) == r /\ + get_hmap (snd p) == Map.upd (get_hmap m) id h}) + = let h, rid_ctr, tip = get_hmap m, get_rid_ctr m, get_tip m in + lemma_is_wf_ctr_and_tip_elim m; + let r, id_h = Heap.alloc rel (Map.sel h id) init mm in + let h = Map.upd h id id_h in + lemma_is_wf_ctr_and_tip_intro h rid_ctr tip; + (mk_mreference id r), mk_mem rid_ctr h tip + +let free (#a:Type0) (#rel:preorder a) (r:mreference a rel{is_mm r}) (m:mem{m `contains` r}) + :Tot mem + = let h, rid_ctr, tip = get_hmap m, get_rid_ctr m, get_tip m in + lemma_is_wf_ctr_and_tip_elim m; + let i = frameOf r in + let i_h = h `Map.sel` i in + let i_h = Heap.free_mm i_h (as_ref r) in + let h = Map.upd h i i_h in + lemma_is_wf_ctr_and_tip_intro h rid_ctr tip; + mk_mem rid_ctr h tip + +let upd_tot (#a:Type) (#rel:preorder a) (m:mem) (r:mreference a rel{m `contains` r}) (v:a) + :Tot mem + = let h, rid_ctr, tip = get_hmap m, get_rid_ctr m, get_tip m in + lemma_is_wf_ctr_and_tip_elim m; + let i = frameOf r in + let i_h = h `Map.sel` i in + let i_h = Heap.upd_tot i_h (as_ref r) v in + let h = Map.upd h i i_h in + lemma_is_wf_ctr_and_tip_intro h rid_ctr tip; + mk_mem rid_ctr h tip + +let sel_tot (#a:Type) (#rel:preorder a) (m:mem) (r:mreference a rel{m `contains` r}) + :Tot a + = Heap.sel_tot (get_hmap m `Map.sel` (frameOf r)) (as_ref r) + +let fresh_frame (m0:mem) (m1:mem) = + not (get_hmap m0 `Map.contains` get_tip m1) /\ + parent (get_tip m1) == get_tip m0 /\ + get_hmap m1 == Map.upd (get_hmap m0) (get_tip m1) Heap.emp + +let hs_push_frame (m:mem) :Tot (m':mem{fresh_frame m m'}) + = let h, rid_ctr, tip = get_hmap m, get_rid_ctr m, get_tip m in + lemma_is_wf_ctr_and_tip_elim m; + let new_tip_rid = extend tip rid_ctr 1 in + let h = Map.upd h new_tip_rid Heap.emp in + assert (forall (s:rid). (new_tip_rid `is_above` s /\ s `is_in` h) ==> s = new_tip_rid); + lemma_is_wf_ctr_and_tip_intro h (rid_ctr + 1) new_tip_rid; + mk_mem (rid_ctr + 1) h new_tip_rid + +let new_eternal_region (m:mem) (parent:rid{is_eternal_region_hs parent /\ get_hmap m `Map.contains` parent}) + (c:option int{None? c \/ is_heap_color (Some?.v c)}) + :Tot (t:(rid & mem){fresh_region (fst t) m (snd t)}) + = let h, rid_ctr, tip = get_hmap m, get_rid_ctr m, get_tip m in + lemma_is_wf_ctr_and_tip_elim m; + let new_rid = + if None? c then extend_monochrome parent rid_ctr + else extend parent rid_ctr (Some?.v c) + in + let h = Map.upd h new_rid Heap.emp in + lemma_is_wf_ctr_and_tip_intro h (rid_ctr + 1) tip; + new_rid, mk_mem (rid_ctr + 1) h tip + +let new_freeable_heap_region + (m:mem) + (parent:rid{is_eternal_region_hs parent /\ get_hmap m `Map.contains` parent}) +: t:(rid & mem){fresh_region (fst t) m (snd t) /\ rid_freeable (fst t)} += let h, rid_ctr, tip = get_hmap m, get_rid_ctr m, get_tip m in + lemma_is_wf_ctr_and_tip_elim m; + let new_rid = extend_monochrome_freeable parent rid_ctr true in + let h = Map.upd h new_rid Heap.emp in + lemma_is_wf_ctr_and_tip_intro h (rid_ctr + 1) tip; + new_rid, mk_mem (rid_ctr + 1) h tip + +let free_heap_region + (m0:mem) + (r:rid{ + is_heap_color (color r) /\ + rid_freeable r /\ + get_hmap m0 `Map.contains` r}) +: mem += let h0, rid_ctr0 = get_hmap m0, get_rid_ctr m0 in + lemma_is_wf_ctr_and_tip_elim m0; + let dom = remove_elt (Map.domain h0) r in + let h1 = Map.restrict dom h0 in + lemma_is_wf_ctr_and_tip_intro h1 rid_ctr0 (get_tip m0); + mk_mem (get_rid_ctr m0) h1 (get_tip m0) + + +(****** The following two lemmas are only used in FStar.Pointer.Base, and invoked explicitly ******) + +val lemma_sel_same_addr (#a:Type0) (#rel:preorder a) (h:mem) (r1:mreference a rel) (r2:mreference a rel) + :Lemma (requires (frameOf r1 == frameOf r2 /\ h `contains` r1 /\ as_addr r1 = as_addr r2 /\ is_mm r1 == is_mm r2)) + (ensures (h `contains` r2 /\ sel h r1 == sel h r2)) + +val lemma_upd_same_addr (#a:Type0) (#rel:preorder a) (h:mem) (r1 r2:mreference a rel) (x: a) + :Lemma (requires (frameOf r1 == frameOf r2 /\ (h `contains` r1 \/ h `contains` r2) /\ + as_addr r1 == as_addr r2 /\ is_mm r1 == is_mm r2)) + (ensures (h `contains` r1 /\ h `contains` r2 /\ upd h r1 x == upd h r2 x)) + +(* Two references with different reads are disjoint. *) +val mreference_distinct_sel_disjoint + (#a:Type0) (#rel1: preorder a) (#rel2: preorder a) (h: mem) (r1: mreference a rel1) (r2:mreference a rel2) + : Lemma (requires (h `contains` r1 /\ h `contains` r2 /\ frameOf r1 == frameOf r2 /\ as_addr r1 == as_addr r2)) + (ensures (sel h r1 == sel h r2)) + +(* + * AR: 12/26: modifies clauses + * NOTE: the modifies clauses used to have a m0.tip == m1.tip conjunct too + * which seemed a bit misplaced + * removing that conjunct required very few changes (one in HACL), since ST effect gives it already + *) +let modifies (s:Set.set rid) (m0:mem) (m1:mem) = modifies_just s (get_hmap m0) (get_hmap m1) + +let modifies_transitively (s:Set.set rid) (m0:mem) (m1:mem) = FStar.Monotonic.HyperHeap.modifies s (get_hmap m0) (get_hmap m1) + +let heap_only (m0:mem) = get_tip m0 == root + +let top_frame (m:mem) = get_hmap m `Map.sel` get_tip m + +val modifies_drop_tip (m0:mem) (m1:mem) (m2:mem) (s:Set.set rid) + : Lemma (fresh_frame m0 m1 /\ get_tip m1 == get_tip m2 /\ + modifies_transitively (Set.union s (Set.singleton (get_tip m1))) m1 m2 ==> + modifies_transitively s m0 (pop m2)) + +let modifies_one id h0 h1 = modifies_one id (get_hmap h0) (get_hmap h1) +let modifies_ref (id:rid) (s:Set.set nat) (h0:mem) (h1:mem) = + Heap.modifies s (get_hmap h0 `Map.sel` id) (get_hmap h1 `Map.sel` id) + + +(****** API for generating modifies clauses in the old style, should use new modifies clauses now ******) + +noeq type some_ref = + | Ref: #a:Type0 -> #rel:preorder a -> mreference a rel -> some_ref + +let some_refs = list some_ref + +[@@"opaque_to_smt"] +private let rec regions_of_some_refs (rs:some_refs) :Tot (Set.set rid) = + match rs with + | [] -> Set.empty + | (Ref r)::tl -> Set.union (Set.singleton (frameOf r)) (regions_of_some_refs tl) + +[@@"opaque_to_smt"] +private let rec refs_in_region (r:rid) (rs:some_refs) :GTot (Set.set nat) = + match rs with + | [] -> Set.empty + | (Ref x)::tl -> + Set.union (if frameOf x = r then Set.singleton (as_addr x) else Set.empty) + (refs_in_region r tl) + +[@@"opaque_to_smt"] +private let rec modifies_some_refs (i:some_refs) (rs:some_refs) (h0:mem) (h1:mem) :GTot Type0 = + match i with + | [] -> True + | (Ref x)::tl -> + (modifies_ref (frameOf x) (refs_in_region (frameOf x) rs) h0 h1) /\ + (modifies_some_refs tl rs h0 h1) + +[@@"opaque_to_smt"] +unfold private let norm_steps :list norm_step = + //iota for reducing match + [iota; zeta; delta; delta_only ["FStar.Monotonic.HyperStack.regions_of_some_refs"; + "FStar.Monotonic.HyperStack.refs_in_region"; + "FStar.Monotonic.HyperStack.modifies_some_refs"]; + primops] + +[@@"opaque_to_smt"] +unfold let mods (rs:some_refs) (h0 h1:mem) :GTot Type0 = + (norm norm_steps (modifies (regions_of_some_refs rs) h0 h1)) /\ + (norm norm_steps (modifies_some_refs rs rs h0 h1)) + +////// + +val eternal_disjoint_from_tip (h:mem{is_stack_region (get_tip h)}) + (r:rid{is_heap_color (color r) /\ + r =!= root /\ + r `is_in` get_hmap h}) + :Lemma (disjoint (get_tip h) r) + +val above_tip_is_live (#a:Type) (#rel:preorder a) (m:mem) (x:mreference a rel) + :Lemma (requires (frameOf x `is_above` get_tip m)) + (ensures (frameOf x `is_in` get_hmap m)) + +///// + +(****** Lemmas about equality of mem ******) + +val lemma_heap_equality_cancel_same_mref_upd + (#a:Type) (#rel:preorder a) (h:mem) (r:mreference a rel) (x y:a) + :Lemma (requires (live_region h (frameOf r))) + (ensures (upd (upd h r x) r y == upd h r y)) + +val lemma_heap_equality_upd_with_sel + (#a:Type) (#rel:preorder a) (h:mem) (r:mreference a rel) + :Lemma (requires (h `contains` r)) + (ensures (upd h r (sel h r) == h)) + +val lemma_heap_equality_commute_distinct_upds + (#a:Type) (#b:Type) (#rel_a:preorder a) (#rel_b:preorder b) + (h:mem) (r1:mreference a rel_a) (r2:mreference b rel_b) (x:a) (y:b) + :Lemma (requires (as_addr r1 =!= as_addr r2 /\ live_region h (frameOf r1) /\ live_region h (frameOf r2))) + (ensures (upd (upd h r1 x) r2 y == upd (upd h r2 y) r1 x)) + +val lemma_next_addr_contained_refs_addr (_:unit) + :Lemma (forall (a:Type0) (rel:preorder a) (r:mreference a rel) (m:mem). + m `contains` r ==> as_addr r < Heap.next_addr (get_hmap m `Map.sel` frameOf r)) + +(*** Untyped views of references *) + +(* Definition and ghost decidable equality *) + +val aref: Type0 + +val dummy_aref :aref + +val aref_equal (a1 a2: aref) + :Ghost bool (requires True) + (ensures (fun b -> b == true <==> a1 == a2)) + +(* Introduction rule *) + +val aref_of (#t: Type) (#rel: preorder t) (r: mreference t rel) :aref + +(* Operators lifted from reference *) + +val frameOf_aref (a:aref) :GTot rid + +val frameOf_aref_of (#t:Type) (#rel:preorder t) (r:mreference t rel) + :Lemma (frameOf_aref (aref_of r) == frameOf r) + [SMTPat (frameOf_aref (aref_of r))] + +val aref_as_addr (a:aref) :GTot pos + +val aref_as_addr_aref_of (#t:Type) (#rel:preorder t) (r:mreference t rel) + :Lemma (aref_as_addr (aref_of r) == as_addr r) + [SMTPat (aref_as_addr (aref_of r))] + +val aref_is_mm (r:aref) :GTot bool + +val is_mm_aref_of (#t:Type) (#rel:preorder t) (r:mreference t rel) + :Lemma (aref_is_mm (aref_of r) == is_mm r) + [SMTPat (aref_is_mm (aref_of r))] + +[@@ remove_unused_type_parameters [0;1]] +val aref_unused_in (a:aref) (h:mem) :GTot Type0 + +val unused_in_aref_of (#t:Type) (#rel:preorder t) (r:mreference t rel) (h:mem) + :Lemma (aref_unused_in (aref_of r) h <==> unused_in r h) + [SMTPat (aref_unused_in (aref_of r) h)] + +val contains_aref_unused_in (#a:Type) (#rel:preorder a) (h:mem) (x:mreference a rel) (y:aref) + :Lemma (requires (contains h x /\ aref_unused_in y h)) + (ensures (frameOf x <> frameOf_aref y \/ as_addr x <> aref_as_addr y)) + [SMTPat (contains h x); SMTPat (aref_unused_in y h)] + +(* Elimination rule *) + +[@@ remove_unused_type_parameters [0;1;2;3]] +val aref_live_at (h:mem) (a:aref) (v:Type0) (rel:preorder v) :GTot Type0 + +val greference_of (a:aref) (v:Type0) (rel:preorder v) + :Ghost (mreference v rel) (requires (exists h . aref_live_at h a v rel)) + (ensures (fun _ -> True)) + +val reference_of (h:mem) (a:aref) (v:Type0) (rel:preorder v) + :Pure (mreference v rel) (requires (aref_live_at h a v rel)) + (ensures (fun x -> aref_live_at h a v rel /\ frameOf x == frameOf_aref a /\ + as_addr x == aref_as_addr a /\ is_mm x == aref_is_mm a)) + +val aref_live_at_aref_of (h:mem) (#t:Type0) (#rel:preorder t) (r:mreference t rel) + :Lemma (aref_live_at h (aref_of r) t rel <==> contains h r) + [SMTPat (aref_live_at h (aref_of r) t rel)] + +val contains_greference_of (h:mem) (a:aref) (t:Type0) (rel:preorder t) + :Lemma (requires (exists h' . aref_live_at h' a t rel)) + (ensures ((exists h' . aref_live_at h' a t rel) /\ (contains h (greference_of a t rel) <==> aref_live_at h a t rel))) + [SMTPatOr [ + [SMTPat (contains h (greference_of a t rel))]; + [SMTPat (aref_live_at h a t rel)]; + ]] + +val aref_of_greference_of (a:aref) (v:Type0) (rel:preorder v) + :Lemma (requires (exists h' . aref_live_at h' a v rel)) + (ensures ((exists h' . aref_live_at h' a v rel) /\ aref_of (greference_of a v rel) == a)) + [SMTPat (aref_of (greference_of a v rel))] + +(* Operators lowered to rref *) + +val frameOf_greference_of (a:aref) (t:Type0) (rel:preorder t) + :Lemma (requires (exists h . aref_live_at h a t rel)) + (ensures ((exists h . aref_live_at h a t rel) /\ frameOf (greference_of a t rel) == frameOf_aref a)) + [SMTPat (frameOf (greference_of a t rel))] + +val as_addr_greference_of (a:aref) (t:Type0) (rel:preorder t) + :Lemma (requires (exists h . aref_live_at h a t rel)) + (ensures ((exists h . aref_live_at h a t rel) /\ as_addr (greference_of a t rel) == aref_as_addr a)) + [SMTPat (as_addr (greference_of a t rel))] + +val is_mm_greference_of (a:aref) (t:Type0) (rel:preorder t) + :Lemma (requires (exists h . aref_live_at h a t rel)) + (ensures ((exists h . aref_live_at h a t rel) /\ is_mm (greference_of a t rel) == aref_is_mm a)) + [SMTPat (is_mm (greference_of a t rel))] + +val unused_in_greference_of (a:aref) (t:Type0) (rel:preorder t) (h:mem) + :Lemma (requires (exists h . aref_live_at h a t rel)) + (ensures ((exists h . aref_live_at h a t rel) /\ (unused_in (greference_of a t rel) h <==> aref_unused_in a h))) + [SMTPat (unused_in (greference_of a t rel) h)] + +val sel_reference_of (a:aref) (v:Type0) (rel:preorder v) (h1 h2: mem) + :Lemma (requires (aref_live_at h1 a v rel /\ aref_live_at h2 a v rel)) + (ensures (aref_live_at h2 a v rel /\ sel h1 (reference_of h2 a v rel) == sel h1 (greference_of a v rel))) + [SMTPat (sel h1 (reference_of h2 a v rel))] + +val upd_reference_of (a:aref) (v:Type0) (rel:preorder v) (h1 h2:mem) (x:v) + :Lemma (requires (aref_live_at h1 a v rel /\ aref_live_at h2 a v rel)) + (ensures (aref_live_at h1 a v rel /\ aref_live_at h2 a v rel /\ + upd h1 (reference_of h2 a v rel) x == upd h1 (greference_of a v rel) x)) + [SMTPat (upd h1 (reference_of h2 a v rel) x)] diff --git a/stage0/ulib/FStar.Monotonic.Map.fst b/stage0/ulib/FStar.Monotonic.Map.fst new file mode 100644 index 00000000000..7fb705038dd --- /dev/null +++ b/stage0/ulib/FStar.Monotonic.Map.fst @@ -0,0 +1,126 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +(** A library for monotonic references to partial, dependent maps, with a whole-map invariant *) +module FStar.Monotonic.Map + +open FStar.HyperStack +open FStar.HyperStack.ST + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +(* Partial, dependent maps *) +type map' (a:Type) (b:a -> Type) = + (x:a -> Tot (option (b x))) + +(* Partial, dependent maps, with a whole-map invariant *) +type map (a:Type) (b:a -> Type) (inv:map' a b -> Type0) = + m:map' a b{inv m} + +let upd (#a:eqtype) #b (m:map' a b) (x:a) (y:b x) + : Tot (map' a b) + = fun z -> if x = z then Some y else m z + +let sel #a #b (m:map' a b) (x:a) + : Tot (option (b x)) + = m x + +let grows_aux #a #b #inv :Preorder.preorder (map a b inv) = + fun (m1 m2:map a b inv) -> + forall x.{:pattern (Some? (m1 x))} + Some? (m1 x) ==> Some? (m2 x) /\ Some?.v (m1 x) == Some?.v (m2 x) + +[@@"opaque_to_smt"] +let grows #a #b #inv = grows_aux #a #b #inv + +(* Monotone, partial, dependent maps, with a whole-map invariant *) +type t r a b inv = m_rref r (map a b inv) grows //maybe grows can include the inv? + +let empty_map a b + : Tot (map' a b) + = fun x -> None + +type rid = HST.erid + +let alloc (#r:rid) #a #b #inv + : ST (t r a b inv) + (requires (fun h -> inv (empty_map a b) /\ witnessed (region_contains_pred r))) + (ensures (fun h0 x h1 -> + inv (empty_map a b) /\ + ralloc_post r (empty_map a b) h0 x h1)) + = ralloc r (empty_map a b) + +let defined #r #a #b #inv (m:t r a b inv) (x:a) (h:HS.mem) + : GTot Type0 + = Some? (sel (HS.sel h m) x) + +let contains #r #a #b #inv (m:t r a b inv) (x:a) (y:b x) (h:HS.mem) + : GTot Type0 + = Some? (sel (HS.sel h m) x) /\ Some?.v (sel (HS.sel h m) x) == y + +let value #r #a #b #inv (m:t r a b inv) (x:a) (h:HS.mem{defined m x h}) + : GTot (r:b x{contains m x r h}) + = Some?.v (sel (HS.sel h m) x) + +let fresh #r #a #b #inv (m:t r a b inv) (x:a) (h:HS.mem) + : GTot Type0 + = None? (sel (HS.sel h m) x) + +let contains_stable #r #a #b #inv (m:t r a b inv) (x:a) (y:b x) + : Lemma (ensures (stable_on_t m (contains m x y))) + = reveal_opaque (`%grows) (grows #a #b #inv) + +let extend (#r:rid) (#a:eqtype) (#b:a -> Type) (#inv:(map' a b -> Type0)) (m:t r a b inv) (x:a) (y:b x) + : ST unit + (requires (fun h -> let cur = HS.sel h m in inv (upd cur x y) /\ sel cur x == None)) + (ensures (fun h0 u h1 -> + let cur = HS.sel h0 m in + let hsref = m in + HS.contains h1 m + /\ modifies (Set.singleton r) h0 h1 + /\ modifies_ref r (Set.singleton (HS.as_addr hsref)) h0 h1 + /\ HS.sel h1 m == upd cur x y + /\ HST.witnessed (defined m x) + /\ HST.witnessed (contains m x y))) + = recall m; + reveal_opaque (`%grows) (grows #a #b #inv); + let cur = !m in + m := upd cur x y; + contains_stable m x y; + mr_witness m (defined m x); + mr_witness m (contains m x y) + +let lookup #r #a #b #inv (m:t r a b inv) (x:a) + : ST (option (b x)) + (requires (fun h -> True)) + (ensures (fun h0 y h1 -> + h0==h1 /\ + y == sel (HS.sel h1 m) x /\ + (None? y ==> fresh m x h1) /\ + (Some? y ==> + defined m x h1 /\ + contains m x (Some?.v y) h1 /\ + HST.witnessed (defined m x) /\ + HST.witnessed (contains m x (Some?.v y))))) += reveal_opaque (`%grows) (grows #a #b #inv); + let y = sel !m x in + match y with + | None -> y + | Some b -> + contains_stable m x b; + mr_witness m (defined m x); + mr_witness m (contains m x b); + y diff --git a/stage0/ulib/FStar.Monotonic.Pure.fst b/stage0/ulib/FStar.Monotonic.Pure.fst new file mode 100644 index 00000000000..8d1716d77c1 --- /dev/null +++ b/stage0/ulib/FStar.Monotonic.Pure.fst @@ -0,0 +1,62 @@ +(* + Copyright 2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Monotonic.Pure + +(* + * This module provides utilities to intro and elim the monotonicity + * property of pure wps + * + * Since pure_wp_monotonic predicate is marked opaque_to_smt in prims, + * reasoning with it requires explicit coercions + *) + +unfold +let is_monotonic (#a:Type) (wp:pure_wp' a) = + (* + * Once we support using tactics in ulib/, + * this would be written as: Prims.pure_wp_monotonic0, + * with a postprocessing tactic to norm it + *) + forall (p q:pure_post a). (forall (x:a). p x ==> q x) ==> (wp p ==> wp q) + +let elim_pure_wp_monotonicity (#a:Type) (wp:pure_wp a) + : Lemma (is_monotonic wp) + = reveal_opaque (`%pure_wp_monotonic) pure_wp_monotonic + +let elim_pure_wp_monotonicity_forall (_:unit) + : Lemma + (forall (a:Type) (wp:pure_wp a). is_monotonic wp) + = reveal_opaque (`%pure_wp_monotonic) pure_wp_monotonic + +let intro_pure_wp_monotonicity (#a:Type) (wp:pure_wp' a) + : Lemma + (requires is_monotonic wp) + (ensures pure_wp_monotonic a wp) + = reveal_opaque (`%pure_wp_monotonic) pure_wp_monotonic + +unfold +let as_pure_wp (#a:Type) (wp:pure_wp' a) + : Pure (pure_wp a) + (requires is_monotonic wp) + (ensures fun r -> r == wp) + = intro_pure_wp_monotonicity wp; + wp + +let elim_pure (#a:Type) (#wp:pure_wp a) ($f : unit -> PURE a wp) (p:pure_post a) + : Pure a (requires (wp p)) (ensures (fun r -> p r)) + = elim_pure_wp_monotonicity wp; + f () diff --git a/stage0/ulib/FStar.Monotonic.Seq.fst b/stage0/ulib/FStar.Monotonic.Seq.fst new file mode 100644 index 00000000000..32040d0000b --- /dev/null +++ b/stage0/ulib/FStar.Monotonic.Seq.fst @@ -0,0 +1,435 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Monotonic.Seq + +open FStar.Seq +open FStar.Classical +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +open FStar.HyperStack +open FStar.HyperStack.ST + +(* 2016-11-22: The following is meant to override the fact that the + enclosing namespace of the current module (here FStar.Monotonic) is + automatically opened, which makes Seq resolve into + FStar.Monotonic.Seq instead of FStar.Seq. *) +module Seq = FStar.Seq + +//////////////////////////////////////////////////////////////////////////////// + +(* + * 12/08 + * AR: writing this in terms of length and index + * earlier it was written in terms of an exists s3. Seq.equal (append s1 s3) s2 + * that meant going through many hoops to prove simple things like transitivity of grows + * so far this seems to work better. + *) +let grows_aux (#a:Type) :Preorder.preorder (seq a) + = fun (s1:seq a) (s2:seq a) -> + length s1 <= length s2 /\ + (forall (i:nat).{:pattern (Seq.index s1 i) \/ (Seq.index s2 i)} i < length s1 ==> index s1 i == index s2 i) + +[@@"opaque_to_smt"] +let grows #a = grows_aux #a + +type rid = HST.erid + +let snoc (s:seq 'a) (x:'a) + : Tot (seq 'a) + = Seq.append s (Seq.create 1 x) + +let lemma_snoc_extends (#a:Type) (s:seq a) (x:a) + : Lemma (requires True) + (ensures (grows s (Seq.snoc s x))) + [SMTPat (grows s (Seq.snoc s x))] + = reveal_opaque (`%grows) (grows #a) + +let alloc_mref_seq (#a:Type) (r:rid) (init:seq a) + : ST (m_rref r (seq a) grows) + (requires (fun _ -> HST.witnessed (region_contains_pred r))) + (ensures (fun h0 m h1 -> + HS.contains h1 m /\ + HS.sel h1 m == init /\ + HST.ralloc_post r init h0 m h1)) + = ralloc r init + +(* + * AR: changing rids below to rid which is eternal regions. + *) +let at_least (#a:Type) (#i:rid) (n:nat) (x:a) (r:m_rref i (seq a) grows) (h:mem) = + Seq.length (HS.sel h r) > n + /\ Seq.index (HS.sel h r) n == x + +let at_least_is_stable (#a:Type) (#i:rid) (n:nat) (x:a) (r:m_rref i (seq a) grows) + : Lemma (ensures stable_on_t r (at_least n x r)) + = reveal_opaque (`%grows) (grows #a) + +(** extending a stored sequence, witnessing its new entry for convenience. *) +let write_at_end (#a:Type) (#i:rid) (r:m_rref i (seq a) grows) (x:a) + : ST unit + (requires (fun h -> True)) + (ensures (fun h0 _ h1 -> + contains h1 r + /\ modifies_one i h0 h1 + /\ modifies_ref i (Set.singleton (HS.as_addr r)) h0 h1 + /\ HS.sel h1 r == Seq.snoc (HS.sel h0 r) x + /\ witnessed (at_least (Seq.length (HS.sel h0 r)) x r))) + = + recall r; + let s0 = !r in + let n = Seq.length s0 in + r := Seq.snoc s0 x; + at_least_is_stable n x r; + Seq.contains_snoc s0 x; + mr_witness r (at_least n x r) + +//////////////////////////////////////////////////////////////////////////////// +//Monotone sequences with a (stateless) invariant of the whole sequence +//////////////////////////////////////////////////////////////////////////////// + +let grows_p (#a:Type) (p:seq a -> Type) :Preorder.preorder (s:seq a{p s}) = + fun s1 s2 -> grows s1 s2 + +let i_seq (r:rid) (a:Type) (p:seq a -> Type) = m_rref r (s:seq a{p s}) (grows_p p) + +let alloc_mref_iseq (#a:Type) (p:seq a -> Type) (r:rid) (init:seq a{p init}) + : ST (i_seq r a p) + (requires (fun _ -> HST.witnessed (region_contains_pred r))) + (ensures (fun h0 m h1 -> HST.ralloc_post r init h0 m h1)) + = ralloc r init + +let i_at_least (#r:rid) (#a:Type) (#p:(seq a -> Type)) (n:nat) (x:a) (m:i_seq r a p) (h:mem) = + Seq.length (HS.sel h m) > n + /\ Seq.index (HS.sel h m) n == x + +let i_at_least_is_stable (#r:rid) (#a:Type) (#p:seq a -> Type) (n:nat) (x:a) (m:i_seq r a p) + : Lemma (ensures stable_on_t m (i_at_least n x m)) + = reveal_opaque (`%grows) (grows #a) + +let int_at_most #r #a #p (x:int) (is:i_seq r a p) (h:mem) : Type0 = + x < Seq.length (HS.sel h is) + +let int_at_most_is_stable (#r:rid) (#a:Type) (#p:seq a -> Type) (is:i_seq r a p) (k:int) + : Lemma (ensures stable_on_t is (int_at_most k is)) + = reveal_opaque (`%grows) (grows #a) + +let i_sel (#r:rid) (#a:Type) (#p:seq a -> Type) (h:mem) (m:i_seq r a p) + : GTot (s:seq a{p s}) + = HS.sel h m + +let i_read (#a:Type) (#p:Seq.seq a -> Type) (#r:rid) (m:i_seq r a p) + : ST (s:seq a{p s}) + (requires (fun h -> True)) + (ensures (fun h0 x h1 -> h0==h1 /\ x == i_sel h0 m)) + = !m + +let i_contains (#r:rid) (#a:Type) (#p:seq a -> Type) (m:i_seq r a p) (h:mem) + : GTot Type0 + = HS.contains h m + +let i_write_at_end (#a:Type) (#p:seq a -> Type) (#rgn:rid) (r:i_seq rgn a p) (x:a) + : ST unit + (requires (fun h -> p (Seq.snoc (i_sel h r) x))) + (ensures (fun h0 _ h1 -> + i_contains r h1 + /\ modifies_one rgn h0 h1 + /\ modifies_ref rgn (Set.singleton (HS.as_addr r)) h0 h1 + /\ i_sel h1 r == Seq.snoc (i_sel h0 r) x + /\ witnessed (i_at_least (Seq.length (i_sel h0 r)) x r))) + = + recall r; + let s0 = !r in + let n = Seq.length s0 in + r := Seq.snoc s0 x; + i_at_least_is_stable n x r; + contains_snoc s0 x; + mr_witness r (i_at_least n x r) + +//////////////////////////////////////////////////////////////////////////////// +//Testing invariant sequences +//////////////////////////////////////////////////////////////////////////////// +private let invariant (s:seq nat) = + forall (i:nat) (j:nat). i < Seq.length s /\ j < Seq.length s /\ i<>j + ==> Seq.index s i <> Seq.index s j + +private val test0: r:rid -> a:m_rref r (seq nat) grows -> k:nat -> ST unit + (requires (fun h -> k < Seq.length (HS.sel h a))) + (ensures (fun h0 result h1 -> True)) +let test0 r a k = + let h0 = HST.get() in + let _ = + let s = HS.sel h0 a in + at_least_is_stable k (Seq.index (HS.sel h0 a) k) a; + Seq.contains_intro s k (Seq.index s k) in + mr_witness a (at_least k (Seq.index (HS.sel h0 a) k) a) + +private val itest: r:rid -> a:i_seq r nat invariant -> k:nat -> ST unit + (requires (fun h -> k < Seq.length (i_sel h a))) + (ensures (fun h0 result h1 -> True)) +let itest r a k = + let h0 = HST.get() in + i_at_least_is_stable k (Seq.index (i_sel h0 a) k) a; + mr_witness a (i_at_least k (Seq.index (i_sel h0 a) k) a) + + +//////////////////////////////////////////////////////////////////////////////// +//Mapping functions over monotone sequences +//////////////////////////////////////////////////////////////////////////////// +val un_snoc: #a: Type -> s:seq a {Seq.length s > 0} -> Tot(seq a & a) +let un_snoc #a s = + let last = Seq.length s - 1 in + Seq.slice s 0 last, Seq.index s last + +val map: ('a -> Tot 'b) -> s:seq 'a -> Tot (seq 'b) + (decreases (Seq.length s)) +let rec map f s = + if Seq.length s = 0 then Seq.empty + else let prefix, last = un_snoc s in + Seq.snoc (map f prefix) (f last) + +val map_snoc: f:('a -> Tot 'b) -> s:seq 'a -> a:'a -> Lemma + (map f (Seq.snoc s a) == Seq.snoc (map f s) (f a)) +let map_snoc f s a = + let prefix, last = un_snoc (Seq.snoc s a) in + cut (Seq.equal prefix s) + +private let op_At s1 s2 = Seq.append s1 s2 + +val map_append: f:('a -> Tot 'b) -> s1:seq 'a -> s2:seq 'a -> Lemma + (requires True) + (ensures (map f (s1@s2) == (map f s1 @ map f s2))) + (decreases (Seq.length s2)) +#reset-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1" +let rec map_append f s_1 s_2 = + if Seq.length s_2 = 0 + then (cut (Seq.equal (s_1@s_2) s_1); + cut (Seq.equal (map f s_1 @ map f s_2) (map f s_1))) + else (let prefix_2, last = un_snoc s_2 in + let m_s_1 = map f s_1 in + let m_p_2 = map f prefix_2 in + let flast = f last in + cut (Seq.equal (s_1@s_2) (Seq.snoc (s_1@prefix_2) last)); //map f (s1@s2) = map f (snoc (s1@p) last) + map_snoc f (Seq.append s_1 prefix_2) last; // = snoc (map f (s1@p)) (f last) + map_append f s_1 prefix_2; // = snoc (map f s_1 @ map f p) (f last) + cut (Seq.equal (Seq.snoc (m_s_1 @ m_p_2) flast) + (m_s_1 @ Seq.snoc m_p_2 flast)); // = map f s1 @ (snoc (map f p) (f last)) + map_snoc f prefix_2 last) // = map f s1 @ map f (snoc p last) + +#reset-options "--z3rlimit 5" + +val map_length: f:('a -> Tot 'b) -> s1:seq 'a -> Lemma + (requires True) + (ensures (Seq.length s1 = Seq.length (map f s1))) + (decreases (length s1)) + [SMTPat (Seq.length (map f s1))] +let rec map_length f s1 = + if Seq.length s1 = 0 then () + else let prefix, last = un_snoc s1 in + map_length f prefix + +val map_index: f:('a -> Tot 'b) -> s:seq 'a -> i:nat{i Lemma + (requires True) + (ensures (Seq.index (map f s) i == f (Seq.index s i))) + (decreases (Seq.length s)) + [SMTPat (Seq.index (map f s) i)] +let rec map_index f s i = + if i = Seq.length s - 1 + then () + else let prefix, last = un_snoc s in + map_index f prefix i + +//17-01-05 all the stuff above should go to Seq.Properties! + +let map_grows (#a:Type) (#b:Type) (f:a -> Tot b) + (s1:seq a) (s3:seq a) + : Lemma (grows s1 s3 + ==> grows (map f s1) (map f s3)) + = reveal_opaque (`%grows) (grows #a); + reveal_opaque (`%grows) (grows #b) + +let map_prefix (#a:Type) (#b:Type) (#i:rid) + (r:m_rref i (seq a) grows) + (f:a -> Tot b) + (bs:seq b) + (h:mem) = + grows bs (map f (HS.sel h r)) + +//17-01-05 this applies to log_t's defined below. +let map_prefix_stable (#a:Type) (#b:Type) (#i:rid) (r:m_rref i (seq a) grows) (f:a -> Tot b) (bs:seq b) + :Lemma (stable_on_t r (map_prefix r f bs)) + = reveal_opaque (`%grows) (grows #a); + reveal_opaque (`%grows) (grows #b) + +let map_has_at_index (#a:Type) (#b:Type) (#i:rid) + (r:m_rref i (seq a) grows) + (f:a -> Tot b) + (n:nat) (v:b) (h:mem) = + let s = HS.sel h r in + n < Seq.length s + /\ Seq.index (map f s) n == v + +let map_has_at_index_stable (#a:Type) (#b:Type) (#i:rid) + (r:m_rref i (seq a) grows) + (f:a -> Tot b) (n:nat) (v:b) + : Lemma (stable_on_t r (map_has_at_index r f n v)) + = reveal_opaque (`%grows) (grows #a) + + +//////////////////////////////////////////////////////////////////////////////// +//Collecting monotone sequences +//////////////////////////////////////////////////////////////////////////////// + +(** yields the concatenation of all sequences returned by f applied to the sequence elements *) +val collect: ('a -> Tot (seq 'b)) -> s:seq 'a -> Tot (seq 'b) + (decreases (Seq.length s)) +let rec collect f s = + if Seq.length s = 0 then Seq.empty + else let prefix, last = un_snoc s in + Seq.append (collect f prefix) (f last) + +val collect_snoc: f:('a -> Tot (seq 'b)) -> s:seq 'a -> a:'a -> Lemma + (collect f (Seq.snoc s a) == Seq.append (collect f s) (f a)) +let collect_snoc f s a = + let prefix, last = un_snoc (Seq.snoc s a) in + cut (Seq.equal prefix s) + +#reset-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1" + +let collect_grows (f:'a -> Tot (seq 'b)) + (s1:seq 'a) (s2:seq 'a) + : Lemma (grows s1 s2 ==> grows (collect f s1) (collect f s2)) + = reveal_opaque (`%grows) (grows #'a); + reveal_opaque (`%grows) (grows #'b); + let rec collect_grows_aux (f:'a -> Tot (seq 'b)) (s1:seq 'a) (s2:seq 'a) + :Lemma (requires (grows s1 s2)) (ensures (grows (collect f s1) (collect f s2))) + (decreases (Seq.length s2)) + = if length s1 = length s2 then assert (Seq.equal s1 s2) + else + let s2_prefix, s2_last = un_snoc s2 in + collect_grows_aux f s1 s2_prefix + in + Classical.arrow_to_impl #(grows s1 s2) #(grows (collect f s1) (collect f s2)) (fun _ -> collect_grows_aux f s1 s2) + +let collect_prefix (#a:Type) (#b:Type) (#i:rid) + (r:m_rref i (seq a) grows) + (f:a -> Tot (seq b)) + (bs:seq b) + (h:mem) = + grows bs (collect f (HS.sel h r)) + +let collect_prefix_stable (#a:Type) (#b:Type) (#i:rid) (r:m_rref i (seq a) grows) (f:a -> Tot (seq b)) (bs:seq b) + : Lemma (stable_on_t r (collect_prefix r f bs)) + = let aux : h0:mem -> h1:mem -> Lemma + (collect_prefix r f bs h0 + /\ grows (HS.sel h0 r) (HS.sel h1 r) + ==> collect_prefix r f bs h1) = + fun h0 h1 -> + let s1 = HS.sel h0 r in + let s3 = HS.sel h1 r in + collect_grows f s1 s3 + in + forall_intro_2 aux + +let collect_has_at_index (#a:Type) (#b:Type) (#i:rid) + (r:m_rref i (seq a) grows) + (f:a -> Tot (seq b)) + (n:nat) (v:b) (h:mem) = + let s = HS.sel h r in + n < Seq.length (collect f s) + /\ Seq.index (collect f s) n == v + +let collect_has_at_index_stable (#a:Type) (#b:Type) (#i:rid) + (r:m_rref i (seq a) grows) + (f:a -> Tot (seq b)) (n:nat) (v:b) + : Lemma (stable_on_t r (collect_has_at_index r f n v)) + = reveal_opaque (`%grows) (grows #b); + Classical.forall_intro_2 (collect_grows f) + +//////////////////////////////////////////////////////////////////////////////// +//Monotonic sequence numbers, bounded by the length of a log +//////////////////////////////////////////////////////////////////////////////// +//17-01-05 the simpler variant, with an historic name; consider using uniform names below. +type log_t (i:rid) (a:Type) = m_rref i (seq a) grows + +let increases (x:int) (y:int) = b2t (x <= y) + +let at_most_log_len (#l:rid) (#a:Type) (x:nat) (log:log_t l a) + : mem -> GTot Type0 + = fun h -> x <= Seq.length (HS.sel h log) + +//Note: we may want int seqn, instead of nat seqn +//because the handshake uses an initial value of -1 +type seqn_val (#l:rid) (#a:Type) (i:rid) (log:log_t l a) (max:nat) = + (x:nat{x <= max /\ witnessed (at_most_log_len x log)}) //never more than the length of the log + +type seqn (#l:rid) (#a:Type) (i:rid) (log:log_t l a) (max:nat) = + m_rref i //counter in region i + (seqn_val i log max) //never more than the length of the log + increases //increasing + +let at_most_log_len_stable (#l:rid) (#a:Type) (x:nat) (log:log_t l a) + : Lemma (stable_on_t log (at_most_log_len x log)) + = reveal_opaque (`%grows) (grows #a) + +let new_seqn (#a:Type) (#l:rid) (#max:nat) + (i:rid) (init:nat) (log:log_t l a) + : ST (seqn i log max) + (requires (fun h -> + HST.witnessed (region_contains_pred i) /\ + init <= max /\ + init <= Seq.length (HS.sel h log))) + (ensures (fun h0 c h1 -> //17-01-05 unify with ralloc_post? + modifies_one i h0 h1 /\ + modifies_ref i Set.empty h0 h1 /\ + fresh_ref c h0 h1 /\ + HS.sel h1 c = init /\ + FStar.Map.contains (HS.get_hmap h1) i)) + = reveal_opaque (`%grows) (grows #a); + recall log; recall_region i; + mr_witness log (at_most_log_len init log); + ralloc i init + +let increment_seqn (#a:Type) (#l:rid) (#max:nat) + (#i:rid) (#log:log_t l a) ($c:seqn i log max) + : ST unit + (requires (fun h -> + let log = HS.sel h log in + let n = HS.sel h c in + n < Seq.length log /\ + n + 1 <= max)) + (ensures (fun h0 _ h1 -> + modifies_one i h0 h1 /\ + modifies_ref i (Set.singleton (HS.as_addr c)) h0 h1 /\ + HS.sel h1 c = HS.sel h0 c + 1)) + = reveal_opaque (`%grows) (grows #a); + recall c; recall log; + let n = !c + 1 in + mr_witness log (at_most_log_len n log); + c := n + +let testify_seqn (#a:Type0) (#i:rid) (#l:rid) (#log:log_t l a) (#max:nat) (ctr:seqn i log max) + : ST unit + (requires (fun h -> True)) + (ensures (fun h0 _ h1 -> + h0==h1 /\ + at_most_log_len (HS.sel h1 ctr) log h1)) + = let n = !ctr in + testify (at_most_log_len n log) + +private let test (i:rid) (l:rid) (a:Type0) (log:log_t l a) //(p:(nat -> Type)) + (r:seqn i log 8) (h:mem) + = assert (HS.sel h r = Heap.sel (FStar.Map.sel (HS.get_hmap h) i) (HS.as_ref r)) diff --git a/stage0/ulib/FStar.Monotonic.Witnessed.fst b/stage0/ulib/FStar.Monotonic.Witnessed.fst new file mode 100644 index 00000000000..f354e0765b5 --- /dev/null +++ b/stage0/ulib/FStar.Monotonic.Witnessed.fst @@ -0,0 +1,338 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Monotonic.Witnessed + +open FStar.Preorder + +(* NOT EXPOSED BY THE INTERFACE [start] *) + +(* + A hybrid modal extension to F*'s classical reasoning logic: + - extends F*'s logic with two hybrid modal operators (get, set) + - extends F*'s logic with corresponding logical axioms, based + on the Kripke semantics of these two hybrid modal operators: + + w |= get p iff w |= p w + + w |= set p w' iff w' |= p + + We do not expose these modal operators to the users directly. + Instead we use them below to define the 'witnessed' modality + which is the basis of reasoning about monotonic state in F*, + as discussed in Ahman et al.'s POPL 2018 paper "Recalling a + Witness: Foundations and Applications of Monotonic State". +*) + +(* Hybrid modal operators *) +assume private type get : #world:Type -> (world -> Type0) -> Type0 +assume private type set : #world:Type -> Type0 -> world -> Type0 + +(* Weakening for the get operator *) +assume val get_weakening :#world:Type + -> p:(world -> Type0) + -> q:(world -> Type0) + -> Lemma (requires (forall w. p w ==> q w)) + (ensures (get p ==> get q)) + [SMTPat (get p); SMTPat (get q)] + +(* Interaction axioms between the get and set operators *) +assume val get_set_axiom :#world:Type + -> p:Type0 + -> Lemma (get (set #world p) <==> p) + [SMTPat (get (set #world p))] + +assume val set_get_axiom :#world:Type + -> w:world + -> p:(world -> Type0) + -> Lemma (set (get p) w <==> set (p w) w) + [SMTPat (set (get p) w)] + +assume val set_set_axiom :#world:Type + -> w:world + -> w':world + -> p:Type0 + -> Lemma (set (set p w') w <==> set p w') + [SMTPat (set (set p w') w)] + +(* Useful derivable get lemma *) + +private val get_constant_lemma :world:Type + -> p:Type0 + -> Lemma (get #world (fun _ -> p) <==> p) +let get_constant_lemma world p = get_set_axiom #world p + +(* Get and set commute with (non-modal) logical connectives *) + +private val get_true :world:Type + -> Lemma (get #world (fun _ -> True)) +let get_true world = get_constant_lemma world True + +assume private val set_true :#world:Type + -> w:world + -> Lemma (set True w) + +private val get_false :world:Type + -> Lemma (requires (get #world (fun _ -> False))) + (ensures (False)) +let get_false world = get_constant_lemma world False + +assume val set_false :#world:Type + -> w:world + -> Lemma (requires (set False w)) + (ensures (False)) + +private val get_and_1 :#world:Type + -> p:(world -> Type0) + -> q:(world -> Type0) + -> Lemma (requires (get (fun w -> p w /\ q w))) + (ensures (get p /\ get q)) +let get_and_1 #world p q = () + +assume private val set_and_1 :#world:Type + -> w:world + -> p:Type0 + -> q:Type0 + -> Lemma (requires (set (p /\ q) w)) + (ensures (set p w /\ set q w)) + +assume private val get_and_2 :#world:Type + -> p:(world -> Type0) + -> q:(world -> Type0) + -> Lemma (requires (get p /\ get q)) + (ensures (get (fun w -> p w /\ q w))) + + +assume private val set_and_2 :#world:Type + -> w:world + -> p:Type0 + -> q:Type0 + -> Lemma (requires (set p w /\ set q w)) + (ensures (set (p /\ q) w)) + +private val get_or_1 :#world:Type + -> p:(world -> Type0) + -> q:(world -> Type0) + -> Lemma (requires (get p \/ get q)) + (ensures (get (fun w -> p w \/ q w))) +let get_or_1 #world p q = () + +assume private val set_or_1 :#world:Type + -> w:world + -> p:Type0 + -> q:Type0 + -> Lemma (requires (set p w \/ set q w)) + (ensures (set (p \/ q) w)) + +assume private val get_or_2 :#world:Type + -> p:(world -> Type0) + -> q:(world -> Type0) + -> Lemma (requires (get (fun w -> p w \/ q w))) + (ensures (get p \/ get q)) + +assume private val set_or_2 :#world:Type + -> w:world + -> p:Type0 + -> q:Type0 + -> Lemma (requires (set (p \/ q) w)) + (ensures (set p w \/ set q w)) + +private val get_impl_1 :#world:Type + -> p:(world -> Type0) + -> q:(world -> Type0) + -> Lemma (requires (get (fun w -> p w ==> q w) /\ get p)) + (ensures (get q)) +let get_impl_1 #world p q = + get_and_2 (fun w -> p w ==> q w) p; + get_weakening (fun w -> (p w ==> q w) /\ p w) q + +assume private val set_impl_1 :#world:Type + -> w:world + -> p:Type0 + -> q:Type0 + -> Lemma (requires (set (p ==> q) w /\ set p w)) + (ensures (set q w)) + +assume private val get_impl_2 :#world:Type + -> p:(world -> Type0) + -> q:(world -> Type0) + -> Lemma (requires (get p ==> get q)) + (ensures (get (fun w -> p w ==> q w))) + +assume private val set_impl_2 :#world:Type + -> w:world + -> p:Type0 + -> q:Type0 + -> Lemma (requires (set p w ==> set q w)) + (ensures (set (p ==> q) w)) + +private val get_forall_1_aux :#world:Type + -> #t:Type + -> p:(t -> world -> Type0) + -> x:t + -> Lemma (requires (get (fun w -> forall x. p x w))) + (ensures (get (fun w -> p x w))) +let get_forall_1_aux #world #t p x = + get_weakening (fun w -> forall x. p x w) (fun w -> p x w) + +private val get_forall_1 :#world:Type + -> #t:Type + -> p:(t -> world -> Type0) + -> Lemma (requires (get (fun w -> forall x. p x w))) + (ensures (forall x. get (fun w -> p x w))) +let get_forall_1 #world #t p = () + +assume private val set_forall_1 :#world:Type + -> #t:Type + -> w:world + -> p:(t -> Type0) + -> Lemma (requires (set (forall x. p x) w)) + (ensures (forall x. set (p x) w)) + +assume private val get_forall_2 :#world:Type + -> #t:Type + -> p:(t -> world -> Type0) + -> Lemma (requires (forall x. get (fun w -> p x w))) + (ensures (get (fun w -> forall x. p x w))) + +assume private val set_forall_2 :#world:Type + -> #t:Type + -> w:world + -> p:(t -> Type0) + -> Lemma (requires (forall x. set (p x) w)) + (ensures (set (forall x. p x) w)) + +private val get_exists_1_aux :#world:Type + -> #t:Type + -> p:(t -> world -> Type0) + -> x:t + -> Lemma (requires (get (fun w -> p x w))) + (ensures (get (fun w -> exists x. p x w))) +let get_exists_1_aux #world #t p x = + get_weakening (fun w -> p x w) (fun w -> exists x . p x w) + +private val get_exists_1 :#world:Type + -> #t:Type + -> p:(t -> world -> Type0) + -> Lemma (requires (exists x. get (fun w -> p x w))) + (ensures (get (fun w -> exists x. p x w))) +let get_exists_1 #world #t p = () + +assume private val set_exists_1 :#world:Type + -> #t:Type + -> w:world + -> p:(t -> Type0) + -> Lemma (requires (exists x. set (p x) w)) + (ensures (set (exists x. p x) w)) + +assume private val get_exists_2 :#world:Type + -> #t:Type + -> p:(t -> world -> Type0) + -> Lemma (requires (get (fun w -> exists x. p x w))) + (ensures (exists x. get (fun w -> p x w))) + +assume private val set_exists_2 :#world:Type + -> #t:Type + -> w:world + -> p:(t -> Type0) + -> Lemma (requires (set (exists x. p x) w)) + (ensures (exists x. set (p x) w)) + +private val get_eq :#world:Type + -> #t:Type + -> v:t + -> v':t + -> Lemma (get #world (fun _ -> v == v') <==> v == v') +let get_eq #world #t v v' = + get_constant_lemma world (v == v') + +assume private val set_eq :#world:Type + -> #t:Type + -> w:world + -> v:t + -> v':t + -> Lemma (set (v == v') w <==> v == v') + +(* NOT EXPOSED BY THE INTERFACE [end] *) + + +(* EXPOSED BY THE INTERFACE [start] *) + +(* Witnessed modality *) + +let witnessed #state rel p = get (fun s -> forall s'. rel s s' ==> p s') + +(* Weakening for the witnessed modality *) + +let lemma_witnessed_weakening #state rel p q = () + +(* Some logical properties of the witnessed modality *) + +let lemma_witnessed_constant #state rel p = get_constant_lemma state p + +let lemma_witnessed_nested #state rel p = () + +let lemma_witnessed_and #state rel p q = + let aux () :Lemma (requires (witnessed rel p /\ witnessed rel q)) + (ensures (witnessed rel (fun s -> p s /\ q s))) + = get_and_2 (fun s -> forall s'. rel s s' ==> p s') (fun s -> forall s'. rel s s' ==> q s') + in + FStar.Classical.move_requires aux () + +let lemma_witnessed_or #state rel p q = () + +let lemma_witnessed_impl #state rel p q = + let aux () :Lemma (requires ((witnessed rel (fun s -> p s ==> q s) /\ witnessed rel p))) + (ensures (witnessed rel q)) + = get_and_2 (fun s -> forall s'. rel s s' ==> p s' ==> q s') (fun s -> forall s'. rel s s' ==> p s') + in + FStar.Classical.move_requires aux () + +let lemma_witnessed_forall #state #t rel p = + let aux () :Lemma (requires (forall x. witnessed rel (fun s -> p x s))) + (ensures (witnessed rel (fun s -> forall x. p x s))) + = get_forall_2 #state #t (fun x s -> forall s'. rel s s' ==> p x s') + in + FStar.Classical.move_requires aux () + +let lemma_witnessed_exists #state #t rel p = () + +(* EXPOSED BY THE INTERFACE [end] *) + + +(* NOT EXPOSED BY THE INTERFACE [start] *) + +(* An equivalent past-view of the witnessed modality *) + +let witnessed_past (#state:Type) (rel:preorder state) (p:(state -> Type0)) = + get (fun s -> exists s'. rel s' s /\ (forall s''. rel s' s'' ==> p s'')) + +val witnessed_defs_equiv_1 :#state:Type + -> rel:preorder state + -> p:(state -> Type0) + -> Lemma (requires (witnessed #state rel p)) + (ensures (witnessed #state rel p)) +let witnessed_defs_equiv_1 #state rel p = () + +val witnessed_defs_equiv_2 :#state:Type + -> rel:preorder state + -> p:(state -> Type0) + -> Lemma (requires (witnessed #state rel p)) + (ensures (witnessed #state rel p)) +let witnessed_defs_equiv_2 #state rel p = + get_weakening #state (fun s -> exists s'. rel s' s /\ (forall s''. rel s' s'' ==> p s'')) + (fun s -> forall s'. rel s s' ==> p s') + +(* NOT EXPOSED BY THE INTERFACE [end] *) diff --git a/stage0/ulib/FStar.Monotonic.Witnessed.fsti b/stage0/ulib/FStar.Monotonic.Witnessed.fsti new file mode 100644 index 00000000000..04ff041c7d7 --- /dev/null +++ b/stage0/ulib/FStar.Monotonic.Witnessed.fsti @@ -0,0 +1,86 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Monotonic.Witnessed + +open FStar.Preorder + +(* + A module that defines the 'witnessed' logical capability/modality + that is the basis of reasoning about monotonic state in F*, as + discussed in Ahman et al.'s POPL 2018 paper "Recalling a Witness: + Foundations and Applications of Monotonic State". Compared to the + POPL paper, where 'witnessed' and 'witnessed_weakening' were + simply postulated as axioms, this module defines them on top of + a more basic hybrid modal extension of F*'s reasoning logic (see + the corresponding fst file). Also, compared to the POPL paper, this + module proves many additional logical properties of 'witnessed'. +*) + +(* Witnessed modality *) + +[@@ remove_unused_type_parameters [0; 1; 2]] +val witnessed : #state:Type -> rel:preorder state -> p:(state -> Type0) -> Type0 + +(* Weakening for the witnessed modality *) + +val lemma_witnessed_weakening :#state:Type + -> rel:preorder state + -> p:(state -> Type0) + -> q:(state -> Type0) + -> Lemma (requires (forall s. p s ==> q s)) + (ensures (witnessed rel p ==> witnessed rel q)) + +(* Some logical properties of the witnessed modality *) + +val lemma_witnessed_constant :#state:Type + -> rel:preorder state + -> p:Type0 + -> Lemma (witnessed rel (fun _ -> p) <==> p) + +val lemma_witnessed_nested :#state:Type + -> rel:preorder state + -> p:(state -> Type0) + -> Lemma (witnessed rel (fun _ -> witnessed rel p) <==> witnessed rel p) + +val lemma_witnessed_and :#state:Type + -> rel:preorder state + -> p:(state -> Type0) + -> q:(state -> Type0) + -> Lemma (witnessed rel (fun s -> p s /\ q s) <==> (witnessed rel p /\ witnessed rel q)) + +val lemma_witnessed_or :#state:Type + -> rel:preorder state + -> p:(state -> Type0) + -> q:(state -> Type0) + -> Lemma ((witnessed rel p \/ witnessed rel q) ==> witnessed rel (fun s -> p s \/ q s)) + +val lemma_witnessed_impl :#state:Type + -> rel:preorder state + -> p:(state -> Type0) + -> q:(state -> Type0) + -> Lemma ((witnessed rel (fun s -> p s ==> q s) /\ witnessed rel p) ==> witnessed rel q) + +val lemma_witnessed_forall :#state:Type + -> #t:Type + -> rel:preorder state + -> p:(t -> state -> Type0) + -> Lemma ((witnessed rel (fun s -> forall x. p x s)) <==> (forall x. witnessed rel (p x))) + +val lemma_witnessed_exists :#state:Type + -> #t:Type + -> rel:preorder state + -> p:(t -> state -> Type0) + -> Lemma ((exists x. witnessed rel (p x)) ==> witnessed rel (fun s -> exists x. p x s)) diff --git a/stage0/ulib/FStar.Mul.fst b/stage0/ulib/FStar.Mul.fst new file mode 100644 index 00000000000..9c456027c19 --- /dev/null +++ b/stage0/ulib/FStar.Mul.fst @@ -0,0 +1,19 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Mul +//If we're not doing anything with tuples, +//open this module to let '*' be multiplication +unfold let op_Star = Prims.op_Multiply diff --git a/stage0/ulib/FStar.Option.fst b/stage0/ulib/FStar.Option.fst new file mode 100644 index 00000000000..f0027c19bad --- /dev/null +++ b/stage0/ulib/FStar.Option.fst @@ -0,0 +1,62 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Option + +open FStar.All + +inline_for_extraction +val isNone: option 'a -> Tot bool +inline_for_extraction +let isNone = function + | None -> true + | Some _ -> false + +inline_for_extraction +val isSome: option 'a -> Tot bool +inline_for_extraction +let isSome = function + | Some _ -> true + | None -> false + +inline_for_extraction +val map: ('a -> ML 'b) -> option 'a -> ML (option 'b) +inline_for_extraction +let map f = function + | Some x -> Some (f x) + | None -> None + +inline_for_extraction +val mapTot: ('a -> Tot 'b) -> option 'a -> Tot (option 'b) +inline_for_extraction +let mapTot f = function + | Some x -> Some (f x) + | None -> None + +inline_for_extraction +val get: option 'a -> ML 'a +let get = function + | Some x -> x + | None -> failwith "empty option" + +let (let?) (x: option 'a) (f: 'a -> option 'b): option 'b + = match x with + | Some x -> f x + | None -> None + +let (and?) (x: option 'a) (y: option 'b): option ('a & 'b) + = match x, y with + | Some x, Some y -> Some (x, y) + | _ -> None diff --git a/stage0/ulib/FStar.OrdSetProps.fst b/stage0/ulib/FStar.OrdSetProps.fst new file mode 100644 index 00000000000..da66144787e --- /dev/null +++ b/stage0/ulib/FStar.OrdSetProps.fst @@ -0,0 +1,51 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.OrdSetProps + +open FStar.OrdSet + +val fold: #a:eqtype -> #b:Type -> #f:cmp a -> (a -> b -> Tot b) -> s:ordset a f -> b + -> Tot b (decreases (size s)) +let rec fold (#a:eqtype) (#b:Type) #f g s x = + if s = empty then x + else + let Some e = choose s in + let a_rest = fold g (remove e s) x in + g e a_rest + +(**********) + +let insert (#a:eqtype) (#f:cmp a) (x:a) (s:ordset a f) = union #a #f (singleton #a #f x) s + +val union':#a:eqtype -> #f:cmp a -> ordset a f -> ordset a f -> Tot (ordset a f) +let union' (#a:eqtype) #f s1 s2 = fold (fun e (s:ordset a f) -> insert e s) s1 s2 + +val union_lemma: #a:eqtype -> #f:cmp a -> s1:ordset a f -> s2:ordset a f + -> Lemma (requires (True)) + (ensures (forall x. mem x (union s1 s2) = mem x (union' s1 s2))) + (decreases (size s1)) +let rec union_lemma (#a:eqtype) #f s1 s2 = + if s1 = empty then () + else + union_lemma (remove (Some?.v (choose s1)) s1) s2 + +val union_lemma': #a:eqtype -> #f:cmp a -> s1:ordset a f -> s2:ordset a f + -> Lemma (requires (True)) + (ensures (union s1 s2 = union' s1 s2)) +let union_lemma' (#a:eqtype) #f s1 s2 = + union_lemma s1 s2; + eq_lemma (union s1 s2) (union' s1 s2) + diff --git a/stage0/ulib/FStar.Order.fst b/stage0/ulib/FStar.Order.fst new file mode 100644 index 00000000000..f157f6f04d3 --- /dev/null +++ b/stage0/ulib/FStar.Order.fst @@ -0,0 +1,86 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Order + +[@@plugin] +type order = | Lt | Eq | Gt + +// Some derived checks +val ge : order -> bool +let ge o = o <> Lt + +val le : order -> bool +let le o = o <> Gt + +val ne : order -> bool +let ne o = o <> Eq + +// Just for completeness and consistency... +val gt : order -> bool +let gt o = o = Gt + +val lt : order -> bool +let lt o = o = Lt + +val eq : order -> bool +let eq o = o = Eq + +// Lexicographical combination, thunked to be lazy +val lex : order -> (unit -> order) -> order +let lex o1 o2 = + match o1 with + | Lt -> Lt + | Eq -> o2 () + | Gt -> Gt + +val order_from_int : int -> order +let order_from_int i = + if i < 0 then Lt + else if i = 0 then Eq + else Gt + +val int_of_order : order -> int +let int_of_order = function + | Lt -> (-1) + | Eq -> 0 + | Gt -> 1 + +val compare_int : int -> int -> order +let compare_int i j = order_from_int (i - j) + +(* + * It promises to call the comparator in strictly smaller elements + * Useful when writing a comparator for an inductive type, + * that contains the list of itself as an argument to one of its + * data constructors + *) +let rec compare_list (#a:Type) + (l1 l2:list a) + (f:(x:a{x << l1} -> y:a{y << l2} -> order)) + : order + = match l1, l2 with + | [], [] -> Eq + | [], _ -> Lt + | _, [] -> Gt + | x::xs, y::ys -> lex (f x y) (fun _ -> compare_list xs ys f) + +val compare_option : ('a -> 'a -> order) -> option 'a -> option 'a -> order +let compare_option f x y = + match x, y with + | None , None -> Eq + | None , Some _ -> Lt + | Some _ , None -> Gt + | Some x , Some y -> f x y diff --git a/stage0/ulib/FStar.PCM.fst b/stage0/ulib/FStar.PCM.fst new file mode 100644 index 00000000000..6e5bd4cf7da --- /dev/null +++ b/stage0/ulib/FStar.PCM.fst @@ -0,0 +1,232 @@ +(* + Copyright 2020 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.PCM + +/// This module defines the partial commutative monoid (PCM) algebraic structure, as well as helper +/// predicates and functions to manipulate PCMs. + +(**** Base definitions *) + +(** A symmetric relation *) +let symrel (a: Type u#a) = c:(a -> a -> prop) { (forall x y. c x y <==> c y x) } + +(** [pcm'] is a magma, the base for the partial commutative monoid *) +noeq +type pcm' (a:Type u#a) = { + composable: symrel a; + op: x:a -> y:a{composable x y} -> a; + one:a +} + +(** The type of a commutativity property *) +let lem_commutative (#a: Type u#a) (p:pcm' a) = + x:a -> + y:a{p.composable x y} -> + Lemma (p.op x y == p.op y x) + +(** The type of a left-associativity property *) +let lem_assoc_l (#a: Type u#a) (p:pcm' a) = + x:a -> + y:a -> + z:a{p.composable y z /\ p.composable x (p.op y z)} -> + Lemma (p.composable x y /\ + p.composable (p.op x y) z /\ + p.op x (p.op y z) == p.op (p.op x y) z) + + +(** The type of a right-associativity property *) +let lem_assoc_r (#a: Type u#a) (p:pcm' a) = + x:a -> + y:a -> + z:a {p.composable x y /\ + p.composable (p.op x y) z} -> + Lemma + (p.composable y z /\ + p.composable x (p.op y z) /\ + p.op x (p.op y z) == p.op (p.op x y) z) + +(** The type of the property characterizing the unit element of the monoid *) +let lem_is_unit (#a: Type u#a) (p:pcm' a) = + x:a -> + Lemma (p.composable x p.one /\ + p.op x p.one == x) + +(** Main type describing partial commutative monoids *) +noeq +type pcm (a:Type u#a) = { + p:pcm' a; + comm:lem_commutative p; + assoc: lem_assoc_l p; + assoc_r: lem_assoc_r p; + is_unit: lem_is_unit p; + refine: a -> prop +} + +(**** Derived predicates *) + + +(** Returns the composable predicate of the PCM *) +let composable (#a: Type u#a) (p:pcm a) (x y:a) = p.p.composable x y + +(** Calls the operation of the PCM *) +let op (#a: Type u#a) (p:pcm a) (x:a) (y:a{composable p x y}) = p.p.op x y + +(** + Two elements [x] and [y] are compatible with respect to a PCM if their subtraction + is well-defined, e.g. if there exists an element [frame] such that [x * z = y] +*) +let compatible (#a: Type u#a) (pcm:pcm a) (x y:a) = + (exists (frame:a). + composable pcm x frame /\ op pcm frame x == y + ) + +(** Compatibility is reflexive *) +let compatible_refl + (#a: Type u#a) (pcm:pcm a) (x:a) + : Lemma (compatible pcm x x) + = + pcm.is_unit x; + pcm.comm x pcm.p.one; + assert (op pcm pcm.p.one x == x) + +(** Compatibility is transitive *) +let compatible_trans + (#a: Type u#a) (pcm:pcm a) (x y z:a) + : Lemma (requires (compatible pcm x y /\ compatible pcm y z)) + (ensures (compatible pcm x z)) + = Classical.forall_intro_3 pcm.assoc + +(** + Helper function to get access to the existentially quantified frame between two compatible + elements +*) +let compatible_elim + (#a: Type u#a) (pcm:pcm a) (x y:a) + (goal: Type) + (lemma: (frame: a{composable pcm x frame /\ op pcm frame x == y}) -> + Lemma (goal) + ) + : Lemma (requires (compatible pcm x y)) (ensures (goal)) + = + Classical.exists_elim + goal #a #(fun frame -> composable pcm x frame /\ op pcm frame x == y) + () (fun frame -> lemma frame) + +let compatible_intro + (#a: Type u#a) (pcm:pcm a) (x y:a) + (frame: a) + : Lemma + (requires (composable pcm x frame /\ op pcm frame x == y)) + (ensures (compatible pcm x y)) + = () + +(** Two elements are joinable when they can evolve to a common point. *) +let joinable #a (p:pcm a) (x y : a) : prop = + exists z. compatible p x z /\ compatible p y z + +let frame_compatible #a (p:pcm a) (x:FStar.Ghost.erased a) (v y:a) = + (forall (frame:a). {:pattern (composable p x frame)} + composable p x frame /\ + v == op p x frame ==> + composable p y frame /\ + v == op p y frame) + +(* + * Frame preserving updates from x to y + * - should preserve all frames, + * - and a frame containing rest of the PCM value should continue to do so + *) + +type frame_preserving_upd (#a:Type u#a) (p:pcm a) (x y:a) = + v:a{ + p.refine v /\ + compatible p x v + } -> + v_new:a{ + p.refine v_new /\ + compatible p y v_new /\ + (forall (frame:a{composable p x frame}).{:pattern composable p x frame} + composable p y frame /\ + (op p x frame == v ==> op p y frame == v_new))} + + +(* + * A specific case of frame preserving updates when y is a refined value + * + * All the frames of x should compose with--and the composition should result in--y + *) +let frame_preserving (#a: Type u#a) (pcm:pcm a) (x y: a) = + (forall frame. composable pcm frame x ==> composable pcm frame y) /\ + (forall frame.{:pattern (composable pcm frame x)} composable pcm frame x ==> op pcm frame y == y) + +(* + * As expected, given frame_preserving, we can construct a frame_preserving_update + *) +let frame_preserving_val_to_fp_upd (#a:Type u#a) (p:pcm a) + (x:Ghost.erased a) (v:a{frame_preserving p x v /\ p.refine v}) + : frame_preserving_upd p x v + = Classical.forall_intro (p.comm v); + fun _ -> v + +(** The PCM [p] is exclusive to element [x] if the only element composable with [x] is [p.one] *) +let exclusive (#a:Type u#a) (p:pcm a) (x:a) = + forall (frame:a). composable p x frame ==> frame == p.p.one + +(** A mutation from [x] to [p.one] is frame preserving if [p] is exclusive to [x] *) +let exclusive_is_frame_preserving (#a: Type u#a) (p:pcm a) (x:a) + : Lemma (requires exclusive p x) + (ensures frame_preserving p x p.p.one) + = p.is_unit x; + p.is_unit p.p.one + +(* Some sanity checks on the definition of frame preserving updates *) + +let no_op_is_frame_preserving (#a:Type u#a) (p:pcm a) + (x:a) + : frame_preserving_upd p x x + = fun v -> v + +let compose_frame_preserving_updates (#a:Type u#a) (p:pcm a) + (x y z:a) + (f:frame_preserving_upd p x y) + (g:frame_preserving_upd p y z) + : frame_preserving_upd p x z + = fun v -> g (f v) + +let frame_preserving_subframe (#a:Type u#a) (p:pcm a) (x y:a) + (subframe:a{composable p x subframe /\ composable p y subframe}) + (f:frame_preserving_upd p x y) + : frame_preserving_upd p (op p x subframe) (op p y subframe) + = fun v -> + compatible_elim p (op p x subframe) v (compatible p x v) (fun frame -> + p.comm x subframe; + p.assoc frame subframe x); + let w = f v in + let aux (frame: a{composable p (op p x subframe) frame}): + Lemma (composable p (op p y subframe) frame /\ + (op p (op p x subframe) frame == v ==> op p (op p y subframe) frame == w)) + [SMTPat (composable p (op p y subframe) frame)] + = p.assoc_r x subframe frame; + assert (composable p x (op p subframe frame)); + assert (composable p y (op p subframe frame)); + p.assoc y subframe frame + in + compatible_elim p (op p x subframe) v (compatible p (op p y subframe) w) (fun frame -> + aux frame; + p.comm frame (op p x subframe); + p.comm (op p y subframe) frame); + w + diff --git a/stage0/ulib/FStar.Parse.fst b/stage0/ulib/FStar.Parse.fst new file mode 100644 index 00000000000..b3835bc746c --- /dev/null +++ b/stage0/ulib/FStar.Parse.fst @@ -0,0 +1,24 @@ +(* + Copyright 2008-2020 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Parse + +(** A primitive parser for booleans *) +assume +val bool_of_string: string -> Tot (option bool) + +(** A primitive parser for [int] *) +assume +val int_of_string: string -> Tot (option int) diff --git a/stage0/ulib/FStar.PartialMap.fst b/stage0/ulib/FStar.PartialMap.fst new file mode 100644 index 00000000000..0fc813960b9 --- /dev/null +++ b/stage0/ulib/FStar.PartialMap.fst @@ -0,0 +1,40 @@ +(* + Copyright 2008-2021 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: Aseem Rastogi +*) + +module FStar.PartialMap + +open FStar.FunctionalExtensionality + +type t k v = k ^-> option v + +let empty _ _ = on_dom _ (fun _ -> None) +let literal f = on_dom _ (fun x -> f x) +let sel m x = m x +let upd m x y = on_dom _ (fun x1 -> if x1 = x then Some y else m x1) +let remove m x = on_dom _ (fun x1 -> if x1 = x then None else m x1) + +let sel_empty _ _ = () +let sel_literal _ _ = () +let sel_upd _ _ _ = () +let sel_upd_distinct_key _ _ _ _ = () +let sel_remove _ _ = () +let sel_remove_distinct_key _ _ _ = () + +let equal m1 m2 = feq m1 m2 /\ True +let eq_intro _ _ = () +let eq_elim _ _ = () diff --git a/stage0/ulib/FStar.PartialMap.fsti b/stage0/ulib/FStar.PartialMap.fsti new file mode 100644 index 00000000000..3795a2882d3 --- /dev/null +++ b/stage0/ulib/FStar.PartialMap.fsti @@ -0,0 +1,100 @@ +(* + Copyright 2008-2021 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: Aseem Rastogi +*) + +/// A partial map, partial in the sense that selecting a key in the map may fail +/// (by returning None) + +module FStar.PartialMap + +/// The main map type + +val t (k:eqtype) ([@@@strictly_positive] v:Type u#a) : Type u#a + +/// An empty map + +val empty (k:eqtype) (v:Type) : t k v + +/// A constructor that constructs the map from a function + +val literal (#k:eqtype) (#v:Type) (f:k -> option v) : t k v + +/// Select a key from the map, may fail by returning None + +val sel (#k:eqtype) (#v:Type) (m:t k v) (x:k) : option v + +/// Updating a key in the map + +val upd (#k:eqtype) (#v:Type) (m:t k v) (x:k) (y:v) : t k v + +/// Removing a key from the map + +val remove (#k:eqtype) (#v:Type) (m:t k v) (x:k) : t k v + +/// Helper function to check if a key exists in the map + +let contains (#k:eqtype) (#v:Type) (m:t k v) (x:k) : bool = + Some? (sel m x) + +/// A constant map + +let const (k:eqtype) (#v:Type) (y:v) : t k v = + literal (fun x -> Some y) + +/// The reasoning principles provided by the map + +val sel_empty (#k:eqtype) (v:Type) (x:k) + : Lemma (ensures sel (empty k v) x == None) + [SMTPat (sel (empty k v) x)] + +val sel_literal (#k:eqtype) (#v:Type) (f:k -> option v) (x:k) + : Lemma (ensures sel (literal f) x == f x) + [SMTPat (sel (literal f) x)] + +val sel_upd (#k:eqtype) (#v:Type) (m:t k v) (x:k) (y:v) + : Lemma (ensures sel (upd m x y) x == Some y) + [SMTPat (sel (upd m x y) x)] + +val sel_upd_distinct_key (#k:eqtype) (#v:Type) (m:t k v) (x1 x2:k) (y:v) + : Lemma (requires x1 =!= x2) + (ensures sel (upd m x1 y) x2 == sel m x2) + [SMTPat (sel (upd m x1 y) x2)] + +val sel_remove (#k:eqtype) (#v:Type) (m:t k v) (x:k) + : Lemma (ensures sel (remove m x) x == None) + [SMTPat (sel (remove m x) x)] + +val sel_remove_distinct_key (#k:eqtype) (#v:Type) (m:t k v) (x1 x2:k) + : Lemma (requires x1 =!= x2) + (ensures sel (remove m x1) x2 == sel m x2) + [SMTPat (sel (remove m x1) x2)] + +/// The map type supports extensional equality +/// +/// Below are the intro and elim forms + +val equal (#k:eqtype) (#v:Type) (m1 m2:t k v) : prop + +val eq_intro (#k:eqtype) (#v:Type) (m1 m2:t k v) + : Lemma (requires forall (x:k). sel m1 x == sel m2 x) + (ensures equal m1 m2) + [SMTPat (equal m1 m2)] + +val eq_elim (#k:eqtype) (#v:Type) (m1 m2:t k v) + : Lemma (requires equal m1 m2) + (ensures m1 == m2) + [SMTPat (equal m1 m2)] diff --git a/stage0/ulib/FStar.Pervasives.Native.fst b/stage0/ulib/FStar.Pervasives.Native.fst new file mode 100644 index 00000000000..f6db654977b --- /dev/null +++ b/stage0/ulib/FStar.Pervasives.Native.fst @@ -0,0 +1,175 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Pervasives.Native + +(* This is a file from the core library, dependencies must be explicit *) +open Prims + +/// This module is implicitly opened in the scope of all other modules. +/// +/// It provides several basic types in F* that enjoy some special +/// status in extraction. For instance, the tuple type below is +/// compiled to OCaml's tuple type, rather than to a F*-defined +/// inductive type. See ulib/ml/FStar_Pervasives_Native.ml +/// + +(** [option a] represents either [Some a]-value or a non-informative [None]. *) +type option (a: Type) = + | None : option a + | Some : v: a -> option a + +(**** Tuples *) + +/// Aside from special support in extraction, the tuple types have +/// special syntax in F*. +/// +/// For instance, rather than [tupleN a1 ... aN], +/// we usually write [a1 & ... & aN] or [a1 * ... * aN]. +/// +/// The latter notation is more common for those coming to F* from +/// OCaml or F#. However, the [*] also clashes with the multiplication +/// operator on integers define in FStar.Mul. For this reason, we now +/// prefer to use the [&] notation, though there are still many uses +/// of [*] remaining. +/// +/// Tuple values are introduced using as [a1, ..., an], rather than +/// [MktupleN a1 ... aN]. +/// +/// We define tuples up to a fixed arity of 14. We have considered +/// splitting this module into 14 different modules, one for each +/// tuple type rather than eagerly including 14-tuples in the +/// dependence graph of all programs. + +(** Pairs: [tuple2 a b] is can be written either as [a * b], for + notation compatible with OCaml's. Or, better, as [a & b]. *) +type tuple2 'a 'b = | Mktuple2 : _1: 'a -> _2: 'b -> tuple2 'a 'b + +(** The fst and snd projections on pairs are very common *) +let fst (x: tuple2 'a 'b) : 'a = Mktuple2?._1 x +let snd (x: tuple2 'a 'b) : 'b = Mktuple2?._2 x + +type tuple3 'a 'b 'c = | Mktuple3 : _1: 'a -> _2: 'b -> _3: 'c -> tuple3 'a 'b 'c + +type tuple4 'a 'b 'c 'd = | Mktuple4 : _1: 'a -> _2: 'b -> _3: 'c -> _4: 'd -> tuple4 'a 'b 'c 'd + +type tuple5 'a 'b 'c 'd 'e = + | Mktuple5 : _1: 'a -> _2: 'b -> _3: 'c -> _4: 'd -> _5: 'e -> tuple5 'a 'b 'c 'd 'e + +type tuple6 'a 'b 'c 'd 'e 'f = + | Mktuple6 : _1: 'a -> _2: 'b -> _3: 'c -> _4: 'd -> _5: 'e -> _6: 'f -> tuple6 'a 'b 'c 'd 'e 'f + +type tuple7 'a 'b 'c 'd 'e 'f 'g = + | Mktuple7 : _1: 'a -> _2: 'b -> _3: 'c -> _4: 'd -> _5: 'e -> _6: 'f -> _7: 'g + -> tuple7 'a 'b 'c 'd 'e 'f 'g + +type tuple8 'a 'b 'c 'd 'e 'f 'g 'h = + | Mktuple8 : _1: 'a -> _2: 'b -> _3: 'c -> _4: 'd -> _5: 'e -> _6: 'f -> _7: 'g -> _8: 'h + -> tuple8 'a 'b 'c 'd 'e 'f 'g 'h + +type tuple9 'a 'b 'c 'd 'e 'f 'g 'h 'i = + | Mktuple9 : + _1: 'a -> + _2: 'b -> + _3: 'c -> + _4: 'd -> + _5: 'e -> + _6: 'f -> + _7: 'g -> + _8: 'h -> + _9: 'i + -> tuple9 'a 'b 'c 'd 'e 'f 'g 'h 'i + +type tuple10 'a 'b 'c 'd 'e 'f 'g 'h 'i 'j = + | Mktuple10 : + _1: 'a -> + _2: 'b -> + _3: 'c -> + _4: 'd -> + _5: 'e -> + _6: 'f -> + _7: 'g -> + _8: 'h -> + _9: 'i -> + _10: 'j + -> tuple10 'a 'b 'c 'd 'e 'f 'g 'h 'i 'j + +type tuple11 'a 'b 'c 'd 'e 'f 'g 'h 'i 'j 'k = + | Mktuple11 : + _1: 'a -> + _2: 'b -> + _3: 'c -> + _4: 'd -> + _5: 'e -> + _6: 'f -> + _7: 'g -> + _8: 'h -> + _9: 'i -> + _10: 'j -> + _11: 'k + -> tuple11 'a 'b 'c 'd 'e 'f 'g 'h 'i 'j 'k + +type tuple12 'a 'b 'c 'd 'e 'f 'g 'h 'i 'j 'k 'l = + | Mktuple12 : + _1: 'a -> + _2: 'b -> + _3: 'c -> + _4: 'd -> + _5: 'e -> + _6: 'f -> + _7: 'g -> + _8: 'h -> + _9: 'i -> + _10: 'j -> + _11: 'k -> + _12: 'l + -> tuple12 'a 'b 'c 'd 'e 'f 'g 'h 'i 'j 'k 'l + +type tuple13 'a 'b 'c 'd 'e 'f 'g 'h 'i 'j 'k 'l 'm = + | Mktuple13 : + _1: 'a -> + _2: 'b -> + _3: 'c -> + _4: 'd -> + _5: 'e -> + _6: 'f -> + _7: 'g -> + _8: 'h -> + _9: 'i -> + _10: 'j -> + _11: 'k -> + _12: 'l -> + _13: 'm + -> tuple13 'a 'b 'c 'd 'e 'f 'g 'h 'i 'j 'k 'l 'm + +type tuple14 'a 'b 'c 'd 'e 'f 'g 'h 'i 'j 'k 'l 'm 'n = + | Mktuple14 : + _1: 'a -> + _2: 'b -> + _3: 'c -> + _4: 'd -> + _5: 'e -> + _6: 'f -> + _7: 'g -> + _8: 'h -> + _9: 'i -> + _10: 'j -> + _11: 'k -> + _12: 'l -> + _13: 'm -> + _14: 'n + -> tuple14 'a 'b 'c 'd 'e 'f 'g 'h 'i 'j 'k 'l 'm 'n + diff --git a/stage0/ulib/FStar.Pervasives.fst b/stage0/ulib/FStar.Pervasives.fst new file mode 100644 index 00000000000..42987e0cdc6 --- /dev/null +++ b/stage0/ulib/FStar.Pervasives.fst @@ -0,0 +1,206 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Pervasives + +(* This is a file from the core library, dependencies must be explicit *) +open Prims + +/// Implementation of FStar.Pervasives.fsti +let remove_unused_type_parameters _ = () + +let smt_pat #_ _ = () + +let smt_pat_or _ = () + +let spinoff p = p + +#push-options "--no_tactics" +let spinoff_eq _ = () +let spinoff_equiv _ = () +#pop-options + +let assert_spinoff _ = () + +let ambient #_ _ = True + +let intro_ambient #_ _ = () + +let normalize_term #_ x = x + +let normalize a = a + +noeq +type norm_step = + | Simpl // Logical simplification, e.g., [P /\ True ~> P] + | Weak // Weak reduction: Do not reduce under binders + | HNF // Head normal form + | Primops // Reduce primitive operators, e.g., [1 + 1 ~> 2] + | Delta // Unfold all non-recursive definitions + | Zeta // Unroll recursive calls + | ZetaFull // Unroll recursive calls fully + | Iota // Reduce case analysis (i.e., match) + | NBE // Use normalization-by-evaluation, instead of interpretation (experimental) + | Reify // Reify effectful definitions into their representations + | NormDebug // Turn on debugging for this call + | UnfoldOnly : list string -> norm_step // Unlike Delta, unfold definitions for only the given + // names, each string is a fully qualified name + // like `A.M.f` + // idem + | UnfoldFully : list string -> norm_step + | UnfoldAttr : list string -> norm_step // Unfold definitions marked with the given attributes + | UnfoldQual : list string -> norm_step + | UnfoldNamespace : list string -> norm_step + | Unmeta : norm_step + | Unascribe // Remove type ascriptions [t <: ty ~> t] + +irreducible +let simplify = Simpl + +irreducible +let weak = Weak + +irreducible +let hnf = HNF + +irreducible +let primops = Primops + +irreducible +let delta = Delta + +irreducible +let norm_debug = NormDebug + +irreducible +let zeta = Zeta + +irreducible +let zeta_full = ZetaFull + +irreducible +let iota = Iota + +irreducible +let nbe = NBE + +irreducible +let reify_ = Reify + +irreducible +let delta_only s = UnfoldOnly s + +irreducible +let delta_fully s = UnfoldFully s + +irreducible +let delta_attr s = UnfoldAttr s + +irreducible +let delta_qualifier s = UnfoldAttr s + +irreducible +let delta_namespace s = UnfoldNamespace s + +irreducible +let unmeta = Unmeta + +irreducible +let unascribe = Unascribe + +let norm _ #_ x = x + +let assert_norm _ = () + +let normalize_term_spec #_ _ = () + +let normalize_spec _ = () + +let norm_spec _ #_ _ = () + +let inversion _ = True + +let allow_inversion _ = () + +let invertOption _ = () + +let rec false_elim #_ _ = false_elim () + +let inline_let = () + +let rename_let _ = () + +let plugin _ = () + +let tcnorm = () + +let must_erase_for_extraction = () + +let dm4f_bind_range = () + +let expect_failure _ = () + +let expect_lax_failure _ = () + +let tcdecltime = () + +let unifier_hint_injective = () + +let strict_on_arguments _ = () + +let resolve_implicits = () + +let override_resolve_implicits_handler #a x l = () + +let handle_smt_goals = () + +let erasable = () + +let commute_nested_matches = () + +let noextract_to _ = () + +let normalize_for_extraction _ = () + +let ite_soundness_by _ = () + +let default_effect _ = () +let top_level_effect _ = () +let effect_param = () +let bind_has_range_args = () +let primitive_extraction = () + +let extract_as_impure_effect = () + +let strictly_positive = () + +let unused = () + +let no_auto_projectors = () + +let no_auto_projectors_decls = () + +let no_subtyping = () + +let admit_termination = () + +let singleton #_ x = x + +let coercion = () + +let desugar_of_variant_record _ = () + +let defer_to #a (_:a) = () diff --git a/stage0/ulib/FStar.Pervasives.fsti b/stage0/ulib/FStar.Pervasives.fsti new file mode 100644 index 00000000000..31dd09c6942 --- /dev/null +++ b/stage0/ulib/FStar.Pervasives.fsti @@ -0,0 +1,1231 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Pervasives + +(* This is a file from the core library, dependencies must be explicit *) +open Prims +include FStar.Pervasives.Native + +/// This module is implicitly opened in the scope of all other +/// modules. +/// +/// It provides several basic definitions in F* that are common to +/// most programs. Broadly, these include: +/// +/// - Utility types and functions, like [id], [either], dependent +/// tuples, etc. +/// +/// - Utility effect definitions, including [DIV] for divergence, +/// [EXN] of exceptions, [STATE_h] a template for state, and (the +/// poorly named) [ALL_h] which combines them all. +/// +/// - Some utilities to control proofs, e.g., inversion of inductive +/// type definitions. +/// +/// - Built-in attributes that can be used to decorate definitions and +/// trigger various kinds of special treatments for those +/// definitions. + +(** [remove_unused_type_parameters] + + This attribute is used to decorate signatures in interfaces for + type abbreviations, indicating that the 0-based positional + parameters are unused in the definition and should be eliminated + for extraction. + + This is important particularly for use with F# extraction, since + F# does not accept type abbreviations with unused type parameters. + + See tests/bug-reports/RemoveUnusedTyparsIFace.A.fsti + *) +val remove_unused_type_parameters : list int -> Tot unit + +(** Values of type [pattern] are used to tag [Lemma]s with SMT + quantifier triggers *) +type pattern : Type0 = unit + +(** The concrete syntax [SMTPat] desugars to [smt_pat] *) +val smt_pat (#a: Type) (x: a) : Tot pattern + +(** The concrete syntax [SMTPatOr] desugars to [smt_pat_or]. This is + used to represent a disjunction of conjunctions of patterns. + + Note, the typing discipline and syntax of patterns is laxer than + it should be. Patterns like [SMTPatOr [SMTPatOr [...]]] are + expressible, but unsupported by F* + + TODO: We should tighten this up, perhaps just reusing the + attribute mechanism for patterns. +*) +val smt_pat_or (x: list (list pattern)) : Tot pattern + +(** eqtype is defined in prims at universe 0 + + Although, usually, only universe 0 types have decidable equality, + sometimes it is possible to define a type in a higher univese also + with decidable equality (e.g., type t : Type u#1 = | Unit) + + Further, sometimes, as in Lemma below, we need to use a + universe-polymorphic equality type (although it is only ever + instantiated with `unit`) +*) +type eqtype_u = a:Type{hasEq a} + +(** [Lemma] is a very widely used effect abbreviation. + + It stands for a unit-returning [Ghost] computation, whose main + value is its logical payload in proving an implication between its + pre- and postcondition. + + [Lemma] is desugared specially. The valid forms are: + + Lemma (ensures post) + Lemma post [SMTPat ...] + Lemma (ensures post) [SMTPat ...] + Lemma (ensures post) (decreases d) + Lemma (ensures post) (decreases d) [SMTPat ...] + Lemma (requires pre) (ensures post) (decreases d) + Lemma (requires pre) (ensures post) [SMTPat ...] + Lemma (requires pre) (ensures post) (decreases d) [SMTPat ...] + + and + + Lemma post (== Lemma (ensures post)) + + the squash argument on the postcondition allows to assume the + precondition for the *well-formedness* of the postcondition. +*) +effect Lemma (a: eqtype_u) (pre: Type) (post: (squash pre -> Type)) (pats: list pattern) = + Pure a pre (fun r -> post ()) + +(** IN the default mode of operation, all proofs in a verification + condition are bundled into a single SMT query. Sub-terms marked + with the [spinoff] below are the exception: each of them is + spawned off into a separate SMT query *) +val spinoff (p: Type0) : Type0 + +val spinoff_eq (p:Type0) : Lemma (spinoff p == p) + +val spinoff_equiv (p:Type0) : Lemma (p <==> spinoff p) [SMTPat (spinoff p)] + +(** Logically equivalent to assert, but spins off separate query *) +val assert_spinoff (p: Type) : Pure unit (requires (spinoff (squash p))) (ensures (fun x -> p)) + +(** The polymorphic identity function *) +unfold +let id (#a: Type) (x: a) : a = x + +(** Trivial postconditions for the [PURE] effect *) +unfold +let trivial_pure_post (a: Type) : pure_post a = fun _ -> True + +(** Sometimes it is convenient to explicit introduce nullary symbols + into the ambient context, so that SMT can appeal to their definitions + even when they are no mentioned explicitly in the program, e.g., when + needed for triggers. + + Use [intro_ambient t] for that. + See, e.g., LowStar.Monotonic.Buffer.fst and its usage there for loc_none *) +[@@ remove_unused_type_parameters [0; 1;]] +val ambient (#a: Type) (x: a) : Type0 + +(** cf. [ambient], above *) +val intro_ambient (#a: Type) (x: a) : Tot (squash (ambient x)) + + +/// Controlling normalization + +(** In any invocation of the F* normalizer, every occurrence of + [normalize_term e] is reduced to the full normal for of [e]. *) +val normalize_term (#a: Type) (x: a) : Tot a + +(** In any invocation of the F* normalizer, every occurrence of + [normalize e] is reduced to the full normal for of [e]. *) +val normalize (a: Type0) : Type0 + +(** Value of [norm_step] are used to enable specific normalization + steps, controlling how the normalizer reduces terms. *) +val norm_step : Type0 + +(** Logical simplification, e.g., [P /\ True ~> P] *) +val simplify : norm_step + +(** Weak reduction: Do not reduce under binders *) +val weak : norm_step + +(** Head normal form: Do not reduce in function arguments or in binder types *) +val hnf : norm_step + +(** Reduce primitive operators, e.g., [1 + 1 ~> 2] *) +val primops : norm_step + +(** Unfold all non-recursive definitions *) +val delta : norm_step + +(** Turn on debugging for this specific call. *) +val norm_debug : norm_step + +(** Unroll recursive calls + + Note: Since F*'s termination check is semantic rather than + syntactically structural, recursive calls in inconsistent contexts, + or recursive evaluation of open terms can diverge. + + When asking for the [zeta] step, F* implements a heuristic to + disable [zeta] when reducing terms beneath a blocked match. This + helps prevent some trivial looping behavior. However, it also + means that with [zeta] alone, your term may not reduce as much as + you might want. See [zeta_full] for that. + *) +val zeta : norm_step + +(** Unroll recursive calls + + Unlike [zeta], [zeta_full] has no looping prevention + heuristics. F* will try to unroll recursive functions as much as + it can, potentially looping. Use with care. + + Note, [zeta_full] implies [zeta]. + See [tests/micro-benchmarks/ReduceRecUnderMatch.fst] for an example. + *) +val zeta_full : norm_step + +(** Reduce case analysis (i.e., match) *) +val iota : norm_step + +(** Use normalization-by-evaluation, instead of interpretation (experimental) *) +val nbe : norm_step + +(** Reify effectful definitions into their representations *) +val reify_ : norm_step + +(** Unlike [delta], unfold definitions for only the names in the given + list. Each string is a fully qualified name like [A.M.f] *) +val delta_only (s: list string) : Tot norm_step + +(** Unfold definitions for only the names in the given list, but + unfold each definition encountered after unfolding as well. + + For example, given + + {[ + let f0 = 0 + let f1 = f0 + 1 + ]} + + [norm [delta_only [`%f1]] f1] will reduce to [f0 + 1]. + [norm [delta_fully [`%f1]] f1] will reduce to [0 + 1]. + + Each string is a fully qualified name like [A.M.f], typically + constructed using a quotation, as in the example above. *) +val delta_fully (s: list string) : Tot norm_step + +(** Rather than mention a symbol to unfold by name, it can be + convenient to tag a collection of related symbols with a common + attribute and then to ask the normalizer to reduce them all. + + For example, given: + + {[ + irreducible let my_attr = () + + [@@my_attr] + let f0 = 0 + + [@@my_attr] + let f1 = f0 + 1 + ]} + + {[norm [delta_attr [`%my_attr]] f1]} + + will reduce to [0 + 1]. + + *) +val delta_attr (s: list string) : Tot norm_step + +(** + For example, given: + + {[ + unfold + let f0 = 0 + + inline_for_extraction + let f1 = f0 + 1 + + ]} + + {[norm [delta_qualifier ["unfold"; "inline_for_extraction"]] f1]} + + will reduce to [0 + 1]. + + *) +val delta_qualifier (s: list string) : Tot norm_step + +val delta_namespace (s: list string) : Tot norm_step + +(** + This step removes the some internal meta nodes during normalization + + In most cases you shouldn't need to use this step explicitly + + *) +val unmeta : norm_step + +(** + This step removes ascriptions during normalization + + An ascription is a type or computation type annotation on + an expression, written as (e <: t) or (e <: C) + + normalize (e <: (t|C)) usually would normalize both the expression e + and the ascription + + However, with unascribe step on, it will drop the ascription + and return the result of (normalize e), + + Removing ascriptions may improve the performance, + as the normalization has less work to do + + However, ascriptions help in re-typechecking of the terms, + and in some cases, are necessary for doing so + + Use it with care + + *) +val unascribe : norm_step + +(** [norm s e] requests normalization of [e] with the reduction steps + [s]. *) +val norm (s: list norm_step) (#a: Type) (x: a) : Tot a + +(** [assert_norm p] reduces [p] as much as possible and then asks the + SMT solver to prove the reduct, concluding [p] *) +val assert_norm (p: Type) : Pure unit (requires (normalize p)) (ensures (fun _ -> p)) + +(** Sometimes it is convenient to introduce an equation between a term + and its normal form in the context. *) +val normalize_term_spec (#a: Type) (x: a) : Lemma (normalize_term #a x == x) + +(** Like [normalize_term_spec], but specialized to [Type0] *) +val normalize_spec (a: Type0) : Lemma (normalize a == a) + +(** Like [normalize_term_spec], but with specific normalization steps *) +val norm_spec (s: list norm_step) (#a: Type) (x: a) : Lemma (norm s #a x == x) + +(** Use the following to expose an ["opaque_to_smt"] definition to the + solver as: [reveal_opaque (`%defn) defn]. NB: zeta is needed in + the case where the definition is recursive. *) +let reveal_opaque (s: string) = norm_spec [delta_only [s]; zeta] + +(** Wrappers over pure wp combinators that return a pure_wp type + (with monotonicity refinement) *) + +unfold +let pure_return (a:Type) (x:a) : pure_wp a = + reveal_opaque (`%pure_wp_monotonic) pure_wp_monotonic; + pure_return0 a x + +unfold +let pure_bind_wp (a b:Type) (wp1:pure_wp a) (wp2:(a -> Tot (pure_wp b))) : Tot (pure_wp b) = + reveal_opaque (`%pure_wp_monotonic) pure_wp_monotonic; + pure_bind_wp0 a b wp1 wp2 + +unfold +let pure_if_then_else (a p:Type) (wp_then wp_else:pure_wp a) : Tot (pure_wp a) = + reveal_opaque (`%pure_wp_monotonic) pure_wp_monotonic; + pure_if_then_else0 a p wp_then wp_else + +unfold +let pure_ite_wp (a:Type) (wp:pure_wp a) : Tot (pure_wp a) = + reveal_opaque (`%pure_wp_monotonic) pure_wp_monotonic; + pure_ite_wp0 a wp + +unfold +let pure_close_wp (a b:Type) (wp:b -> Tot (pure_wp a)) : Tot (pure_wp a) = + reveal_opaque (`%pure_wp_monotonic) pure_wp_monotonic; + pure_close_wp0 a b wp + +unfold +let pure_null_wp (a:Type) : Tot (pure_wp a) = + reveal_opaque (`%pure_wp_monotonic) pure_wp_monotonic; + pure_null_wp0 a + +[@@ "opaque_to_smt"] +unfold +let pure_assert_wp (p:Type) : Tot (pure_wp unit) = + reveal_opaque (`%pure_wp_monotonic) pure_wp_monotonic; + pure_assert_wp0 p + +[@@ "opaque_to_smt"] +unfold +let pure_assume_wp (p:Type) : Tot (pure_wp unit) = + reveal_opaque (`%pure_wp_monotonic) pure_wp_monotonic; + pure_assume_wp0 p + +/// The [DIV] effect for divergent computations +/// +/// The wp-calculus for [DIV] is same as that of [PURE] + + +(** The effect of divergence: from a specificational perspective it is + identical to PURE, however the specs are given a partial + correctness interpretation. Computations with the [DIV] effect may + not terminate. *) +new_effect { + DIV : a:Type -> wp:pure_wp a -> Effect + with + return_wp = pure_return + ; bind_wp = pure_bind_wp + ; if_then_else = pure_if_then_else + ; ite_wp = pure_ite_wp + ; stronger = pure_stronger + ; close_wp = pure_close_wp + ; trivial = pure_trivial +} + +(** [PURE] computations can be silently promoted for use in a [DIV] context *) +sub_effect PURE ~> DIV { lift_wp = purewp_id } + + +(** [Div] is the Hoare-style counterpart of the wp-indexed [DIV] *) +unfold +let div_hoare_to_wp (#a:Type) (#pre:pure_pre) (post:pure_post' a pre) : Tot (pure_wp a) = + reveal_opaque (`%pure_wp_monotonic) pure_wp_monotonic; + fun (p:pure_post a) -> pre /\ (forall a. post a ==> p a) + +effect Div (a: Type) (pre: pure_pre) (post: pure_post' a pre) = + DIV a (div_hoare_to_wp post) + + +(** [Dv] is the instance of [DIV] with trivial pre- and postconditions *) +effect Dv (a: Type) = DIV a (pure_null_wp a) + + +(** We use the [EXT] effect to underspecify external system calls + as being impure but having no observable effect on the state *) +effect EXT (a: Type) = Dv a + +/// The [STATE_h] effect template for stateful computations, generic +/// in the type of the state. +/// +/// Note, [STATE_h] is itself not a computation type in F*, since it +/// is parameterized by the type of heap. However, instantiations of +/// [STATE_h] with specific types of the heap are computation +/// types. See, e.g., [FStar.ST] for such instantiations. +/// +/// Weakest preconditions for stateful computations transform +/// [st_post_h] postconditions to [st_pre_h] preconditions. Both are +/// parametric in the type of the state, here denoted by the +/// [heap:Type] variable. + +(** Preconditions are predicates on the [heap] *) +let st_pre_h (heap: Type) = heap -> GTot Type0 + +(** Postconditions relate [a]-typed results to the final [heap], here + refined by some pure proposition [pre], typically instantiated to + the precondition applied to the initial [heap] *) +let st_post_h' (heap a pre: Type) = a -> _: heap{pre} -> GTot Type0 + +(** Postconditions without refinements *) +let st_post_h (heap a: Type) = st_post_h' heap a True + +(** The type of the main WP-transformer for stateful computations *) +let st_wp_h (heap a: Type) = st_post_h heap a -> Tot (st_pre_h heap) + +(** Returning a value does not transform the state *) +unfold +let st_return (heap a: Type) (x: a) (p: st_post_h heap a) = p x + +(** Sequential composition of stateful WPs *) +unfold +let st_bind_wp + (heap: Type) + (a b: Type) + (wp1: st_wp_h heap a) + (wp2: (a -> GTot (st_wp_h heap b))) + (p: st_post_h heap b) + (h0: heap) + = wp1 (fun a h1 -> wp2 a p h1) h0 + +(** Branching for stateful WPs *) +unfold +let st_if_then_else + (heap a p: Type) + (wp_then wp_else: st_wp_h heap a) + (post: st_post_h heap a) + (h0: heap) + = wp_then post h0 /\ (~p ==> wp_else post h0) + +(** As with [PURE] the [wp] combinator names the postcondition as + [k] to avoid duplicating it. *) +unfold +let st_ite_wp (heap a: Type) (wp: st_wp_h heap a) (post: st_post_h heap a) (h0: heap) = + forall (k: st_post_h heap a). + (forall (x: a) (h: heap). {:pattern (guard_free (k x h))} post x h ==> k x h) ==> wp k h0 + +(** Subsumption for stateful WPs *) +unfold +let st_stronger (heap a: Type) (wp1 wp2: st_wp_h heap a) = + (forall (p: st_post_h heap a) (h: heap). wp1 p h ==> wp2 p h) + +(** Closing the scope of a binder within a stateful WP *) +unfold +let st_close_wp (heap a b: Type) (wp: (b -> GTot (st_wp_h heap a))) (p: st_post_h heap a) (h: heap) = + (forall (b: b). wp b p h) + +(** Applying a stateful WP to a trivial postcondition *) +unfold +let st_trivial (heap a: Type) (wp: st_wp_h heap a) = (forall h0. wp (fun r h1 -> True) h0) + +(** Introducing a new effect template [STATE_h] *) +new_effect { + STATE_h (heap: Type) : result: Type -> wp: st_wp_h heap result -> Effect + with + return_wp = st_return heap + ; bind_wp = st_bind_wp heap + ; if_then_else = st_if_then_else heap + ; ite_wp = st_ite_wp heap + ; stronger = st_stronger heap + ; close_wp = st_close_wp heap + ; trivial = st_trivial heap +} + +/// The [EXN] effect for computations that may raise exceptions or +/// fatal errors +/// +/// Weakest preconditions for stateful computations transform +/// [ex_post] postconditions (predicates on [result]s) to [ex_pre] +/// precondition propositions. + +(** Normal results are represented using [V x]. + Handleable exceptions are represented [E e]. + Fatal errors are [Err msg]. *) +noeq +type result (a: Type) = + | V : v: a -> result a + | E : e: exn -> result a + | Err : msg: string -> result a + +(** Exceptional preconditions are just propositions *) +let ex_pre = Type0 + +(** Postconditions on results refined by a precondition *) +let ex_post' (a pre: Type) = _: result a {pre} -> GTot Type0 + +(** Postconditions on results *) +let ex_post (a: Type) = ex_post' a True + +(** Exceptions WP-predicate transformers *) +let ex_wp (a: Type) = ex_post a -> GTot ex_pre + +(** Returning a value [x] normally promotes it to the [V x] result *) +unfold +let ex_return (a: Type) (x: a) (p: ex_post a) : GTot Type0 = p (V x) + +(** Sequential composition of exception-raising code requires case analysing + the result of the first computation before "running" the second one *) +unfold +let ex_bind_wp (a b: Type) (wp1: ex_wp a) (wp2: (a -> GTot (ex_wp b))) (p: ex_post b) + : GTot Type0 = + forall (k: ex_post b). + (forall (rb: result b). {:pattern (guard_free (k rb))} p rb ==> k rb) ==> + (wp1 (function + | V ra1 -> wp2 ra1 k + | E e -> k (E e) + | Err m -> k (Err m))) + +(** As for other effects, branching in [ex_wp] appears in two forms. + First, a simple case analysis on [p] *) +unfold +let ex_if_then_else (a p: Type) (wp_then wp_else: ex_wp a) (post: ex_post a) = + wp_then post /\ (~p ==> wp_else post) + +(** Naming continuations for use with branching *) +unfold +let ex_ite_wp (a: Type) (wp: ex_wp a) (post: ex_post a) = + forall (k: ex_post a). + (forall (rb: result a). {:pattern (guard_free (k rb))} post rb ==> k rb) ==> wp k + +(** Subsumption for exceptional WPs *) +unfold +let ex_stronger (a: Type) (wp1 wp2: ex_wp a) = (forall (p: ex_post a). wp1 p ==> wp2 p) + +(** Closing the scope of a binder for exceptional WPs *) +unfold +let ex_close_wp (a b: Type) (wp: (b -> GTot (ex_wp a))) (p: ex_post a) = (forall (b: b). wp b p) + +(** Applying a computation with a trivial postcondition *) +unfold +let ex_trivial (a: Type) (wp: ex_wp a) = wp (fun r -> True) + +(** Introduce a new effect for [EXN] *) +new_effect { + EXN : result: Type -> wp: ex_wp result -> Effect + with + return_wp = ex_return + ; bind_wp = ex_bind_wp + ; if_then_else = ex_if_then_else + ; ite_wp = ex_ite_wp + ; stronger = ex_stronger + ; close_wp = ex_close_wp + ; trivial = ex_trivial +} + +(** A Hoare-style abbreviation for EXN *) +effect Exn (a: Type) (pre: ex_pre) (post: ex_post' a pre) = + EXN a (fun (p: ex_post a) -> pre /\ (forall (r: result a). post r ==> p r)) + +(** We include divergence in exceptions. + + NOTE: BE WARNED, CODE IN THE [EXN] EFFECT IS ONLY CHECKED FOR + PARTIAL CORRECTNESS *) +unfold +let lift_div_exn (a: Type) (wp: pure_wp a) (p: ex_post a) = wp (fun a -> p (V a)) +sub_effect DIV ~> EXN { lift_wp = lift_div_exn } + +(** A variant of [Exn] with trivial pre- and postconditions *) +effect Ex (a: Type) = Exn a True (fun v -> True) + +/// The [ALL_h] effect template for computations that may diverge, +/// raise exceptions or fatal errors, and uses a generic state. +/// +/// Note, this effect is poorly named, particularly as F* has since +/// gained many more user-defined effect. We no longer have an effect +/// that includes all others. +/// +/// We might rename this in the future to something like [StExnDiv_h]. +/// +/// We layer state on top of exceptions, meaning that raising an +/// exception does not discard the state. +/// +/// As with [STATE_h], [ALL_h] is not a computation type, though its +/// instantiation with a specific type of [heap] (in FStar.All) is. + +(** [all_pre_h] is a predicate on the initial state *) +let all_pre_h (h: Type) = h -> GTot Type0 + +(** Postconditions relate [result]s to final [heap]s refined by a precondition *) +let all_post_h' (h a pre: Type) = result a -> _: h{pre} -> GTot Type0 + +(** A variant of [all_post_h'] without the precondition refinement *) +let all_post_h (h a: Type) = all_post_h' h a True + +(** WP predicate transformers for the [All_h] effect template *) +let all_wp_h (h a: Type) = all_post_h h a -> Tot (all_pre_h h) + +(** Returning a value [x] normally promotes it to the [V x] result + without touching the [heap] *) +unfold +let all_return (heap a: Type) (x: a) (p: all_post_h heap a) = p (V x) + +(** Sequential composition for [ALL_h] is like [EXN]: case analysis of + the exceptional result before "running" the continuation *) +unfold +let all_bind_wp + (heap: Type) + (a b: Type) + (wp1: all_wp_h heap a) + (wp2: (a -> GTot (all_wp_h heap b))) + (p: all_post_h heap b) + (h0: heap) + : GTot Type0 = + wp1 (fun ra h1 -> + (match ra with + | V v -> wp2 v p h1 + | E e -> p (E e) h1 + | Err msg -> p (Err msg) h1)) + h0 + +(** Case analysis in [ALL_h] *) +unfold +let all_if_then_else + (heap a p: Type) + (wp_then wp_else: all_wp_h heap a) + (post: all_post_h heap a) + (h0: heap) + = wp_then post h0 /\ (~p ==> wp_else post h0) + +(** Naming postcondition for better sharing in [ALL_h] *) +unfold +let all_ite_wp (heap a: Type) (wp: all_wp_h heap a) (post: all_post_h heap a) (h0: heap) = + forall (k: all_post_h heap a). + (forall (x: result a) (h: heap). {:pattern (guard_free (k x h))} post x h ==> k x h) ==> wp k h0 + +(** Subsumption in [ALL_h] *) +unfold +let all_stronger (heap a: Type) (wp1 wp2: all_wp_h heap a) = + (forall (p: all_post_h heap a) (h: heap). wp1 p h ==> wp2 p h) + +(** Closing a binder in the scope of an [ALL_h] wp *) +unfold +let all_close_wp + (heap a b: Type) + (wp: (b -> GTot (all_wp_h heap a))) + (p: all_post_h heap a) + (h: heap) + = (forall (b: b). wp b p h) + +(** Applying an [ALL_h] wp to a trivial postcondition *) +unfold +let all_trivial (heap a: Type) (wp: all_wp_h heap a) = (forall (h0: heap). wp (fun r h1 -> True) h0) + +(** Introducing the [ALL_h] effect template *) +new_effect { + ALL_h (heap: Type) : a: Type -> wp: all_wp_h heap a -> Effect + with + return_wp = all_return heap + ; bind_wp = all_bind_wp heap + ; if_then_else = all_if_then_else heap + ; ite_wp = all_ite_wp heap + ; stronger = all_stronger heap + ; close_wp = all_close_wp heap + ; trivial = all_trivial heap +} + +(** + Controlling inversions of inductive type + + Given a value of an inductive type [v:t], where [t = A | B], the SMT + solver can only prove that [v=A \/ v=B] by _inverting_ [t]. This + inversion is controlled by the [ifuel] setting, which usually limits + the recursion depth of the number of such inversions that the solver + can perform. + + The [inversion] predicate below is a way to circumvent the + [ifuel]-based restrictions on inversion depth. In particular, if the + [inversion t] is available in the SMT solver's context, it is free to + invert [t] infinitely, regardless of the [ifuel] setting. + + Be careful using this, since it explicitly subverts the [ifuel] + setting. If used unwisely, this can lead to very poor SMT solver + performance. *) +[@@ remove_unused_type_parameters [0]] +val inversion (a: Type) : Type0 + +(** To introduce [inversion t] in the SMT solver's context, call + [allow_inversion t]. *) +val allow_inversion (a: Type) : Pure unit (requires True) (ensures (fun x -> inversion a)) + +(** Since the [option] type is so common, we always allow inverting + options, regardless of [ifuel] *) +val invertOption (a: Type) + : Lemma (requires True) (ensures (forall (x: option a). None? x \/ Some? x)) [SMTPat (option a)] + +(** Values of type [a] or type [b] *) +type either a b = + | Inl : v: a -> either a b + | Inr : v: b -> either a b + +(** Projections for the components of a dependent pair *) +let dfst (#a: Type) (#b: a -> GTot Type) (t: dtuple2 a b) + : Tot a + = Mkdtuple2?._1 t + +let dsnd (#a: Type) (#b: a -> GTot Type) (t: dtuple2 a b) + : Tot (b (Mkdtuple2?._1 t)) + = Mkdtuple2?._2 t + +(** Dependent triples, with sugar [x:a & y:b x & c x y] *) +unopteq +type dtuple3 (a: Type) (b: (a -> GTot Type)) (c: (x: a -> b x -> GTot Type)) = + | Mkdtuple3 : _1: a -> _2: b _1 -> _3: c _1 _2 -> dtuple3 a b c + +(** Dependent quadruples, with sugar [x:a & y:b x & z:c x y & d x y z] *) +unopteq +type dtuple4 + (a: Type) (b: (x: a -> GTot Type)) (c: (x: a -> b x -> GTot Type)) + (d: (x: a -> y: b x -> z: c x y -> GTot Type)) + = | Mkdtuple4 : _1: a -> _2: b _1 -> _3: c _1 _2 -> _4: d _1 _2 _3 -> dtuple4 a b c d + +(** Dependent quadruples, with sugar [x:a & y:b x & z:c x y & d x y z] *) +unopteq +type dtuple5 + (a: Type) (b: (x: a -> GTot Type)) (c: (x: a -> b x -> GTot Type)) + (d: (x: a -> y: b x -> z: c x y -> GTot Type)) + (e: (x: a -> y: b x -> z: c x y -> w: d x y z -> GTot Type)) + = | Mkdtuple5 : _1: a -> _2: b _1 -> _3: c _1 _2 -> _4: d _1 _2 _3 -> _5: e _1 _2 _3 _4 -> dtuple5 a b c d e + +(** Explicitly discarding a value *) +let ignore (#a: Type) (x: a) : Tot unit = () + +(** In a context where [false] is provable, you can prove that any + type [a] is inhabited. + + There are many proofs of this fact in F*. Here, in the implementation, we build an + infinitely looping function, since the termination check succeeds + in a [False] context. *) +val false_elim (#a: Type) (u: unit{False}) : Tot a + +/// Attributes: +/// +/// An attribute is any F* term. +/// +/// Attributes are desugared and checked for being well-scoped. But, +/// they are not type-checked. +/// +/// It is associated with a definition using the [[@@attribute]] +/// notation, just preceding the definition. + +(** We collect several internal ocaml attributes into a single + inductive type. + + This may be unnecessary. In the future, we are likely to flatten + this definition into several definitions of abstract top-level + names. + + An example: + + {[ + [@@ CInline ] let f x = UInt32.(x +%^ 1) + ]} + + is extracted to C by KaRaMeL to a C definition tagged with the + [inline] qualifier. *) +type __internal_ocaml_attributes = + | PpxDerivingShow (* Generate [@@@ deriving show ] on the resulting OCaml type *) + | PpxDerivingShowConstant of string (* Similar, but for constant printers. *) + | PpxDerivingYoJson (* Generate [@@@ deriving yojson ] on the resulting OCaml type *) + | CInline + (* KaRaMeL-only: generates a C "inline" attribute on the resulting + * function declaration. When used on a local definition (i.e. a letbinding) + KaRaMeL will try to inline this binding in the extracted C code. *) + | Substitute + (* KaRaMeL-only: forces KaRaMeL to inline the function at call-site; this is + * deprecated and the recommended way is now to use F*'s + * [inline_for_extraction], which now also works for stateful functions. *) + | Gc + (* KaRaMeL-only: instructs KaRaMeL to heap-allocate any value of this + * data-type; this requires running with a conservative GC as the + * allocations are not freed. *) + | Comment of string + (* KaRaMeL-only: attach a comment to the declaration. Note that using F*-doc + * syntax automatically fills in this attribute. *) + | CPrologue of string + (* KaRaMeL-only: verbatim C code to be prepended to the declaration. + * Multiple attributes are valid and accumulate, separated by newlines. *) + | CEpilogue of string (* Ibid. *) + | CConst of string + (* KaRaMeL-only: indicates that the parameter with that name is to be marked + * as C const. This will be checked by the C compiler, not by KaRaMeL or F*. + * + * This is deprecated and doesn't work as intended. Use + * LowStar.ConstBuffer.fst instead! *) + | CCConv of string (* A calling convention for C, one of stdcall, cdecl, fastcall *) + | CAbstractStruct + (* KaRaMeL-only: for types that compile to struct types (records and + * inductives), indicate that the header file should only contain a forward + * declaration, which in turn forces the client to only ever use this type + * through a pointer. *) + | CIfDef (* KaRaMeL-only: on a given `val foo`, compile if foo with #ifdef. *) + | CMacro +(* KaRaMeL-only: for a top-level `let v = e`, compile as a macro *) + | CNoInline + (* For security-sensitive functions only: generate special attributes in C + to prevent inlining; if the function is subjected to a -static-header + option, the `inline` attribute will be removed, but the static will + remain. *) + +(** The [inline_let] attribute on a local let-binding, instructs the + extraction pipeline to inline the definition. This may be both to + avoid generating unnecessary intermediate variables, and also to + enable further partial evaluation. Note, use this with care, since + inlining all lets can lead to an exponential blowup in code + size. *) +val inline_let : unit + +(** The [rename_let] attribute support a form of metaprogramming for + the names of let-bound variables used in extracted code. + + This is useful, particularly in conjunction with partial + evaluation, to ensure that names reflect their usage context. + + See tests/micro-benchmarks/Renaming*.fst *) +val rename_let (new_name: string) : Tot unit + +(** The [plugin] attribute is used in conjunction with native + compilation of F* components, accelerating their reduction + relative to the default strategy of just interpreting them. + + See examples/native_tactics for several examples. *) +val plugin (x: int) : Tot unit + +(** An attribute to mark things that the typechecker should *first* + elaborate and typecheck, but unfold before verification. *) +val tcnorm : unit + +(** We erase all ghost functions and unit-returning pure functions to + [()] at extraction. This creates a small issue with abstract + types. Consider a module that defines an abstract type [t] whose + (internal) definition is [unit] and also defines [f: int -> t]. [f] + would be erased to be just [()] inside the module, while the + client calls to [f] would not, since [t] is abstract. To get + around this, when extracting interfaces, if we encounter an + abstract type, we tag it with this attribute, so that + extraction can treat it specially. + + Note, since the use of cross-module inlining (the [--cmi] option), + this attribute is no longer necessary. We retain it for legacy, + but will remove it in the future. *) +val must_erase_for_extraction : unit + +(** This attribute is used with the Dijkstra Monads for Free + construction to track position information in generated VCs *) +val dm4f_bind_range : unit + +(** When attached a top-level definition, the typechecker will succeed + if and only if checking the definition results in an error. The + error number list is actually OPTIONAL. If present, it will be + checked that the definition raises exactly those errors in the + specified multiplicity, but order does not matter. *) +val expect_failure (errs: list int) : Tot unit + +(** When --admit_smt_queries true is present, with the previous attribute since some + definitions only fail when verification is turned on. With this + attribute, one can ensure that a definition fails while lax-checking + too. Same semantics as above, but lax mode will be turned on for the + definition. *) +val expect_lax_failure (errs: list int) : Tot unit + +(** Print the time it took to typecheck a top-level definition *) +val tcdecltime : unit + +(** This attribute is to be used as a hint for the unifier. A + function-typed symbol `t` marked with this attribute will be treated + as being injective in all its arguments by the unifier. That is, + given a problem `t a1..an =?= t b1..bn` the unifier will solve it by + proving `ai =?= bi` for all `i`, without trying to unfold the + definition of `t`. *) +val unifier_hint_injective : unit + +(** + This attribute is used to control the evaluation order + and unfolding strategy for certain definitions. + + In particular, given + {[ + [@@(strict_on_arguments [1;2])] + let f x0 (x1:list x0) (x1:option x0) = e + ]} + + An application [f e0 e1 e2] is reduced by the normalizer by: + + 1. evaluating [e0 ~>* v0, e1 ~>* v1, e2 ~>* v2] + + 2 a. + If, according to the positional arguments [1;2], + if v1 and v2 have constant head symbols + (e.g., v1 = Cons _ _ _, and v2 = None _) + then [f] is unfolded to [e] and reduced as + {[e[v0/x0][v1/x1][v2/x2]]} + + 2 b. + + Otherwise, [f] is not unfolded and the term is [f e0 e1 e2] + reduces to [f v0 v1 v2]. *) +val strict_on_arguments (x: list int) : Tot unit + +(** + * An attribute to tag a tactic designated to solve any + * unsolved implicit arguments remaining at the end of type inference. + **) +val resolve_implicits : unit + +(** + * Implicit arguments can be tagged with an attribute [abc] to dispatch + * their solving to a user-defined tactic also tagged with the same + * attribute and resolve_implicits [@@abc; resolve_implicits]. + + * However, sometimes it is useful to have multiple such + * [abc]-tagged tactics in scope. In such a scenario, to choose among them, + * one can use the attribute as shown below to declare that [t] overrides + * all the tactics [t1...tn] and should be used to solve [abc]-tagged + * implicits, so long as [t] is not iself overridden by some other tactic. + + [@@resolve_implicits; abc; override_resolve_implicits_handler abc [`%t1; ... `%tn]] + let t = e + + **) +val override_resolve_implicits_handler : #a:Type -> a -> list string -> Tot unit + +(** A tactic registered to solve implicits with the (handle_smt_goals) + attribute will receive the SMT goal generated during typechecking + just before it is passed to the SMT solver. + *) +val handle_smt_goals : unit + +(** This attribute can be added to an inductive type definition, + indicating that it should be erased on extraction to `unit`. + + However, any pattern matching on the inductive type results + in a `Ghost` effect, ensuring that computationally relevant + code cannot rely on the values of the erasable type. + + See tests/micro-benchmarks/Erasable.fst, for examples. Also + see https://github.com/FStarLang/FStar/issues/1844 *) +val erasable : unit + +(** [commute_nested_matches] + This attribute can be used to decorate an inductive type [t] + + During normalization, if reduction is blocked on matching the + constructors of [t] in the following sense: + + [ + match (match e0 with | P1 -> e1 | ... | Pn -> en) with + | Q1 -> f1 ... | Qm -> fm + ] + + i.e., the outer match is stuck due to the inner match on [e0] + being stuck, and if the head constructor the outer [Qi] patterns + are the constructors of the decorated inductive type [t], then, + this is reduced to + + [ + match e0 with + | P1 -> (match e1 with | Q1 -> f1 ... | Qm -> fm) + | ... + | Pn -> (match en with | Q1 -> f1 ... | Qm -> fm) + ] + + This is sometimes useful when partially evaluating code before + extraction, particularly when aiming to obtain first-order code + for KaRaMeL. However, this attribute should be used with care, + since if after the rewriting the inner matches do not reduce, then + this can cause an explosion in code size. + + See tests/micro-benchmarks/CommuteNestedMatches.fst + and examples/layeredeffects/LowParseWriters.fsti + *) +val commute_nested_matches : unit + +(** This attribute controls extraction: it can be used to disable + extraction of a given top-level definition into a specific backend, + such as "OCaml". If any extracted code must call into an erased + function, an error will be raised (code 340). + *) +val noextract_to (backend:string) : Tot unit + + +(** This attribute decorates a let binding, e.g., + + [@@normalize_for_extraction steps] + let f = e + + The effect is that prior to extraction, F* will first reduce [e] + using the normalization [steps], and then proceed to extract it as + usual. + + Almost the same behavior can be achieved by using a + [postprocess_for_extraction_with t] attribute, which runs tactic + [t] on the goal [e == ?u] and extracts the solution to [?u] in + place of [e]. However, using a tactic to postprocess a term is + more general than needed for some cases. + + In particular, if we intend to only normalize [e] before + extraction (rather than applying some other form of equational + reasoning), then using [normalize_for_extraction] can be more + efficient, for the following reason: + + Since we are reducing [e] just before extraction, F* can enable an + otherwise non-user-facing normalization feature that allows all + arguments marked [@@@erasable] to be erased to [()]---these terms + will anyway be extracted to [()] so erasing them during + normalization is a useful optimization. + *) +val normalize_for_extraction (steps:list norm_step) : Tot unit + + +(** A layered effect definition may optionally be annotated with + (ite_soundness_by t) attribute, where t is another attribute + When so, the implicits and the smt guard generated when + checking the soundness of the if-then-else combinator, are + dispatched to the tactic in scope that has the t attribute (in addition + to the resolve_implicits attribute as usual) + + See examples/layeredeffects/IteSoundess.fst for a few examples + *) +val ite_soundness_by (attribute: unit): Tot unit + +(** By-default functions that have a layered effect, need to have a type + annotation for their bodies + However, a layered effect definition may contain the default_effect + attribute to indicate to the typechecker that for missing annotations, + use the default effect. + The default effect attribute takes as argument a string, that is the name + of the default effect, two caveats: + - The argument must be a string constant (not a name, for example) + - The argument should be the fully qualified name + For example, the TAC effect in FStar.Tactics.Effect.fsti specifies + its default effect as FStar.Tactics.Tac + F* will typecheck that the default effect only takes one argument, + the result type of the computation + *) +val default_effect (s:string) : Tot unit + +(** A layered effect may optionally be annotated with the + top_level_effect attribute so indicate that this effect may + appear at the top-level + (e.g., a top-level let x = e, where e has a layered effect type) + + The top_level_effect attribute takes (optional) string argument, that is the + name of the effect abbreviation that may constrain effect arguments + for the top-level effect + + As with default effect, the string argument must be a string constant, + and fully qualified + + E.g. a Hoare-style effect `M a pre post`, may have the attribute + `@@ top_level_effect "N"`, where the effect abbreviation `N` may be: + + effect N a post = M a True post + + i.e., enforcing a trivial precondition if `M` appears at the top-level + + If the argument to `top_level_effect` is absent, then the effect itself + is allowed at the top-level with any effect arguments + + See tests/micro-benchmarks/TopLevelIndexedEffects.fst for examples + + *) +val top_level_effect (s:string) : Tot unit + +(** This attribute can be annotated on the binders in an effect signature + to indicate that they are effect parameters. For example, for a + state effect that is parametric in the type of the state, the state + index may be marked as an effect parameter. + + Also see https://github.com/FStarLang/FStar/wiki/Indexed-effects + + *) +val effect_param : unit + +(** Bind definition for a layered effect may optionally contain range + arguments, that are provided by the typechecker during reification + This attribute on the effect definition indicates that the bind + has range arguments. + See for example the TAC effect in FStar.Tactics.Effect.fsti + *) +val bind_has_range_args : unit + + +(** An indexed effect definition may be annotated with + this attribute to indicate that the effect should be + extracted "natively". E.g., the `bind` of the effect is + extracted to primitive `let` bindings + + As an example, `Steel` effects (the effect for concurrent + separation logic) are marked as such + + *) +val primitive_extraction : unit + +(** A qualifier on a type definition which when used in co-domain position + on an arrow type will be extracted as if it were an impure effect type. + + e.g., if you have + + [@@extract_as_impure_effect] + val stt (a:Type) (pre:_) (post:_) : Type + + then arrows of the form `a -> stt b p q` will be extracted + similarly to `a -> Dv b`. + *) +val extract_as_impure_effect : unit + + +(** A binder in a definition/declaration may optionally be annotated as strictly_positive + When the let definition is used in a data constructor type in an inductive + definition, this annotation is used to check the positivity of the inductive + + Further F* checks that the binder is actually positive in the let definition + + See tests/micro-benchmarks/Positivity.fst and NegativeTests.Positivity.fst for a few examples + *) +val strictly_positive : unit + +(** A binder in a definition/declaration may optionally be annotated as unused. + This is used in the strict positivity checker. E.g., a type such as the one + below is accepted + + let f ([@@@unused] a:Type) = unit + type t = | MkT: f t -> t + + F* checks that the binder is actually unused in the definition + + See tests/micro-benchmarks/Positivity.fst for a few examples + *) +val unused : unit + +(** This attribute may be added to an inductive type + to disable auto generated projectors + + Normally there should not be any need to use this unless: + for some reason F* cannot typecheck the auto-generated projectors. + + Another reason to use this attribute may be to avoid generating and + typechecking lot of projectors, most of which are not going to be used + in the rest of the program + *) +val no_auto_projectors : unit + +(** As [no_auto_projectors] but also do not even generate declarations + for them. *) +val no_auto_projectors_decls : unit + +(** This attribute can be added to a let definition + and indicates to the typechecker to typecheck the signature of the definition + without using subtyping. This is sometimes useful for indicating that a lemma + can be applied by the tactic engine without requiring to check additional + subtyping obligations +*) +val no_subtyping : unit + +val admit_termination : unit + +(** Pure and ghost inner let bindings are now always inlined during + the wp computation, if: the return type is not unit and the head + symbol is not marked irreducible. + + To circumvent this behavior, singleton can be used. + See the example usage in ulib/FStar.Algebra.Monoid.fst. *) +val singleton (#a: Type) (x: a) : Tot (y: a{y == x}) + +(** A weakening coercion from eqtype to Type. + + One of its uses is in types of layered effect combinators that + are subjected to stricter typing discipline (no subtyping) *) +unfold let eqtype_as_type (a:eqtype) : Type = a + +(** A coercion of the [x] from [a] to [b], when [a] is provably equal + to [b]. In most cases, F* will silently coerce from [a] to [b] + along a provable equality (as in the body of this + function). Occasionally, you may need to apply this explicitly *) +let coerce_eq (#a:Type) (#b:Type) (_:squash (a == b)) (x:a) : b = x + +val coercion : unit + +(** Marks a record type as being the result of an automatic desugar of + a constructor with a record payload. + For example, in a module `M`, `type foo = | A {x: int}` desugars + to the type `M.foo` and a type `M.foo__A__payload`. That latter + type `foo__A__payload` is decorated with an attribute + `desugar_of_variant_record ["M.A"]`. *) +val desugar_of_variant_record (type_name: string): unit + +(** Tag for implicits that are to be solved by a tactic. *) +val defer_to (#a:Type) (tag : a) : unit diff --git a/stage0/ulib/FStar.Pprint.fsti b/stage0/ulib/FStar.Pprint.fsti new file mode 100644 index 00000000000..7ac5c135064 --- /dev/null +++ b/stage0/ulib/FStar.Pprint.fsti @@ -0,0 +1,392 @@ +(* + Copyright 2023 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Pprint + +(* Unfortunate *) +open FStar.Char +open FStar.Float + +(* The rest of this file is taken almost verbatim from src/prettyprint/FStar.Pprint.fsti *) + +(** A pretty-printing engine and a set of basic document combinators. *) + +(** {1 Building documents} *) + +(** Documents must be built in memory before they are rendered. This may seem + costly, but it is a simple approach, and works well. *) + +(** The following operations form a set of basic (low-level) combinators for + building documents. On top of these combinators, higher-level combinators + can be defined: see {!PPrintCombinators}. *) + +(** This is the abstract type of documents. *) +new +val document : Type0 + +(** The following basic (low-level) combinators allow constructing documents. *) + +(** [empty] is the empty document. *) +val empty: document + +(** [doc_of_char c] is a document that consists of the single character [c]. This + character must not be a newline. *) +val doc_of_char: char -> document + +(** [doc_of_string s] is a document that consists of the string [s]. This string must + not contain a newline. *) +val doc_of_string: string -> document + +(** [doc_of_bool b] is a document that consists of the boolean [b]. *) +val doc_of_bool: bool -> document + +(** [substring s ofs len] is a document that consists of the portion of the + string [s] delimited by the offset [ofs] and the length [len]. This + portion must contain a newline. *) +val substring: string -> int -> int -> document + +(** [fancystring s apparent_length] is a document that consists of the string + [s]. This string must not contain a newline. The string may contain fancy + characters: color escape characters, UTF-8 or multi-byte characters, + etc. Thus, its apparent length (which measures how many columns the text + will take up on screen) differs from its length in bytes. *) +val fancystring: string -> int -> document + +(** [fancysubstring s ofs len apparent_length] is a document that consists of + the portion of the string [s] delimited by the offset [ofs] and the length + [len]. This portion must not contain a newline. The string may contain fancy + characters. *) +val fancysubstring : string -> int -> int -> int -> document + +(** [utf8string s] is a document that consists of the UTF-8-encoded string [s]. + This string must not contain a newline. *) +val utf8string: string -> document + +(** [hardline] is a forced newline document. This document forces all enclosing + groups to be printed in non-flattening mode. In other words, any enclosing + groups are dissolved. *) +val hardline: document + +(** [blank n] is a document that consists of [n] blank characters. *) +val blank: int -> document + +(** [break_ n] is a document which consists of either [n] blank characters, + when forced to display on a single line, or a single newline character, + otherwise. Note that there is no choice at this point: choices are encoded + by the [group] combinator. *) +val break_: int -> document + +(** [doc1 ^^ doc2] is the concatenation of the documents [doc1] and [doc2]. *) +val ( ^^ ) : document -> document -> document +(** [x ^/^ y] separates x and y with a breakable space. It is a short-hand for + [x ^^ break 1 ^^ y] *) +val ( ^/^ ) : document -> document -> document + +(** [nest j doc] is the document [doc], in which the indentation level has + been increased by [j], that is, in which [j] blanks have been inserted + after every newline character. Read this again: indentation is inserted + after every newline character. No indentation is inserted at the beginning + of the document. *) +val nest: int -> document -> document + +(** [group doc] encodes a choice. If possible, then the entire document [group + doc] is rendered on a single line. Otherwise, the group is dissolved, and + [doc] is rendered. There might be further groups within [doc], whose + presence will lead to further choices being explored. *) +val group: document -> document + +// (** [column f] is the document obtained by applying the function [f] to the +// current column number. This combinator allows making the construction of +// a document dependent on the current column number. *) +// val column: (int -> document) -> document + +// (** [nesting f] is the document obtained by applying the function [f] to the +// current indentation level, that is, the number of indentation (blank) +// characters that were inserted at the beginning of the current line. *) +// val nesting: (int -> document) -> document + +// (** [position f] is the document obtained by applying the function [f] +// to the current position in the rendered output. The position +// consists of [bol], which is the character-offset of the beginnig +// of the current line (starting at 0), [line], which is the current +// line (starting at 1), and [column], which is the current column +// (starting at 0). The current character-offset is always given by +// [bol + column]. *) +// val position : (int -> int -> int -> document) -> document + +(** [ifflat doc1 doc2] is rendered as [doc1] if part of a group that can be + successfully flattened, and is rendered as [doc2] otherwise. Use this + operation with caution. Because the pretty-printer is free to choose + between [doc1] and [doc2], these documents should be semantically + equivalent. *) +val ifflat: document -> document -> document + +// SI: purposely commented-out for now. +// (** {1 Rendering documents} *) +// +// (** This renderer sends its output into an output channel. *) +// module ToChannel : PPrintRenderer.RENDERER +// with type channel = out_channel +// and type document = document +// +// (** This renderer sends its output into a memory buffer. *) +// module ToBuffer : PPrintRenderer.RENDERER +// with type channel = Buffer.t +// and type document = document +// +// (** This renderer sends its output into a formatter channel. *) +// module ToFormatter : PPrintRenderer.RENDERER +// with type channel = Format.formatter +// and type document = document + + +(** A set of high-level combinators for building documents. *) + +(** {1 Single characters} *) + +(** The following constant documents consist of a single character. *) + +val lparen: document +val rparen: document +val langle: document +val rangle: document +val lbrace: document +val rbrace: document +val lbracket: document +val rbracket: document +val squote: document +val dquote: document +val bquote: document +val semi: document +val colon: document +val comma: document +val space: document +val dot: document +val sharp: document +val slash: document +val backslash: document +val equals: document +val qmark: document +val tilde: document +val at: document +val percent: document +val dollar: document +val caret: document +val ampersand: document +val star: document +val plus: document +val minus: document +val underscore: document +val bang: document +val bar: document +val rarrow: document +val long_left_arrow: document +val larrow: document + +(** {1 Delimiters} *) + +(** [precede l x] is [l ^^ x]. *) +val precede: document -> document -> document + +(** [terminate r x] is [x ^^ r]. *) +val terminate: document -> document -> document + +(** [enclose l r x] is [l ^^ x ^^ r]. *) +val enclose: document -> document -> document -> document + +(** The following combinators enclose a document within a pair of delimiters. + They are partial applications of [enclose]. No whitespace or line break is + introduced. *) + +val squotes: document -> document +val dquotes: document -> document +val bquotes: document -> document +val braces: document -> document +val parens: document -> document +val angles: document -> document +val brackets: document -> document + +(** {1 Repetition} *) + +(** [twice doc] is the document obtained by concatenating two copies of + the document [doc]. *) +val twice: document -> document + +(** [repeat n doc] is the document obtained by concatenating [n] copies of + the document [doc]. *) +val repeat: int -> document -> document + +(** {1 Lists and options} *) + +(** [concat docs] is the concatenation of the documents in the list [docs] (with ^^). *) +val concat: list document -> document + +(** [separate sep docs] is the concatenation of the documents in the list + [docs]. The separator [sep] is inserted between every two adjacent + documents. *) +val separate: document -> list document -> document + +(** [concat_map f xs] is equivalent to [concat (List.map f xs)]. *) +val concat_map: ('a -> document) -> list 'a -> document + +(** [separate_map sep f xs] is equivalent to [separate sep (List.map f xs)]. *) +val separate_map: document -> ('a -> document) -> list 'a -> document + +(** [separate2 sep last_sep docs] is the concatenation of the documents in the + list [docs]. The separator [sep] is inserted between every two adjacent + documents, except between the last two documents, where the separator + [last_sep] is used instead. *) +val separate2: document -> document -> list document -> document + +(** [optional f None] is the empty document. [optional f (Some x)] is + the document [f x]. *) +val optional: ('a -> document) -> option 'a -> document + +(** {1 Text} *) + +(** [lines s] is the list of documents obtained by splitting [s] at newline + characters, and turning each line into a document via [substring]. This + code is not UTF-8 aware. *) +val lines: string -> list document + +(** [arbitrary_string s] is equivalent to [separate (break 1) (lines s)]. + It is analogous to [string s], but is valid even if the string [s] + contains newline characters. *) +val arbitrary_string: string -> document + +(** [words s] is the list of documents obtained by splitting [s] at whitespace + characters, and turning each word into a document via [substring]. All + whitespace is discarded. This code is not UTF-8 aware. *) +val words: string -> list document + +(** [split ok s] splits the string [s] before and after every occurrence of a + character that satisfies the predicate [ok]. The substrings thus obtained + are turned into documents, and a list of documents is returned. No + information is lost: the concatenation of the documents yields the + original string. This code is not UTF-8 aware. *) +val split: (char -> bool) -> string -> list document + +(** [flow sep docs] separates the documents in the list [docs] with the + separator [sep] and arranges for a new line to begin whenever a document + does not fit on the current line. This is useful for typesetting + free-flowing, ragged-right text. A typical choice of [sep] is [break b], + where [b] is the number of spaces that must be inserted between two + consecutive words (when displayed on the same line). *) +val flow: document -> list document -> document + +(** [flow_map sep f docs] is equivalent to [flow sep (List.map f docs)]. *) +val flow_map: document -> ('a -> document) -> list 'a -> document + +(** [url s] is a possible way of displaying the URL [s]. A potential line + break is inserted immediately before and immediately after every slash + and dot character. *) +val url: string -> document + +(** {1 Alignment and indentation} *) + +(** [align doc] increases the indentation level to reach the current + column. Thus, this document will be rendered within a box whose + upper left corner is the current position. *) +val align: document -> document + +(* [hang n doc] is analogous to [align], but additionally indents + all lines, except the first one, by [n]. Thus, the text in the + box forms a hanging indent. *) +val hang: int -> document -> document + +(** [prefix n b left right] has the following flat layout: {[ +left right +]} +and the following non-flat layout: +{[ +left + right +]} +The parameter [n] controls the nesting of [right] (when not flat). +The parameter [b] controls the number of spaces between [left] and [right] +(when flat). + *) +val prefix: int -> int -> document -> document -> document + +(** [jump n b right] is equivalent to [prefix n b empty right]. *) +val jump: int -> int -> document -> document + +(** [infix n b middle left right] has the following flat layout: {[ +left middle right +]} +and the following non-flat layout: {[ +left middle + right +]} +The parameter [n] controls the nesting of [right] (when not flat). +The parameter [b] controls the number of spaces between [left] and [middle] +(always) and between [middle] and [right] (when flat). +*) +val infix: int -> int -> document -> document -> document -> document + +(** [surround n b opening contents closing] has the following flat layout: {[ +opening contents closing +]} +and the following non-flat layout: {[ +opening + contents +closing +]} +The parameter [n] controls the nesting of [contents] (when not flat). +The parameter [b] controls the number of spaces between [opening] and [contents] +and between [contents] and [closing] (when flat). +*) +val surround: int -> int -> document -> document -> document -> document + +(** [soft_surround] is analogous to [surround], but involves more than one + group, so it offers possibilities other than the completely flat layout + (where [opening], [contents], and [closing] appear on a single line) and + the completely developed layout (where [opening], [contents], and + [closing] appear on separate lines). It tries to place the beginning of + [contents] on the same line as [opening], and to place [closing] on the + same line as the end of [contents], if possible. +*) +val soft_surround: int -> int -> document -> document -> document -> document + +(** [surround_separate n b void opening sep closing docs] is equivalent to + [surround n b opening (separate sep docs) closing], except when the + list [docs] is empty, in which case it reduces to [void]. *) +val surround_separate: int -> int -> document -> document -> document -> document -> list document -> document + +(** [surround_separate_map n b void opening sep closing f xs] is equivalent to + [surround_separate n b void opening sep closing (List.map f xs)]. *) +val surround_separate_map: int -> int -> document -> document -> document -> document -> ('a -> document) -> list 'a -> document + +(** {1 Short-hands} *) + + +//(** [!^s] is a short-hand for [string s]. *) +// val ( !^ ) : string -> document + +(** [x ^/^ y] separates [x] and [y] with a breakable space. + It is a short-hand for [x ^^ break 1 ^^ y]. *) + +(** [x ^//^ y] is a short-hand for [prefix 2 1 x y]. *) +// val ( ^//^ ) : document -> document -> document + +// Expose underlying Renderer.pretty implementations (avoid inner modules). +// [pretty_string] uses ToBuffer:RENDERER implementation; +// [print_out_channel] uses the ToChannel:RENDERER one. +(** Note: this exists in the underlying module, but userspace cannot really +call it since we have no support for floats. See [render] below. *) +val pretty_string : float -> int -> document -> string + +(** Render a document. Equivalent to [pretty_string 1.0 80]. *) +val render : document -> string diff --git a/stage0/ulib/FStar.PredicateExtensionality.fst b/stage0/ulib/FStar.PredicateExtensionality.fst new file mode 100644 index 00000000000..0571d1d9cf9 --- /dev/null +++ b/stage0/ulib/FStar.PredicateExtensionality.fst @@ -0,0 +1,30 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.PredicateExtensionality +module F = FStar.FunctionalExtensionality +module P = FStar.PropositionalExtensionality + +let predicate (a:Type) = a -> Tot prop + +let peq (#a:Type) (p1:predicate a) (p2:predicate a) = + forall x. (p1 x <==> p2 x) + +let predicateExtensionality (a:Type) (p1 p2:predicate a) + : Lemma (requires (peq #a p1 p2)) + (ensures (F.on_domain a p1==F.on_domain a p2)) + = P.axiom(); + assert (F.feq p1 p2) + diff --git a/stage0/ulib/FStar.Preorder.fst b/stage0/ulib/FStar.Preorder.fst new file mode 100644 index 00000000000..9591cdd3fc6 --- /dev/null +++ b/stage0/ulib/FStar.Preorder.fst @@ -0,0 +1,36 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Preorder + +(* Preordered relations and stable predicates *) + +type relation (a:Type) = a -> a -> Type0 + +type predicate (a:Type) = a -> Type0 + +let reflexive (#a:Type) (rel:relation a) = + forall (x:a). rel x x + +let transitive (#a:Type) (rel:relation a) = + forall (x:a) (y:a) (z:a). (rel x y /\ rel y z) ==> rel x z + +let preorder_rel (#a:Type) (rel:relation a) = + reflexive rel /\ transitive rel + +type preorder (a:Type) = rel:relation a{preorder_rel rel} + +let stable (#a:Type) (p:predicate a) (rel:relation a{preorder_rel rel}) = + forall (x:a) (y:a). (p x /\ rel x y) ==> p y diff --git a/stage0/ulib/FStar.Printf.fst b/stage0/ulib/FStar.Printf.fst new file mode 100644 index 00000000000..6c647d8a79d --- /dev/null +++ b/stage0/ulib/FStar.Printf.fst @@ -0,0 +1,209 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Printf + +(* + * A variable arity C-style printf + * See tests/micro-benchmarks/Test.Printf.fst for example usage + *) + +open FStar.Char +open FStar.String +module I = FStar.Integers + +noeq +type extension = + | MkExtension : #a:Type0 -> $f:(a -> Tot string) -> extension + +/// `arg`: The format specifiers supported +/// %b : bool +/// %d : int +/// %c : char +/// %s : string +/// %uy : U8.t +/// %us : U16.t +/// %ul : U32.t +/// %uL : U64.t +/// %y : Int8.t +/// %i : Int16.t +/// %l : Int32.t +/// %L : Int64.t +noeq +type arg = + | Bool + | Int + | Char + | String + | U8 + | U16 + | U32 + | U64 + | I8 + | I16 + | I32 + | I64 + | Extension of extension + +/// `arg_type`: Interpreting a `arg` tag as a type +let arg_type (a:arg) : Tot Type0 = + match a with + | Bool -> bool + | Int -> int + | Char -> char + | String -> string + | U8 -> FStar.UInt8.t + | U16 -> FStar.UInt16.t + | U32 -> FStar.UInt32.t + | U64 -> FStar.UInt64.t + | I8 -> FStar.Int8.t + | I16 -> FStar.Int16.t + | I32 -> FStar.Int32.t + | I64 -> FStar.Int64.t + | Extension (MkExtension #t _) -> t + +let string_of_arg (#a:arg) (x:arg_type a) : string = + match a with + | Bool -> string_of_bool x + | Int -> string_of_int x + | Char -> string_of_char x + | String -> x + | U8 -> FStar.UInt8.to_string x + | U16 -> FStar.UInt16.to_string x + | U32 -> FStar.UInt32.to_string x + | U64 -> FStar.UInt64.to_string x + | I8 -> FStar.Int8.to_string x + | I16 -> FStar.Int16.to_string x + | I32 -> FStar.Int32.to_string x + | I64 -> FStar.Int64.to_string x + | Extension (MkExtension f) -> f x + +/// `dir`: Internal to this module +/// A 'directive"; used when parsing a format specifier +noeq +type dir = + | Lit of char + | Arg of arg + +/// `dir_type ds`: Interpreting a list directives as a pure function type +let rec dir_type (ds:list dir) : Tot Type0 = + match ds with + | [] -> string + | Lit c :: ds' -> dir_type ds' + | Arg a :: ds' -> arg_type a -> dir_type ds' + +/// `string_of_dirs ds`: +/// Interpreting a list of directives as its function, +/// in a continuation-passing style +let rec string_of_dirs + (ds:list dir) + (k:string -> string) + : dir_type ds + = match ds with + | [] -> k "" + | Lit c :: ds' -> + coerce_eq () ( + string_of_dirs ds' (fun res -> k (string_of_char c ^ res)) + ) + | Arg a :: ds' -> + fun (x : arg_type a) -> + string_of_dirs ds' (fun res -> ((k "") + ^ string_of_arg x + ^ res)) + +type extension_parser = i:list char -> option (extension & o:list char{o << i}) + +/// `parse_format s`: +/// Parses a list of characters into a list of directives +/// Or None, in case the format string is invalid +let rec parse_format + (s:list char) + (parse_ext: extension_parser) + : option (list dir) + = let add_dir (d:dir) (ods : option (list dir)) + : option (list dir) + = match ods with + | None -> None + | Some ds -> Some (d::ds) + in + match s with + | [] -> Some [] + | ['%'] -> None + + //Unsigned integers beging with '%u' + | '%' :: 'u' :: s' -> begin + match s' with + | 'y' :: s'' -> add_dir (Arg U8) (parse_format s'' parse_ext) + | 's' :: s'' -> add_dir (Arg U16) (parse_format s'' parse_ext) + | 'l' :: s'' -> add_dir (Arg U32) (parse_format s'' parse_ext) + | 'L' :: s'' -> add_dir (Arg U64) (parse_format s'' parse_ext) + | _ -> None + end + + //User extensions begin with '%X' + | '%' :: 'X' :: s' -> begin + match parse_ext s' with + | Some (ext, rest) -> add_dir (Arg (Extension ext)) (parse_format rest parse_ext) + | _ -> None + end + + | '%' :: c :: s' -> begin + match c with + | '%' -> add_dir (Lit '%') (parse_format s' parse_ext) + | 'b' -> add_dir (Arg Bool) (parse_format s' parse_ext) + | 'd' -> add_dir (Arg Int) (parse_format s' parse_ext) + | 'c' -> add_dir (Arg Char) (parse_format s' parse_ext) + | 's' -> add_dir (Arg String) (parse_format s' parse_ext) + | 'y' -> add_dir (Arg I8) (parse_format s' parse_ext) + | 'i' -> add_dir (Arg I16) (parse_format s' parse_ext) + | 'l' -> add_dir (Arg I32) (parse_format s' parse_ext) + | 'L' -> add_dir (Arg I64) (parse_format s' parse_ext) + | _ -> None + end + | c :: s' -> + add_dir (Lit c) (parse_format s' parse_ext) + +/// `parse_format_string`: parses a format `string` into a list of directives +let parse_format_string + (s:string) + (parse_ext:extension_parser) + : option (list dir) + = parse_format (list_of_string s) parse_ext + +let no_extensions : extension_parser = fun s -> None + +/// `sprintf`: The main function of this module +/// A variable arity string formatter +/// Used as: `sprintf "format string" v1 ... vn` +/// +/// It's marked `inline_for_extraction`, meaning that we don't need +/// any special support in our compilation targets to support sprintf +/// +/// `sprintf "Hello %s" "world"` +/// will just extract to `"Hello " ^ "world"` +inline_for_extraction +let sprintf + (s:string{normalize_term (b2t (Some? (parse_format_string s no_extensions)))}) + : norm [unascribe; delta; iota; zeta; primops] (dir_type (Some?.v (parse_format_string s no_extensions))) + = norm [unascribe; delta; iota; zeta; primops] (string_of_dirs (Some?.v (parse_format_string s no_extensions)) (fun s -> s)) + + +/// `ext_sprintf`: An extensible version of sprintf +inline_for_extraction +let ext_sprintf + (parse_ext: extension_parser) + (s:string{normalize_term (b2t (Some? (parse_format_string s parse_ext)))}) + : norm [unascribe; delta; iota; zeta; primops] (dir_type (Some?.v (parse_format_string s parse_ext))) + = norm [unascribe; delta; iota; zeta; primops] (string_of_dirs (Some?.v (parse_format_string s parse_ext)) (fun s -> s)) diff --git a/stage0/ulib/FStar.PropositionalExtensionality.fst b/stage0/ulib/FStar.PropositionalExtensionality.fst new file mode 100644 index 00000000000..567fd35ec62 --- /dev/null +++ b/stage0/ulib/FStar.PropositionalExtensionality.fst @@ -0,0 +1,42 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.PropositionalExtensionality +(* + * + * The propositional extensionality axiom asserts the equality of + * equisatisfiable propositions. + * + * The formulation of this axiom is (clearly) tied closely to the + * precise definition of `prop`. + * + * `Prims.prop` is defined as the type of all subtypes of `unit`, + * including, e.g., `squash t`, for all `t`. + * + * Note, we have considered several other plausible definitions of + * `prop`, but some of which are actually inconsistent with this + * axiom. See, for instance, + * examples/paradoxes/PropositionalExtensionalityInconsistent.fst. + * + *) + +assume +val axiom (_:unit) + : Lemma (forall (p1 p2:prop). (p1 <==> p2) <==> (p1 == p2)) + +let apply (p1 p2:prop) + : Lemma (ensures ((p1 <==> p2) <==> (p1 == p2))) + = axiom () diff --git a/stage0/ulib/FStar.PtrdiffT.fst b/stage0/ulib/FStar.PtrdiffT.fst new file mode 100644 index 00000000000..e589a04bb3a --- /dev/null +++ b/stage0/ulib/FStar.PtrdiffT.fst @@ -0,0 +1,56 @@ +module FStar.PtrdiffT + +module Cast = FStar.Int.Cast +module I64 = FStar.Int64 + +open FStar.Ghost + +friend FStar.SizeT + +(** We assume the existence of lower and upper bounds corresponding to PTRDIFF_MIN + and PTRDIFF_MAX, which ensure that a ptrdiff_t has at least width 16 according to + the C standard *) +assume +val max_bound : x:erased int { x >= pow2 15 - 1 } +let min_bound : erased int = hide (- (reveal max_bound + 1)) + +(** We also assume that size_t is wider than ptrdiff_t *) +assume +val bounds_lemma (_:unit) : Lemma (SizeT.bound > max_bound) + +let t = x:I64.t{I64.v x >= min_bound /\ I64.v x <= max_bound } + +let fits x = + FStar.Int.fits x I64.n == true /\ + x <= max_bound /\ x >= min_bound + +let fits_lt x y = () + +let v x = + I64.v x + +let int_to_t (x: int) : Pure t + (requires (fits x)) + (ensures (fun y -> v y == x)) + = I64.int_to_t x + +let ptrdiff_v_inj x = () +let ptrdiff_int_to_t_inj x = () + +let mk x = int_to_t (I16.v x) + +let ptrdifft_to_sizet x = + bounds_lemma (); + SizeT.Sz <| Cast.int64_to_uint64 x + +let add x y = I64.add x y + +let div x y = + FStar.Math.Lib.slash_decr_axiom (v x) (v y); + I64.div x y + +let rem x y = I64.rem x y +let gt x y = I64.gt x y +let gte x y = I64.gte x y +let lt x y = I64.lt x y +let lte x y = I64.lte x y diff --git a/stage0/ulib/FStar.PtrdiffT.fsti b/stage0/ulib/FStar.PtrdiffT.fsti new file mode 100644 index 00000000000..be203a848b9 --- /dev/null +++ b/stage0/ulib/FStar.PtrdiffT.fsti @@ -0,0 +1,114 @@ +module FStar.PtrdiffT + +module I16 = FStar.Int16 +module US = FStar.SizeT + +val t : eqtype + +val fits (x: int) : Tot prop + +val fits_lt (x y: int) : Lemma + (requires (abs x < abs y /\ fits y)) + (ensures (fits x)) + [SMTPat (fits x); SMTPat (fits y)] + +[@@noextract_to "krml"] +val v (x: t) : Pure int + (requires True) + (ensures (fun y -> fits y)) + +[@@noextract_to "krml"] +val int_to_t (x: int) : Pure t + (requires (fits x)) + (ensures (fun y -> v y == x)) + +/// v and int_to_t are inverses +val ptrdiff_v_inj (x: t) + : Lemma + (ensures int_to_t (v x) == x) + [SMTPat (v x)] + +val ptrdiff_int_to_t_inj (x: int) + : Lemma + (requires fits x) + (ensures v (int_to_t x) == x) + [SMTPat (int_to_t x)] + +/// According to the C standard, "the bit width of ptrdiff_t is not less than 17 since c99, +/// 16 since C23" +/// (https://en.cppreference.com/w/c/types/ptrdiff_t) +/// We therefore only offer a function to create a ptrdiff_t when we are sure it fits +noextract inline_for_extraction +val mk (x: I16.t) : Pure t + (requires True) + (ensures (fun y -> v y == I16.v x)) + +noextract inline_for_extraction +let zero : (zero_ptrdiff: t { v zero_ptrdiff == 0 }) = + mk 0s + +(** Cast from ptrdiff_to to size_t. + We restrict the cast to positive integers to avoid reasoning about modular arithmetic *) +val ptrdifft_to_sizet (x:t{v x >= 0}) : Pure US.t + (requires True) + (ensures fun c -> v x == US.v c) + +val add (x y: t) : Pure t + (requires (fits (v x + v y))) + (ensures (fun z -> v z == v x + v y)) + +(** Division primitive + + As for rem below, we only provide division on positive signed + integers, to avoid having to reason about possible overflows *) +val div (a:t{v a >= 0}) (b:t{v b > 0}) : Pure t + (requires True) + (ensures fun c -> v a / v b == v c) + +(** Modulo specification, similar to FStar.Int.mod *) + +let mod_spec (a:int{fits a}) (b:int{fits b /\ b <> 0}) : GTot (n:int{fits n}) = + let open FStar.Mul in + let res = a - ((a/b) * b) in + fits_lt res b; + res + +(** Euclidean remainder + + The result is the modulus of [a] with respect to a non-zero [b]. + Note, according to the C standard, this operation is only defined + if a/b is representable. + To avoid requiring the precondition `fits (v a / v b)`, we instead + restrict this primitive to positive integers only. + *) +val rem (a:t{v a >= 0}) (b:t{v b > 0}) : Pure t + (requires True) + (ensures (fun c -> mod_spec (v a) (v b) = v c)) + +(** Greater than *) +val gt (x y:t) : Pure bool + (requires True) + (ensures (fun z -> z == (v x > v y))) + +(** Greater than or equal *) +val gte (x y:t) : Pure bool + (requires True) + (ensures (fun z -> z == (v x >= v y))) + +(** Less than *) +val lt (x y:t) : Pure bool + (requires True) + (ensures (fun z -> z == (v x < v y))) + +(** Less than or equal *) +val lte (x y: t) : Pure bool + (requires True) + (ensures (fun z -> z == (v x <= v y))) + +(** Infix notations *) + +unfold let op_Plus_Hat = add +unfold let op_Greater_Hat = gt +unfold let op_Greater_Equals_Hat = gte +unfold let op_Less_Hat = lt +unfold let op_Less_Equals_Hat = lte diff --git a/stage0/ulib/FStar.Pure.BreakVC.fst b/stage0/ulib/FStar.Pure.BreakVC.fst new file mode 100644 index 00000000000..804666d3240 --- /dev/null +++ b/stage0/ulib/FStar.Pure.BreakVC.fst @@ -0,0 +1,34 @@ +module FStar.Pure.BreakVC + +open FStar.Tactics.V2 + +let mono_lem () : Lemma (pure_wp_monotonic unit break_wp') = + assert (pure_wp_monotonic unit break_wp') by begin + norm [delta]; + l_to_r [`spinoff_eq] + end + +let squash_p_impl_p (p:pure_post unit) : squash (squash (p ()) ==> p ()) = () + +#push-options "--no_tactics" // don't process `with_tactic` markers + +let (==>>) = (==>) // Working around #3173 and #3175 + +let aux2 (p:pure_post unit) +: Lemma (break_wp p ==> pure_return unit () p) += calc (==>>) { + break_wp p; + == {} + spinoff (squash (p ())); + ==> { spinoff_equiv (squash (p ())) } + squash (p ()); + ==>> { squash_p_impl_p p } + p (); + ==> { () } + pure_return unit () p; + } + +let break_vc () : PURE unit break_wp = + Classical.forall_intro aux2; + () +#pop-options diff --git a/stage0/ulib/FStar.Pure.BreakVC.fsti b/stage0/ulib/FStar.Pure.BreakVC.fsti new file mode 100644 index 00000000000..b564e6411e1 --- /dev/null +++ b/stage0/ulib/FStar.Pure.BreakVC.fsti @@ -0,0 +1,21 @@ +module FStar.Pure.BreakVC + +open FStar.Tactics + +let break_wp' : pure_wp' unit = + fun p -> spinoff (squash (p ())) + +val mono_lem () : Lemma (pure_wp_monotonic unit break_wp') + +private +let post () : Tac unit = + norm [delta_fully [`%mono_lem; `%break_wp']]; + trefl() + +[@@postprocess_with post] +unfold +let break_wp : pure_wp unit = + let _ = mono_lem () in + break_wp' + +val break_vc () : PURE unit break_wp diff --git a/stage0/ulib/FStar.Range.fsti b/stage0/ulib/FStar.Range.fsti new file mode 100644 index 00000000000..b8b8bdfda64 --- /dev/null +++ b/stage0/ulib/FStar.Range.fsti @@ -0,0 +1,59 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Range + +open FStar.Sealed + +(** [__range] is a type for the internal representations of source + ranges. Internally, it includes a "definition" range and a "use" range, each + of which has a filename, a start position (line+col), and an end position. + + We do not fully expose this type, but explode below allows to inspect it, + and __mk_range to construct it. + + [range] is a sealed version of [__range], meaning it does not provide + any facts about its values. We use this type since we have *total* functions + inspecting terms and returning their range metadada (like the range_of constant). + Given that range is sealed, it is sound to make range_of total. +*) + +assume new type __range + +type range = sealed __range + +(** A dummy range constant *) +val __range_0 : __range +let range_0 : range = seal __range_0 + +(** Building a range constant *) +val __mk_range (file: string) (from_line from_col to_line to_col: int) : Tot __range + +val mk_range (file: string) (from_line from_col to_line to_col: int) : Tot range +(* This is essentially +unfold +let mk_range (file: string) (from_line from_col to_line to_col: int) : Tot range = + seal (__mk_range file from_line from_col to_line to_col) +but the extra indirection breaks the custom errors messages in QuickCode. +Just retaining this as a primop for now (Guido 30/Aug/2024) *) + +val join_range (r1 r2 : range) : Tot range + +(** [labeled] is used internally to the SMT encoding to associate a + source-code location with an assertion. *) +irreducible +let labeled (r : range) (msg: string) (b: Type) : Type = b + +val explode (r : __range) : Tot (string * int * int * int * int) diff --git a/stage0/ulib/FStar.Real.Old.fst b/stage0/ulib/FStar.Real.Old.fst new file mode 100644 index 00000000000..f1837bce014 --- /dev/null +++ b/stage0/ulib/FStar.Real.Old.fst @@ -0,0 +1,70 @@ +(* This module is DEPRECATED. Use FStar.Real instead. *) +module FStar.Real.Old + +module SEM = FStar.StrongExcludedMiddle + +let of_string = admit() +// We cannot really implement this. The old implementation +// had no execution behavior for it nor assumed any facts. + +let (=.) x y = SEM.strong_excluded_middle (x == y) +let (<>.) x y = SEM.strong_excluded_middle (x =!= y) +let (>.) x y = SEM.strong_excluded_middle (x >. y) +let (>=.) x y = SEM.strong_excluded_middle (x >=. y) +let (<.) x y = SEM.strong_excluded_middle (x <. y) +let (<=.) x y = SEM.strong_excluded_middle (x <=. y) + +#reset-options "--smtencoding.elim_box true --smtencoding.l_arith_repr native --smtencoding.nl_arith_repr native" + +let n_over_n2 (n:real{n <>. 0.0R /\ n*.n <>. 0.0R}) = + assert (n /. (n *. n) == 1.0R /. n) + +let test = assert (two >. one) +let test1 = assert (one =. 1.0R) + +let test_lt1 = assert (1.0R <. 2.0R) +let test_lt2 = assert (~ (1.0R <. 1.0R)) +let test_lt3 = assert (~ (2.0R <. 1.0R)) + +let test_le1 = assert (1.0R <=. 2.0R) +let test_le2 = assert (1.0R <=. 1.0R) +let test_le3 = assert (~ (2.0R <=. 1.0R)) + +let test_gt1 = assert (~ (1.0R >. 2.0R)) +let test_gt2 = assert (~ (1.0R >. 1.0R)) +let test_gt3 = assert (2.0R >. 1.0R) + +let test_ge1 = assert (~ (1.0R >=. 2.0R)) +let test_ge2 = assert (1.0R >=. 1.0R) +let test_ge3 = assert (2.0R >=. 1.0R) + +let test_add_eq = assert (1.0R +. 1.0R =. 2.0R) +let test_add_eq' = assert (1.0R +. 3.0R =. 4.0R) +let test_add_lt = assert (1.0R +. 1.0R <. 3.0R) + +let test_mul_eq = assert (2.0R *. 2.0R =. 4.0R) +let test_mul_lt = assert (2.0R *. 2.0R <. 5.0R) + +let test_div_eq = assert (8.0R /. 2.0R =. 4.0R) +let test_div_lt = assert (8.0R /. 2.0R <. 5.0R) + +let test_sqrt_2_mul = assert (sqrt_2 *. sqrt_2 == 2.0R) +//let test_sqrt_2_add = assert (sqrt_2 +. sqrt_2 >. 2.0R) // Fails +let test_sqrt_2_scale = assert (1.0R /. sqrt_2 =. sqrt_2 /. 2.0R) + +// Common identities +let add_id_l = assert (forall n. 0.0R +. n =. n) +let add_id_r = assert (forall n. n +. 0.0R =. n) + +let mul_nil_l = assert (forall n. 0.0R *. n =. 0.0R) +let mul_nil_r = assert (forall n. n *. 0.0R =. 0.0R) + +let mul_id_l = assert (forall n. 1.0R *. n =. n) +let mul_id_r = assert (forall n. n *. 1.0R =. n) + +let add_comm = assert (forall x y. x +. y =. y +.x) +let add_assoc = assert (forall x y z. ((x +. y) +.z) =. (x +. (y +. z))) + +let mul_comm = assert (forall x y. x *. y =. y *.x) +let mul_assoc = assert (forall x y z. ((x *. y) *.z) =. (x *. (y *. z))) +let mul_dist = assert (forall x y z. x *. (y +. z) =. (x *. y) +. (x *.z)) diff --git a/stage0/ulib/FStar.Real.Old.fsti b/stage0/ulib/FStar.Real.Old.fsti new file mode 100644 index 00000000000..d7d924a4d17 --- /dev/null +++ b/stage0/ulib/FStar.Real.Old.fsti @@ -0,0 +1,48 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Real.Old + +(* This module is DEPRECATED. Use FStar.Real instead. *) + +open FStar.Real +module R = FStar.Real + +let real = R.real +let of_int = R.of_int + +(** + Used to extract real constants; this function is + uninterpreted logically. i.e., 1.1R is extracted to + [of_string "1.1"] + *) +val of_string: string -> Tot real + +unfold let ( +. ) : real -> real -> Tot real = R.op_Plus_Dot +unfold let ( -. ) : real -> real -> Tot real = R.op_Subtraction_Dot +unfold let ( *. ) : real -> real -> Tot real = R.op_Star_Dot +unfold let ( /. ) : real -> d:real{d <> 0.0R} -> Tot real = R.op_Slash_Dot + +(* Assuming ghost decidable equality, but it is not eqtype. *) +val ( =. ) (x y : real) : GTot (b:bool{b <==> x == y}) +val ( >. ) (x y : real) : GTot (b:bool{b <==> R.op_Greater_Dot x y}) +val ( >=. ) (x y : real) : GTot (b:bool{b <==> R.op_Greater_Equals_Dot x y}) +val ( <. ) (x y : real) : GTot (b:bool{b <==> R.op_Less_Dot x y}) +val ( <=. ) (x y : real) : GTot (b:bool{b <==> R.op_Less_Equals_Dot x y}) + +unfold let zero = R.zero +unfold let one = R.one +unfold let two = R.two +unfold let sqrt_2 = R.sqrt_2 diff --git a/stage0/ulib/FStar.Real.fsti b/stage0/ulib/FStar.Real.fsti new file mode 100644 index 00000000000..f4f265aeb6a --- /dev/null +++ b/stage0/ulib/FStar.Real.fsti @@ -0,0 +1,50 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Real +(* + This module provides a signature for real arithmetic. + + Real number constants can be specific in floating point format with + an 'R' suffix, e.g., 1.0R + + All these operations are mapped to the corresponding primitives + in Z3's theory of real arithmetic. + + This is only a logical model of the reals. There is no extraction + for them, as they are an erasable type. Any operation that can observe + a real (comparisons, etc) must be Ghost or a proposition. +*) + +[@@erasable] +val real : Type0 + +val of_int : int -> Tot real + +val ( +. ) : real -> real -> Tot real +val ( -. ) : real -> real -> Tot real +val ( *. ) : real -> real -> Tot real +val ( /. ) : real -> d:real{d =!= 0.0R} -> Tot real + +val ( >. ) : real -> real -> prop +val ( >=. ) : real -> real -> prop + +val ( <. ) : real -> real -> prop +val ( <=. ) : real -> real -> prop + +let zero : real = of_int 0 +let one : real = of_int 1 +let two : real = of_int 2 +val sqrt_2 : r:real{r >=. 0.0R /\ r *. r == two} diff --git a/stage0/ulib/FStar.Ref.fst b/stage0/ulib/FStar.Ref.fst new file mode 100644 index 00000000000..e597f5a7e83 --- /dev/null +++ b/stage0/ulib/FStar.Ref.fst @@ -0,0 +1,74 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Ref + +(* wrapper over FStar.ST to provide operations over refs with default preorder *) + +include FStar.Heap +include FStar.ST + +open FStar.Heap +open FStar.ST + +unfold +let sel (#a:Type0) (h:heap) (r:ref a) : GTot a + = Heap.sel h r + +unfold +let upd (#a:Type0) (h:heap) (r:ref a) (v:a) :GTot heap + = Heap.upd h r v + +unfold +let addr_of (#a:Type0) (r:ref a) : GTot nat = addr_of r + +unfold +let contains (#a:Type0) (h:heap) (r:ref a) :GTot Type0 + = Heap.contains h r + +unfold +let unused_in (#a:Type0) (r:ref a) (h:heap) :GTot Type0 + = Heap.unused_in r h + +unfold +let fresh (#a:Type0) (r:ref a) (h0:heap) (h1:heap) : Type0 + = Heap.fresh r h0 h1 + +unfold +let only (#a:Type0) (r:ref a) :GTot (Set.set nat) + = Heap.only r + +val recall (#a:Type0) (r:ref a) : STATE unit (fun p h -> h `contains` r ==> p () h) +let recall #_ r = recall r + +val alloc (#a:Type0) (init:a) + :ST (ref a) + (fun _ -> True) + (fun h0 r h1 -> fresh r h0 h1 /\ modifies Set.empty h0 h1 /\ sel h1 r == init) +let alloc #_ init = alloc init + +val read (#a:Type0) (r:ref a) :STATE a (fun p h -> p (sel h r) h) +let read #_ r = read r + +val write (#a:Type0) (r:ref a) (v:a) + :ST unit (fun _ -> True) (fun h0 _ h1 -> h0 `contains` r /\ modifies (only r) h0 h1 /\ equal_dom h0 h1 /\ sel h1 r == v) +let write #_ r v = write r v + +val op_Bang (#a:Type0) (r:ref a) :STATE a (fun p h -> p (sel h r) h) +let op_Bang #_ r = read r + +val op_Colon_Equals (#a:Type0) (r:ref a) (v:a) + :ST unit (fun _ -> True) (fun h0 _ h1 -> h0 `contains` r /\ modifies (only r) h0 h1 /\ equal_dom h0 h1 /\ sel h1 r == v) +let op_Colon_Equals #_ r v = write r v diff --git a/stage0/ulib/FStar.RefinementExtensionality.fst b/stage0/ulib/FStar.RefinementExtensionality.fst new file mode 100644 index 00000000000..6ac69db4317 --- /dev/null +++ b/stage0/ulib/FStar.RefinementExtensionality.fst @@ -0,0 +1,33 @@ +module FStar.RefinementExtensionality + +open FStar.FunctionalExtensionality +open FStar.PredicateExtensionality + +let refext0 (t:Type) (r1 : t -> prop) (r2 : t -> prop) : + Lemma (requires (r1 == r2)) + (ensures (x:t{r1 x} == x:t{r2 x})) = () + +let refext_on_domain (t:Type) (r1 : t -> prop) (r2 : t -> prop) : + Lemma (requires (forall x. r1 x <==> r2 x)) + (ensures (x:t{on t r1 x} == x:t{on t r2 x})) = + PredicateExtensionality.predicateExtensionality _ r1 r2; + refext0 t (on t r1) (on t r2) + +let refext (t:Type) (r1 : t -> prop) (r2 : t -> prop) : + Lemma (requires (forall x. r1 x <==> r2 x)) + (ensures (x:t{r1 x} == x:t{r2 x})) = + assert (x:t{on t r1 x} == x:t{r1 x}); + assert (x:t{on t r2 x} == x:t{r2 x}); + refext_on_domain t r1 r2; + () + +(* Small test. Use names to avoid hash-consing mismatches. *) +let ref1 (x:int) : prop = b2t (x >= 0) +let ref2 (x:int) : prop = x >= 0 \/ x >= 1 + +let ty1 = x:int{ref1 x} +let ty2 = x:int{ref2 x} + +let _ = + refext int ref1 ref2; + assert (ty1 == ty2) diff --git a/stage0/ulib/FStar.RefinementExtensionality.fsti b/stage0/ulib/FStar.RefinementExtensionality.fsti new file mode 100644 index 00000000000..5fb90dd1fee --- /dev/null +++ b/stage0/ulib/FStar.RefinementExtensionality.fsti @@ -0,0 +1,5 @@ +module FStar.RefinementExtensionality + +val refext (t:Type) (r1 : t -> prop) (r2 : t -> prop) : + Lemma (requires (forall x. r1 x <==> r2 x)) + (ensures (x:t{r1 x} == x:t{r2 x})) diff --git a/stage0/ulib/FStar.Reflection.Const.fst b/stage0/ulib/FStar.Reflection.Const.fst new file mode 100644 index 00000000000..678695745f7 --- /dev/null +++ b/stage0/ulib/FStar.Reflection.Const.fst @@ -0,0 +1,79 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.Const + +(* Common lids *) + +// TODO: these are awful names +// TODO: _qn vs _lid + +let imp_qn = ["Prims"; "l_imp"] +let and_qn = ["Prims"; "l_and"] +let or_qn = ["Prims"; "l_or"] +let not_qn = ["Prims"; "l_not"] +let iff_qn = ["Prims"; "l_iff"] +let eq2_qn = ["Prims"; "eq2"] +let eq1_qn = ["Prims"; "eq"] +let true_qn = ["Prims"; "l_True"] +let false_qn = ["Prims"; "l_False"] +let b2t_qn = ["Prims"; "b2t"] +let forall_qn = ["Prims"; "l_Forall"] +let exists_qn = ["Prims"; "l_Exists"] +let squash_qn = ["Prims"; "squash"] +let prop_qn = ["Prims"; "prop"] + +let bool_true_qn = ["Prims"; "true"] +let bool_false_qn = ["Prims"; "false"] + +let int_lid = ["Prims"; "int"] +let bool_lid = ["Prims"; "bool"] +let unit_lid = ["Prims"; "unit"] +let string_lid = ["Prims"; "string"] + +let add_qn = ["Prims"; "op_Addition"] +let neg_qn = ["Prims"; "op_Minus"] +let minus_qn = ["Prims"; "op_Subtraction"] +let mult_qn = ["Prims"; "op_Multiply"] +let mult'_qn = ["FStar"; "Mul"; "op_Star"] +let div_qn = ["Prims"; "op_Division"] +let lt_qn = ["Prims"; "op_LessThan"] +let lte_qn = ["Prims"; "op_LessThanOrEqual"] +let gt_qn = ["Prims"; "op_GreaterThan"] +let gte_qn = ["Prims"; "op_GreaterThanOrEqual"] +let mod_qn = ["Prims"; "op_Modulus"] + +let nil_qn = ["Prims"; "Nil"] +let cons_qn = ["Prims"; "Cons"] + +let mktuple2_qn = ["FStar"; "Pervasives"; "Native"; "Mktuple2"] +let mktuple3_qn = ["FStar"; "Pervasives"; "Native"; "Mktuple3"] +let mktuple4_qn = ["FStar"; "Pervasives"; "Native"; "Mktuple4"] +let mktuple5_qn = ["FStar"; "Pervasives"; "Native"; "Mktuple5"] +let mktuple6_qn = ["FStar"; "Pervasives"; "Native"; "Mktuple6"] +let mktuple7_qn = ["FStar"; "Pervasives"; "Native"; "Mktuple7"] +let mktuple8_qn = ["FStar"; "Pervasives"; "Native"; "Mktuple8"] + +let land_qn = ["FStar" ; "UInt" ; "logand"] +let lxor_qn = ["FStar" ; "UInt" ; "logxor"] +let lor_qn = ["FStar" ; "UInt" ; "logor"] +let ladd_qn = ["FStar" ; "UInt" ; "add_mod"] +let lsub_qn = ["FStar" ; "UInt" ; "sub_mod"] +let shiftl_qn = ["FStar" ; "UInt" ; "shift_left"] +let shiftr_qn = ["FStar" ; "UInt" ; "shift_right"] +let udiv_qn = ["FStar" ; "UInt" ; "udiv"] +let umod_qn = ["FStar" ; "UInt" ; "mod"] +let mul_mod_qn = ["FStar" ; "UInt" ; "mul_mod"] +let nat_bv_qn = ["FStar" ; "BV" ; "int2bv"] diff --git a/stage0/ulib/FStar.Reflection.Formula.fst b/stage0/ulib/FStar.Reflection.Formula.fst new file mode 100644 index 00000000000..3e8ff064485 --- /dev/null +++ b/stage0/ulib/FStar.Reflection.Formula.fst @@ -0,0 +1,19 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.Formula + +(* This module is a temporary for Meta-F* migration *) +include FStar.Reflection.V1.Formula diff --git a/stage0/ulib/FStar.Reflection.TermEq.Simple.fst b/stage0/ulib/FStar.Reflection.TermEq.Simple.fst new file mode 100644 index 00000000000..9ea6044b6af --- /dev/null +++ b/stage0/ulib/FStar.Reflection.TermEq.Simple.fst @@ -0,0 +1,10 @@ +module FStar.Reflection.TermEq.Simple + +open FStar.Stubs.Reflection.Types +open FStar.Reflection.TermEq + +let term_eq = term_eq +let term_eq_ok _ _ = () + +let univ_eq = univ_eq +let univ_eq_ok _ _ = () diff --git a/stage0/ulib/FStar.Reflection.TermEq.Simple.fsti b/stage0/ulib/FStar.Reflection.TermEq.Simple.fsti new file mode 100644 index 00000000000..b8575ebc9dc --- /dev/null +++ b/stage0/ulib/FStar.Reflection.TermEq.Simple.fsti @@ -0,0 +1,31 @@ +module FStar.Reflection.TermEq.Simple + +(* This is just a wrapper over FStar.Reflection.TermEq. + +For clients who just want to use term_eq, and not term_eq_dec, this +interface brings in less dependencies. + +Returning just a boolean and providing the SMTPat lemma makes it a bit +more convenient to use: one can write `if term_eq (foo _) _` for an +effectful foo without running into a variable escaping its scope. *) + +open FStar.Stubs.Reflection.Types + +(* A conservative version: works on all terms, returns `true` if they +are guaranteed to be equal. *) +[@@plugin] +val term_eq (t1 t2 : term) : bool + +val term_eq_ok (t1 t2 : term) + : Lemma (requires term_eq t1 t2) + (ensures t1 == t2) + [SMTPat (term_eq t1 t2)] + +(* Idem for universes *) +[@@plugin] +val univ_eq (u1 u2 : universe) : bool + +val univ_eq_ok (u1 u2 : universe) + : Lemma (requires univ_eq u1 u2) + (ensures u1 == u2) + [SMTPat (univ_eq u1 u2)] diff --git a/stage0/ulib/FStar.Reflection.TermEq.fst b/stage0/ulib/FStar.Reflection.TermEq.fst new file mode 100644 index 00000000000..0b38743a70a --- /dev/null +++ b/stage0/ulib/FStar.Reflection.TermEq.fst @@ -0,0 +1,703 @@ +module FStar.Reflection.TermEq + +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Reflection.V2.Data +open FStar.Stubs.Reflection.V2.Builtins +module L = FStar.List.Tot + + +(* "SMT may not be able to prove the types of ... and ... to be equal, +if the proof fails, try annotating these with the same type" *) +#set-options "--warn_error -290" + +(* +This file raised several limitations of the SMT encoding. Lines marked +with #2908 are seemingly consequences of issue #2908 and should be removed. +In fact many auxiliary lemmas should be removed as they should become +obvious. Lines marked with *** should not be needed. +*) + +let rec memP_allP #a #b (top:b) (pred : (x:a{x << top}) -> Type) (x : a) (l : list a{l << top}) + : Lemma (requires allP top pred l /\ L.memP x l) + (ensures x << top /\ pred x) + [SMTPat (allP top pred l); SMTPat (L.memP x l)] + = match l with + | [] -> () + | y::ys -> + if StrongExcludedMiddle.strong_excluded_middle (x == y) then () else memP_allP top pred x ys + +let rec memP_allP0 #a (pred : a -> Type) (x : a) (l : list a) + : Lemma (requires allP0 pred l /\ L.memP x l) + (ensures pred x) + [SMTPat (allP0 pred l); SMTPat (L.memP x l)] + = match l with + | [] -> () + | y::ys -> + if StrongExcludedMiddle.strong_excluded_middle (x == y) then () else memP_allP0 pred x ys + +let rec memP_dec #a (x : a) (l : list a) + : Lemma (requires L.memP x l) + (ensures x << l) + [SMTPat (L.memP x l)] + = match l with + | [] -> () + | y::ys -> + if StrongExcludedMiddle.strong_excluded_middle (x == y) then () else memP_dec x ys + +(* FIXME: the only reason these are not exposed is to not contaminate +the namespace for most users, especially as `Eq` is already used in +Reflection.Formula. But there should not be a problem with using this +name fully-qualified. *) +private +type _cmpres = + | Eq + | Neq + | Unknown + +// Would it be easier for the proofs to be embedded in the _cmpres? +let valid #t (c:_cmpres) (x y : t) = + match c with + | Eq -> x == y + | Neq -> x =!= y + | Unknown -> True + +type cmpres #t (x y : t) = c:_cmpres{valid c x y} + +type comparator_for (t:Type) = x:t -> y:t -> cmpres x y + +let (&&&) (#s : Type u#ss) (#t : Type u#tt) (#x #y : s) (#w #z : t) + ($c1 : cmpres x y) + ($c2 : cmpres w z) + : cmpres (x,w) (y,z) = + match c1, c2 with + | Eq, Eq -> Eq + | Neq, _ + | _, Neq -> Neq + | _ -> Unknown + +val bv_cmp : comparator_for bv +let bv_cmp x1 x2 = + let v1 = inspect_bv x1 in + let v2 = inspect_bv x2 in + pack_inspect_bv x1; + pack_inspect_bv x2; + sealed_singl v1.sort v2.sort; + if v1.index = v2.index then Eq else Neq + +val namedv_cmp : comparator_for namedv +let namedv_cmp x1 x2 = + let v1 = inspect_namedv x1 in + let v2 = inspect_namedv x2 in + pack_inspect_namedv x1; + pack_inspect_namedv x2; + sealed_singl v1.sort v2.sort; + if v1.uniq = v2.uniq then Eq else Neq + +val fv_cmp : comparator_for fv +let fv_cmp f1 f2 = + pack_inspect_fv f1; + pack_inspect_fv f2; + let n1 = inspect_fv f1 in + let n2 = inspect_fv f2 in + if n1 = n2 then Eq else Neq + +val opt_cmp : #a:Type -> comparator_for a -> comparator_for (option a) +let opt_cmp cmp o1 o2 = + match o1, o2 with + | None, None -> Eq + | Some x, Some y -> cmp x y + | _ -> Neq + +val either_cmp : #a:Type -> #b:Type -> comparator_for a -> comparator_for b -> comparator_for (either a b) +let either_cmp cmpa cmpb e1 e2 = + match e1, e2 with + | Inl x, Inl y -> cmpa x y + | Inr x, Inr y -> cmpb x y + | _ -> Neq + +val pair_cmp : #a:Type -> #b:Type -> comparator_for a -> comparator_for b -> comparator_for (a & b) +let pair_cmp cmpa cmpb (a1, b1) (a2, b2) = + cmpa a1 a2 &&& cmpb b1 b2 + +val list_cmp : #a:Type u#aa -> comparator_for a -> comparator_for (list a) +let rec list_cmp #a cmp l1 l2 = + match l1, l2 with + | [], [] -> Eq + | x::xs, y::ys -> cmp x y &&& list_cmp cmp xs ys + | _ -> Neq + +val list_dec_cmp : +#a:Type u#aa -> #b:Type u#bb -> top1:b -> top2:b -> + f:(x:a -> y:a{x << top1 /\ y << top2} -> cmpres x y) -> + l1:(list a) -> l2:(list a){l1 << top1 /\ l2 << top2} -> + cmpres l1 l2 +let rec list_dec_cmp #a top1 top2 cmp l1 l2 = + match l1, l2 with + | [], [] -> Eq + | x::xs, y::ys -> cmp x y &&& list_dec_cmp top1 top2 cmp xs ys + | _ -> Neq + +val opt_dec_cmp : #a:Type -> #b:Type -> top1:b -> top2:b -> + (f : (x:a -> y:a{x << top1 /\ y << top2} -> cmpres x y)) -> + o1:(option a){o1 << top1} -> o2:(option a){o2 << top2} -> + cmpres o1 o2 +let opt_dec_cmp top1 top2 cmp o1 o2 = + match o1, o2 with + | None, None -> Eq + | Some x, Some y -> cmp x y + | _ -> Neq + +val either_dec_cmp : #a:Type -> #b:Type -> #c:Type -> top1:c -> top2:c -> + (x:a -> y:a{x< cmpres x y) -> + (x:b -> y:b{x< cmpres x y) -> + e1 :(either a b){e1 << top1} -> + e2 :(either a b){e2 << top2} -> + cmpres e1 e2 +let either_dec_cmp top1 top2 cmpa cmpb e1 e2 = + match e1, e2 with + | Inl x, Inl y -> cmpa x y + | Inr x, Inr y -> cmpb x y + | _ -> Neq + +val eq_cmp : #a:eqtype -> comparator_for a +let eq_cmp x y = if x = y then Eq else Neq + +val range_cmp : comparator_for range +let range_cmp r1 r2 = + Sealed.sealed_singl r1 r2; + Eq + +val ident_cmp : comparator_for ident +let ident_cmp i1 i2 = + let iv1 = inspect_ident i1 in + let iv2 = inspect_ident i2 in + pack_inspect_ident i1; + pack_inspect_ident i2; + Sealed.sealed_singl (snd iv1) (snd iv2); + eq_cmp (fst iv1) (fst iv2) + +val univ_cmp : comparator_for universe +let rec univ_cmp (u1 u2 : universe) = + pack_inspect_universe u1; + pack_inspect_universe u2; + let uv1 = inspect_universe u1 in + let uv2 = inspect_universe u2 in + match uv1, uv2 with + | Uv_Zero, Uv_Zero -> Eq + | Uv_Succ u1, Uv_Succ u2 -> univ_cmp u1 u2 + | Uv_Max us1, Uv_Max us2 -> list_dec_cmp u1 u2 univ_cmp us1 us2 + | Uv_BVar v1, Uv_BVar v2 -> eq_cmp v1 v2 + | Uv_Name n1, Uv_Name n2 -> ident_cmp n1 n2 + | Uv_Unif u1, Uv_Unif u2 -> Unknown + | Uv_Unk, Uv_Unk -> Eq + | _ -> Neq + +val const_cmp : comparator_for vconst +let const_cmp c1 c2 = + match c1, c2 with + | C_Unit, C_Unit -> Eq + | C_Int i1, C_Int i2 -> eq_cmp i1 i2 + | C_True, C_True -> Eq + | C_False, C_False -> Eq + | C_String s1, C_String s2 -> eq_cmp s1 s2 + | C_Range r1, C_Range r2 -> range_cmp r1 r2 + | C_Reify, C_Reify -> Eq + | C_Reflect n1, C_Reflect n2 -> eq_cmp n1 n2 + | C_Real s1, C_Real s2 -> eq_cmp s1 s2 + | _ -> Neq + +(* TODO. Or seal...? *) +val ctxu_cmp : comparator_for ctx_uvar_and_subst +let ctxu_cmp _ _ = Unknown + +val term_cmp : comparator_for term +val binder_cmp : comparator_for binder +val aqual_cmp : comparator_for aqualv +val arg_cmp : comparator_for argv +val comp_cmp : comparator_for comp +val pat_cmp : comparator_for pattern +val pat_arg_cmp : comparator_for (pattern & bool) +val br_cmp : comparator_for branch +val match_returns_ascription_cmp : comparator_for match_returns_ascription + +let rec term_cmp t1 t2 = + pack_inspect_inv t1; + pack_inspect_inv t2; + let tv1 = inspect_ln t1 in + let tv2 = inspect_ln t2 in + match tv1, tv2 with + | Tv_Unsupp, _ + | _, Tv_Unsupp -> Unknown + | Tv_Var v1, Tv_Var v2 -> namedv_cmp v1 v2 + | Tv_BVar v1, Tv_BVar v2 -> bv_cmp v1 v2 + | Tv_FVar f1, Tv_FVar f2 -> fv_cmp f1 f2 + | Tv_UInst f1 u1, Tv_UInst f2 u2 -> + fv_cmp f1 f2 &&& list_dec_cmp t1 t2 univ_cmp u1 u2 + + | Tv_App h1 a1, Tv_App h2 a2 -> + term_cmp h1 h2 &&& arg_cmp a1 a2 + + | Tv_Abs b1 e1, Tv_Abs b2 e2 -> + binder_cmp b1 b2 + &&& term_cmp e1 e2 + + | Tv_Arrow b1 c1, Tv_Arrow b2 c2 -> + binder_cmp b1 b2 + &&& comp_cmp c1 c2 + + | Tv_Type u1, Tv_Type u2 -> + univ_cmp u1 u2 + + | Tv_Refine sb1 r1, Tv_Refine sb2 r2 -> + binder_cmp sb1 sb2 + &&& term_cmp r1 r2 + + | Tv_Const c1, Tv_Const c2 -> + const_cmp c1 c2 + + | Tv_Uvar n1 u1, Tv_Uvar n2 u2 -> + eq_cmp n1 n2 &&& ctxu_cmp u1 u2 + + | Tv_Let r1 attrs1 sb1 e1 b1, Tv_Let r2 attrs2 sb2 e2 b2 -> + eq_cmp r1 r2 + &&& list_dec_cmp t1 t2 term_cmp attrs1 attrs2 + &&& binder_cmp sb1 sb2 + &&& term_cmp e1 e2 + &&& term_cmp b1 b2 + + | Tv_Match sc1 o1 brs1, Tv_Match sc2 o2 brs2 -> + term_cmp sc1 sc2 + &&& opt_dec_cmp t1 t2 match_returns_ascription_cmp o1 o2 + &&& list_dec_cmp t1 t2 br_cmp brs1 brs2 + + | Tv_AscribedT e1 ta1 tacopt1 eq1, Tv_AscribedT e2 ta2 tacopt2 eq2 -> + term_cmp e1 e2 + &&& term_cmp ta1 ta2 + &&& opt_dec_cmp t1 t2 term_cmp tacopt1 tacopt2 + &&& eq_cmp eq1 eq2 + + | Tv_AscribedC e1 c1 tacopt1 eq1, Tv_AscribedC e2 c2 tacopt2 eq2 -> + term_cmp e1 e2 + &&& comp_cmp c1 c2 + &&& opt_dec_cmp t1 t2 term_cmp tacopt1 tacopt2 + &&& eq_cmp eq1 eq2 + + | Tv_Unknown, Tv_Unknown -> Eq + + | _ -> Neq + +and arg_cmp (a1, q1) (a2, q2) = + term_cmp a1 a2 &&& aqual_cmp q1 q2 + +and aqual_cmp a1 a2 = + match a1, a2 with + | Q_Implicit, Q_Implicit -> Eq + | Q_Explicit, Q_Explicit -> Eq + | Q_Equality, Q_Equality -> Eq + | Q_Meta m1, Q_Meta m2 -> term_cmp m1 m2 + | _ -> Neq + +and match_returns_ascription_cmp asc1 asc2 = + let (b1, (tc1, tacopt1, eq1)) = asc1 in + let (b2, (tc2, tacopt2, eq2)) = asc2 in + binder_cmp b1 b2 + &&& either_dec_cmp asc1 asc2 term_cmp comp_cmp tc1 tc2 + &&& opt_dec_cmp asc1 asc2 term_cmp tacopt1 tacopt2 + &&& eq_cmp eq1 eq2 + +and binder_cmp b1 b2 = + let bv1 = inspect_binder b1 in + let bv2 = inspect_binder b2 in + pack_inspect_binder b1; + pack_inspect_binder b2; + term_cmp bv1.sort bv2.sort + &&& aqual_cmp bv1.qual bv2.qual + &&& list_dec_cmp b1 b2 term_cmp bv1.attrs bv2.attrs + +and comp_cmp c1 c2 = + let cv1 = inspect_comp c1 in + let cv2 = inspect_comp c2 in + pack_inspect_comp_inv c1; + pack_inspect_comp_inv c2; + match cv1, cv2 with + | C_Total t1, C_Total t2 + | C_GTotal t1, C_GTotal t2 -> + term_cmp t1 t2 + + | C_Lemma pre1 post1 pat1, C_Lemma pre2 post2 pat2 -> + term_cmp pre1 pre2 + &&& term_cmp post1 post2 + &&& term_cmp pat1 pat2 + + | C_Eff us1 ef1 t1 args1 dec1, C_Eff us2 ef2 t2 args2 dec2 -> + list_dec_cmp c1 c2 univ_cmp us1 us2 + &&& eq_cmp ef1 ef2 + &&& term_cmp t1 t2 + &&& list_dec_cmp c1 c2 arg_cmp args1 args2 + &&& list_dec_cmp c1 c2 term_cmp dec1 dec2 + + | _ -> Neq + +and br_cmp br1 br2 = + //pair_cmp pat_cmp term_cmp br1 br2 + pat_cmp (fst br1) (fst br2) &&& term_cmp (snd br1) (snd br2) + +and pat_cmp p1 p2 = + match p1, p2 with + | Pat_Var x1 s1, Pat_Var x2 s2 -> + sealed_singl x1 x2; + sealed_singl s1 s2; + Eq + | Pat_Constant x1, Pat_Constant x2 -> const_cmp x1 x2 + | Pat_Dot_Term x1, Pat_Dot_Term x2 -> opt_dec_cmp p1 p2 term_cmp x1 x2 + | Pat_Cons head1 us1 subpats1, Pat_Cons head2 us2 subpats2 -> + fv_cmp head1 head2 + &&& opt_dec_cmp p1 p2 (list_dec_cmp p1 p2 univ_cmp) us1 us2 + &&& list_dec_cmp p1 p2 pat_arg_cmp subpats1 subpats2 + + | _ -> Neq + +and pat_arg_cmp (p1, b1) (p2, b2) = + pat_cmp p1 p2 &&& eq_cmp b1 b2 + +let defined r = ~(Unknown? r) + +let def2 f l1 l2 =(forall x y. L.memP x l1 /\ L.memP y l2 ==> defined (f x y)) + +let rec defined_list #a (f : comparator_for a) (l1 l2 : list a) + : Lemma (requires (def2 f l1 l2)) (ensures defined (list_cmp f l1 l2)) + = match l1, l2 with + | [], [] -> () + | x::xs, y::ys -> defined_list f xs ys + | _ -> () + +let rec defined_list_dec #a #b (t1 t2 : b) (f : comparator_for a) + (l1 : list a{l1 << t1}) + (l2 : list a{l2 << t2}) + : Lemma (requires (def2 f l1 l2)) (ensures defined (list_dec_cmp t1 t2 f l1 l2)) + = match l1, l2 with + | [], [] -> () + | x::xs, y::ys -> defined_list_dec t1 t2 f xs ys + | _ -> () + +let faithful_univ_UvMax (u : universe) (us : list universe) + : Lemma (requires inspect_universe u == Uv_Max us /\ faithful_univ u) + (ensures allP u faithful_univ us) + = assert_norm (faithful_univ u <==> allP u faithful_univ us) // #2908 + +let univ_eq_UvMax (u1 u2 : universe) (us1 us2 : list universe) + : Lemma (requires inspect_universe u1 == Uv_Max us1 /\ + inspect_universe u2 == Uv_Max us2) + (ensures univ_cmp u1 u2 == list_dec_cmp u1 u2 univ_cmp us1 us2) + = assert_norm (univ_cmp u1 u2 == list_dec_cmp u1 u2 univ_cmp us1 us2) // #2908 + +val univ_faithful_lemma (u1 u2 : universe) : Lemma (requires faithful_univ u1 /\ faithful_univ u2) (ensures defined (univ_cmp u1 u2)) +let rec univ_faithful_lemma (u1 u2 : universe) = + match inspect_universe u1, inspect_universe u2 with + | Uv_Zero, Uv_Zero -> () + | Uv_Succ u1, Uv_Succ u2 -> univ_faithful_lemma u1 u2 + | Uv_Max us1, Uv_Max us2 -> + (****)faithful_univ_UvMax u1 us1; + (***)faithful_univ_UvMax u2 us2; + univ_faithful_lemma_list u1 u2 us1 us2; + (***)univ_eq_UvMax u1 u2 us1 us2; + () + | Uv_BVar _, Uv_BVar _ -> () + | Uv_Name _, Uv_Name _ -> () + | _ -> () + +and univ_faithful_lemma_list #b (u1 u2 : b) (us1 : list universe{us1 << u1}) (us2 : list universe{us2 << u2}) + : Lemma (requires allP u1 faithful_univ us1 /\ allP u2 faithful_univ us2) + (ensures defined (list_dec_cmp u1 u2 univ_cmp us1 us2)) + (decreases us1) + = + introduce forall x y. L.memP x us1 /\ L.memP y us2 ==> defined (univ_cmp x y) with + (introduce forall y. L.memP x us1 /\ L.memP y us2 ==> defined (univ_cmp x y) with + (introduce (L.memP x us1 /\ L.memP y us2) ==> (defined (univ_cmp x y)) with h. ( + univ_faithful_lemma x y + ) + ) + ) + ; + defined_list_dec u1 u2 univ_cmp us1 us2 + +(* Just a placeholder for now *) +val faithful_lemma (t1:term) (t2:term) : Lemma (requires faithful t1 /\ faithful t2) (ensures defined (term_cmp t1 t2)) + +#push-options "--z3rlimit 40" + +let faithful_Tv_UInst (t : term) (f : fv) (us : list universe) + : Lemma (requires inspect_ln t == Tv_UInst f us + /\ faithful t) + (ensures allP t faithful_univ us) + = () + +let faithful_Tv_Let (t : term) (recf : bool) (attrs : list term) (b:simple_binder) (ty:typ) (def body : term) + : Lemma (requires inspect_ln t == Tv_Let recf attrs b def body + /\ faithful t) + (ensures faithful_attrs attrs) + = () + +let term_eq_Tv_Let (t1 t2 : term) (recf1 recf2 : bool) (attrs1 attrs2 : list term) (b1 b2:simple_binder) (def1 def2 body1 body2: term) + : Lemma (requires inspect_ln t1 == Tv_Let recf1 attrs1 b1 def1 body1 + /\ inspect_ln t2 == Tv_Let recf2 attrs2 b2 def2 body2) + (ensures term_cmp t1 t2 == (eq_cmp recf1 recf2 &&& list_dec_cmp t1 t2 term_cmp attrs1 attrs2 &&& binder_cmp b1 b2 &&& term_cmp def1 def2 &&& term_cmp body1 body2)) + = assume (term_cmp t1 t2 == (eq_cmp recf1 recf2 &&& list_dec_cmp t1 t2 term_cmp attrs1 attrs2 &&& binder_cmp b1 b2 &&& term_cmp def1 def2 &&& term_cmp body1 body2)) // #2908, somehow assert_norm also does not work + +let faithful_Tv_Match (t : term) (sc : term) (o : option match_returns_ascription) (brs : list branch) + : Lemma (requires inspect_ln t == Tv_Match sc o brs /\ faithful t) + (ensures allP t faithful_branch brs) + = assert_norm (faithful t ==> allP t faithful_branch brs) + +let term_eq_Tv_Match (t1 t2 : term) (sc1 sc2 : term) (o1 o2 : option match_returns_ascription) (brs1 brs2 : list branch) + : Lemma (requires inspect_ln t1 == Tv_Match sc1 o1 brs1 + /\ inspect_ln t2 == Tv_Match sc2 o2 brs2) + (ensures term_cmp t1 t2 == (term_cmp sc1 sc2 + &&& opt_dec_cmp t1 t2 match_returns_ascription_cmp o1 o2 + &&& list_dec_cmp t1 t2 br_cmp brs1 brs2)) + = assume (term_cmp t1 t2 == (term_cmp sc1 sc2 + &&& opt_dec_cmp t1 t2 match_returns_ascription_cmp o1 o2 + &&& list_dec_cmp t1 t2 br_cmp brs1 brs2)) // #2908, somehow assert_norm also does not work + +let faithful_Pat_Cons (p : pattern) (f:fv) (ous : option universes) (subpats : list (pattern & bool)) + : Lemma (requires p == Pat_Cons f ous subpats /\ faithful_pattern p) + (ensures allP p faithful_pattern_arg subpats) + = assert_norm (faithful_pattern p ==> allP p faithful_pattern_arg subpats) // #2908 + +let pat_eq_Pat_Cons (p1 p2 : pattern) (f1 f2 : fv) (ous1 ous2 : option universes) (args1 args2 : list (pattern & bool)) + : Lemma (requires p1 == Pat_Cons f1 ous1 args1 /\ p2 == Pat_Cons f2 ous2 args2) + (ensures pat_cmp p1 p2 == (fv_cmp f1 f2 + &&& opt_dec_cmp p1 p2 (list_dec_cmp p1 p2 univ_cmp) ous1 ous2 + &&& list_dec_cmp p1 p2 pat_arg_cmp args1 args2)) + = assert_norm (pat_cmp p1 p2 == (fv_cmp f1 f2 + &&& opt_dec_cmp p1 p2 (list_dec_cmp p1 p2 univ_cmp) ous1 ous2 + &&& list_dec_cmp p1 p2 pat_arg_cmp args1 args2)) + +let comp_eq_C_Eff (c1 c2 : comp) (us1 us2 : universes) (ef1 ef2 : name) (t1 t2 : typ) (args1 args2 : list argv) (dec1 dec2 : list term) + : Lemma (requires inspect_comp c1 == C_Eff us1 ef1 t1 args1 dec1 + /\ inspect_comp c2 == C_Eff us2 ef2 t2 args2 dec2) + (ensures comp_cmp c1 c2 == (list_dec_cmp c1 c2 univ_cmp us1 us2 + &&& eq_cmp ef1 ef2 + &&& term_cmp t1 t2 + &&& list_dec_cmp c1 c2 arg_cmp args1 args2 + &&& list_dec_cmp c1 c2 term_cmp dec1 dec2)) + = assume (comp_cmp c1 c2 == (list_dec_cmp c1 c2 univ_cmp us1 us2 + &&& eq_cmp ef1 ef2 + &&& term_cmp t1 t2 + &&& list_dec_cmp c1 c2 arg_cmp args1 args2 + &&& list_dec_cmp c1 c2 term_cmp dec1 dec2)) // #2908, assert_norm doesn't work + +let rec faithful_lemma (t1 t2 : term) = + match inspect_ln t1, inspect_ln t2 with + | Tv_Var _, Tv_Var _ + | Tv_BVar _, Tv_BVar _ + | Tv_FVar _, Tv_FVar _ -> () + | Tv_UInst f1 us1, Tv_UInst f2 us2 -> + let tv1 = inspect_ln t1 in + let tv2 = inspect_ln t2 in + univ_faithful_lemma_list t1 t2 us1 us2; + () + + | Tv_Const c1, Tv_Const c2 -> () + | Tv_App h1 a1, Tv_App h2 a2 -> + faithful_lemma h1 h2; + faithful_lemma_arg a1 a2 + | Tv_Abs b1 t1, Tv_Abs b2 t2 -> + faithful_lemma_binder b1 b2; + faithful_lemma t1 t2 + | Tv_Arrow b1 c1, Tv_Arrow b2 c2 -> + faithful_lemma_binder b1 b2; + faithful_lemma_comp c1 c2 + | Tv_Type u1, Tv_Type u2 -> + univ_faithful_lemma u1 u2 + | Tv_Refine b1 t1, Tv_Refine b2 t2 -> + faithful_lemma_binder b1 b2; + faithful_lemma t1 t2 + + | Tv_Let r1 ats1 x1 e1 b1, Tv_Let r2 ats2 x2 e2 b2 -> + faithful_lemma_attrs_dec t1 t2 ats1 ats2; + faithful_lemma_binder x1 x2; + faithful_lemma e1 e2; + faithful_lemma b1 b2; + (***)term_eq_Tv_Let t1 t2 r1 r2 ats1 ats2 x1 x2 e1 e2 b1 b2; + () + + | Tv_Match sc1 o1 brs1, Tv_Match sc2 o2 brs2 -> + (***)faithful_Tv_Match t1 sc1 o1 brs1; + (***)faithful_Tv_Match t2 sc2 o2 brs2; + faithful_lemma sc1 sc2; + faithful_lemma_branches t1 t2 brs1 brs2; + (***)term_eq_Tv_Match t1 t2 sc1 sc2 o1 o2 brs1 brs2; + () + + | Tv_AscribedT e1 t1 tacopt1 eq1, Tv_AscribedT e2 t2 tacopt2 eq2 -> + faithful_lemma e1 e2; + faithful_lemma t1 t2; + (match tacopt1, tacopt2 with | Some t1, Some t2 -> faithful_lemma t1 t2 | _ -> ()); + () + + | Tv_AscribedC e1 c1 tacopt1 eq1, Tv_AscribedC e2 c2 tacopt2 eq2 -> + faithful_lemma e1 e2; + faithful_lemma_comp c1 c2; + (match tacopt1, tacopt2 with | Some t1, Some t2 -> faithful_lemma t1 t2 | _ -> ()); + () + + | Tv_Unknown, Tv_Unknown -> () + + | _ -> assert (defined (term_cmp t1 t2)) (* rest of the cases trivial *) + +and faithful_lemma_pattern (p1 p2 : pattern) : Lemma (requires faithful_pattern p1 /\ faithful_pattern p2) (ensures defined (pat_cmp p1 p2)) = + match p1, p2 with + | Pat_Var _ _, Pat_Var _ _ -> () + | Pat_Constant _, Pat_Constant _ -> () + | Pat_Dot_Term (Some t1), Pat_Dot_Term (Some t2) -> + faithful_lemma t1 t2 + | Pat_Cons head1 univs1 subpats1, Pat_Cons head2 univs2 subpats2 -> + (***)faithful_Pat_Cons p1 head1 univs1 subpats1; + (***)faithful_Pat_Cons p2 head2 univs2 subpats2; + let aux : squash (defined (opt_dec_cmp p1 p2 (list_dec_cmp p1 p2 univ_cmp) univs1 univs2)) = + match univs1, univs2 with + | Some us1, Some us2 -> + univ_faithful_lemma_list p1 p2 us1 us2 + | _ -> () + in + faithful_lemma_pattern_args p1 p2 subpats1 subpats2; + (***)pat_eq_Pat_Cons p1 p2 head1 head2 univs1 univs2 subpats1 subpats2; + () + + | _ -> () + +and faithful_lemma_pattern_arg (pb1 pb2 : pattern & bool) + : Lemma (requires faithful_pattern_arg pb1 /\ faithful_pattern_arg pb2) + (ensures defined (pat_arg_cmp pb1 pb2)) + = + let (p1, _) = pb1 in + let (p2, _) = pb2 in + faithful_lemma_pattern p1 p2 + +and faithful_lemma_pattern_args #b + (top1 top2 : b) + (pats1 : list (pattern & bool){pats1 << top1}) + (pats2 : list (pattern & bool){pats2 << top2}) + : Lemma (requires allP top1 faithful_pattern_arg pats1 /\ allP top2 faithful_pattern_arg pats2) + (ensures defined (list_dec_cmp top1 top2 pat_arg_cmp pats1 pats2)) + (decreases pats1) + = + introduce forall x y. L.memP x pats1 /\ L.memP y pats2 ==> defined (pat_arg_cmp x y) with + (introduce forall y. L.memP x pats1 /\ L.memP y pats2 ==> defined (pat_arg_cmp x y) with + (introduce (L.memP x pats1 /\ L.memP y pats2) ==> (defined (pat_arg_cmp x y)) with h. ( + faithful_lemma_pattern_arg x y + ) + ) + ) + ; + defined_list_dec top1 top2 pat_arg_cmp pats1 pats2 + +and faithful_lemma_branch (br1 br2 : branch) : Lemma (requires faithful_branch br1 /\ faithful_branch br2) (ensures defined (br_cmp br1 br2)) = + faithful_lemma_pattern (fst br1) (fst br2); + faithful_lemma (snd br1) (snd br2) + +and faithful_lemma_branches #b (top1 top2 : b) + (brs1 : list branch{brs1 << top1}) + (brs2 : list branch{brs2 << top2}) + : Lemma (requires allP top1 faithful_branch brs1 /\ allP top2 faithful_branch brs2) + (ensures defined (list_dec_cmp top1 top2 br_cmp brs1 brs2)) + (decreases brs1) + = + introduce forall x y. L.memP x brs1 /\ L.memP y brs2 ==> defined (br_cmp x y) with + (introduce forall y. L.memP x brs1 /\ L.memP y brs2 ==> defined (br_cmp x y) with + (introduce (L.memP x brs1 /\ L.memP y brs2) ==> (defined (br_cmp x y)) with h. ( + faithful_lemma_branch x y + ) + ) + ) + ; + defined_list_dec top1 top2 br_cmp brs1 brs2 + +and faithful_lemma_arg (a1 a2 : argv) : Lemma (requires faithful_arg a1 /\ faithful_arg a2) (ensures defined (arg_cmp a1 a2)) = + faithful_lemma (fst a1) (fst a2); + (match snd a1, snd a2 with | Q_Meta t1, Q_Meta t2 -> faithful_lemma t1 t2 | _ -> ()) + +and faithful_lemma_binder (b1 b2 : binder) : Lemma (requires faithful_binder b1 /\ faithful_binder b2) (ensures defined (binder_cmp b1 b2)) = + let bv1 = inspect_binder b1 in + let bv2 = inspect_binder b2 in + faithful_lemma_qual bv1.qual bv2.qual; + faithful_lemma bv1.sort bv2.sort; + faithful_lemma_attrs_dec b1 b2 bv1.attrs bv2.attrs; + assert_norm ( + (term_cmp bv1.sort bv2.sort + &&& aqual_cmp bv1.qual bv2.qual + &&& list_dec_cmp b1 b2 term_cmp bv1.attrs bv2.attrs) == binder_cmp b1 b2); + () + +and faithful_lemma_qual (q1 q2 : aqualv) : Lemma (requires faithful_qual q1 /\ faithful_qual q2) (ensures defined (aqual_cmp q1 q2)) = + match q1, q2 with + | Q_Meta t1, Q_Meta t2 -> faithful_lemma t1 t2 + | _ -> () + +and faithful_lemma_attrs_dec #b (top1 top2 : b) + (at1 : list term{at1 << top1}) + (at2 : list term{at2 << top2}) + : Lemma (requires faithful_attrs at1 /\ faithful_attrs at2) + (ensures defined (list_dec_cmp top1 top2 term_cmp at1 at2)) + (decreases at1) + = + // TODO: factor out + introduce forall x y. L.memP x at1 /\ L.memP y at2 ==> defined (term_cmp x y) with + (introduce forall y. L.memP x at1 /\ L.memP y at2 ==> defined (term_cmp x y) with + (introduce (L.memP x at1 /\ L.memP y at2) ==> (defined (term_cmp x y)) with h. ( + faithful_lemma x y + ) + ) + ) + ; + defined_list_dec top1 top2 term_cmp at1 at2 + +and faithful_lemma_comp (c1 c2 : comp) : Lemma (requires faithful_comp c1 /\ faithful_comp c2) (ensures defined (comp_cmp c1 c2)) = + match inspect_comp c1, inspect_comp c2 with + | C_Total t1, C_Total t2 -> faithful_lemma t1 t2 + | C_GTotal t1, C_GTotal t2 -> faithful_lemma t1 t2 + | C_Lemma pre1 post1 pat1, C_Lemma pre2 post2 pat2 -> + faithful_lemma pre1 pre2; + faithful_lemma post1 post2; + faithful_lemma pat1 pat2 + | C_Eff us1 e1 r1 args1 dec1, C_Eff us2 e2 r2 args2 dec2 -> + univ_faithful_lemma_list c1 c2 us1 us2; + faithful_lemma r1 r2; + introduce forall x y. L.memP x args1 /\ L.memP y args2 ==> defined (arg_cmp x y) with + (introduce forall y. L.memP x args1 /\ L.memP y args2 ==> defined (arg_cmp x y) with + (introduce (L.memP x args1 /\ L.memP y args2) ==> (defined (arg_cmp x y)) with h. ( + faithful_lemma_arg x y + ) + ) + ) + ; + defined_list_dec c1 c2 arg_cmp args1 args2; + introduce forall x y. L.memP x dec1 /\ L.memP y dec2 ==> defined (term_cmp x y) with + (introduce forall y. L.memP x dec1 /\ L.memP y dec2 ==> defined (term_cmp x y) with + (introduce (L.memP x dec1 /\ L.memP y dec2) ==> (defined (term_cmp x y)) with h. ( + faithful_lemma x y + ) + ) + ) + ; + defined_list_dec c1 c2 term_cmp dec1 dec2; + (***)comp_eq_C_Eff c1 c2 us1 us2 e1 e2 r1 r2 args1 args2 dec1 dec2; + () + | _ -> () +#pop-options + + +let term_eq (t1 t2 : term) : (b:bool{b ==> t1 == t2}) = + Eq? (term_cmp t1 t2) + +let term_eq_dec (t1 t2 : faithful_term) : (b:bool{b <==> t1 == t2}) = + faithful_lemma t1 t2; + Eq? (term_cmp t1 t2) + +let univ_eq (u1 u2 : universe) : (b:bool{b ==> u1 == u2}) = + Eq? (univ_cmp u1 u2) + +let univ_eq_dec (u1 u2 : faithful_universe) : (b:bool{b <==> u1 == u2}) = + univ_faithful_lemma u1 u2; + Eq? (univ_cmp u1 u2) diff --git a/stage0/ulib/FStar.Reflection.TermEq.fsti b/stage0/ulib/FStar.Reflection.TermEq.fsti new file mode 100644 index 00000000000..f8856cbff68 --- /dev/null +++ b/stage0/ulib/FStar.Reflection.TermEq.fsti @@ -0,0 +1,159 @@ +module FStar.Reflection.TermEq + +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Reflection.V2.Data +open FStar.Stubs.Reflection.V2.Builtins +module L = FStar.List.Tot + +(* Auxiliary... would be good to move. *) +let rec allP0 #a (pred : a -> Type0) (l : list a) : Type0 = + match l with + | [] -> True + | x::xs -> pred x /\ allP0 pred xs + +let rec allP #a #b (top:b) (pred : (x:a{x << top}) -> Type0) (l : list a{l << top \/ l === top}) : Type0 = + match l with + | [] -> True + | x::xs -> pred x /\ allP top pred xs + +let optP0 #a (pred : a -> Type0) (o : option a) : Type0 = + match o with + | None -> True + | Some x -> pred x + +let optP #a #b (top:b) (pred : (x:a{x << top}) -> Type0) (o : option a{o << top}) : Type0 = + match o with + | None -> True + | Some x -> pred x +(* /Aux *) + +let rec faithful_univ (u : universe) : Type0 = + match inspect_universe u with + | Uv_Unif _ -> False (* We just forbid this *) + + | Uv_Unk + | Uv_Zero + | Uv_BVar _ + | Uv_Name _ -> True + + | Uv_Succ u -> faithful_univ u + | Uv_Max us -> allP u faithful_univ us + +(* Just a placeholder for now *) +let faithful_const (c:vconst) : Type0 = True + +let rec faithful (t:term) : Type0 = + match inspect_ln t with + | Tv_Var _ + | Tv_BVar _ + | Tv_FVar _ + | Tv_Unknown -> + True + + | Tv_Const c -> + faithful_const c + + | Tv_UInst f us -> + allP t faithful_univ us + + | Tv_Unsupp -> False + | Tv_App h a -> + faithful h /\ faithful_arg a + | Tv_Abs b t -> + faithful_binder b /\ faithful t + | Tv_Arrow b c -> + faithful_binder b /\ faithful_comp c + | Tv_Type u -> + faithful_univ u + | Tv_Refine b phi -> + faithful_binder b + /\ faithful phi + + | Tv_Uvar n u -> False + | Tv_Let r ats x e b -> + faithful_attrs ats + /\ faithful_binder x + /\ faithful e + /\ faithful b + + | Tv_Match sc o brs -> + faithful sc + /\ None? o // stopgap + /\ allP t faithful_branch brs + + | Tv_AscribedT e ta tacopt eq -> + faithful e + /\ faithful ta + /\ optP t faithful tacopt + + | Tv_AscribedC e c tacopt eq -> + faithful e + /\ faithful_comp c + /\ optP t faithful tacopt + +and faithful_arg (a : argv) : Type0 = + faithful (fst a) /\ faithful_qual (snd a) + +and faithful_qual (q:aqualv) : Type0 = + match q with + | Q_Implicit -> True + | Q_Explicit -> True + | Q_Equality -> True + | Q_Meta m -> faithful m + +and faithful_binder (b:binder) : Type0 = + match inspect_binder b with + | {sort=sort; qual=q; attrs=attrs} -> + faithful sort /\ faithful_qual q /\ faithful_attrs attrs + +and faithful_branch (b : branch) : Type0 = + let (p, t) = b in + faithful_pattern p /\ faithful t + +and faithful_pattern (p : pattern) : Type0 = + match p with + | Pat_Constant c -> faithful_const c + | Pat_Cons head univs subpats -> + optP p (allP p faithful_univ) univs + /\ allP p faithful_pattern_arg subpats + + (* non-binding bvs are always OK *) + | Pat_Var _ _ -> True + | Pat_Dot_Term None -> True + | Pat_Dot_Term (Some t) -> faithful t + +and faithful_pattern_arg (pb : pattern & bool) : Type0 = + faithful_pattern (fst pb) + +and faithful_attrs ats : Type0 = + allP ats faithful ats + +and faithful_comp c = + match inspect_comp c with + | C_Total t -> faithful t + | C_GTotal t -> faithful t + | C_Lemma pre post pats -> faithful pre /\ faithful post /\ faithful pats + | C_Eff us ef r args decs -> + allP c faithful_univ us + /\ faithful r + /\ allP c faithful_arg args + /\ allP c faithful decs + +let faithful_term = t:term{faithful t} +let faithful_universe = u:universe{faithful_univ u} + +(* A conservative version: works on all terms, returns `true` if they +are guaranteed to be equal. *) +[@@plugin] +val term_eq (t1 t2 : term) : (b:bool{b ==> t1 == t2}) + +(* A fully decidable version, for faithful terms. *) +[@@plugin] +val term_eq_dec (t1 t2 : faithful_term) : (b:bool{b <==> t1 == t2}) + +(* Idem for universes *) +[@@plugin] +val univ_eq (u1 u2 : universe) : (b:bool{b ==> u1 == u2}) + +[@@plugin] +val univ_eq_dec (u1 u2 : faithful_universe) : (b:bool{b <==> u1 == u2}) diff --git a/stage0/ulib/FStar.Reflection.V1.Compare.fsti b/stage0/ulib/FStar.Reflection.V1.Compare.fsti new file mode 100644 index 00000000000..5d4409d6df3 --- /dev/null +++ b/stage0/ulib/FStar.Reflection.V1.Compare.fsti @@ -0,0 +1,19 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.V1.Compare + +(* The V2 module is generic already, and does not make any V1/V2 distinction. *) +include FStar.Reflection.V2.Compare diff --git a/stage0/ulib/FStar.Reflection.V1.Derived.Lemmas.fst b/stage0/ulib/FStar.Reflection.V1.Derived.Lemmas.fst new file mode 100644 index 00000000000..91cd00d9e98 --- /dev/null +++ b/stage0/ulib/FStar.Reflection.V1.Derived.Lemmas.fst @@ -0,0 +1,132 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.V1.Derived.Lemmas + +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Reflection.V1.Builtins +open FStar.Stubs.Reflection.V1.Data +open FStar.Reflection.V1.Derived +open FStar.List.Tot + +let rec forall_list (p:'a -> Type) (l:list 'a) : Type = + match l with + | [] -> True + | x::xs -> p x /\ forall_list p xs + +let forallP (p: 'a -> Type) (l: list 'a): Type + = forall (x: 'a). memP x l ==> p x +// Precedence relation on the element of a list +unfold let (<<:) (l: list 'a) (r: 'r) + = forallP (fun x -> x << r) l + +// A glorified `id` +val list_ref : (#a:Type) -> (#p:(a -> Type)) -> (l:list a) -> + Pure (list (x:a{p x})) + (requires (forallP p l)) + (ensures (fun _ -> True)) +let rec list_ref #a #p l = + match l with + | [] -> [] + | x::xs -> x :: list_ref #a #p xs + +val collect_app_order' : (args:list argv) -> (tt:term) -> (t:term) -> + Lemma (requires args <<: tt /\ t << tt) + (ensures (let fn, args' = collect_app_ln' args t in + args' <<: tt /\ fn << tt)) + (decreases t) +let rec collect_app_order' args tt t = + match inspect_ln_unascribe t with + | Tv_App l r -> collect_app_order' (r::args) tt l + | _ -> () + +val collect_app_order : (t:term) -> + Lemma (ensures (forall (f:term). forall (s:list argv). (f,s) == collect_app_ln t ==> + (f << t /\ s <<: t) + \/ (f == t /\ s == []))) +let collect_app_order t = + match inspect_ln_unascribe t with + | Tv_App l r -> collect_app_order' [r] t l + | _ -> () + +val collect_app_ref : (t:term) -> (h:term{h == t \/ h << t}) & list (a:argv{fst a << t}) +let collect_app_ref t = + let h, a = collect_app_ln t in + collect_app_order t; + h, list_ref a + +(**** [collect_abs_ln t] is smaller than [t] *) +let rec collect_abs_order' (bds: binders) (tt t: term) + : Lemma (requires t << tt /\ bds <<: tt) + (ensures (let bds', body = collect_abs' bds t in + (bds' <<: tt /\ body << tt))) + (decreases t) + = match inspect_ln_unascribe t with + | Tv_Abs b body -> collect_abs_order' (b::bds) tt body + | _ -> () + +val collect_abs_ln_order : (t:term) -> + Lemma (ensures forall bds body. + (bds, body) == collect_abs_ln t ==> + (body << t /\ bds <<: t) + \/ (body == t /\ bds == []) + ) +let collect_abs_ln_order t = + match inspect_ln_unascribe t with + | Tv_Abs b body -> collect_abs_order' [b] t body; + let bds, body = collect_abs' [] t in + Classical.forall_intro (rev_memP bds) + | _ -> () + +val collect_abs_ln_ref : (t:term) -> list (bd:binder{bd << t}) & (body:term{body == t \/ body << t}) +let collect_abs_ln_ref t = + let bds, body = collect_abs_ln t in + collect_abs_ln_order t; + list_ref bds, body + + + +(**** [collect_arr_ln_bs t] is smaller than [t] *) +let rec collect_arr_order' (bds: binders) (tt: term) (c: comp) + : Lemma (requires c << tt /\ bds <<: tt) + (ensures (let bds', c' = collect_arr' bds c in + bds' <<: tt /\ c' << tt)) + (decreases c) + = match inspect_comp c with + | C_Total ret -> + ( match inspect_ln_unascribe ret with + | Tv_Arrow b c -> collect_arr_order' (b::bds) tt c + | _ -> ()) + | _ -> () + +val collect_arr_ln_bs_order : (t:term) -> + Lemma (ensures forall bds c. + (bds, c) == collect_arr_ln_bs t ==> + (c << t /\ bds <<: t) + \/ (c == pack_comp (C_Total t) /\ bds == []) + ) +let collect_arr_ln_bs_order t = + match inspect_ln_unascribe t with + | Tv_Arrow b c -> collect_arr_order' [b] t c; + Classical.forall_intro_2 (rev_memP #binder); + inspect_pack_comp_inv (C_Total t) + | _ -> inspect_pack_comp_inv (C_Total t) + +val collect_arr_ln_bs_ref : (t:term) -> list (bd:binder{bd << t}) + & (c:comp{ c == pack_comp (C_Total t) \/ c << t}) +let collect_arr_ln_bs_ref t = + let bds, c = collect_arr_ln_bs t in + collect_arr_ln_bs_order t; + list_ref bds, c diff --git a/stage0/ulib/FStar.Reflection.V1.Derived.fst b/stage0/ulib/FStar.Reflection.V1.Derived.fst new file mode 100644 index 00000000000..e45206d1dba --- /dev/null +++ b/stage0/ulib/FStar.Reflection.V1.Derived.fst @@ -0,0 +1,262 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.V1.Derived + +open FStar.Stubs.Reflection.Types +open FStar.Reflection.Const +open FStar.Stubs.Reflection.V1.Builtins +open FStar.Stubs.Reflection.V1.Data +open FStar.Order +open FStar.Stubs.VConfig + +let bv_of_binder (b : binder) : bv = (inspect_binder b).binder_bv + +let rec inspect_ln_unascribe (t:term) : tv:term_view{tv << t /\ notAscription tv} = + match inspect_ln t with + | Tv_AscribedT t' _ _ _ + | Tv_AscribedC t' _ _ _ -> inspect_ln_unascribe t' + | tv -> tv + +(* + * AR: add versions that take attributes as arguments? + *) +let mk_binder (bv : bv) (sort : typ) : binder = + pack_binder { + binder_bv=bv; + binder_qual=Q_Explicit; + binder_attrs=[]; + binder_sort = sort; + } + +let mk_implicit_binder (bv : bv) (sort : typ) : binder = + pack_binder { + binder_bv=bv; + binder_qual=Q_Implicit; + binder_attrs=[]; + binder_sort = sort; + } + +let type_of_binder (b : binder) : typ = + (inspect_binder b).binder_sort + +val flatten_name : name -> Tot string +let rec flatten_name ns = + match ns with + | [] -> "" + | [n] -> n + | n::ns -> n ^ "." ^ flatten_name ns + +(* Helpers for dealing with nested applications and arrows *) +let rec collect_app_ln' (args : list argv) (t : term) : Tot (term & list argv) (decreases t) = + match inspect_ln_unascribe t with + | Tv_App l r -> + collect_app_ln' (r::args) l + | _ -> (t, args) + +val collect_app_ln : term -> term & list argv +let collect_app_ln = collect_app_ln' [] + +let rec mk_app (t : term) (args : list argv) : Tot term (decreases args) = + match args with + | [] -> t + | (x::xs) -> mk_app (pack_ln (Tv_App t x)) xs + +// Helper for when all arguments are explicit +let mk_e_app (t : term) (args : list term) : Tot term = + let e t = (t, Q_Explicit) in + mk_app t (List.Tot.Base.map e args) + +let u_unk : universe = pack_universe Uv_Unk + +let rec mk_tot_arr_ln (bs: list binder) (cod : term) : Tot term (decreases bs) = + match bs with + | [] -> cod + | (b::bs) -> pack_ln (Tv_Arrow b (pack_comp (C_Total (mk_tot_arr_ln bs cod)))) + +let rec collect_arr' (bs : list binder) (c : comp) : Tot (list binder & comp) (decreases c) = + begin match inspect_comp c with + | C_Total t -> + begin match inspect_ln_unascribe t with + | Tv_Arrow b c -> + collect_arr' (b::bs) c + | _ -> + (bs, c) + end + | _ -> (bs, c) + end + +val collect_arr_ln_bs : typ -> list binder & comp +let collect_arr_ln_bs t = + let (bs, c) = collect_arr' [] (pack_comp (C_Total t)) in + (List.Tot.Base.rev bs, c) + +val collect_arr_ln : typ -> list typ & comp +let collect_arr_ln t = + let bs, c = collect_arr_ln_bs t in + List.Tot.Base.map type_of_binder bs, c + +let rec collect_abs' (bs : list binder) (t : term) : Tot (list binder & term) (decreases t) = + match inspect_ln_unascribe t with + | Tv_Abs b t' -> + collect_abs' (b::bs) t' + | _ -> (bs, t) + +val collect_abs_ln : term -> list binder & term +let collect_abs_ln t = + let (bs, t') = collect_abs' [] t in + (List.Tot.Base.rev bs, t') + +let fv_to_string (fv:fv) : string = implode_qn (inspect_fv fv) + +let mk_stringlit (s : string) : term = + pack_ln (Tv_Const (C_String s)) + +let mk_strcat (t1 t2 : term) : term = + mk_e_app (pack_ln (Tv_FVar (pack_fv ["Prims"; "strcat"]))) [t1; t2] + +let mk_cons (h t : term) : term = + mk_e_app (pack_ln (Tv_FVar (pack_fv cons_qn))) [h; t] + +let mk_cons_t (ty h t : term) : term = + mk_app (pack_ln (Tv_FVar (pack_fv cons_qn))) [(ty, Q_Implicit); (h, Q_Explicit); (t, Q_Explicit)] + +let rec mk_list (ts : list term) : term = + match ts with + | [] -> pack_ln (Tv_FVar (pack_fv nil_qn)) + | t::ts -> mk_cons t (mk_list ts) + +let mktuple_n (ts : list term{List.Tot.Base.length ts <= 8}) : term = + match List.Tot.Base.length ts with + | 0 -> pack_ln (Tv_Const C_Unit) + | 1 -> let [x] = ts in x + | n -> begin + let qn = match n with + | 2 -> mktuple2_qn + | 3 -> mktuple3_qn + | 4 -> mktuple4_qn + | 5 -> mktuple5_qn + | 6 -> mktuple6_qn + | 7 -> mktuple7_qn + | 8 -> mktuple8_qn + in mk_e_app (pack_ln (Tv_FVar (pack_fv qn))) ts + end + +let destruct_tuple (t : term) : option (list term) = + let head, args = collect_app_ln t in + match inspect_ln head with + | Tv_FVar fv -> + if List.Tot.Base.mem + (inspect_fv fv) [mktuple2_qn; mktuple3_qn; mktuple4_qn; mktuple5_qn; + mktuple6_qn; mktuple7_qn; mktuple8_qn] + then Some (List.Tot.Base.concatMap (fun (t, q) -> + match q with + | Q_Explicit -> [t] + | _ -> []) args) + else None + | _ -> None + +let mkpair (t1 t2 : term) : term = + mktuple_n [t1;t2] + +let rec head (t : term) : term = + match inspect_ln t with + | Tv_Match t _ _ + | Tv_Let _ _ _ _ t _ + | Tv_Abs _ t + | Tv_Refine _ _ t + | Tv_App t _ + | Tv_AscribedT t _ _ _ + | Tv_AscribedC t _ _ _ -> head t + + | Tv_Unknown + | Tv_Uvar _ _ + | Tv_Const _ + | Tv_Type _ + | Tv_Var _ + | Tv_BVar _ + | Tv_FVar _ + | Tv_UInst _ _ + | Tv_Arrow _ _ + | Tv_Unsupp -> t + +(** Checks if a term `t` is equal to some FV (a top level name). +Ignores universes and ascriptions. *) +let is_fvar (t : term) (nm:string) : bool = + match inspect_ln_unascribe t with + | Tv_FVar fv + | Tv_UInst fv _ -> implode_qn (inspect_fv fv) = nm + | _ -> false + +(** Checks if a term `t` is equal to any FV (a top level name) from +those given in the list. Ignores universes and ascriptions. *) +let rec is_any_fvar (t : term) (nms:list string) : bool = + match nms with + | [] -> false + | v::vs -> is_fvar t v || is_any_fvar t vs + +let is_uvar (t : term) : bool = + match inspect_ln (head t) with + | Tv_Uvar _ _ -> true + | _ -> false + +let binder_set_qual (q:aqualv) (b:binder) : Tot binder = + let bview = inspect_binder b in + pack_binder {bview with binder_qual=q} + +(** Set a vconfig for a sigelt *) +val add_check_with : vconfig -> sigelt -> Tot sigelt +let add_check_with vcfg se = + let attrs = sigelt_attrs se in + let vcfg_t = embed_vconfig vcfg in + let t = `(check_with (`#vcfg_t)) in + set_sigelt_attrs (t :: attrs) se + +let un_uinst (t:term) : term = + match inspect_ln t with + | Tv_UInst fv _ -> pack_ln (Tv_FVar fv) + | _ -> t + +(* Returns [true] iff the term [t] is just the name [nm], though +possibly universe-instantiated and applied to some implicit arguments. +*) +let rec is_name_imp (nm : name) (t : term) : bool = + begin match inspect_ln_unascribe t with + | Tv_FVar fv + | Tv_UInst fv _ -> + if inspect_fv fv = nm + then true + else false + | Tv_App l (_, Q_Implicit) -> + is_name_imp nm l + | _ -> false + end + +(* If t is of the shape [squash t'], return [Some t'], +otherwise [None]. *) +let unsquash_term (t : term) : option term = + match inspect_ln_unascribe t with + | Tv_App l (r, Q_Explicit) -> + if is_name_imp squash_qn l + then Some r + else None + | _ -> None + +(* As [unsquash_term], but returns the original term if +[t] is not a squash. *) +let maybe_unsquash_term (t : term) : term = + match unsquash_term t with + | Some t' -> t' + | None -> t diff --git a/stage0/ulib/FStar.Reflection.V1.Formula.fst b/stage0/ulib/FStar.Reflection.V1.Formula.fst new file mode 100644 index 00000000000..282257db474 --- /dev/null +++ b/stage0/ulib/FStar.Reflection.V1.Formula.fst @@ -0,0 +1,236 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.V1.Formula + +open FStar.List.Tot.Base +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.V1.Builtins +open FStar.Stubs.Reflection.Types +open FStar.Reflection.Const +open FStar.Stubs.Reflection.V1.Builtins +open FStar.Reflection.V1.Derived +open FStar.Stubs.Reflection.V1.Data + +///// Cannot open FStar.Tactics.Derived here ///// +private let bv_to_string (bv : bv) : Tac string = + let bvv = inspect_bv bv in + unseal (bvv.bv_ppname) +private let rec inspect_unascribe (t:term) : Tac (tv:term_view{notAscription tv}) = + match inspect t with + | Tv_AscribedT t _ _ _ + | Tv_AscribedC t _ _ _ -> + inspect_unascribe t + | tv -> tv +private let rec collect_app' (args : list argv) (t : term) + : Tac (term & list argv) = + match inspect_unascribe t with + | Tv_App l r -> + collect_app' (r::args) l + | _ -> (t, args) +private let collect_app = collect_app' [] +///// + +noeq type comparison = + | Eq of option typ (* Propositional equality (eq2), maybe annotated *) + | BoolEq of option typ (* Decidable, boolean equality (eq), maybe annotated *) + | Lt | Le | Gt | Ge (* Orderings, at type `int` (and subtypes) *) + +noeq type formula = + | True_ : formula + | False_ : formula + | Comp : comparison -> term -> term -> formula + | And : term -> term -> formula + | Or : term -> term -> formula + | Not : term -> formula + | Implies: term -> term -> formula + | Iff : term -> term -> formula + | Forall : bv -> typ -> term -> formula + | Exists : bv -> typ -> term -> formula + | App : term -> term -> formula + | Name : bv -> formula + | FV : fv -> formula + | IntLit : int -> formula + | F_Unknown : formula // Also a baked-in "None" + +let mk_Forall (typ : term) (pred : term) : Tac formula = + let b = pack_bv ({ bv_ppname = as_ppname "x"; + bv_index = 0; }) in + Forall b typ (pack_ln (Tv_App pred (pack_ln (Tv_BVar b), Q_Explicit))) + +let mk_Exists (typ : term) (pred : term) : Tac formula = + let b = pack_bv ({ bv_ppname = as_ppname "x"; + bv_index = 0; }) in + Exists b typ (pack_ln (Tv_App pred (pack_ln (Tv_BVar b), Q_Explicit))) + +let term_as_formula' (t:term) : Tac formula = + match inspect_unascribe t with + | Tv_Var n -> + Name n + + | Tv_FVar fv + | Tv_UInst fv _ -> + // Cannot use `when` clauses when verifying! + let qn = inspect_fv fv in + if qn = true_qn then True_ + else if qn = false_qn then False_ + else FV fv + + // TODO: l_Forall + // ...or should we just try to drop all squashes? + // TODO: b2t at this point ? + | Tv_App h0 t -> begin + let (h, ts) = collect_app h0 in + let h = un_uinst h in + match inspect_ln h, ts@[t] with + | Tv_FVar fv, [(a1, Q_Implicit); (a2, Q_Explicit); (a3, Q_Explicit)] -> + let qn = inspect_fv fv in + if qn = eq2_qn then Comp (Eq (Some a1)) a2 a3 + else if qn = eq1_qn then Comp (BoolEq (Some a1)) a2 a3 + else if qn = lt_qn then Comp Lt a2 a3 + else if qn = lte_qn then Comp Le a2 a3 + else if qn = gt_qn then Comp Gt a2 a3 + else if qn = gte_qn then Comp Ge a2 a3 + else App h0 (fst t) + | Tv_FVar fv, [(a1, Q_Explicit); (a2, Q_Explicit)] -> + let qn = inspect_fv fv in + if qn = imp_qn then Implies a1 a2 + else if qn = and_qn then And a1 a2 + else if qn = iff_qn then Iff a1 a2 + else if qn = or_qn then Or a1 a2 + // Non-annotated comparisons + else if qn = eq2_qn then Comp (Eq None) a1 a2 + else if qn = eq1_qn then Comp (BoolEq None) a1 a2 + else App h0 (fst t) + + | Tv_FVar fv, [(a1, Q_Implicit); (a2, Q_Explicit)] -> + let qn = inspect_fv fv in + if qn = forall_qn then mk_Forall a1 a2 + else if qn = exists_qn then mk_Exists a1 a2 + else App h0 (fst t) + | Tv_FVar fv, [(a, Q_Explicit)] -> + let qn = inspect_fv fv in + if qn = not_qn then Not a + else if qn = b2t_qn then begin + if term_eq a (`false) then False_ + else if term_eq a (`true) then True_ + else App h0 (fst t) + end + else App h0 (fst t) + | _ -> + App h0 (fst t) + end + + | Tv_Const (C_Int i) -> + IntLit i + + (* Not formulas. *) + | Tv_Let _ _ _ _ _ _ + | Tv_Match _ _ _ + | Tv_Type _ + | Tv_Abs _ _ + | Tv_Arrow _ _ + | Tv_Uvar _ _ + | Tv_Unknown + | Tv_Unsupp + | Tv_Refine _ _ _ -> F_Unknown + + (* Other constants? *) + | Tv_Const _ -> F_Unknown + + (* Should not occur, we're using inspect_ln *) + | Tv_BVar _ -> F_Unknown + +// Unsquashing +let term_as_formula (t:term) : Tac formula = + match unsquash_term t with + | None -> F_Unknown + | Some t -> + term_as_formula' t + +let term_as_formula_total (t:term) : Tac formula = + term_as_formula' (maybe_unsquash_term t) + +let formula_as_term_view (f:formula) : Tot term_view = + let mk_app' tv args = List.Tot.Base.fold_left (fun tv a -> Tv_App (pack_ln tv) a) tv args in + let e = Q_Explicit in + let i = Q_Implicit in + match f with + | True_ -> Tv_FVar (pack_fv true_qn) + | False_ -> Tv_FVar (pack_fv false_qn) + | Comp (Eq None) l r -> mk_app' (Tv_FVar (pack_fv eq2_qn)) [(l,e);(r,e)] + | Comp (Eq (Some t)) l r -> mk_app' (Tv_FVar (pack_fv eq2_qn)) [(t,i);(l,e);(r,e)] + | Comp (BoolEq None) l r -> mk_app' (Tv_FVar (pack_fv eq1_qn)) [(l,e);(r,e)] + | Comp (BoolEq (Some t)) l r -> mk_app' (Tv_FVar (pack_fv eq1_qn)) [(t,i);(l,e);(r,e)] + | Comp Lt l r -> mk_app' (Tv_FVar (pack_fv lt_qn)) [(l,e);(r,e)] + | Comp Le l r -> mk_app' (Tv_FVar (pack_fv lte_qn)) [(l,e);(r,e)] + | Comp Gt l r -> mk_app' (Tv_FVar (pack_fv gt_qn)) [(l,e);(r,e)] + | Comp Ge l r -> mk_app' (Tv_FVar (pack_fv gte_qn)) [(l,e);(r,e)] + | And p q -> mk_app' (Tv_FVar (pack_fv and_qn)) [(p,e);(q,e)] + | Or p q -> mk_app' (Tv_FVar (pack_fv or_qn)) [(p,e);(q,e)] + | Implies p q -> mk_app' (Tv_FVar (pack_fv imp_qn)) [(p,e);(q,e)] + | Not p -> mk_app' (Tv_FVar (pack_fv not_qn)) [(p,e)] + | Iff p q -> mk_app' (Tv_FVar (pack_fv iff_qn)) [(p,e);(q,e)] + | Forall b sort t -> Tv_Unknown // TODO: decide on meaning of this + | Exists b sort t -> Tv_Unknown // TODO: ^ + + | App p q -> + Tv_App p (q, Q_Explicit) + + | Name b -> + Tv_Var b + + | FV fv -> + Tv_FVar fv + + | IntLit i -> + Tv_Const (C_Int i) + + | F_Unknown -> + Tv_Unknown + +let formula_as_term (f:formula) : Tot term = + pack_ln (formula_as_term_view f) + +let formula_to_string (f:formula) : Tac string = + match f with + | True_ -> "True_" + | False_ -> "False_" + | Comp (Eq mt) l r -> "Eq" ^ + (match mt with + | None -> "" + | Some t -> " (" ^ term_to_string t ^ ")") ^ + " (" ^ term_to_string l ^ ") (" ^ term_to_string r ^ ")" + | Comp (BoolEq mt) l r -> "BoolEq" ^ + (match mt with + | None -> "" + | Some t -> " (" ^ term_to_string t ^ ")") ^ + " (" ^ term_to_string l ^ ") (" ^ term_to_string r ^ ")" + | Comp Lt l r -> "Lt (" ^ term_to_string l ^ ") (" ^ term_to_string r ^ ")" + | Comp Le l r -> "Le (" ^ term_to_string l ^ ") (" ^ term_to_string r ^ ")" + | Comp Gt l r -> "Gt (" ^ term_to_string l ^ ") (" ^ term_to_string r ^ ")" + | Comp Ge l r -> "Ge (" ^ term_to_string l ^ ") (" ^ term_to_string r ^ ")" + | And p q -> "And (" ^ term_to_string p ^ ") (" ^ term_to_string q ^ ")" + | Or p q -> "Or (" ^ term_to_string p ^ ") (" ^ term_to_string q ^ ")" + | Implies p q -> "Implies (" ^ term_to_string p ^ ") (" ^ term_to_string q ^ ")" + | Not p -> "Not (" ^ term_to_string p ^ ")" + | Iff p q -> "Iff (" ^ term_to_string p ^ ") (" ^ term_to_string q ^ ")" + | Forall bs _sort t -> "Forall (" ^ term_to_string t ^ ")" + | Exists bs _sort t -> "Exists (" ^ term_to_string t ^ ")" + | App p q -> "App (" ^ term_to_string p ^ ") (" ^ term_to_string q ^ ")" + | Name bv -> "Name (" ^ bv_to_string bv ^ ")" + | FV fv -> "FV (" ^ flatten_name (inspect_fv fv) ^ ")" + | IntLit i -> "Int " ^ string_of_int i + | F_Unknown -> "?" diff --git a/stage0/ulib/FStar.Reflection.V1.fst b/stage0/ulib/FStar.Reflection.V1.fst new file mode 100644 index 00000000000..f4cad669531 --- /dev/null +++ b/stage0/ulib/FStar.Reflection.V1.fst @@ -0,0 +1,24 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.V1 + +include FStar.Stubs.Reflection.Types +include FStar.Stubs.Reflection.V1.Data +include FStar.Stubs.Reflection.V1.Builtins +include FStar.Reflection.V1.Derived +include FStar.Reflection.V1.Derived.Lemmas +include FStar.Reflection.Const +include FStar.Reflection.V1.Compare diff --git a/stage0/ulib/FStar.Reflection.V2.Arith.fst b/stage0/ulib/FStar.Reflection.V2.Arith.fst new file mode 100644 index 00000000000..b7ab5b71c85 --- /dev/null +++ b/stage0/ulib/FStar.Reflection.V2.Arith.fst @@ -0,0 +1,249 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.V2.Arith + +open FStar.Tactics.V2.Bare +open FStar.Reflection.V2 +module O = FStar.Order + +(* + * Simple decision procedure to decide if a term is an "arithmetic + * proposition", by which we mean a simple relation between two + * arithmetic expressions (each representing integers or naturals) + * + * Main use case: deciding, in a tactic, if a goal is an arithmetic + * expression and applying a custom decision procedure there (instead of + * feeding to the SMT solver) + *) + +noeq +type expr = + | Lit : int -> expr + // atom, contains both a numerical ID and the actual term encountered + | Atom : nat -> term -> expr + | Plus : expr -> expr -> expr + | Mult : expr -> expr -> expr + | Minus : expr -> expr -> expr + | Land : expr -> expr -> expr + | Lxor : expr -> expr -> expr + | Lor : expr -> expr -> expr + | Ladd : expr -> expr -> expr + | Lsub : expr -> expr -> expr + | Shl : expr -> expr -> expr + | Shr : expr -> expr -> expr + | Neg : expr -> expr + | Udiv : expr -> expr -> expr + | Umod : expr -> expr -> expr + | MulMod : expr -> expr -> expr + | NatToBv : expr -> expr + // | Div : expr -> expr -> expr // Add this one? + +noeq +type connective = + | C_Lt | C_Eq | C_Gt | C_Ne + +noeq +type prop = + | CompProp : expr -> connective -> expr -> prop + | AndProp : prop -> prop -> prop + | OrProp : prop -> prop -> prop + | NotProp : prop -> prop + +let lt e1 e2 = CompProp e1 C_Lt e2 +let le e1 e2 = CompProp e1 C_Lt (Plus (Lit 1) e2) +let eq e1 e2 = CompProp e1 C_Eq e2 +let ne e1 e2 = CompProp e1 C_Ne e2 +let gt e1 e2 = CompProp e1 C_Gt e2 +let ge e1 e2 = CompProp (Plus (Lit 1) e1) C_Gt e2 + +(* Define a traversal monad! Makes exception handling and counter-keeping easy *) +private let st = p:(nat & list term){fst p == List.Tot.Base.length (snd p)} +private let tm a = st -> Tac (either string (a & st)) +private let return (x:'a) : tm 'a = fun i -> Inr (x, i) +private let (let!) (m : tm 'a) (f : 'a -> tm 'b) : tm 'b = + fun i -> match m i with + | Inr (x, j) -> f x j + | s -> Inl (Inl?.v s) // why? To have a catch-all pattern and thus an easy WP + +val lift : ('a -> Tac 'b) -> ('a -> tm 'b) +let lift f x st = + Inr (f x, st) + +val liftM : ('a -> 'b) -> (tm 'a -> tm 'b) +let liftM f x = + let! xx = x in + return (f xx) + +val liftM2 : ('a -> 'b -> 'c) -> (tm 'a -> tm 'b -> tm 'c) +let liftM2 f x y = + let! xx = x in + let! yy = y in + return (f xx yy) + +val liftM3 : ('a -> 'b -> 'c -> 'd) -> (tm 'a -> tm 'b -> tm 'c -> tm 'd) +let liftM3 f x y z = + let! xx = x in + let! yy = y in + let! zz = z in + return (f xx yy zz) + + +private let rec find_idx (f : 'a -> Tac bool) (l : list 'a) : Tac (option ((n:nat{n < List.Tot.Base.length l}) & 'a)) = + match l with + | [] -> None + | x::xs -> + if f x + then Some (0, x) + else begin match find_idx f xs with + | None -> None + | Some (i, x) -> Some (i+1, x) + end + +private let atom (t:term) : tm expr = fun (n, atoms) -> + match find_idx (term_eq_old t) atoms with + | None -> Inr (Atom n t, (n + 1, t::atoms)) + | Some (i, t) -> Inr (Atom (n - 1 - i) t, (n, atoms)) + +private val fail : (#a:Type) -> string -> tm a +private let fail #a s = fun i -> Inl s + +val as_arith_expr : term -> tm expr +#push-options "--initial_fuel 4 --max_fuel 4" +let rec as_arith_expr (t:term) = + let hd, tl = collect_app_ln t in + // Invoke [collect_app_order]: forall (arg, qual) ∈ tl, (arg, qual) << t + collect_app_order t; + // [precedes_fst_tl]: forall (arg, qual) ∈ tl, arg << t + let precedes_fst_tl (arg: term) (q: aqualv) + : Lemma (List.Tot.memP (arg, q) tl ==> arg << t) + = let _: argv = arg, q in () + in Classical.forall_intro_2 (precedes_fst_tl); + match inspect_ln hd, tl with + | Tv_FVar fv, [(e1, Q_Implicit); (e2, Q_Explicit) ; (e3, Q_Explicit)] -> + let qn = inspect_fv fv in + let e2' = as_arith_expr e2 in + let e3' = as_arith_expr e3 in + if qn = land_qn then liftM2 Land e2' e3' + else if qn = lxor_qn then liftM2 Lxor e2' e3' + else if qn = lor_qn then liftM2 Lor e2' e3' + else if qn = shiftr_qn then liftM2 Shr e2' e3' + else if qn = shiftl_qn then liftM2 Shl e2' e3' + else if qn = udiv_qn then liftM2 Udiv e2' e3' + else if qn = umod_qn then liftM2 Umod e2' e3' + else if qn = mul_mod_qn then liftM2 MulMod e2' e3' + else if qn = ladd_qn then liftM2 Ladd e2' e3' + else if qn = lsub_qn then liftM2 Lsub e2' e3' + else atom t + | Tv_FVar fv, [(l, Q_Explicit); (r, Q_Explicit)] -> + let qn = inspect_fv fv in + // Have to go through hoops to get F* to typecheck this. + // Maybe the do notation is twisting the terms somehow unexpected? + let ll = as_arith_expr l in + let rr = as_arith_expr r in + if qn = add_qn then liftM2 Plus ll rr + else if qn = minus_qn then liftM2 Minus ll rr + else if qn = mult_qn then liftM2 Mult ll rr + else if qn = mult'_qn then liftM2 Mult ll rr + else atom t + | Tv_FVar fv, [(l, Q_Implicit); (r, Q_Explicit)] -> + let qn = inspect_fv fv in + let ll = as_arith_expr l in + let rr = as_arith_expr r in + if qn = nat_bv_qn then liftM NatToBv rr + else atom t + | Tv_FVar fv, [(a, Q_Explicit)] -> + let qn = inspect_fv fv in + let aa = as_arith_expr a in + if qn = neg_qn then liftM Neg aa + else atom t + | Tv_Const (C_Int i), _ -> + return (Lit i) + | _ -> + atom t +#pop-options + +val is_arith_expr : term -> tm expr +let is_arith_expr t = + let! a = as_arith_expr t in + match a with + | Atom _ t -> begin + let hd, tl = collect_app_ref t in + match inspect_ln hd, tl with + | Tv_FVar _, [] + | Tv_BVar _, [] + | Tv_Var _, [] -> return a + | _ -> let! s = lift term_to_string t in + fail ("not an arithmetic expression: (" ^ s ^ ")") + end + | _ -> return a + +// Cannot use this... +// val is_arith_prop : term -> tm prop +val is_arith_prop : term -> st -> Tac (either string (prop & st)) +let rec is_arith_prop (t:term) = fun i -> + (let! f = lift (fun t -> term_as_formula t) t in + match f with + | Comp (Eq _) l r -> liftM2 eq (is_arith_expr l) (is_arith_expr r) + | Comp (BoolEq _) l r -> liftM2 eq (is_arith_expr l) (is_arith_expr r) + | Comp Lt l r -> liftM2 lt (is_arith_expr l) (is_arith_expr r) + | Comp Le l r -> liftM2 le (is_arith_expr l) (is_arith_expr r) + | And l r -> liftM2 AndProp (is_arith_prop l) (is_arith_prop r) + | Or l r -> liftM2 OrProp (is_arith_prop l) (is_arith_prop r) + | _ -> + let! s = lift term_to_string t in + fail ("connector (" ^ s ^ ")")) i + + +// Run the monadic computations, disregard the counter +let run_tm (m : tm 'a) : Tac (either string 'a) = + match m (0, []) with + | Inr (x, _) -> Inr x + | s -> Inl (Inl?.v s) // why? To have a catch-all pattern and thus an easy WP + +let rec expr_to_string (e:expr) : string = + match e with + | Atom i _ -> "a"^(string_of_int i) + | Lit i -> string_of_int i + | Plus l r -> "(" ^ (expr_to_string l) ^ " + " ^ (expr_to_string r) ^ ")" + | Minus l r -> "(" ^ (expr_to_string l) ^ " - " ^ (expr_to_string r) ^ ")" + | Mult l r -> "(" ^ (expr_to_string l) ^ " * " ^ (expr_to_string r) ^ ")" + | Neg l -> "(- " ^ (expr_to_string l) ^ ")" + | Land l r -> "(" ^ (expr_to_string l) ^ " & " ^ (expr_to_string r) ^ ")" + | Lor l r -> "(" ^ (expr_to_string l) ^ " | " ^ (expr_to_string r) ^ ")" + | Lxor l r -> "(" ^ (expr_to_string l) ^ " ^ " ^ (expr_to_string r) ^ ")" + | Ladd l r -> "(" ^ (expr_to_string l) ^ " >> " ^ (expr_to_string r) ^ ")" + | Lsub l r -> "(" ^ (expr_to_string l) ^ " >> " ^ (expr_to_string r) ^ ")" + | Shl l r -> "(" ^ (expr_to_string l) ^ " << " ^ (expr_to_string r) ^ ")" + | Shr l r -> "(" ^ (expr_to_string l) ^ " >> " ^ (expr_to_string r) ^ ")" + | NatToBv l -> "(" ^ "to_vec " ^ (expr_to_string l) ^ ")" + | Udiv l r -> "(" ^ (expr_to_string l) ^ " / " ^ (expr_to_string r) ^ ")" + | Umod l r -> "(" ^ (expr_to_string l) ^ " % " ^ (expr_to_string r) ^ ")" + | MulMod l r -> "(" ^ (expr_to_string l) ^ " ** " ^ (expr_to_string r) ^ ")" + +let rec compare_expr (e1 e2 : expr) : O.order = + match e1, e2 with + | Lit i, Lit j -> O.compare_int i j + | Atom _ t, Atom _ s -> compare_term t s + | Plus l1 l2, Plus r1 r2 + | Minus l1 l2, Minus r1 r2 + | Mult l1 l2, Mult r1 r2 -> O.lex (compare_expr l1 r1) (fun () -> compare_expr l2 r2) + | Neg e1, Neg e2 -> compare_expr e1 e2 + | Lit _, _ -> O.Lt | _, Lit _ -> O.Gt + | Atom _ _, _ -> O.Lt | _, Atom _ _ -> O.Gt + | Plus _ _, _ -> O.Lt | _, Plus _ _ -> O.Gt + | Mult _ _, _ -> O.Lt | _, Mult _ _ -> O.Gt + | Neg _, _ -> O.Lt | _, Neg _ -> O.Gt + | _ -> O.Gt // don't care about this for now diff --git a/stage0/ulib/FStar.Reflection.V2.Collect.fst b/stage0/ulib/FStar.Reflection.V2.Collect.fst new file mode 100644 index 00000000000..c1c3926feb9 --- /dev/null +++ b/stage0/ulib/FStar.Reflection.V2.Collect.fst @@ -0,0 +1,69 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.V2.Collect + +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Reflection.V2.Builtins +open FStar.Stubs.Reflection.V2.Data + +let rec inspect_ln_unascribe (t:term) : tv:term_view{tv << t /\ notAscription tv} = + match inspect_ln t with + | Tv_AscribedT t' _ _ _ + | Tv_AscribedC t' _ _ _ -> inspect_ln_unascribe t' + | tv -> tv + +// (* Helpers for dealing with nested applications and arrows *) +let rec collect_app_ln' (args : list argv) (t : term) : Tot (term & list argv) (decreases t) = + match inspect_ln_unascribe t with + | Tv_App l r -> + collect_app_ln' (r::args) l + | _ -> (t, args) + +val collect_app_ln : term -> term & list argv +let collect_app_ln = collect_app_ln' [] + +let rec collect_arr' (bs : list binder) (c : comp) : Tot (list binder & comp) (decreases c) = + begin match inspect_comp c with + | C_Total t -> + begin match inspect_ln_unascribe t with + | Tv_Arrow b c -> + collect_arr' (b::bs) c + | _ -> + (bs, c) + end + | _ -> (bs, c) + end + +val collect_arr_ln_bs : typ -> list binder & comp +let collect_arr_ln_bs t = + let (bs, c) = collect_arr' [] (pack_comp (C_Total t)) in + (List.Tot.Base.rev bs, c) + +val collect_arr_ln : typ -> list typ & comp +let collect_arr_ln t = + let bs, c = collect_arr_ln_bs t in + List.Tot.Base.map (fun b -> (inspect_binder b).sort) bs, c + +let rec collect_abs' (bs : list binder) (t : term) : Tot (list binder & term) (decreases t) = + match inspect_ln_unascribe t with + | Tv_Abs b t' -> + collect_abs' (b::bs) t' + | _ -> (bs, t) + +val collect_abs_ln : term -> list binder & term +let collect_abs_ln t = + let (bs, t') = collect_abs' [] t in + (List.Tot.Base.rev bs, t') \ No newline at end of file diff --git a/stage0/ulib/FStar.Reflection.V2.Compare.fst b/stage0/ulib/FStar.Reflection.V2.Compare.fst new file mode 100644 index 00000000000..55d1c28aab2 --- /dev/null +++ b/stage0/ulib/FStar.Reflection.V2.Compare.fst @@ -0,0 +1,249 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.V2.Compare + +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Reflection.V2.Data +open FStar.Stubs.Reflection.V2.Builtins +open FStar.Reflection.V2.Derived +open FStar.Order + +(* Many warnings about types SMT maybe not being able to prove +types are equal, but all spurious. *) +#set-options "--warn_error -290" + +let compare_name (n1 n2 : name) : order = + compare_list n1 n2 (fun s1 s2 -> order_from_int (compare_string s1 s2)) + +let compare_fv (f1 f2 : fv) : order = + compare_name (inspect_fv f1) (inspect_fv f2) + +let compare_const (c1 c2 : vconst) : order = + match c1, c2 with + | C_Unit, C_Unit -> Eq + | C_Int i, C_Int j -> order_from_int (i - j) + | C_True, C_True -> Eq + | C_False, C_False -> Eq + | C_String s1, C_String s2 -> order_from_int (compare_string s1 s2) + | C_Range r1, C_Range r2 -> Eq + | C_Reify, C_Reify -> Eq + | C_Reflect l1, C_Reflect l2 -> compare_name l1 l2 + | C_Real r1, C_Real r2 -> order_from_int (compare_string r1 r2) + | C_Unit, _ -> Lt | _, C_Unit -> Gt + | C_Int _, _ -> Lt | _, C_Int _ -> Gt + | C_True, _ -> Lt | _, C_True -> Gt + | C_False, _ -> Lt | _, C_False -> Gt + | C_String _, _ -> Lt | _, C_String _ -> Gt + | C_Range _, _ -> Lt | _, C_Range _ -> Gt + | C_Reify, _ -> Lt | _, C_Reify -> Gt + | C_Reflect _, _ -> Lt | _, C_Reflect _ -> Gt + | C_Real _, _ -> Lt | _ , C_Real _ -> Gt + +let compare_ident (i1 i2:ident) : order = + let nm1, _ = inspect_ident i1 in + let nm2, _ = inspect_ident i2 in + order_from_int (compare_string nm1 nm2) + +let rec compare_universe (u1 u2:universe) : order = + match inspect_universe u1, inspect_universe u2 with + | Uv_Zero, Uv_Zero -> Eq + | Uv_Succ u1, Uv_Succ u2 -> compare_universe u1 u2 + | Uv_Max us1, Uv_Max us2 -> + compare_list us1 us2 (fun x y -> compare_universe x y) + | Uv_BVar n1, Uv_BVar n2 -> compare_int n1 n2 + | Uv_Name i1, Uv_Name i2 -> compare_ident i1 i2 + | Uv_Unif u1, Uv_Unif u2 -> Eq //AR: TODO + | Uv_Unk, Uv_Unk -> Eq + | Uv_Zero, _ -> Lt | _, Uv_Zero -> Gt + | Uv_Succ _, _ -> Lt | _, Uv_Succ _ -> Gt + | Uv_Max _, _ -> Lt | _, Uv_Max _ -> Gt + | Uv_BVar _, _ -> Lt | _, Uv_BVar _ -> Gt + | Uv_Name _, _ -> Lt | _, Uv_Name _ -> Gt + | Uv_Unif _, _ -> Lt | _, Uv_Unif _ -> Gt + | Uv_Unk, _ -> Lt + +let compare_universes (us1 us2:universes) : order = + compare_list us1 us2 compare_universe + +let rec __compare_term (s t : term) : Tot order (decreases %[s;2]) = + match inspect_ln s, inspect_ln t with + | Tv_Var sv, Tv_Var tv -> + compare_namedv sv tv + + | Tv_BVar sv, Tv_BVar tv -> + compare_bv sv tv + + | Tv_FVar sv, Tv_FVar tv -> + compare_fv sv tv + + | Tv_UInst sv sus, Tv_UInst tv tus -> + lex (compare_fv sv tv) (fun _ -> compare_universes sus tus) + + | Tv_App _ _, Tv_App _ _ -> + (* We do something special here to first compare the heads, + then the arguments, as lists. Otherwise `f _ _` is always before `g _ _`, + regardless of `f` and `g`. *) + let open FStar.Reflection.V2.Derived.Lemmas in + let h1, aa1 = collect_app_ref s in + let h2, aa2 = collect_app_ref t in + Reflection.V2.Derived.Lemmas.collect_app_order s; + Reflection.V2.Derived.Lemmas.collect_app_order t; + lex (__compare_term h1 h2) (fun () -> compare_argv_list s t aa1 aa2) + + | Tv_Abs b1 e1, Tv_Abs b2 e2 -> + lex (__compare_binder b1 b2) (fun () -> __compare_term e1 e2) + + | Tv_Refine b1 e1, Tv_Refine b2 e2 -> + lex (__compare_binder b1 b2) (fun () -> + __compare_term e1 e2) + + | Tv_Arrow b1 e1, Tv_Arrow b2 e2 -> + lex (__compare_binder b1 b2) (fun () -> __compare_comp e1 e2) + + | Tv_Type su, Tv_Type tu -> compare_universe su tu + + | Tv_Const c1, Tv_Const c2 -> + compare_const c1 c2 + + | Tv_Uvar u1 _, Tv_Uvar u2 _-> + compare_int u1 u2 + + | Tv_Let _r1 _attrs1 b1 t1 t1', Tv_Let _r2 _attrs2 b2 t2 t2' -> + lex (__compare_binder b1 b2) (fun () -> + lex (__compare_term t1 t2) (fun () -> + __compare_term t1' t2')) + + | Tv_Match _ _ _, Tv_Match _ _ _ -> + Eq // TODO + + | Tv_AscribedT e1 t1 tac1 _, Tv_AscribedT e2 t2 tac2 _ -> + lex (__compare_term e1 e2) (fun () -> + lex (__compare_term t1 t2) (fun () -> + match tac1, tac2 with + | None, None -> Eq + | None, _ -> Lt + | _, None -> Gt + | Some e1, Some e2 -> __compare_term e1 e2)) + + | Tv_AscribedC e1 c1 tac1 _, Tv_AscribedC e2 c2 tac2 _ -> + lex (__compare_term e1 e2) (fun () -> + lex (__compare_comp c1 c2) (fun () -> + match tac1, tac2 with + | None, None -> Eq + | None, _ -> Lt + | _, None -> Gt + | Some e1, Some e2 -> __compare_term e1 e2)) + + | Tv_Unknown, Tv_Unknown -> + Eq + + | Tv_Unsupp, Tv_Unsupp -> + Eq + + // From here onward, they must have different constructors. Order them arbitrarily as in the definition. + | Tv_Var _, _ -> Lt | _, Tv_Var _ -> Gt + | Tv_BVar _, _ -> Lt | _, Tv_BVar _ -> Gt + | Tv_FVar _, _ -> Lt | _, Tv_FVar _ -> Gt + | Tv_UInst _ _, _ -> Lt | _, Tv_UInst _ _ -> Gt + | Tv_App _ _, _ -> Lt | _, Tv_App _ _ -> Gt + | Tv_Abs _ _, _ -> Lt | _, Tv_Abs _ _ -> Gt + | Tv_Arrow _ _, _ -> Lt | _, Tv_Arrow _ _ -> Gt + | Tv_Type _, _ -> Lt | _, Tv_Type _ -> Gt + | Tv_Refine _ _ , _ -> Lt | _, Tv_Refine _ _ -> Gt + | Tv_Const _, _ -> Lt | _, Tv_Const _ -> Gt + | Tv_Uvar _ _, _ -> Lt | _, Tv_Uvar _ _ -> Gt + | Tv_Let _ _ _ _ _, _ -> Lt | _, Tv_Let _ _ _ _ _ -> Gt + | Tv_Match _ _ _, _ -> Lt | _, Tv_Match _ _ _ -> Gt + | Tv_AscribedT _ _ _ _, _ -> Lt | _, Tv_AscribedT _ _ _ _ -> Gt + | Tv_AscribedC _ _ _ _, _ -> Lt | _, Tv_AscribedC _ _ _ _ -> Gt + | Tv_Unknown, _ -> Lt | _, Tv_Unknown -> Gt + | Tv_Unsupp, _ -> Lt | _, Tv_Unsupp -> Gt + +and __compare_term_list (l1 l2:list term) : Tot order (decreases l1) = + match l1, l2 with + | [], [] -> Eq + | [], _ -> Lt + | _, [] -> Gt + | hd1::tl1, hd2::tl2 -> + lex (__compare_term hd1 hd2) (fun () -> __compare_term_list tl1 tl2) + +and compare_argv (b1 b2 : Ghost.erased term) // termination bounds + (a1 : argv{fst a1 << Ghost.reveal b1}) + (a2 : argv{fst a2 << Ghost.reveal b2}) +: Tot order (decreases %[Ghost.reveal b1; 0]) = + let t1, q1 = a1 in + let t2, q2 = a2 in + assert (t1 << a1); + assert (t2 << a2); + match q1, q2 with + (* We should never see Q_Meta here *) + | Q_Implicit, Q_Explicit -> Lt + | Q_Explicit, Q_Implicit -> Gt + | _, _ -> + assert (t1 << Ghost.reveal b1); + assert (t2 << Ghost.reveal b2); + __compare_term t1 t2 + +and compare_argv_list (b1 b2 : Ghost.erased term) + (l1 : list (a:argv{fst a << Ghost.reveal b1})) + (l2 : list (a:argv{fst a << Ghost.reveal b2})) +: Tot order (decreases %[Ghost.reveal b1; 1; l1]) = + match l1, l2 with + | [], [] -> Eq + | [], _ -> Lt + | _, [] -> Gt + | hd1::tl1, hd2::tl2 -> + assert (fst hd1 << Ghost.reveal b1); + lex (compare_argv b1 b2 hd1 hd2) (fun () -> compare_argv_list b1 b2 tl1 tl2) + +and __compare_comp (c1 c2 : comp) : Tot order (decreases c1) = + let cv1 = inspect_comp c1 in + let cv2 = inspect_comp c2 in + match cv1, cv2 with + | C_Total t1, C_Total t2 + + | C_GTotal t1, C_GTotal t2 -> __compare_term t1 t2 + + | C_Lemma p1 q1 s1, C_Lemma p2 q2 s2 -> + lex (__compare_term p1 p2) + (fun () -> + lex (__compare_term q1 q2) + (fun () -> __compare_term s1 s2) + ) + + | C_Eff us1 eff1 res1 args1 _decrs1, + C_Eff us2 eff2 res2 args2 _decrs2 -> + (* This could be more complex, not sure it is worth it *) + lex (compare_universes us1 us2) + (fun _ -> lex (compare_name eff1 eff2) + (fun _ -> __compare_term res1 res2)) + + | C_Total _, _ -> Lt | _, C_Total _ -> Gt + | C_GTotal _, _ -> Lt | _, C_GTotal _ -> Gt + | C_Lemma _ _ _, _ -> Lt | _, C_Lemma _ _ _ -> Gt + | C_Eff _ _ _ _ _, _ -> Lt | _, C_Eff _ _ _ _ _ -> Gt + +and __compare_binder (b1 b2 : binder) : order = + let bview1 = inspect_binder b1 in + let bview2 = inspect_binder b2 in + __compare_term bview1.sort bview2.sort + +(* We need this indirection since otherwise the plugin attribute +"leaks" into compare_argv and friends, which take an erased termination +bound, for which we do not have a plugin. *) +let compare_term = __compare_term +let compare_comp = __compare_comp +let compare_binder = __compare_binder diff --git a/stage0/ulib/FStar.Reflection.V2.Compare.fsti b/stage0/ulib/FStar.Reflection.V2.Compare.fsti new file mode 100644 index 00000000000..3d712847e04 --- /dev/null +++ b/stage0/ulib/FStar.Reflection.V2.Compare.fsti @@ -0,0 +1,47 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.V2.Compare + +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Reflection.V2.Data +open FStar.Order + +[@@plugin] +val compare_name (n1 n2 : name) : order + +[@@plugin] +val compare_fv (f1 f2 : fv) : order + +[@@plugin] +val compare_const (c1 c2 : vconst) : order + +[@@plugin] +val compare_ident (i1 i2:ident) : order + +[@@plugin] +val compare_universe (u1 u2:universe) : order + +[@@plugin] +val compare_universes (us1 us2:universes) : order + +[@@plugin] +val compare_term (s t : term) : order + +[@@plugin] +val compare_comp (c1 c2 : comp) : order + +[@@plugin] +val compare_binder (b1 b2 : binder) : order diff --git a/stage0/ulib/FStar.Reflection.V2.Derived.Lemmas.fst b/stage0/ulib/FStar.Reflection.V2.Derived.Lemmas.fst new file mode 100644 index 00000000000..bc37f67c13f --- /dev/null +++ b/stage0/ulib/FStar.Reflection.V2.Derived.Lemmas.fst @@ -0,0 +1,132 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.V2.Derived.Lemmas + +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Reflection.V2.Builtins +open FStar.Stubs.Reflection.V2.Data +open FStar.Reflection.V2.Collect +open FStar.List.Tot + +let rec forall_list (p:'a -> Type) (l:list 'a) : Type = + match l with + | [] -> True + | x::xs -> p x /\ forall_list p xs + +let forallP (p: 'a -> Type) (l: list 'a): Type + = forall (x: 'a). memP x l ==> p x +// Precedence relation on the element of a list +unfold let (<<:) (l: list 'a) (r: 'r) + = forallP (fun x -> x << r) l + +// A glorified `id` +val list_ref : (#a:Type) -> (#p:(a -> Type)) -> (l:list a) -> + Pure (list (x:a{p x})) + (requires (forallP p l)) + (ensures (fun _ -> True)) +let rec list_ref #a #p l = + match l with + | [] -> [] + | x::xs -> x :: list_ref #a #p xs + +val collect_app_order' : (args:list argv) -> (tt:term) -> (t:term) -> + Lemma (requires args <<: tt /\ t << tt) + (ensures (let fn, args' = collect_app_ln' args t in + args' <<: tt /\ fn << tt)) + (decreases t) +let rec collect_app_order' args tt t = + match inspect_ln_unascribe t with + | Tv_App l r -> collect_app_order' (r::args) tt l + | _ -> () + +val collect_app_order : (t:term) -> + Lemma (ensures (forall (f:term). forall (s:list argv). (f,s) == collect_app_ln t ==> + (f << t /\ s <<: t) + \/ (f == t /\ s == [] /\ ~(Tv_App? (inspect_ln t))))) +let collect_app_order t = + match inspect_ln_unascribe t with + | Tv_App l r -> collect_app_order' [r] t l + | _ -> () + +val collect_app_ref : (t:term) -> (h:term{h == t \/ h << t}) & list (a:argv{fst a << t}) +let collect_app_ref t = + let h, a = collect_app_ln t in + collect_app_order t; + h, list_ref a + +(**** [collect_abs_ln t] is smaller than [t] *) +let rec collect_abs_order' (bds: binders) (tt t: term) + : Lemma (requires t << tt /\ bds <<: tt) + (ensures (let bds', body = collect_abs' bds t in + (bds' <<: tt /\ body << tt))) + (decreases t) + = match inspect_ln_unascribe t with + | Tv_Abs b body -> collect_abs_order' (b::bds) tt body + | _ -> () + +val collect_abs_ln_order : (t:term) -> + Lemma (ensures forall bds body. + (bds, body) == collect_abs_ln t ==> + (body << t /\ bds <<: t) + \/ (body == t /\ bds == []) + ) +let collect_abs_ln_order t = + match inspect_ln_unascribe t with + | Tv_Abs b body -> collect_abs_order' [b] t body; + let bds, body = collect_abs' [] t in + Classical.forall_intro (rev_memP bds) + | _ -> () + +val collect_abs_ln_ref : (t:term) -> list (bd:binder{bd << t}) & (body:term{body == t \/ body << t}) +let collect_abs_ln_ref t = + let bds, body = collect_abs_ln t in + collect_abs_ln_order t; + list_ref bds, body + + + +(**** [collect_arr_ln_bs t] is smaller than [t] *) +let rec collect_arr_order' (bds: binders) (tt: term) (c: comp) + : Lemma (requires c << tt /\ bds <<: tt) + (ensures (let bds', c' = collect_arr' bds c in + bds' <<: tt /\ c' << tt)) + (decreases c) + = match inspect_comp c with + | C_Total ret -> + ( match inspect_ln_unascribe ret with + | Tv_Arrow b c -> collect_arr_order' (b::bds) tt c + | _ -> ()) + | _ -> () + +val collect_arr_ln_bs_order : (t:term) -> + Lemma (ensures forall bds c. + (bds, c) == collect_arr_ln_bs t ==> + (c << t /\ bds <<: t) + \/ (c == pack_comp (C_Total t) /\ bds == []) + ) +let collect_arr_ln_bs_order t = + match inspect_ln_unascribe t with + | Tv_Arrow b c -> collect_arr_order' [b] t c; + Classical.forall_intro_2 (rev_memP #binder); + inspect_pack_comp_inv (C_Total t) + | _ -> inspect_pack_comp_inv (C_Total t) + +val collect_arr_ln_bs_ref : (t:term) -> list (bd:binder{bd << t}) + & (c:comp{ c == pack_comp (C_Total t) \/ c << t}) +let collect_arr_ln_bs_ref t = + let bds, c = collect_arr_ln_bs t in + collect_arr_ln_bs_order t; + list_ref bds, c diff --git a/stage0/ulib/FStar.Reflection.V2.Derived.fst b/stage0/ulib/FStar.Reflection.V2.Derived.fst new file mode 100644 index 00000000000..e88210271e1 --- /dev/null +++ b/stage0/ulib/FStar.Reflection.V2.Derived.fst @@ -0,0 +1,253 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.V2.Derived + +open FStar.Stubs.Reflection.Types +open FStar.Reflection.Const +open FStar.Stubs.Reflection.V2.Builtins +open FStar.Stubs.Reflection.V2.Data +open FStar.Order +open FStar.Stubs.VConfig +open FStar.Reflection.V2.Collect + +let type_of_binder (b : binder) : typ = + (inspect_binder b).sort + +let rec inspect_ln_unascribe (t:term) : tv:term_view{tv << t /\ notAscription tv} = + match inspect_ln t with + | Tv_AscribedT t' _ _ _ + | Tv_AscribedC t' _ _ _ -> inspect_ln_unascribe t' + | tv -> tv + +let compare_bv (v1 v2 : bv) : order = + Order.compare_int (inspect_bv v1).index (inspect_bv v2).index + +let compare_namedv (v1 v2 : namedv) : order = + Order.compare_int (inspect_namedv v1).uniq (inspect_namedv v2).uniq + +let shift n s = match s with + | DB i t -> DB (i+n) t + | DT i t -> DT (i+n) t + | UN i t -> UN (i+n) t + | NM x i -> NM x (i+n) + | UD x i -> UD x (i+n) + | NT _ _ -> s +let shift_subst n s = List.Tot.map (shift n) s + +let subst1 (n:namedv) (t1:term) (t2:term) : term = + subst_term [NT n t1] t2 + +(* + * AR: add versions that take attributes as arguments? + *) +let mk_binder (nm : string) (sort : typ) : simple_binder = + let bv : binder_view = { + ppname = seal nm; + qual = Q_Explicit; + attrs = []; + sort = sort; + } + in + inspect_pack_binder bv; + pack_binder bv + +let mk_implicit_binder (nm : string) (sort : typ) : binder = + pack_binder { + ppname = seal nm; + qual = Q_Implicit; + attrs = []; + sort = sort; + } + +let push_binding (e:env) (b:binding) : env = + let nv : namedv = pack_namedv { + uniq = b.uniq; + sort = seal b.sort; + ppname = b.ppname; + } + in + push_namedv e nv + +val flatten_name : name -> Tot string +let rec flatten_name ns = + match ns with + | [] -> "" + | [n] -> n + | n::ns -> n ^ "." ^ flatten_name ns + +let rec mk_app (t : term) (args : list argv) : Tot term (decreases args) = + match args with + | [] -> t + | (x::xs) -> mk_app (pack_ln (Tv_App t x)) xs + +// Helper for when all arguments are explicit +let mk_e_app (t : term) (args : list term) : Tot term = + let e t = (t, Q_Explicit) in + mk_app t (List.Tot.Base.map e args) + +let u_unk : universe = pack_universe Uv_Unk + +let rec mk_tot_arr_ln (bs: list binder) (cod : term) : Tot term (decreases bs) = + match bs with + | [] -> cod + | (b::bs) -> pack_ln (Tv_Arrow b (pack_comp (C_Total (mk_tot_arr_ln bs cod)))) + +let rec mk_arr_ln (bs: list binder{~(Nil? bs)}) (cod : comp) : Tot term (decreases bs) = + match bs with + | [b] -> pack_ln (Tv_Arrow b cod) + | (b::bs) -> pack_ln (Tv_Arrow b (pack_comp (C_Total (mk_arr_ln bs cod)))) + +let fv_to_string (fv:fv) : string = implode_qn (inspect_fv fv) + +let mk_stringlit (s : string) : term = + pack_ln (Tv_Const (C_String s)) + +let mk_strcat (t1 t2 : term) : term = + mk_e_app (pack_ln (Tv_FVar (pack_fv ["Prims"; "strcat"]))) [t1; t2] + +let mk_cons (h t : term) : term = + mk_e_app (pack_ln (Tv_FVar (pack_fv cons_qn))) [h; t] + +let mk_cons_t (ty h t : term) : term = + mk_app (pack_ln (Tv_FVar (pack_fv cons_qn))) [(ty, Q_Implicit); (h, Q_Explicit); (t, Q_Explicit)] + +let rec mk_list (ts : list term) : term = + match ts with + | [] -> pack_ln (Tv_FVar (pack_fv nil_qn)) + | t::ts -> mk_cons t (mk_list ts) + +let mktuple_n (ts : list term{List.Tot.Base.length ts <= 8}) : term = + match List.Tot.Base.length ts with + | 0 -> pack_ln (Tv_Const C_Unit) + | 1 -> let [x] = ts in x + | n -> begin + let qn = match n with + | 2 -> mktuple2_qn + | 3 -> mktuple3_qn + | 4 -> mktuple4_qn + | 5 -> mktuple5_qn + | 6 -> mktuple6_qn + | 7 -> mktuple7_qn + | 8 -> mktuple8_qn + in mk_e_app (pack_ln (Tv_FVar (pack_fv qn))) ts + end + +let destruct_tuple (t : term) : option (list term) = + let head, args = collect_app_ln t in + match inspect_ln head with + | Tv_FVar fv -> + if List.Tot.Base.mem + (inspect_fv fv) [mktuple2_qn; mktuple3_qn; mktuple4_qn; mktuple5_qn; + mktuple6_qn; mktuple7_qn; mktuple8_qn] + then Some (List.Tot.Base.concatMap (fun (t, q) -> + match q with + | Q_Explicit -> [t] + | _ -> []) args) + else None + | _ -> None + +let mkpair (t1 t2 : term) : term = + mktuple_n [t1;t2] + +let rec head (t : term) : term = + match inspect_ln t with + | Tv_Match t _ _ + | Tv_Let _ _ _ t _ + | Tv_Abs _ t + | Tv_Refine _ t + | Tv_App t _ + | Tv_AscribedT t _ _ _ + | Tv_AscribedC t _ _ _ -> head t + + | Tv_Unknown + | Tv_Uvar _ _ + | Tv_Const _ + | Tv_Type _ + | Tv_Var _ + | Tv_BVar _ + | Tv_FVar _ + | Tv_UInst _ _ + | Tv_Arrow _ _ + | Tv_Unsupp -> t + +(** Checks if a term `t` is equal to some FV (a top level name). +Ignores universes and ascriptions. *) +let is_fvar (t : term) (nm:string) : bool = + match inspect_ln_unascribe t with + | Tv_FVar fv + | Tv_UInst fv _ -> implode_qn (inspect_fv fv) = nm + | _ -> false + +(** Checks if a term `t` is equal to any FV (a top level name) from +those given in the list. Ignores universes and ascriptions. *) +let rec is_any_fvar (t : term) (nms:list string) : bool = + match nms with + | [] -> false + | v::vs -> is_fvar t v || is_any_fvar t vs + +let is_uvar (t : term) : bool = + match inspect_ln (head t) with + | Tv_Uvar _ _ -> true + | _ -> false + +let binder_set_qual (q:aqualv) (b:binder) : Tot binder = + let bview = inspect_binder b in + pack_binder { bview with qual=q } + +(** Set a vconfig for a sigelt *) +val add_check_with : vconfig -> sigelt -> Tot sigelt +let add_check_with vcfg se = + let attrs = sigelt_attrs se in + let vcfg_t = embed_vconfig vcfg in + let t = `(check_with (`#vcfg_t)) in + set_sigelt_attrs (t :: attrs) se + +let un_uinst (t:term) : term = + match inspect_ln t with + | Tv_UInst fv _ -> pack_ln (Tv_FVar fv) + | _ -> t + +(* Returns [true] iff the term [t] is just the name [nm], though +possibly universe-instantiated and applied to some implicit arguments. +*) +let rec is_name_imp (nm : name) (t : term) : bool = + begin match inspect_ln_unascribe t with + | Tv_FVar fv + | Tv_UInst fv _ -> + if inspect_fv fv = nm + then true + else false + | Tv_App l (_, Q_Implicit) -> + is_name_imp nm l + | _ -> false + end + +(* If t is of the shape [squash t'], return [Some t'], +otherwise [None]. *) +let unsquash_term (t : term) : option term = + match inspect_ln_unascribe t with + | Tv_App l (r, Q_Explicit) -> + if is_name_imp squash_qn l + then Some r + else None + | _ -> None + +(* As [unsquash_term], but returns the original term if +[t] is not a squash. *) +let maybe_unsquash_term (t : term) : term = + match unsquash_term t with + | Some t' -> t' + | None -> t diff --git a/stage0/ulib/FStar.Reflection.V2.Formula.fst b/stage0/ulib/FStar.Reflection.V2.Formula.fst new file mode 100644 index 00000000000..5a4d9427b91 --- /dev/null +++ b/stage0/ulib/FStar.Reflection.V2.Formula.fst @@ -0,0 +1,254 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.V2.Formula + +open FStar.List.Tot.Base + +open FStar.Stubs.Reflection.Types +open FStar.Reflection.Const +open FStar.Stubs.Reflection.V2.Builtins +open FStar.Reflection.V2.Derived +open FStar.Stubs.Reflection.V2.Data + +open FStar.Stubs.Tactics.Common +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Tactics.NamedView + +private +let term_eq = FStar.Reflection.TermEq.Simple.term_eq + +///// Helpers (we cannot use the ones in Tactics.V2.Derived, those are for named views ///// +private let rec inspect_unascribe (t:term) : Tac term_view = + match inspect t with + | Tv_AscribedT t _ _ _ + | Tv_AscribedC t _ _ _ -> + inspect_unascribe t + | tv -> tv + +private let rec collect_app' (args : list argv) (t : term) + : Tac (term & list argv) (decreases t) = + match inspect_unascribe t with + | Tv_App l r -> + collect_app' (r::args) l + | _ -> (t, args) + +private let collect_app = collect_app' [] +///// + +[@@plugin] +noeq type comparison = + | Eq of option typ (* Propositional equality (eq2), maybe annotated *) + | BoolEq of option typ (* Decidable, boolean equality (eq), maybe annotated *) + | Lt | Le | Gt | Ge (* Orderings, at type `int` (and subtypes) *) + +[@@plugin] +noeq type formula = + | True_ : formula + | False_ : formula + | Comp : comparison -> term -> term -> formula + | And : term -> term -> formula + | Or : term -> term -> formula + | Not : term -> formula + | Implies: term -> term -> formula + | Iff : term -> term -> formula + | Forall : bv -> typ -> term -> formula + | Exists : bv -> typ -> term -> formula + | App : term -> term -> formula + | Name : namedv -> formula + | FV : fv -> formula + | IntLit : int -> formula + | F_Unknown : formula // Also a baked-in "None" + +let mk_Forall (typ : term) (pred : term) : Tot formula = + let b = pack_bv ({ ppname = as_ppname "x"; + sort = seal typ; + index = 0; }) in + Forall b typ (pack (Tv_App pred (pack (Tv_BVar b), Q_Explicit))) + +let mk_Exists (typ : term) (pred : term) : Tot formula = + let b = pack_bv ({ ppname = as_ppname "x"; + sort = seal typ; + index = 0; }) in + Exists b typ (pack (Tv_App pred (pack (Tv_BVar b), Q_Explicit))) + +[@@plugin] +let term_as_formula' (t:term) : Tac formula = + match inspect_unascribe t with + | Tv_Var n -> + Name n + + | Tv_FVar fv + | Tv_UInst fv _ -> + // Cannot use `when` clauses when verifying! + let qn = inspect_fv fv in + if qn = true_qn then True_ + else if qn = false_qn then False_ + else FV fv + + // TODO: l_Forall + // ...or should we just try to drop all squashes? + // TODO: b2t at this point ? + | Tv_App h0 t -> begin + let (h, ts) = collect_app h0 in + let h = un_uinst h in + match inspect h, ts@[t] with + | Tv_FVar fv, [(a1, Q_Implicit); (a2, Q_Explicit); (a3, Q_Explicit)] -> + let qn = inspect_fv fv in + if qn = eq2_qn then Comp (Eq (Some a1)) a2 a3 + else if qn = eq1_qn then Comp (BoolEq (Some a1)) a2 a3 + else if qn = lt_qn then Comp Lt a2 a3 + else if qn = lte_qn then Comp Le a2 a3 + else if qn = gt_qn then Comp Gt a2 a3 + else if qn = gte_qn then Comp Ge a2 a3 + else App h0 (fst t) + | Tv_FVar fv, [(a1, Q_Explicit); (a2, Q_Explicit)] -> + let qn = inspect_fv fv in + if qn = imp_qn then Implies a1 a2 + else if qn = and_qn then And a1 a2 + else if qn = iff_qn then Iff a1 a2 + else if qn = or_qn then Or a1 a2 + // Non-annotated comparisons + else if qn = eq2_qn then Comp (Eq None) a1 a2 + else if qn = eq1_qn then Comp (BoolEq None) a1 a2 + else App h0 (fst t) + + | Tv_FVar fv, [(a1, Q_Implicit); (a2, Q_Explicit)] -> + let qn = inspect_fv fv in + if qn = forall_qn then mk_Forall a1 a2 + else if qn = exists_qn then mk_Exists a1 a2 + else App h0 (fst t) + | Tv_FVar fv, [(a, Q_Explicit)] -> + let qn = inspect_fv fv in + if qn = not_qn then Not a + else if qn = b2t_qn then begin + if term_eq a (`false) then False_ + else if term_eq a (`true) then True_ + else App h0 (fst t) + end + else App h0 (fst t) + | _ -> + App h0 (fst t) + end + + | Tv_Const (C_Int i) -> + IntLit i + + (* Not formulas. *) + | Tv_Let _ _ _ _ _ + | Tv_Match _ _ _ + | Tv_Type _ + | Tv_Abs _ _ + | Tv_Arrow _ _ + | Tv_Uvar _ _ + | Tv_Unknown + | Tv_Unsupp + | Tv_Refine _ _ -> F_Unknown + + (* Other constants? *) + | Tv_Const _ -> F_Unknown + + (* Should not occur, we're using inspect *) + | Tv_BVar _ -> F_Unknown + | _ -> raise (TacticFailure (mkmsg "Unexpected: term_as_formula", None)) + +// Unsquashing +let term_as_formula (t:term) : Tac formula = + match unsquash_term t with + | None -> F_Unknown + | Some t -> + term_as_formula' t + +// Badly named, this only means it always returns a formula even if not properly +// squashed at the top-level. +let term_as_formula_total (t:term) : Tac formula = + term_as_formula' (maybe_unsquash_term t) + +let formula_as_term_view (f:formula) : Tot term_view = + let mk_app' tv args = List.Tot.Base.fold_left (fun tv a -> Tv_App (pack tv) a) tv args in + let e = Q_Explicit in + let i = Q_Implicit in + match f with + | True_ -> Tv_FVar (pack_fv true_qn) + | False_ -> Tv_FVar (pack_fv false_qn) + | Comp (Eq None) l r -> mk_app' (Tv_FVar (pack_fv eq2_qn)) [(l,e);(r,e)] + | Comp (Eq (Some t)) l r -> mk_app' (Tv_FVar (pack_fv eq2_qn)) [(t,i);(l,e);(r,e)] + | Comp (BoolEq None) l r -> mk_app' (Tv_FVar (pack_fv eq1_qn)) [(l,e);(r,e)] + | Comp (BoolEq (Some t)) l r -> mk_app' (Tv_FVar (pack_fv eq1_qn)) [(t,i);(l,e);(r,e)] + | Comp Lt l r -> mk_app' (Tv_FVar (pack_fv lt_qn)) [(l,e);(r,e)] + | Comp Le l r -> mk_app' (Tv_FVar (pack_fv lte_qn)) [(l,e);(r,e)] + | Comp Gt l r -> mk_app' (Tv_FVar (pack_fv gt_qn)) [(l,e);(r,e)] + | Comp Ge l r -> mk_app' (Tv_FVar (pack_fv gte_qn)) [(l,e);(r,e)] + | And p q -> mk_app' (Tv_FVar (pack_fv and_qn)) [(p,e);(q,e)] + | Or p q -> mk_app' (Tv_FVar (pack_fv or_qn)) [(p,e);(q,e)] + | Implies p q -> mk_app' (Tv_FVar (pack_fv imp_qn)) [(p,e);(q,e)] + | Not p -> mk_app' (Tv_FVar (pack_fv not_qn)) [(p,e)] + | Iff p q -> mk_app' (Tv_FVar (pack_fv iff_qn)) [(p,e);(q,e)] + | Forall b sort t -> Tv_Unknown // TODO: decide on meaning of this + | Exists b sort t -> Tv_Unknown // TODO: ^ + + | App p q -> + Tv_App p (q, Q_Explicit) + + | Name b -> + Tv_Var b + + | FV fv -> + Tv_FVar fv + + | IntLit i -> + Tv_Const (C_Int i) + + | F_Unknown -> + Tv_Unknown + +let formula_as_term (f:formula) : Tot term = + pack (formula_as_term_view f) + +private let namedv_to_string (namedv : namedv) : Tac string = + let namedvv = inspect_namedv namedv in + unseal namedvv.ppname + +let formula_to_string (f:formula) : Tac string = + match f with + | True_ -> "True_" + | False_ -> "False_" + | Comp (Eq mt) l r -> "Eq" ^ + (match mt with + | None -> "" + | Some t -> " (" ^ term_to_string t ^ ")") ^ + " (" ^ term_to_string l ^ ") (" ^ term_to_string r ^ ")" + | Comp (BoolEq mt) l r -> "BoolEq" ^ + (match mt with + | None -> "" + | Some t -> " (" ^ term_to_string t ^ ")") ^ + " (" ^ term_to_string l ^ ") (" ^ term_to_string r ^ ")" + | Comp Lt l r -> "Lt (" ^ term_to_string l ^ ") (" ^ term_to_string r ^ ")" + | Comp Le l r -> "Le (" ^ term_to_string l ^ ") (" ^ term_to_string r ^ ")" + | Comp Gt l r -> "Gt (" ^ term_to_string l ^ ") (" ^ term_to_string r ^ ")" + | Comp Ge l r -> "Ge (" ^ term_to_string l ^ ") (" ^ term_to_string r ^ ")" + | And p q -> "And (" ^ term_to_string p ^ ") (" ^ term_to_string q ^ ")" + | Or p q -> "Or (" ^ term_to_string p ^ ") (" ^ term_to_string q ^ ")" + | Implies p q -> "Implies (" ^ term_to_string p ^ ") (" ^ term_to_string q ^ ")" + | Not p -> "Not (" ^ term_to_string p ^ ")" + | Iff p q -> "Iff (" ^ term_to_string p ^ ") (" ^ term_to_string q ^ ")" + | Forall bs _sort t -> "Forall (" ^ term_to_string t ^ ")" + | Exists bs _sort t -> "Exists (" ^ term_to_string t ^ ")" + | App p q -> "App (" ^ term_to_string p ^ ") (" ^ term_to_string q ^ ")" + | Name bv -> "Name (" ^ namedv_to_string bv ^ ")" + | FV fv -> "FV (" ^ flatten_name (inspect_fv fv) ^ ")" + | IntLit i -> "Int " ^ string_of_int i + | F_Unknown -> "?" diff --git a/stage0/ulib/FStar.Reflection.V2.fst b/stage0/ulib/FStar.Reflection.V2.fst new file mode 100644 index 00000000000..e1adf5d9700 --- /dev/null +++ b/stage0/ulib/FStar.Reflection.V2.fst @@ -0,0 +1,25 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection.V2 + +include FStar.Stubs.Reflection.Types +include FStar.Stubs.Reflection.V2.Data +include FStar.Stubs.Reflection.V2.Builtins +include FStar.Reflection.V2.Derived +include FStar.Reflection.V2.Derived.Lemmas +include FStar.Reflection.Const +include FStar.Reflection.V2.Compare +include FStar.Reflection.V2.Collect diff --git a/stage0/ulib/FStar.Reflection.fst b/stage0/ulib/FStar.Reflection.fst new file mode 100644 index 00000000000..7f2c5f7cdee --- /dev/null +++ b/stage0/ulib/FStar.Reflection.fst @@ -0,0 +1,20 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Reflection + +(* This switch currently points to V1. Ask for V2 explicitly by +importing FStar.Reflection.V2 *) +include FStar.Reflection.V1 diff --git a/stage0/ulib/FStar.ReflexiveTransitiveClosure.fst b/stage0/ulib/FStar.ReflexiveTransitiveClosure.fst new file mode 100644 index 00000000000..c6e2a6097ad --- /dev/null +++ b/stage0/ulib/FStar.ReflexiveTransitiveClosure.fst @@ -0,0 +1,185 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.ReflexiveTransitiveClosure + +open FStar.Tactics.V2 +#set-options "--max_ifuel 1 --max_fuel 0" + +noeq +type _closure (#a:Type u#a) (r:binrel u#a u#r a) : a -> a -> Type u#(max a r) = +| Refl: x:a -> _closure r x x +| Step: x:a -> y:a -> squash (r x y) ->_closure r x y +| Closure: x:a -> y:a -> z:a -> _closure r x y -> _closure r y z -> _closure r x z + +let _closure0 (#a:Type) (r:binrel a) (x y: a) : prop = + squash (_closure r x y) + +let rec induct_ (#a:Type) (r:binrel a) (p: a -> a -> prop) + (f_refl: (x:a -> squash (p x x))) + (f_step: (x:a -> y:a { r x y } -> squash (p x y))) + (f_closure: (x:a -> y:a -> z:a { p x y /\ p y z } -> squash (p x z))) + (x:a) (y:a) (xy:_closure r x y) +: Tot (squash (p x y)) (decreases xy) += match xy with + | Refl x -> f_refl x + | Step x y _ -> f_step x y + | Closure x y z xy yz -> + let p1 = induct_ r p f_refl f_step f_closure x y xy in + let p2 = induct_ r p f_refl f_step f_closure y z yz in + f_closure x y z + +let get_squash (#a:Type) (r:binrel a) (x:a) (y:a{_closure0 r x y}) + : Tot (squash (_closure r x y)) + = assert_norm (_closure0 r x y ==> squash (_closure r x y)) + +val closure_reflexive: #a:Type u#a -> r:binrel u#a u#r a -> Lemma (reflexive (_closure0 r)) +let closure_reflexive #a r = + assert (forall x. _closure0 r x x) by + (let x = forall_intro () in + mapply (`FStar.Squash.return_squash); + mapply (`Refl)) + +#push-options "--warn_error -271" +val closure_transitive: #a:Type u#a -> r:binrel u#a u#r a -> Lemma (transitive (_closure0 r)) +let closure_transitive #a r = + let open FStar.Squash in + let aux (x y z:a) + (s0:squash (_closure r x y)) + (s1:squash (_closure r y z)) + : GTot (squash (_closure r x z)) + = bind_squash s0 (fun p0 -> + bind_squash s1 (fun p1 -> + return_squash (Closure x y z p0 p1))) + in + let aux (x y z:a) + : Lemma (requires (_closure0 r x y /\ _closure0 r y z)) + (ensures _closure0 r x z) + [SMTPat ()] + = get_squash r x y; get_squash r y z; aux x y z () () + in + () +#pop-options + +let closure #a r = + closure_reflexive r; + closure_transitive r; + _closure0 r + +let closure_step #a r x y = + let q : squash (r x y) = () in + assert (squash (r x y) ==> closure r x y) by + (let xy = implies_intro () in + let xy : squash (r x y) = unquote (binding_to_term xy) in + squash_intro (); + mapply (`Step); + assumption()) + +val closure_one_aux: #a:Type u#a -> r:binrel u#a u#r a -> x:a -> y:a + -> xy:_closure r x y + -> Tot (either (squash (x == y)) + (z:a & squash (r x z) & _closure r z y)) + (decreases xy) +let rec closure_one_aux #a r x y xy = + match xy with + | Refl _ -> Inl () + | Step _ _ pr -> Inr (| y, pr, Refl y |) + | Closure x i y xi iy -> + match closure_one_aux r i y iy with + | Inl _ -> closure_one_aux r x y xi + | Inr (| z, r_i_z, c_z_y |) -> + let c_z_y : _closure r z y = c_z_y in + match closure_one_aux r x i xi with + | Inl _ -> Inr (| z, r_i_z, c_z_y |) + | Inr (| w, r_x_w, c_w_i |) -> + let step : _closure r i z = Step #a #r i z r_i_z in + let c_i_y : _closure r i y = Closure i z y step c_z_y in + let c_w_y : _closure r w y = Closure w i y c_w_i c_i_y in + Inr (| w, r_x_w, c_w_y |) + +let closure_one_aux' (#a:Type u#a) (r:binrel u#a u#r a) (x y:a) + (xy:_closure r x y) + : GTot (squash (x == y \/ (exists z. squash (r x z) /\ closure r z y))) + = let p = closure_one_aux r x y xy in + match p with + | Inl _ -> () + | Inr (| z, rxz, _c_zy |) -> + assert (squash (r x z)); + let s : closure r z y = FStar.Squash.return_squash _c_zy in + let ss = FStar.Squash.return_squash s in + FStar.Squash.give_proof ss; + assert (closure r z y); + () + +val closure_one: #a:Type u#a -> r:binrel u#a u#r a -> x:a -> y:a + -> xy:squash (closure r x y) + -> GTot (squash (x == y \/ (exists z. squash (r x z) /\ closure r z y))) +let closure_one #a r x y xy = + let open FStar.Squash in + bind_squash xy (fun xy -> + bind_squash xy (closure_one_aux' r x y)) + +let closure_inversion #a r x y = closure_one r x y () + +val _stable_on_closure: #a:Type u#a -> r:binrel u#a u#r a -> p:(a -> Type0) + -> p_stable_on_r: squash (forall x y. p x /\ squash (r x y) ==> p y) + -> x: a + -> y: a + -> xy: _closure r x y + -> px: squash (p x) + -> GTot (squash (p y)) (decreases xy) +let rec _stable_on_closure #a r p p_stable_on_r x y xy px = + match xy with + | Refl _ -> () + | Step _ _ _ -> () + | Closure x a y xa ay -> + let hi = _stable_on_closure r p p_stable_on_r in + let pa = hi x a xa px in + hi a y ay pa + +let squash_implies_to_arrow (p:Type u#p) (q:Type) + : Lemma (requires (squash (p -> GTot q))) + (ensures squash p ==> q) + = () + +let squash_double_arrow (#a:Type u#a) (#p:Type0) + (f:(squash (a -> Tot (squash p)))) + : Tot (squash (a -> GTot p)) = + FStar.Squash.squash_double_arrow f + +let stable_on_closure #a r p hr = + assert (forall x y. p x ==> closure r x y ==> p y) by + (let x = forall_intro () in + let y = forall_intro () in + let x : a = unquote (binding_to_term x) in + let y : a = unquote (binding_to_term y) in + let px = implies_intro () in + mapply (`squash_implies_to_arrow); + mapply (`FStar.Squash.return_squash); + apply (`squash_double_arrow); + mapply (`FStar.Squash.return_squash); + let xy = intro () in + let xy : _closure r x y = unquote (binding_to_term xy) in + exact (quote (_stable_on_closure r p hr x y xy (Squash.get_proof _)))) + +let induct + (#a:Type) (r:binrel a) (p: a -> a -> prop) + (f_refl: (x:a -> squash (p x x))) + (f_step: (x:a -> y:a { r x y } -> squash (p x y))) + (f_closure: (x:a -> y:a -> z:a { p x y /\ p y z } -> squash (p x z))) + (x:a) (y:a) (xy:squash (closure r x y)) +: squash (p x y) += let xy = FStar.Squash.join_squash #(_closure r x y) xy in + FStar.Squash.bind_squash xy (induct_ r p f_refl f_step f_closure x y) diff --git a/stage0/ulib/FStar.ReflexiveTransitiveClosure.fsti b/stage0/ulib/FStar.ReflexiveTransitiveClosure.fsti new file mode 100644 index 00000000000..a5ca566e010 --- /dev/null +++ b/stage0/ulib/FStar.ReflexiveTransitiveClosure.fsti @@ -0,0 +1,83 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.ReflexiveTransitiveClosure + +/// This module defines the reflexive transitive closure of a +/// relation. That is, the smallest preorder that includes it. +/// +/// Closures are convenient for defining monotonic memory references: +/// +/// - Define a `step` relation and take `closure step` as the +/// monotonic relation of the reference. +/// +/// - To witness a property of the value of the reference, one must +/// show that the property is stable with respect to `closure step`, +/// but this boils down to proving that is stable with respect to +/// `step` (see lemma `stable_on_closure` below). +/// +/// See examples/preorder/Closure.fst for usage examples. + +let binrel (a:Type) = a -> a -> Type + +let predicate (a:Type u#a) = a -> Type0 + +let reflexive (#a:Type) (rel:binrel u#a u#r a) = + forall (x:a). squash (rel x x) + +let transitive (#a:Type) (rel:binrel u#a u#r a) = + forall (x:a) (y:a) (z:a). (squash (rel x y) /\ squash (rel y z)) ==> squash (rel x z) + +let preorder_rel (#a:Type) (rel:binrel u#a u#r a) = + reflexive rel /\ transitive rel + +type preorder (a:Type u#a) : Type u#(max a (1 + r)) = rel:binrel u#a u#r a{preorder_rel rel} + +let stable (#a:Type u#a) (p:a -> Type0) (rel:binrel u#a u#r a{preorder_rel rel}) = + forall (x:a) (y:a). (p x /\ squash (rel x y)) ==> p y + +val closure (#a:Type u#a) (r:binrel u#a u#r a) : preorder u#a u#0 a + +(** `closure r` includes `r` *) +val closure_step: #a:Type u#a -> r:binrel u#a u#r a -> x:a -> y:a { r x y } + -> Lemma (ensures closure r x y) + [SMTPat (closure r x y)] + +(** `closure r` is the smallest preorder that includes `r` *) +val closure_inversion: #a:Type u#a -> r:binrel u#a u#r a -> x:a -> y:a + -> Lemma (requires closure r x y) + (ensures x == y \/ (exists z. squash (r x z) /\ closure r z y)) + +(** +* A predicate that is stable on `r` is stable on `closure r` +* +* This is useful to witness properties of monotonic references where +* the monotonicity relation is the closure of a step relation. +*) +val stable_on_closure: #a:Type u#a -> r:binrel u#a u#r a -> p:(a -> Type0) + -> p_stable_on_r: (squash (forall x y.{:pattern (p y); (r x y)} p x /\ squash (r x y) ==> p y)) + -> Lemma (forall x y.{:pattern (closure r x y)} p x /\ closure r x y ==> p y) + +(** +* Induction over the reflective transitive closure of r +*) +val induct + (#a:Type) (r:binrel a) (p: a -> a -> prop) + (f_refl: (x:a -> squash (p x x))) + (f_step: (x:a -> y:a { r x y } -> squash (p x y))) + (f_closure: (x:a -> y:a -> z:a { p x y /\ p y z } -> squash (p x z))) + (x:a) (y:a) (xy:squash (closure r x y)) +: squash (p x y) + diff --git a/stage0/ulib/FStar.ST.fst b/stage0/ulib/FStar.ST.fst new file mode 100644 index 00000000000..4f44fd94ba8 --- /dev/null +++ b/stage0/ulib/FStar.ST.fst @@ -0,0 +1,132 @@ +(* + Copyright 2008-2014 Nikhil Swamy, Aseem Rastogi, and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.ST + +open FStar.TSet +open FStar.Heap +open FStar.Preorder + +module W = FStar.Monotonic.Witnessed + +(***** Global ST (GST) effect with put, get, witness, and recall *****) + +new_effect GST = STATE_h heap + +let gst_pre = st_pre_h heap +let gst_post' (a:Type) (pre:Type) = st_post_h' heap a pre +let gst_post (a:Type) = st_post_h heap a +let gst_wp (a:Type) = st_wp_h heap a + +unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:heap) = wp (fun a -> p a h) +sub_effect DIV ~> GST = lift_div_gst + +let heap_rel (h1:heap) (h2:heap) = + forall (a:Type0) (rel:preorder a) (r:mref a rel). h1 `contains` r ==> + (h2 `contains` r /\ rel (sel h1 r) (sel h2 r)) + +assume val gst_get: unit -> GST heap (fun p h0 -> p h0 h0) +assume val gst_put: h1:heap -> GST unit (fun p h0 -> heap_rel h0 h1 /\ p () h1) + +type heap_predicate = heap -> Type0 + +let stable (p:heap_predicate) = + forall (h1:heap) (h2:heap). (p h1 /\ heap_rel h1 h2) ==> p h2 + +[@@"opaque_to_smt"] +let witnessed (p:heap_predicate{stable p}) : Type0 = W.witnessed heap_rel p + +assume val gst_witness: p:heap_predicate -> GST unit (fun post h0 -> stable p /\ p h0 /\ (witnessed p ==> post () h0)) +assume val gst_recall: p:heap_predicate -> GST unit (fun post h0 -> stable p /\ witnessed p /\ (p h0 ==> post () h0)) + +val lemma_functoriality (p:heap_predicate{stable p /\ witnessed p}) + (q:heap_predicate{stable q /\ (forall (h:heap). p h ==> q h)}) + :Lemma (ensures (witnessed q)) +let lemma_functoriality p q = + reveal_opaque (`%witnessed) witnessed; + W.lemma_witnessed_weakening heap_rel p q + +(***** ST effect *****) + +let st_pre = gst_pre +let st_post' = gst_post' +let st_post = gst_post +let st_wp = gst_wp + +new_effect STATE = GST + +unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp +sub_effect GST ~> STATE = lift_gst_state + +effect State (a:Type) (wp:st_wp a) = STATE a wp + +effect ST (a:Type) (pre:st_pre) (post: (h:heap -> Tot (st_post' a (pre h)))) = + STATE a (fun (p:st_post a) (h:heap) -> pre h /\ (forall a h1. post h a h1 ==> p a h1)) +effect St (a:Type) = ST a (fun h -> True) (fun h0 r h1 -> True) + +let contains_pred (#a:Type0) (#rel:preorder a) (r:mref a rel) = fun h -> h `contains` r + +type mref (a:Type0) (rel:preorder a) = r:Heap.mref a rel{is_mm r = false /\ witnessed (contains_pred r)} + +let recall (#a:Type) (#rel:preorder a) (r:mref a rel) :STATE unit (fun p h -> Heap.contains h r ==> p () h) + = gst_recall (contains_pred r) + +let alloc (#a:Type) (#rel:preorder a) (init:a) + :ST (mref a rel) + (fun h -> True) + (fun h0 r h1 -> fresh r h0 h1 /\ modifies Set.empty h0 h1 /\ sel h1 r == init) + = let h0 = gst_get () in + let r, h1 = alloc rel h0 init false in + gst_put h1; + gst_witness (contains_pred r); + r + +let read (#a:Type) (#rel:preorder a) (r:mref a rel) :STATE a (fun p h -> p (sel h r) h) + = let h0 = gst_get () in + gst_recall (contains_pred r); + Heap.lemma_sel_equals_sel_tot_for_contained_refs h0 r; + sel_tot h0 r + +let write (#a:Type) (#rel:preorder a) (r:mref a rel) (v:a) + : ST unit + (fun h -> rel (sel h r) v) + (fun h0 x h1 -> rel (sel h0 r) v /\ h0 `contains` r /\ + modifies (Set.singleton (addr_of r)) h0 h1 /\ equal_dom h0 h1 /\ + sel h1 r == v) + = let h0 = gst_get () in + gst_recall (contains_pred r); + let h1 = upd_tot h0 r v in + Heap.lemma_distinct_addrs_distinct_preorders (); + Heap.lemma_distinct_addrs_distinct_mm (); + Heap.lemma_upd_equals_upd_tot_for_contained_refs h0 r v; + gst_put h1 + +let get (u:unit) :ST heap (fun h -> True) (fun h0 h h1 -> h0==h1 /\ h==h1) = gst_get () + +let op_Bang (#a:Type) (#rel:preorder a) (r:mref a rel) + : STATE a (fun p h -> p (sel h r) h) += read #a #rel r + +let op_Colon_Equals (#a:Type) (#rel:preorder a) (r:mref a rel) (v:a) + : ST unit + (fun h -> rel (sel h r) v) + (fun h0 x h1 -> rel (sel h0 r) v /\ h0 `contains` r /\ + modifies (Set.singleton (addr_of r)) h0 h1 /\ equal_dom h0 h1 /\ + sel h1 r == v) += write #a #rel r v + +type ref (a:Type0) = mref a (trivial_preorder a) + +let modifies_none (h0:heap) (h1:heap) = modifies !{} h0 h1 diff --git a/stage0/ulib/FStar.Sealed.Inhabited.fst b/stage0/ulib/FStar.Sealed.Inhabited.fst new file mode 100644 index 00000000000..52314431c5e --- /dev/null +++ b/stage0/ulib/FStar.Sealed.Inhabited.fst @@ -0,0 +1,57 @@ +(* + Copyright 2023 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Authors: G. Martinez, N. Swamy +*) + +module FStar.Sealed.Inhabited + +(* This module provides an instance of FStar.Sealed.sealed + specialized for inhabited types. + + The main type `sealed w` is the singleton type of a sealed values + that is provably equal to `seal w`. + + This type `sealed_` is not intended for use by clients, it is exposed + only to enable writing an SMT pattern. +*) +let sealed_ (#a:Type u#a) + (witness:a) + : Type u#0 + = FStar.Sealed.sealed a + +(* A trivial predicate, just for writing an SMT pattern on sealed_singleton *) +let is_sealed (#a:Type u#a) + (#witness:a) + (x:sealed_ witness) + : prop + = True + +let sealed (#a:Type u#a) + (witness:a) + : Type u#0 + = s:sealed_ witness { is_sealed s } + +(* Sealing a value `x:a` at the type `sealed w` *) +let seal (#a:Type u#a) (#w:a) (x:a) + : sealed w + = FStar.Sealed.seal x + +(* A lemma with an SMT pattern for automatically proving that a + `seal x == seal w`*) +let sealed_singleton (a:Type u#a) (w:a) (x:sealed w) + : Lemma (x == seal #a #w w) + [SMTPat (is_sealed #a #w x)] + = FStar.Sealed.sealed_singl x (seal #a #w w) diff --git a/stage0/ulib/FStar.Sealed.fsti b/stage0/ulib/FStar.Sealed.fsti new file mode 100644 index 00000000000..c14905a9013 --- /dev/null +++ b/stage0/ulib/FStar.Sealed.fsti @@ -0,0 +1,51 @@ +(* + Copyright 2023 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Authors: G. Martinez, N. Swamy +*) + +module FStar.Sealed + +(* This module provides the type ``sealed a`` which is a singleton + type from the perspective of F*'s logic. I.e., two values `x, y` + both of type `sealed a` are provably equal. + + However, from the meta-F*, i.e., using the Tac effect, one can + break the seal and extract an underlying value of type `a` from a + `sealed a`. + + See also FStar.Sealed.Inhabited for a version of this module for + use with inhabited types, in a style that is more efficient for + SMT-based reasoning +*) +assume +new type sealed ([@@@strictly_positive] a : Type u#aa) : Type u#0 + +(* The main axiom provided by this module: + + Two sealed values of the same type are equal. + + Their seal can be broken only at the meta level, by incurring a Tac effect. + See FStar.Tactics.unseal +*) +val sealed_singl (#a:Type) (x y : sealed a) + : Lemma (x == y) + +(* Sealing a value hides it from the logical fragment of F* *) +val seal (#a : Type u#aa) (x:a) : Tot (sealed a) + +val map_seal (#a : Type u#aa) (#b : Type u#bb) (s : sealed a) (f : a -> Tot b) : Tot (sealed b) + +val bind_seal (#a : Type u#aa) (#b : Type u#bb) (s : sealed a) (f : a -> Tot (sealed b)) : Tot (sealed b) diff --git a/stage0/ulib/FStar.Seq.Base.fst b/stage0/ulib/FStar.Seq.Base.fst new file mode 100644 index 00000000000..3c1ebe619d0 --- /dev/null +++ b/stage0/ulib/FStar.Seq.Base.fst @@ -0,0 +1,290 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +(* A logical theory of sequences indexed by natural numbers in [0, n) *) +module FStar.Seq.Base +//#set-options "--initial_fuel 0 --max_fuel 0 --initial_ifuel 1 --max_ifuel 1" + +module List = FStar.List.Tot + +type seq (a:Type u#a) :Type u#a = + | MkSeq: l:list a -> seq a + +let length #_ s = List.length (MkSeq?.l s) + +let seq_to_list #_ s = + match s with + | MkSeq l -> l + +let seq_of_list #_ l = MkSeq l + +let index #_ s i = List.index (MkSeq?.l s) i + +let _cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = MkSeq (x::(MkSeq?.l s)) + +let hd (#a:Type) (s:seq a{length s > 0}) : Tot a = List.hd (MkSeq?.l s) + +let tl (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = MkSeq (List.tl (MkSeq?.l s)) + +let rec create #_ len v = if len = 0 then MkSeq [] else _cons v (create (len - 1) v) + +private let rec init_aux' (#a:Type) (len:nat) (k:nat{k < len}) (contents: (i:nat{i < len} -> Tot a)) + : Tot (seq a) + (decreases (len - k)) + = if k + 1 = len + then MkSeq [contents k] + else _cons (contents k) (init_aux' len (k+1) contents) + +let init_aux = init_aux' + +let init #_ len contents = if len = 0 then MkSeq [] else init_aux len 0 contents + +private let rec init_aux_ghost' (#a:Type) (len:nat) (k:nat{k < len}) (contents:(i:nat { i < len } -> GTot a)) + : GTot (seq a) + (decreases (len - k)) + = if k + 1 = len + then MkSeq [contents k] + else _cons (contents k) (init_aux_ghost' len (k+1) contents) + +let init_aux_ghost = init_aux_ghost' + +let init_ghost #_ len contents = if len = 0 then MkSeq [] else init_aux_ghost len 0 contents + +let empty #_ = MkSeq [] + +let lemma_empty #_ _ = () + +private let rec upd' (#a:Type) (s:seq a) (n:nat{n < length s}) (v:a) + : Tot (seq a) + (decreases (length s)) + = if n = 0 then _cons v (tl s) else _cons (hd s) (upd' (tl s) (n - 1) v) + +let upd = upd' + +let append #_ s1 s2 = MkSeq (List.append (MkSeq?.l s1) (MkSeq?.l s2)) + +private let rec slice' (#a:Type) (s:seq a) (i:nat) (j:nat{i <= j && j <= length s}) + : Tot (seq a) + (decreases (length s)) + = if i > 0 then slice' #a (tl s) (i - 1) (j - 1) + else if j = 0 then MkSeq [] + else _cons (hd s) (slice' #a (tl s) i (j - 1)) + +let slice = slice' + +let lemma_seq_of_seq_to_list #_ s = () +let lemma_seq_to_seq_of_list #_ s = () +let lemma_seq_of_list_cons #_ x l = () +let lemma_seq_to_list_cons #_ x s = () + +let rec lemma_create_len #_ n i = if n = 0 then () else lemma_create_len (n - 1) i + +let rec lemma_init_aux_len' (#a:Type) (n:nat) (k:nat{k < n}) (contents:(i:nat{ i < n } -> Tot a)) + : Lemma (requires True) + (ensures (length (init_aux n k contents) = n - k)) + (decreases (n-k)) += if k + 1 = n then () else lemma_init_aux_len' #a n (k+1) contents + +let lemma_init_len #_ n contents = if n = 0 then () else lemma_init_aux_len' n 0 contents + +let lemma_init_aux_len = lemma_init_aux_len' + +private let rec lemma_init_ghost_aux_len' (#a:Type) (n:nat) (k:nat{k < n}) (contents:(i:nat{ i < n } -> GTot a)) + : Lemma (requires True) + (ensures (length (init_aux_ghost n k contents) = n - k)) + (decreases (n-k)) += if k + 1 = n then () else lemma_init_ghost_aux_len' #a n (k+1) contents + +let lemma_init_ghost_len #_ n contents = if n = 0 then () else lemma_init_ghost_aux_len' n 0 contents + +let lemma_init_ghost_aux_len = lemma_init_ghost_aux_len' + +let rec lemma_len_upd #_ n v s = if n = 0 then () else lemma_len_upd (n - 1) v (tl s) + +let lemma_len_append #_ s1 s2 = FStar.List.Tot.append_length (MkSeq?.l s1) (MkSeq?.l s2) + +let rec lemma_len_slice' (#a:Type) (s:seq a) (i:nat) (j:nat{i <= j && j <= length s}) + : Lemma + (requires True) + (ensures (length (slice s i j) = j - i)) (decreases (length s)) += if i > 0 then lemma_len_slice' #a (tl s) (i - 1) (j - 1) + else if j = 0 then () + else lemma_len_slice' #a (tl s) i (j - 1) + +let lemma_len_slice = lemma_len_slice' + +let rec lemma_index_create #_ n v i = + if n = 0 then () + else if i = 0 then () + else (lemma_create_len (n - 1) v; lemma_index_create (n - 1) v (i - 1)) + +let rec lemma_index_upd1' (#a:Type) (s:seq a) (n:nat{n < length s}) (v:a) + : Lemma + (requires True) + (ensures (index (upd s n v) n == v)) (decreases (length s)) += if n = 0 + then () + else begin + lemma_index_upd1' #a (tl s) (n - 1) v; + assert (index (upd (tl s) (n-1) v) (n-1) == v) + end + +let lemma_index_upd1 = lemma_index_upd1' + +let rec lemma_index_upd2' (#a:Type) (s:seq a) (n:nat{n < length s}) (v:a) (i:nat{i<>n /\ i < length s}) + : Lemma + (requires True) + (ensures (index (upd s n v) i == index s i)) + (decreases (length s)) += match (MkSeq?.l s) with + | [] -> () + | hd::tl -> + if i = 0 then () + else + if n = 0 then () + else (lemma_len_upd (n - 1) v (MkSeq tl); lemma_index_upd2' #a (MkSeq tl) (n - 1) v (i - 1)) + +let lemma_index_upd2 = lemma_index_upd2' + +let rec lemma_index_app1' (#a:Type) (s1:seq a) (s2:seq a) (i:nat{i < length s1}) + : Lemma + (requires True) + (ensures (index (append s1 s2) i == index s1 i)) (decreases (length s1)) += match (MkSeq?.l s1) with + | [] -> () + | hd::tl -> + if i = 0 then () + else (lemma_len_append (MkSeq tl) s2; lemma_index_app1' #a (MkSeq tl) s2 (i - 1)) + +let lemma_index_app1 = lemma_index_app1' + +let rec lemma_index_app2' (#a:Type) (s1:seq a) (s2:seq a) (i:nat{i < length s1 + length s2 /\ length s1 <= i}) +: Lemma + (requires True) + (ensures (index (append s1 s2) i == index s2 (i - length s1))) (decreases (length s1)) += match s1.l with + | [] -> () + | hd::tl -> lemma_index_app2' #a (MkSeq tl) s2 (i - 1) + +let lemma_index_app2 = lemma_index_app2' + +let rec lemma_index_slice0' (#a:Type) (s:seq a) (j:nat{j <= length s}) (k : nat{k < j}) +: Lemma + (requires True) + (ensures (index (slice s 0 j) k == index s k)) (decreases (length s)) += if k = 0 + then () + else lemma_index_slice0' (tl s) (j-1) (k-1) + +#push-options "--fuel 1 --ifuel 0" +let rec lemma_index_slice' (#a:Type) (s:seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (k:nat{k < j - i}) +: Lemma + (requires True) + (ensures (index (slice s i j) k == index s (k + i))) (decreases (length s)) += if i > 0 + then ( + lemma_index_slice' #a (tl s) (i - 1) (j - 1) k; + assert (index (slice (tl s) (i - 1) (j - 1)) k == index (tl s) (k + (i - 1))); + assert (index (slice (tl s) (i - 1) (j - 1)) k == index s (k + i)); + assert (index (slice s i j) k == index s (k + i)) + ) + else ( + assert (j > 0); + lemma_index_slice0' #a s j k + ) +#pop-options + +let lemma_index_slice = lemma_index_slice' + +let hasEq_lemma _ = () + +let equal #a s1 s2 = + (length s1 = length s2 + /\ (forall (i:nat{i < length s1}).{:pattern (index s1 i); (index s2 i)} (index s1 i == index s2 i))) + +let rec eq_i' (#a:eqtype) (s1:seq a) (s2:seq a{length s1 == length s2}) (i:nat{i <= length s1}) +: Tot (r:bool{r <==> (forall j. (j >= i /\ j < length s1) ==> (index s1 j = index s2 j))}) + (decreases (length s1 - i)) += if i = length s1 then true + else + if index s1 i = index s2 i then eq_i' s1 s2 (i + 1) + else false + +let eq_i = eq_i' + +let eq #_ s1 s2 = if length s1 = length s2 then eq_i s1 s2 0 else false + +let lemma_eq_intro #_ s1 s2 = () + +let lemma_eq_refl #_ s1 s2 = () + +let lemma_eq_elim #_ s1 s2 = + assert ( length s1 == List.length (MkSeq?.l s1) ); + assert ( length s2 == List.length (MkSeq?.l s2) ); + assert ( forall (i: nat) . i < length s1 ==> index s1 i == List.index (MkSeq?.l s1) i); + assert ( forall (i: nat) . i < length s1 ==> index s2 i == List.index (MkSeq?.l s2) i); + List.index_extensionality (MkSeq?.l s1) (MkSeq?.l s2) + +(* Properties of [append] *) + +let append_assoc #a s1 s2 s3 = List.append_assoc (MkSeq?.l s1) (MkSeq?.l s2) (MkSeq?.l s3) + +let append_empty_l #a s = List.append_nil_l (MkSeq?.l s) + +let append_empty_r #a s = List.append_l_nil (MkSeq?.l s) + + +private +let rec init_index_aux (#a:Type) (len:nat) (k:nat{k < len}) (contents:(i:nat { i < len } -> Tot a)) + : Lemma (requires True) + (ensures (forall (i:nat{i < len - k}).index (init_aux len k contents) i == contents (k + i))) + (decreases (len - k)) += + if k + 1 = len + then () + else begin + init_index_aux #a len (k+1) contents ; + assert (forall (i:nat{i < len - k}). + if i = 0 then index (init_aux len k contents) 0 == contents k + else index (init_aux len k contents) i == index (init_aux len (k+1) contents) (i-1)) + end + +let init_index #_ len contents = + if len = 0 then () else init_index_aux len 0 contents + +let init_index_ #_ len contents j = init_index len contents + +private +let rec init_ghost_index_aux (#a:Type) (len:nat) (k:nat{k < len}) (contents:(i:nat { i < len } -> GTot a)) + : Lemma (requires True) + (ensures (forall (i:nat{i < len - k}).index (init_aux_ghost len k contents) i == contents (k + i))) + (decreases (len - k)) += + if k + 1 = len + then () + else begin + init_ghost_index_aux #a len (k+1) contents ; + assert (forall (i:nat{i < len - k}). + if i = 0 then index (init_aux_ghost len k contents) 0 == contents k + else index (init_aux_ghost len k contents) i == index (init_aux_ghost len (k+1) contents) (i-1)) + end + +let init_ghost_index #_ len contents = + if len = 0 then () else init_ghost_index_aux len 0 contents + +let init_ghost_index_ #_ len contents j = init_ghost_index len contents + +let lemma_equal_instances_implies_equal_types () = () diff --git a/stage0/ulib/FStar.Seq.Base.fsti b/stage0/ulib/FStar.Seq.Base.fsti new file mode 100644 index 00000000000..9dbfeaa0c5f --- /dev/null +++ b/stage0/ulib/FStar.Seq.Base.fsti @@ -0,0 +1,232 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +(* A logical theory of sequences indexed by natural numbers in [0, n) *) +module FStar.Seq.Base +//#set-options "--initial_fuel 0 --max_fuel 0 --initial_ifuel 1 --max_ifuel 1" + +module List = FStar.List.Tot + +new val seq ([@@@strictly_positive] a : Type u#a) : Type u#a + +(* Destructors *) +val length: #a:Type -> seq a -> Tot nat + +val seq_to_list (#a:Type) (s:seq a) : Tot (l:list a{List.length l == length s}) + +val seq_of_list (#a:Type) (l:list a) : Tot (s:seq a{List.length l == length s}) + +val index: #a:Type -> s:seq a -> i:nat{i < length s} -> Tot a + +val create: #a:Type -> nat -> a -> Tot (seq a) + +private val init_aux (#a:Type) (len:nat) (k:nat{k < len}) (contents:(i:nat { i < len } -> Tot a)) + :Tot (seq a) + +inline_for_extraction val init: #a:Type -> len:nat -> contents: (i:nat { i < len } -> Tot a) -> Tot (seq a) + +private val init_aux_ghost (#a:Type) (len:nat) (k:nat{k < len}) (contents:(i:nat { i < len } -> GTot a)) + : GTot (seq a) + +inline_for_extraction val init_ghost: #a:Type -> len:nat -> contents: (i:nat { i < len } -> GTot a) -> GTot (seq a) + +val empty (#a:Type) : Tot (s:(seq a){length s=0}) + +[@@(deprecated "Seq.empty")] +unfold +let createEmpty (#a:Type) + : Tot (s:(seq a){length s=0}) + = empty #a + +val lemma_empty (#a:Type) (s:seq a) : Lemma (length s = 0 ==> s == empty #a) + +val upd: #a:Type -> s:seq a -> n:nat{n < length s} -> a -> Tot (seq a) + +val append: #a:Type -> seq a -> seq a -> Tot (seq a) + +let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s + +let op_At_Bar (#a:Type) (s1:seq a) (s2:seq a) = append s1 s2 + +val slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s} -> Tot (seq a) + +(* Lemmas about seq_to_list/seq_of_list *) +val lemma_seq_of_seq_to_list : #a:Type -> s:seq a -> + Lemma + (requires True) + (ensures seq_of_list (seq_to_list s) == s) + [SMTPat (seq_of_list (seq_to_list s))] + +val lemma_seq_to_seq_of_list : #a:Type -> l:list a -> + Lemma + (requires True) + (ensures seq_to_list (seq_of_list l) == l) + [SMTPat (seq_to_list (seq_of_list l))] + +val lemma_seq_of_list_cons : #a:Type -> x:a -> l:list a -> + Lemma + (requires True) + (ensures seq_of_list (x::l) == create 1 x @| seq_of_list l) + [SMTPat (seq_of_list (x::l))] + +val lemma_seq_to_list_cons : #a:Type -> x:a -> s:seq a -> + Lemma + (requires True) + (ensures seq_to_list (cons x s) == x :: seq_to_list s) + [SMTPat (seq_to_list (cons x s))] + +(* Lemmas about length *) +val lemma_create_len: #a:Type -> n:nat -> i:a -> Lemma + (requires True) + (ensures (length (create n i) = n)) + [SMTPat (length (create n i))] + +val lemma_init_len: #a:Type -> n:nat -> contents: (i:nat { i < n } -> Tot a) -> Lemma + (requires True) + (ensures (length (init n contents) = n)) + [SMTPat (length (init n contents))] + +private val lemma_init_aux_len (#a:Type) (n:nat) (k:nat{k < n}) (contents:(i:nat{ i < n } -> Tot a)) + : Lemma (requires True) + (ensures (length (init_aux n k contents) = n - k)) + [SMTPat (length (init_aux n k contents))] + +val lemma_init_ghost_len: #a:Type -> n:nat -> contents: (i:nat { i < n } -> GTot a) -> Lemma + (requires True) + (ensures (length (init_ghost n contents) = n)) + [SMTPat (length (init_ghost n contents))] + +private val lemma_init_ghost_aux_len (#a:Type) (n:nat) (k:nat{k < n}) (contents:(i:nat{ i < n } -> GTot a)) + : Lemma (requires True) + (ensures (length (init_aux_ghost n k contents) = n - k)) + [SMTPat (length (init_aux_ghost n k contents))] + +val lemma_len_upd: #a:Type -> n:nat -> v:a -> s:seq a{n < length s} -> Lemma + (requires True) + (ensures (length (upd s n v) = length s)) + [SMTPat (length (upd s n v))] + +val lemma_len_append: #a:Type -> s1:seq a -> s2:seq a -> Lemma + (requires True) + (ensures (length (append s1 s2) = length s1 + length s2)) + [SMTPat (length (append s1 s2))] + +val lemma_len_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s} -> Lemma + (requires True) + (ensures (length (slice s i j) = j - i)) + [SMTPat (length (slice s i j))] + +(* Lemmas about index *) +val lemma_index_create: #a:Type -> n:nat -> v:a -> i:nat{i < n} -> Lemma + (requires True) + (ensures (index (create n v) i == v)) + [SMTPat (index (create n v) i)] + +val lemma_index_upd1: #a:Type -> s:seq a -> n:nat{n < length s} -> v:a -> Lemma + (requires True) + (ensures (index (upd s n v) n == v)) + [SMTPat (index (upd s n v) n)] + +val lemma_index_upd2: #a:Type -> s:seq a -> n:nat{n < length s} -> v:a -> i:nat{i<>n /\ i < length s} -> Lemma + (requires True) + (ensures (index (upd s n v) i == index s i)) + [SMTPat (index (upd s n v) i)] + +val lemma_index_app1: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i < length s1} -> Lemma + (requires True) + (ensures (index (append s1 s2) i == index s1 i)) + [SMTPat (index (append s1 s2) i)] + +val lemma_index_app2: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i < length s1 + length s2 /\ length s1 <= i} -> Lemma + (requires True) + (ensures (index (append s1 s2) i == index s2 (i - length s1))) + [SMTPat (index (append s1 s2) i)] + +val lemma_index_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s} -> k:nat{k < j - i} -> Lemma + (requires True) + (ensures (index (slice s i j) k == index s (k + i))) + [SMTPat (index (slice s i j) k)] + +val hasEq_lemma: a:Type -> Lemma (requires (hasEq a)) (ensures (hasEq (seq a))) [SMTPat (hasEq (seq a))] + +[@@ remove_unused_type_parameters [0; 1; 2]] +val equal (#a:Type) (s1:seq a) (s2:seq a) : Tot prop + +(* decidable equality *) +private val eq_i: + #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} + -> i:nat{i <= length s1} + -> Tot (r:bool{r <==> (forall j. (j >= i /\ j < length s1) ==> (index s1 j = index s2 j))}) + +val eq: #a:eqtype -> s1:seq a -> s2:seq a -> Tot (r:bool{r <==> equal s1 s2}) + +val lemma_eq_intro: #a:Type -> s1:seq a -> s2:seq a -> Lemma + (requires (length s1 = length s2 + /\ (forall (i:nat{i < length s1}).{:pattern (index s1 i); (index s2 i)} (index s1 i == index s2 i)))) + (ensures (equal s1 s2)) + [SMTPat (equal s1 s2)] + +val lemma_eq_refl: #a:Type -> s1:seq a -> s2:seq a -> Lemma + (requires (s1 == s2)) + (ensures (equal s1 s2)) + [SMTPat (equal s1 s2)] + +val lemma_eq_elim: #a:Type -> s1:seq a -> s2:seq a -> Lemma + (requires (equal s1 s2)) + (ensures (s1==s2)) + [SMTPat (equal s1 s2)] + +(* Properties of [append] *) + +val append_assoc + (#a: Type) + (s1 s2 s3: seq a) +: Lemma + (ensures (append (append s1 s2) s3 == append s1 (append s2 s3))) + +val append_empty_l + (#a: Type) + (s: seq a) +: Lemma + (ensures (append empty s == s)) + +val append_empty_r + (#a: Type) + (s: seq a) +: Lemma + (ensures (append s empty == s)) + + +val init_index (#a:Type) (len:nat) (contents:(i:nat { i < len } -> Tot a)) + : Lemma (requires True) + (ensures (forall (i:nat{i < len}). index (init len contents) i == contents i)) + +val init_index_ (#a:Type) (len:nat) (contents:(i:nat { i < len } -> Tot a)) (j: nat) + : Lemma (requires j < len) + (ensures (index (init len contents) j == contents j)) + [SMTPat (index (init len contents) j)] + +val init_ghost_index (#a:Type) (len:nat) (contents:(i:nat { i < len } -> GTot a)) + : Lemma (requires True) + (ensures (forall (i:nat{i < len}). index (init_ghost len contents) i == contents i)) + +val init_ghost_index_ (#a:Type) (len:nat) (contents:(i:nat { i < len } -> GTot a)) (j: nat) + : Lemma (requires j < len) + (ensures (index (init_ghost len contents) j == contents j)) + [SMTPat (index (init_ghost len contents) j)] + +val lemma_equal_instances_implies_equal_types (_:unit) + :Lemma (forall (a:Type) (b:Type) (s1:seq a) (s2:seq b). s1 === s2 ==> a == b) diff --git a/stage0/ulib/FStar.Seq.Equiv.fst b/stage0/ulib/FStar.Seq.Equiv.fst new file mode 100644 index 00000000000..c53ae39f65e --- /dev/null +++ b/stage0/ulib/FStar.Seq.Equiv.fst @@ -0,0 +1,100 @@ +(* + Copyright 2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: A. Rozanov +*) + +module FStar.Seq.Equiv +module CE = FStar.Algebra.CommMonoid.Equiv + +open FStar.Seq.Base +open FStar.Seq.Properties +open FStar.IntegerIntervals + +let rec eq_of_seq #c eq s1 s2 + : Tot prop (decreases length s1) = + (length s1 = length s2 /\ + (length s1 = 0 \/ ( + let s1s, s1l = un_snoc s1 in + let s2s, s2l = un_snoc s2 in + eq.eq s1l s2l /\ eq_of_seq eq s1s s2s))) + +let rec eq_of_seq_element_equality #c eq s1 s2 + : Lemma (requires eq_of_seq eq s1 s2) + (ensures (length s1 = length s2) /\ + (forall (i: under (length s1)). (index s1 i `eq.eq` index s2 i))) + (decreases length s1) = + if (length s1 > 0) then + let s1liat, s1last = un_snoc s1 in + let s2liat, s2last = un_snoc s2 in + eq_of_seq_element_equality eq s1liat s2liat + +let rec eq_of_seq_from_element_equality #c eq s1 s2 + : Lemma (requires (length s1 = length s2) /\ + (forall (i: under (length s1)). (index s1 i `eq.eq` index s2 i))) + (ensures eq_of_seq eq s1 s2) + (decreases length s1) = + if length s1 = 0 then () else + let s1liat, s1last = un_snoc s1 in + let s2liat, s2last = un_snoc s2 in + eq_of_seq_from_element_equality eq s1liat s2liat + +let eq_of_seq_condition #c eq s1 s2 + : Lemma ((length s1 = length s2) /\ + (forall (i: under (length s1)). (index s1 i `eq.eq` index s2 i)) <==> + eq_of_seq eq s1 s2) = + Classical.move_requires_2 (eq_of_seq_element_equality eq) s1 s2; + Classical.move_requires_2 (eq_of_seq_from_element_equality eq) s1 s2 + +let rec eq_of_seq_reflexivity #c (eq: CE.equiv c) s + : Lemma (ensures eq_of_seq eq s s) + (decreases length s) = + if length s > 0 then + let liat, last = un_snoc s in + eq_of_seq_reflexivity #c eq liat; + eq.reflexivity last + +let rec eq_of_seq_symmetry #c (eq: CE.equiv c) s1 s2 + : Lemma (requires eq_of_seq eq s1 s2) + (ensures eq_of_seq eq s2 s1) + (decreases length s1) = + if length s1 > 0 then + let lia1, las1 = un_snoc s1 in + let lia2, las2 = un_snoc s2 in + eq_of_seq_symmetry #c eq lia1 lia2; + eq.symmetry las1 las2 + +let rec eq_of_seq_transitivity #c (eq: CE.equiv c) s1 s2 s3 + : Lemma (requires eq_of_seq eq s1 s2 /\ eq_of_seq eq s2 s3) + (ensures eq_of_seq eq s1 s3) + (decreases length s1) = + if length s1 > 0 then + let lia1, las1 = un_snoc s1 in + let lia2, las2 = un_snoc s2 in + let lia3, las3 = un_snoc s3 in + eq_of_seq_transitivity #c eq lia1 lia2 lia3; + eq.transitivity las1 las2 las3 + +let seq_equiv #c (eq: CE.equiv c) = + CE.EQ (eq_of_seq eq) (eq_of_seq_reflexivity eq) + (eq_of_seq_symmetry eq) + (eq_of_seq_transitivity eq) + +let eq_of_seq_unsnoc #c eq m (s1 s2: (z:seq c{length z==m})) + : Lemma (requires eq_of_seq eq s1 s2) + (ensures eq.eq (snd (un_snoc s1)) (snd (un_snoc s2)) /\ + eq_of_seq eq (fst (un_snoc s1)) (fst (un_snoc s2))) = + eq_of_seq_element_equality eq s1 s2; + eq_of_seq_from_element_equality eq (fst (un_snoc s1)) (fst (un_snoc s2)) diff --git a/stage0/ulib/FStar.Seq.Equiv.fsti b/stage0/ulib/FStar.Seq.Equiv.fsti new file mode 100644 index 00000000000..3e7c8d9de11 --- /dev/null +++ b/stage0/ulib/FStar.Seq.Equiv.fsti @@ -0,0 +1,62 @@ +(* + Copyright 2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: A. Rozanov +*) + + +(* Sequence equivalence relation (see FStar.Algebra.CommMonoid.Equiv) +*) + +module FStar.Seq.Equiv +module CE = FStar.Algebra.CommMonoid.Equiv +open FStar.Seq.Base +open FStar.Seq.Properties +open FStar.IntegerIntervals + +val eq_of_seq (#c:_) (eq:CE.equiv c) (s1 s2: seq c) : prop + +val eq_of_seq_element_equality (#c:_) (eq: CE.equiv c) (s1 s2: seq c) + : Lemma (requires eq_of_seq eq s1 s2) + (ensures length s1 = length s2 /\ + (forall (i: under (length s1)). (index s1 i `eq.eq` index s2 i))) + +val eq_of_seq_from_element_equality (#c:_) (eq: CE.equiv c) (s1 s2: seq c) + : Lemma (requires (length s1 = length s2) /\ + (forall (i: under (length s1)). (index s1 i `eq.eq` index s2 i))) + (ensures eq_of_seq eq s1 s2) + +val eq_of_seq_condition (#c:_) (eq: CE.equiv c) (s1 s2: seq c) + : Lemma ((length s1 = length s2) /\ + (forall (i: under (length s1)). (index s1 i `eq.eq` index s2 i)) <==> + eq_of_seq eq s1 s2) + +val eq_of_seq_reflexivity (#c:_) (eq: CE.equiv c) (s: seq c) + : Lemma (ensures eq_of_seq eq s s) + +val eq_of_seq_symmetry (#c:_) (eq: CE.equiv c) (s1 s2: seq c) + : Lemma (requires eq_of_seq eq s1 s2) + (ensures eq_of_seq eq s2 s1) + +val eq_of_seq_transitivity (#c:_) (eq: CE.equiv c) (s1 s2 s3: seq c) + : Lemma (requires eq_of_seq eq s1 s2 /\ eq_of_seq eq s2 s3) + (ensures eq_of_seq eq s1 s3) + +val seq_equiv (#c:_) (eq:CE.equiv c) : (CE.equiv (seq c)) + +val eq_of_seq_unsnoc (#c:_) (eq:CE.equiv c) (m:pos) (s1 s2: (z:seq c{length z==m})) + : Lemma (requires eq_of_seq eq s1 s2) + (ensures eq.eq (snd (un_snoc s1)) (snd (un_snoc s2)) /\ + eq_of_seq eq (fst (un_snoc s1)) (fst (un_snoc s2))) diff --git a/stage0/ulib/FStar.Seq.Permutation.fst b/stage0/ulib/FStar.Seq.Permutation.fst new file mode 100644 index 00000000000..5dd0a3484e5 --- /dev/null +++ b/stage0/ulib/FStar.Seq.Permutation.fst @@ -0,0 +1,736 @@ +(* + Copyright 2021-2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Authors: N. Swamy, A. Rastogi, A. Rozanov +*) +module FStar.Seq.Permutation +open FStar.Seq +open FStar.Calc + +[@@"opaque_to_smt"] +let is_permutation (#a:Type) (s0:seq a) (s1:seq a) (f:index_fun s0) = + Seq.length s0 == Seq.length s1 /\ + (forall x y. // {:pattern f x; f y} + x <> y ==> f x <> f y) /\ + (forall (i:nat{i < Seq.length s0}). // {:pattern (Seq.index s1 (f i))} + Seq.index s0 i == Seq.index s1 (f i)) + +let reveal_is_permutation #a (s0 s1:seq a) (f:index_fun s0) + = reveal_opaque (`%is_permutation) (is_permutation s0 s1 f) + +let reveal_is_permutation_nopats (#a:Type) (s0 s1:seq a) (f:index_fun s0) + : Lemma (is_permutation s0 s1 f <==> + + Seq.length s0 == Seq.length s1 /\ + + (forall x y. x <> y ==> f x <> f y) /\ + + (forall (i:nat{i < Seq.length s0}). + Seq.index s0 i == Seq.index s1 (f i))) + = reveal_is_permutation s0 s1 f + +let split3_index (#a:eqtype) (s0:seq a) (x:a) (s1:seq a) (j:nat) + : Lemma + (requires j < Seq.length (Seq.append s0 s1)) + (ensures ( + let s = Seq.append s0 (Seq.cons x s1) in + let s' = Seq.append s0 s1 in + let n = Seq.length s0 in + if j < n then Seq.index s' j == Seq.index s j + else Seq.index s' j == Seq.index s (j + 1) + )) + = let n = Seq.length (Seq.append s0 s1) in + if j < n then () + else () + +#push-options "--fuel 2 --ifuel 0 --z3rlimit_factor 2" +let rec find (#a:eqtype) (x:a) (s:seq a{ count x s > 0 }) + : Tot (frags:(seq a & seq a) { + let s' = Seq.append (fst frags) (snd frags) in + let n = Seq.length (fst frags) in + s `Seq.equal` Seq.append (fst frags) (Seq.cons x (snd frags)) + }) (decreases (Seq.length s)) + = if Seq.head s = x + then Seq.empty, Seq.tail s + else ( + let pfx, sfx = find x (Seq.tail s) in + assert (Seq.equal (Seq.tail s) + (Seq.append pfx (Seq.cons x sfx))); + assert (Seq.equal s + (Seq.cons (Seq.head s) (Seq.tail s))); + Seq.cons (Seq.head s) pfx, sfx + ) +#pop-options + +let introduce_is_permutation (#a:Type) (s0:seq a) (s1:seq a) + (f:index_fun s0) + (_:squash (Seq.length s0 == Seq.length s1)) + (_:squash (forall x y. x <> y ==> f x <> f y)) + (_:squash (forall (i:nat{i < Seq.length s0}). Seq.index s0 i == Seq.index s1 (f i))) + : Lemma + (ensures + is_permutation s0 s1 f) + = reveal_is_permutation_nopats s0 s1 f + +let reveal_is_permutation_pats (#a:Type) (s0 s1:seq a) (f:index_fun s0) + : Lemma (is_permutation s0 s1 f <==> + + Seq.length s0 == Seq.length s1 /\ + + (forall x y. {:pattern f x; f y } x <> y ==> f x <> f y) /\ + + (forall (i:nat{i < Seq.length s0}). {:pattern (Seq.index s0 i)} + Seq.index s0 i == Seq.index s1 (f i))) + = reveal_is_permutation s0 s1 f + +let adapt_index_fun (s:Seq.seq 'a { Seq.length s > 0 }) + (f:index_fun (Seq.tail s)) + (n:nat{n < Seq.length s}) + : index_fun s + = fun i -> if i = 0 + then n + else if f (i - 1) < n + then f (i - 1) + else f (i - 1) + 1 + +let count_singleton_one (#a:eqtype) (x:a) + : Lemma (count x (Seq.create 1 x) == 1) + = () +let count_singleton_zero (#a:eqtype) (x y:a) + : Lemma (x =!= y ==> count x (Seq.create 1 y) == 0) + = () +let equal_counts_empty (#a:eqtype) (s0 s1:seq a) + : Lemma + (requires Seq.length s0 == 0 /\ (forall x. count x s0 == count x s1)) + (ensures Seq.length s1 == 0) + = if Seq.length s1 > 0 then + assert (count (Seq.head s1) s1 > 0) +let count_head (#a:eqtype) (x:seq a{ Seq.length x > 0 }) + : Lemma (count (Seq.head x) x > 0) + = () + + +#push-options "--fuel 0" +#restart-solver +let rec permutation_from_equal_counts (#a:eqtype) (s0:seq a) (s1:seq a{(forall x. count x s0 == count x s1)}) + : Tot (seqperm s0 s1) + (decreases (Seq.length s0)) + = if Seq.length s0 = 0 + then ( + let f : index_fun s0 = fun i -> i in + reveal_is_permutation_pats s0 s1 f; + equal_counts_empty s0 s1; + f + ) else ( + count_head s0; + assert (count (Seq.head s0) s0 > 0); + let pfx, sfx = find (Seq.head s0) s1 in + introduce forall x. count x (Seq.tail s0) == count x (Seq.append pfx sfx) + with ( + if x = Seq.head s0 + then ( + calc (eq2 #int) { + count x (Seq.tail s0) <: int; + (==) { + assert (s0 `Seq.equal` Seq.cons (Seq.head s0) (Seq.tail s0)); + lemma_append_count_aux (Seq.head s0) (Seq.create 1 (Seq.head s0)) (Seq.tail s0); + count_singleton_one x + } + count x s0 - 1 <: int; + (==) {} + count x s1 - 1 <: int; + (==) {} + count x (Seq.append pfx (Seq.cons (Seq.head s0) sfx)) - 1 <: int; + (==) { lemma_append_count_aux x pfx (Seq.cons (Seq.head s0) sfx) } + count x pfx + count x (Seq.cons (Seq.head s0) sfx) - 1 <: int; + (==) { lemma_append_count_aux x (Seq.create 1 (Seq.head s0)) sfx } + count x pfx + (count x (Seq.create 1 (Seq.head s0)) + count x sfx) - 1 <: int; + (==) { count_singleton_one x } + count x pfx + count x sfx <: int; + (==) { lemma_append_count_aux x pfx sfx } + count x (Seq.append pfx sfx) <: int; + } + ) + else ( + calc (==) { + count x (Seq.tail s0); + (==) { + assert (s0 `Seq.equal` Seq.cons (Seq.head s0) (Seq.tail s0)); + lemma_append_count_aux x (Seq.create 1 (Seq.head s0)) (Seq.tail s0); + count_singleton_zero x (Seq.head s0) + } + count x s0; + (==) { } + count x s1; + (==) { } + count x (Seq.append pfx (Seq.cons (Seq.head s0) sfx)); + (==) { lemma_append_count_aux x pfx (Seq.cons (Seq.head s0) sfx) } + count x pfx + count x (Seq.cons (Seq.head s0) sfx); + (==) { lemma_append_count_aux x (Seq.create 1 (Seq.head s0)) sfx } + count x pfx + (count x (Seq.create 1 (Seq.head s0)) + count x sfx) ; + (==) { count_singleton_zero x (Seq.head s0) } + count x pfx + count x sfx; + (==) { lemma_append_count_aux x pfx sfx } + count x (Seq.append pfx sfx); + } + ) + ); + let s1' = (Seq.append pfx sfx) in + let f' = permutation_from_equal_counts (Seq.tail s0) s1' in + reveal_is_permutation_pats (Seq.tail s0) s1' f'; + let n = Seq.length pfx in + let f : index_fun s0 = adapt_index_fun s0 f' n in + assert (Seq.length s0 == Seq.length s1); + let len_eq : squash (Seq.length s0 == Seq.length s1) = () in + assert (forall x y. x <> y ==> f' x <> f' y); + let neq = + introduce forall x y. x <> y ==> f x <> f y + with (introduce _ ==> _ + with _. ( + if f x = n || f y = n + then () + else if f' (x - 1) < n + then ( + assert (f x == f' (x - 1)); + if f' (y - 1) < n + then assert (f y == f' (y - 1)) + else assert (f y == f' (y - 1) + 1) + ) + else ( + assert (f x == f' (x - 1) + 1); + if f' (y - 1) < n + then assert (f y == f' (y - 1)) + else assert (f y == f' (y - 1) + 1) + ) + ) + ) + in + let perm_prop = + introduce forall (i:nat{i < Seq.length s0}). Seq.index s0 i == Seq.index s1 (f i) + with ( + if i = 0 then () + else if f' (i - 1) < n + then ( + assert (f i == f' (i - 1)); + assert (Seq.index (Seq.tail s0) (i - 1) == + Seq.index s1' (f' (i - 1))) + ) + else ( + assert (f i = f' (i - 1) + 1); + assert (Seq.index (Seq.tail s0) (i - 1) == + Seq.index s1' (f' (i - 1))) + ) + ) + in + introduce_is_permutation s0 s1 f len_eq neq perm_prop; + f) +#pop-options + + +module CE = FStar.Algebra.CommMonoid.Equiv + +let elim_monoid_laws #a #eq (m:CE.cm a eq) + : Lemma ( + (forall x y. {:pattern m.mult x y} eq.eq (m.mult x y) (m.mult y x)) /\ + (forall x y z.{:pattern (m.mult x (m.mult y z))} eq.eq (m.mult x (m.mult y z)) (m.mult (m.mult x y) z)) /\ + (forall x.{:pattern (m.mult x m.unit)} eq.eq (m.mult x m.unit) x) + ) + = introduce forall x y. eq.eq (m.mult x y) (m.mult y x) + with ( m.commutativity x y ); + + introduce forall x y z. eq.eq (m.mult x (m.mult y z)) (m.mult (m.mult x y) z) + with ( m.associativity x y z; + eq.symmetry (m.mult (m.mult x y) z) (m.mult x (m.mult y z)) ); + + introduce forall x. eq.eq (m.mult x m.unit) x + with ( CE.right_identity eq m x ) + +let rec foldm_snoc_unit_seq (#a:Type) (#eq:CE.equiv a) (m:CE.cm a eq) (s:Seq.seq a) + : Lemma (requires Seq.equal s (Seq.create (Seq.length s) m.unit)) + (ensures eq.eq (foldm_snoc m s) m.unit) + (decreases Seq.length s) + = CE.elim_eq_laws eq; + elim_monoid_laws m; + if Seq.length s = 0 + then () + else let s_tl, _ = un_snoc s in + foldm_snoc_unit_seq m s_tl + +#push-options "--fuel 2" +let foldm_snoc_singleton (#a:_) #eq (m:CE.cm a eq) (x:a) + : Lemma (eq.eq (foldm_snoc m (Seq.create 1 x)) x) + = elim_monoid_laws m +#pop-options + +let x_yz_to_y_xz #a #eq (m:CE.cm a eq) (x y z:a) + : Lemma ((x `m.mult` (y `m.mult` z)) + `eq.eq` + (y `m.mult` (x `m.mult` z))) + = CE.elim_eq_laws eq; + elim_monoid_laws m; + calc (eq.eq) { + x `m.mult` (y `m.mult` z); + (eq.eq) { m.commutativity x (y `m.mult` z) } + (y `m.mult` z) `m.mult` x; + (eq.eq) { m.associativity y z x } + y `m.mult` (z `m.mult` x); + (eq.eq) { m.congruence y (z `m.mult` x) y (x `m.mult` z) } + y `m.mult` (x `m.mult` z); + } + +let lemma_un_snoc_append (#a:Type) (s1 : seq a) (s2 : seq a {Seq.length s2 <> 0}) + : Lemma (fst (Seq.un_snoc (append s1 s2)) == append s1 (fst (Seq.un_snoc s2))) + = assert (Seq.equal (fst (Seq.un_snoc (append s1 s2))) + (append s1 (fst (Seq.un_snoc s2)))) + +#push-options "--fuel 1 --ifuel 0" +let rec foldm_snoc_append #a #eq (m:CE.cm a eq) (s1 s2: seq a) + : Lemma + (ensures eq.eq (foldm_snoc m (append s1 s2)) + (m.mult (foldm_snoc m s1) (foldm_snoc m s2))) + (decreases (Seq.length s2)) + = CE.elim_eq_laws eq; + elim_monoid_laws m; + if Seq.length s2 = 0 + then assert (Seq.append s1 s2 `Seq.equal` s1) + else ( + let s2', last = Seq.un_snoc s2 in + calc (eq.eq) + { + foldm_snoc m (append s1 s2); + (eq.eq) { assert (Seq.equal (append s1 s2) + (Seq.snoc (append s1 s2') last)) } + foldm_snoc m (Seq.snoc (append s1 s2') last); + (eq.eq) { + lemma_un_snoc_append s1 s2; + assert (Seq.equal (fst (Seq.un_snoc (append s1 s2))) (append s1 s2')) + } + + m.mult last (foldm_snoc m (append s1 s2')); + (eq.eq) { foldm_snoc_append m s1 s2'; + m.congruence last (foldm_snoc m (append s1 s2')) + last (m.mult (foldm_snoc m s1) (foldm_snoc m s2')) } + m.mult last (m.mult (foldm_snoc m s1) (foldm_snoc m s2')); + (eq.eq) { x_yz_to_y_xz m last (foldm_snoc m s1) (foldm_snoc m s2') } + m.mult (foldm_snoc m s1) (m.mult last (foldm_snoc m s2')); + (eq.eq) { } + m.mult (foldm_snoc m s1) (foldm_snoc m s2); + }) +#pop-options + +let foldm_snoc_sym #a #eq (m:CE.cm a eq) (s1 s2: seq a) + : Lemma + (ensures eq.eq (foldm_snoc m (append s1 s2)) (foldm_snoc m (append s2 s1))) + = CE.elim_eq_laws eq; + elim_monoid_laws m; + foldm_snoc_append m s1 s2; + foldm_snoc_append m s2 s1 + +#push-options "--fuel 0" +let foldm_snoc3 #a #eq (m:CE.cm a eq) (s1:seq a) (x:a) (s2:seq a) + : Lemma (eq.eq (foldm_snoc m (Seq.append s1 (Seq.cons x s2))) + (m.mult x (foldm_snoc m (Seq.append s1 s2)))) + = CE.elim_eq_laws eq; + elim_monoid_laws m; + calc (eq.eq) + { + foldm_snoc m (Seq.append s1 (Seq.cons x s2)); + (eq.eq) { foldm_snoc_append m s1 (Seq.cons x s2) } + m.mult (foldm_snoc m s1) (foldm_snoc m (Seq.cons x s2)); + (eq.eq) { foldm_snoc_append m (Seq.create 1 x) s2; + m.congruence (foldm_snoc m s1) + (foldm_snoc m (Seq.cons x s2)) + (foldm_snoc m s1) + (m.mult (foldm_snoc m (Seq.create 1 x)) (foldm_snoc m s2)) } + m.mult (foldm_snoc m s1) (m.mult (foldm_snoc m (Seq.create 1 x)) (foldm_snoc m s2)); + (eq.eq) { foldm_snoc_singleton m x; + m.congruence (foldm_snoc m (Seq.create 1 x)) + (foldm_snoc m s2) + x + (foldm_snoc m s2); + m.congruence (foldm_snoc m s1) + (m.mult (foldm_snoc m (Seq.create 1 x)) (foldm_snoc m s2)) + (foldm_snoc m s1) + (m.mult x (foldm_snoc m s2)) } + m.mult (foldm_snoc m s1) (m.mult x (foldm_snoc m s2)); + (eq.eq) { x_yz_to_y_xz m (foldm_snoc m s1) x (foldm_snoc m s2) } + m.mult x (m.mult (foldm_snoc m s1) (foldm_snoc m s2)); + (eq.eq) { foldm_snoc_append m s1 s2; + m.congruence x + (m.mult (foldm_snoc m s1) (foldm_snoc m s2)) + x + (foldm_snoc m (Seq.append s1 s2)) } + m.mult x (foldm_snoc m (Seq.append s1 s2)); + } +#pop-options + + +let remove_i #a (s:seq a) (i:nat{i < Seq.length s}) + : a & seq a + = let s0, s1 = Seq.split s i in + Seq.head s1, Seq.append s0 (Seq.tail s1) + +#push-options "--using_facts_from '* -FStar.Seq.Properties.slice_slice'" +let shift_perm' #a + (s0 s1:seq a) + (_:squash (Seq.length s0 == Seq.length s1 /\ Seq.length s0 > 0)) + (p:seqperm s0 s1) + : Tot (seqperm (fst (Seq.un_snoc s0)) + (snd (remove_i s1 (p (Seq.length s0 - 1))))) + = reveal_is_permutation s0 s1 p; + let s0', last = Seq.un_snoc s0 in + let n = Seq.length s0' in + let p' (i:nat{ i < n }) + : j:nat{ j < n } + = if p i < p n then p i else p i - 1 + in + let _, s1' = remove_i s1 (p n) in + reveal_is_permutation_nopats s0' s1' p'; + p' +#pop-options + +let shift_perm #a + (s0 s1:seq a) + (_:squash (Seq.length s0 == Seq.length s1 /\ Seq.length s0 > 0)) + (p:seqperm s0 s1) + : Pure (seqperm (fst (Seq.un_snoc s0)) + (snd (remove_i s1 (p (Seq.length s0 - 1))))) + (requires True) + (ensures fun _ -> let n = Seq.length s0 - 1 in + Seq.index s1 (p n) == + Seq.index s0 n) + = reveal_is_permutation s0 s1 p; + shift_perm' s0 s1 () p + +let seqperm_len #a (s0 s1:seq a) + (p:seqperm s0 s1) + : Lemma + (ensures Seq.length s0 == Seq.length s1) + = reveal_is_permutation s0 s1 p + +let eq2_eq #a (eq:CE.equiv a) (x y:a) + : Lemma (requires x == y) + (ensures x `eq.eq` y) + = eq.reflexivity x + +(* The sequence indexing lemmas make this quite fiddly *) +#push-options "--z3rlimit_factor 2 --fuel 1 --ifuel 0" +let rec foldm_snoc_perm #a #eq m s0 s1 p + : Lemma + (ensures eq.eq (foldm_snoc m s0) (foldm_snoc m s1)) + (decreases (Seq.length s0)) + = //for getting calc chain to compose + CE.elim_eq_laws eq; + seqperm_len s0 s1 p; + if Seq.length s0 = 0 then ( + assert (Seq.equal s0 s1); + eq2_eq eq (foldm_snoc m s0) (foldm_snoc m s1) + ) + else ( + let n0 = Seq.length s0 - 1 in + let prefix, last = Seq.un_snoc s0 in + let prefix', suffix0 = Seq.split s1 (p n0) in + assert (Seq.equal s1 (Seq.append prefix' suffix0)); + let last', suffix' = Seq.head suffix0, Seq.tail suffix0 in + assert (Seq.cons last' suffix' `Seq.equal` suffix0); + assert (s1 `Seq.equal` Seq.append prefix' (Seq.cons last' suffix')); + let s1' = snd (remove_i s1 (p n0)) in + let p' : seqperm prefix s1' = shift_perm s0 s1 () p in + assert (last == last'); + calc + (eq.eq) + { + foldm_snoc m s1; + (eq.eq) { eq2_eq eq (foldm_snoc m s1) + (foldm_snoc m (Seq.append prefix' (Seq.cons last' suffix'))) } + foldm_snoc m (Seq.append prefix' (Seq.cons last' suffix')); + (eq.eq) { foldm_snoc3 m prefix' last' suffix' } + m.mult last' (foldm_snoc m (append prefix' suffix')); + (eq.eq) { assert (Seq.equal (append prefix' suffix') s1'); + eq2_eq eq (m.mult last' (foldm_snoc m (append prefix' suffix'))) + (m.mult last' (foldm_snoc m s1')) } + m.mult last' (foldm_snoc m s1'); + (eq.eq) { foldm_snoc_perm m prefix s1' p'; + eq.symmetry (foldm_snoc m prefix) (foldm_snoc m s1'); + eq.reflexivity last'; + m.congruence last' + (foldm_snoc m s1') + last' + (foldm_snoc m prefix) } + m.mult last' (foldm_snoc m prefix); + (eq.eq) { eq2_eq eq (m.mult last' (foldm_snoc m prefix)) + (foldm_snoc m s0) } + foldm_snoc m s0; + }; + eq.symmetry (foldm_snoc m s1) (foldm_snoc m s0)) +#pop-options + +//////////////////////////////////////////////////////////////////////////////// +// foldm_snoc_split +//////////////////////////////////////////////////////////////////////////////// + +(* Some utilities to introduce associativity-commutativity reasoning on + CM using quantified formulas with patterns. + + Use these with care, since with large terms the SMT solver may end up + with an explosion of instantiations +*) +let cm_associativity #c #eq (cm: CE.cm c eq) + : Lemma (forall (x y z:c). {:pattern (x `cm.mult` y `cm.mult` z)} + (x `cm.mult` y `cm.mult` z) `eq.eq` (x `cm.mult` (y `cm.mult` z))) + = Classical.forall_intro_3 (Classical.move_requires_3 cm.associativity) + +let cm_commutativity #c #eq (cm: CE.cm c eq) + : Lemma (forall (x y:c). {:pattern (x `cm.mult` y)} + (x `cm.mult` y) `eq.eq` (y `cm.mult` x)) + = Classical.forall_intro_2 (Classical.move_requires_2 cm.commutativity) + +(* A utility to introduce the equivalence relation laws into the context. + FStar.Algebra.CommutativeMonoid provides something similar, but this + version provides a more goal-directed pattern for transitivity. + We should consider changing FStar.Algebra.CommutativeMonoid *) +let elim_eq_laws #a (eq:CE.equiv a) + : Lemma ( + (forall x.{:pattern (x `eq.eq` x)} x `eq.eq` x) /\ + (forall x y.{:pattern (x `eq.eq` y)} x `eq.eq` y ==> y `eq.eq` x) /\ + (forall x y z.{:pattern eq.eq x y; eq.eq x z} (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z) + ) + = CE.elim_eq_laws eq + +let fold_decomposition_aux #c #eq (cm: CE.cm c eq) + (n0: int) + (nk: int{nk=n0}) + (expr1 expr2: (ifrom_ito n0 nk) -> c) + : Lemma (foldm_snoc cm (init (closed_interval_size n0 nk) + (init_func_from_expr (func_sum cm expr1 expr2) n0 nk)) `eq.eq` + cm.mult (foldm_snoc cm (init (closed_interval_size n0 nk) (init_func_from_expr expr1 n0 nk))) + (foldm_snoc cm (init (closed_interval_size n0 nk) (init_func_from_expr expr2 n0 nk)))) + = elim_eq_laws eq; + let sum_of_funcs (i: under (closed_interval_size n0 nk)) + = expr1 (n0+i) `cm.mult` expr2 (n0+i) in + lemma_eq_elim (init (closed_interval_size n0 nk) sum_of_funcs) + (create 1 (expr1 n0 `cm.mult` expr2 n0)); + foldm_snoc_singleton cm (expr1 n0 `cm.mult` expr2 n0); + let ts = (init (closed_interval_size n0 nk) sum_of_funcs) in + let ts1 = (init (nk+1-n0) (fun i -> expr1 (n0+i))) in + let ts2 = (init (nk+1-n0) (fun i -> expr2 (n0+i))) in + assert (foldm_snoc cm ts `eq.eq` sum_of_funcs (nk-n0)); // this assert speeds up the proof. + foldm_snoc_singleton cm (expr1 nk); + foldm_snoc_singleton cm (expr2 nk); + cm.congruence (foldm_snoc cm ts1) (foldm_snoc cm ts2) (expr1 nk) (expr2 nk) + +let aux_shuffle_lemma #c #eq (cm: CE.cm c eq) + (s1 s2 l1 l2: c) + : Lemma (((s1 `cm.mult` s2) `cm.mult` (l1 `cm.mult` l2)) `eq.eq` + ((s1 `cm.mult` l1) `cm.mult` (s2 `cm.mult` l2))) + = elim_eq_laws eq; + cm_commutativity cm; + cm_associativity cm; + let (+) = cm.mult in + cm.congruence (s1+s2) l1 (s2+s1) l1; + cm.congruence ((s1+s2)+l1) l2 ((s2+s1)+l1) l2; + cm.congruence ((s2+s1)+l1) l2 (s2+(s1+l1)) l2; + cm.congruence (s2+(s1+l1)) l2 ((s1+l1)+s2) l2 + + +#push-options "--ifuel 0 --fuel 1 --z3rlimit 40" +(* This proof is quite delicate, for several reasons: + - It's working with higher order functions that are non-trivially dependently typed, + notably on the ranges the ranges of indexes they manipulate + + - When using the induction hypothesis (i.e., on a recursive call), what we get + is a property about the function at a different type, i.e., `range_count n0 (nk - 1)`. + + - If left to the SMT solver alone, these higher order functions + at slightly different types cannot be proven equal and the + proof fails, often mysteriously. + + - To have something more robust, I rewrote this function to + return a squash proof, and then to coerce this proof to the + type needed, where the F* unififer/normalization machinery can + help, rather than leaving it purely to SMT, which is what + happens when the property is states as a postcondition of a + Lemma +*) +let rec foldm_snoc_split' #c #eq (cm: CE.cm c eq) + (n0: int) + (nk: not_less_than n0) + (expr1 expr2: (ifrom_ito n0 nk) -> c) + : Tot (squash (foldm_snoc cm (init (closed_interval_size n0 nk) (init_func_from_expr (func_sum cm expr1 expr2) n0 nk)) `eq.eq` + cm.mult (foldm_snoc cm (init (closed_interval_size n0 nk) (init_func_from_expr expr1 n0 nk))) + (foldm_snoc cm (init (closed_interval_size n0 nk) (init_func_from_expr expr2 n0 nk))))) + (decreases nk-n0) + = if (nk=n0) + then fold_decomposition_aux cm n0 nk expr1 expr2 + else ( + cm_commutativity cm; + elim_eq_laws eq; + let lfunc_up_to (nf: ifrom_ito n0 nk) = init_func_from_expr (func_sum cm expr1 expr2) n0 nf in + let range_count = closed_interval_size in + let full_count = range_count n0 nk in + let sub_count = range_count n0 (nk-1) in + let fullseq = init full_count (lfunc_up_to nk) in + let rfunc_1_up_to (nf: ifrom_ito n0 nk) = init_func_from_expr expr1 n0 nf in + let rfunc_2_up_to (nf: ifrom_ito n0 nk) = init_func_from_expr expr2 n0 nf in + let fullseq_r1 = init full_count (rfunc_1_up_to nk) in + let fullseq_r2 = init full_count (rfunc_2_up_to nk) in + let subseq = init sub_count (lfunc_up_to nk) in + let subfold = foldm_snoc cm subseq in + let last = lfunc_up_to nk sub_count in + lemma_eq_elim (fst (un_snoc fullseq)) subseq; // subseq is literally (liat fullseq) + let fullfold = foldm_snoc cm fullseq in + let subseq_r1 = init sub_count (rfunc_1_up_to nk) in + let subseq_r2 = init sub_count (rfunc_2_up_to nk) in + lemma_eq_elim (fst (un_snoc fullseq_r1)) subseq_r1; // subseq is literally (liat fullseq) + lemma_eq_elim (fst (un_snoc fullseq_r2)) subseq_r2; // subseq is literally (liat fullseq) + lemma_eq_elim (init sub_count (lfunc_up_to nk)) subseq; + lemma_eq_elim (init sub_count (lfunc_up_to (nk-1))) subseq; + lemma_eq_elim subseq_r1 (init sub_count (rfunc_1_up_to (nk-1))); + lemma_eq_elim subseq_r2 (init sub_count (rfunc_2_up_to (nk-1))); + let fullfold_r1 = foldm_snoc cm fullseq_r1 in + let fullfold_r2 = foldm_snoc cm fullseq_r2 in + let subfold_r1 = foldm_snoc cm subseq_r1 in + let subfold_r2 = foldm_snoc cm subseq_r2 in + cm.congruence (foldm_snoc cm (init sub_count (rfunc_1_up_to (nk-1)))) + (foldm_snoc cm (init sub_count (rfunc_2_up_to (nk-1)))) + subfold_r1 subfold_r2; + let last_r1 = rfunc_1_up_to nk sub_count in + let last_r2 = rfunc_2_up_to nk sub_count in + let nk' = nk - 1 in + (* here's the nasty bit with where we have to massage the proof from the induction hypothesis *) + let ih + : squash ((foldm_snoc cm (init (range_count n0 nk') (init_func_from_expr #_ #n0 #nk' (func_sum #(ifrom_ito n0 nk') cm expr1 expr2) n0 nk')) `eq.eq` + cm.mult (foldm_snoc cm (init (range_count n0 nk') (init_func_from_expr #_ #n0 #nk' expr1 n0 nk'))) + (foldm_snoc cm (init (range_count n0 nk') (init_func_from_expr #_ #n0 #nk' expr2 n0 nk'))))) + = foldm_snoc_split' cm n0 nk' expr1 expr2 + in + calc (==) { + subfold; + == { } + foldm_snoc cm subseq; + == { assert (Seq.equal subseq + (init (range_count n0 nk') + (init_func_from_expr #_ #n0 #nk' + (func_sum #(ifrom_ito n0 nk') cm expr1 expr2) n0 nk'))) } + foldm_snoc cm (init (range_count n0 nk') + (init_func_from_expr #_ #n0 #nk' + (func_sum #(ifrom_ito n0 nk') cm expr1 expr2) n0 nk')); + }; + assert (Seq.equal subseq_r1 (init (range_count n0 nk') (init_func_from_expr #_ #n0 #nk' expr1 n0 nk'))); + assert (Seq.equal subseq_r2 (init (range_count n0 nk') (init_func_from_expr #_ #n0 #nk' expr2 n0 nk'))); + let _ : squash (subfold `eq.eq` (subfold_r1 `cm.mult` subfold_r2)) = ih in + cm.congruence subfold last (subfold_r1 `cm.mult` subfold_r2) last; + aux_shuffle_lemma cm subfold_r1 subfold_r2 (rfunc_1_up_to nk sub_count) (rfunc_2_up_to nk sub_count); + cm.congruence (subfold_r1 `cm.mult` (rfunc_1_up_to nk sub_count)) (subfold_r2 `cm.mult` (rfunc_2_up_to nk sub_count)) + (foldm_snoc cm fullseq_r1) (foldm_snoc cm fullseq_r2) + ) +#pop-options + +/// Finally, package the proof up into a Lemma, as expected by the interface +let foldm_snoc_split #c #eq (cm: CE.cm c eq) + (n0: int) + (nk: not_less_than n0) + (expr1 expr2: (ifrom_ito n0 nk) -> c) + = foldm_snoc_split' cm n0 nk expr1 expr2 + +open FStar.Seq.Equiv + +let rec foldm_snoc_equality #c #eq (add: CE.cm c eq) (s t: seq c) + : Lemma (requires length s == length t /\ eq_of_seq eq s t) + (ensures foldm_snoc add s `eq.eq` foldm_snoc add t) + (decreases length s) = + if length s = 0 then ( + assert_norm (foldm_snoc add s == add.unit); + assert_norm (foldm_snoc add t == add.unit); + eq.reflexivity add.unit + ) + else ( + let sliat, slast = un_snoc s in + let tliat, tlast = un_snoc t in + eq_of_seq_unsnoc eq (length s) s t; + foldm_snoc_equality add sliat tliat; + add.congruence slast (foldm_snoc add sliat) + tlast (foldm_snoc add tliat) + ) + +let foldm_snoc_split_seq #c #eq (add: CE.cm c eq) + (s: seq c) (t: seq c{length s == length t}) + (sum_seq: seq c{length sum_seq == length s}) + (proof: (i: under (length s)) -> Lemma ((index s i `add.mult` index t i) + `eq.eq` (index sum_seq i))) + : Lemma ((foldm_snoc add s `add.mult` foldm_snoc add t) `eq.eq` + (foldm_snoc add sum_seq)) = + if length s = 0 then add.identity add.unit + else + let n = length s in + let index_1 (i:under n) = index s i in + let index_2 (i:under n) = index t i in + let nk = (n-1) in + assert (n == closed_interval_size 0 nk); + let index_sum = func_sum add index_1 index_2 in + let expr1 = init_func_from_expr #c #0 #(n-1) index_1 0 nk in + let expr2 = init_func_from_expr #c #0 #(n-1) index_2 0 nk in + let expr_sum = init_func_from_expr #c #0 #(n-1) index_sum 0 nk in + Classical.forall_intro eq.reflexivity; + Classical.forall_intro_2 (Classical.move_requires_2 eq.symmetry); + Classical.forall_intro_3 (Classical.move_requires_3 eq.transitivity); + foldm_snoc_split add 0 (n-1) index_1 index_2; + assert (foldm_snoc add (init n (expr_sum)) `eq.eq` + add.mult (foldm_snoc add (init n expr1)) (foldm_snoc add (init n expr2))); + lemma_eq_elim s (init n expr1); + lemma_eq_elim t (init n expr2); + Classical.forall_intro proof; + eq_of_seq_from_element_equality eq (init n expr_sum) sum_seq; + foldm_snoc_equality add (init n expr_sum) sum_seq ; + + assert (eq.eq (foldm_snoc add (init n expr_sum)) + (foldm_snoc add sum_seq)); + assert (foldm_snoc add s == foldm_snoc add (init n expr1)); + assert (foldm_snoc add t == foldm_snoc add (init n expr2)); + assert (add.mult (foldm_snoc add s) (foldm_snoc add t) `eq.eq` + foldm_snoc add (init n (expr_sum))); + eq.transitivity (add.mult (foldm_snoc add s) (foldm_snoc add t)) + (foldm_snoc add (init n (expr_sum))) + (foldm_snoc add sum_seq) + +let rec foldm_snoc_of_equal_inits #c #eq #m (cm: CE.cm c eq) + (f: (under m) -> c) + (g: (under m) -> c) + : Lemma (requires (forall (i: under m). f i `eq.eq` g i)) + (ensures foldm_snoc cm (init m f) `eq.eq` + foldm_snoc cm (init m g)) = + if m=0 then begin + assert_norm (foldm_snoc cm (init m f) == cm.unit); + assert_norm (foldm_snoc cm (init m g) == cm.unit); + eq.reflexivity cm.unit + end else + if m=1 then begin + foldm_snoc_singleton cm (f 0); + foldm_snoc_singleton cm (g 0); + eq.transitivity (foldm_snoc cm (init m f)) (f 0) (g 0); + eq.symmetry (foldm_snoc cm (init m g)) (g 0); + eq.transitivity (foldm_snoc cm (init m f)) + (g 0) + (foldm_snoc cm (init m g)) + end else + let fliat, flast = un_snoc (init m f) in + let gliat, glast = un_snoc (init m g) in + foldm_snoc_of_equal_inits cm (fun (i: under (m-1)) -> f i) + (fun (i: under (m-1)) -> g i); + lemma_eq_elim (init (m-1) (fun (i: under (m-1)) -> f i)) fliat; + lemma_eq_elim (init (m-1) (fun (i: under (m-1)) -> g i)) gliat; + cm.congruence flast (foldm_snoc cm fliat) + glast (foldm_snoc cm gliat) + diff --git a/stage0/ulib/FStar.Seq.Permutation.fsti b/stage0/ulib/FStar.Seq.Permutation.fsti new file mode 100644 index 00000000000..864e2a70d95 --- /dev/null +++ b/stage0/ulib/FStar.Seq.Permutation.fsti @@ -0,0 +1,165 @@ +(* + Copyright 2021-2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Authors: N. Swamy, A. Rastogi, A. Rozanov +*) +module FStar.Seq.Permutation + +open FStar.Seq + +open FStar.IntegerIntervals + +(* This module defines a permutation on sequences as a bijection among + the sequence indices relating equal elements. + + It defines a few utilities to work with such permutations. + + Notably: + + 1. Given two sequence with equal element counts, it constructs a + permutation. + + 2. Folding the multiplication of a commutative monoid over a + sequence and its permutation produces the equivalent results +*) + +(* A function from the indices of `s` to itself *) +let index_fun #a (s:seq a) = under (Seq.length s) -> under (Seq.length s) + +(* An abstract predicate defining when an index_fun is a permutation *) +val is_permutation (#a:Type) (s0:seq a) (s1:seq a) (f:index_fun s0) : prop + +(* Revealing the interpretation of is_permutation *) +val reveal_is_permutation (#a:Type) (s0 s1:seq a) (f:index_fun s0) + : Lemma (is_permutation s0 s1 f <==> + (* lengths of the sequences are the same *) + Seq.length s0 == Seq.length s1 /\ + (* f is injective *) + (forall x y. {:pattern f x; f y} + x <> y ==> f x <> f y) /\ + (* and f relates equal items in s0 and s1 *) + (forall (i:nat{i < Seq.length s0}).{:pattern (Seq.index s1 (f i))} + Seq.index s0 i == Seq.index s1 (f i))) + +(* A seqperm is an index_fun that is also a permutation *) +let seqperm (#a:Type) (s0:seq a) (s1:seq a) = + f:index_fun s0 { is_permutation s0 s1 f } + +(* We can construct a permutation from + sequences whose element counts are the same *) +val permutation_from_equal_counts + (#a:eqtype) + (s0:seq a) (s1:seq a{(forall x. count x s0 == count x s1)}) + : Tot (seqperm s0 s1) + +(** Now, some utilities related to commutative monoids and permutations *) + +module CE = FStar.Algebra.CommMonoid.Equiv + +(* folding a m.mult over a sequence *) +let foldm_snoc (#a:Type) (#eq:CE.equiv a) (m:CE.cm a eq) (s:seq a) = + foldr_snoc m.mult s m.unit + +(* folding over a sequence of units is unit *) +val foldm_snoc_unit_seq (#a:Type) (#eq:CE.equiv a) (m:CE.cm a eq) (s:Seq.seq a) + : Lemma (requires Seq.equal s (Seq.create (Seq.length s) m.unit)) + (ensures eq.eq (foldm_snoc m s) m.unit) + +(* folding over a singleton sequence is the sequence element *) +val foldm_snoc_singleton (#a:_) (#eq:_) (m:CE.cm a eq) (x:a) + : Lemma (eq.eq (foldm_snoc m (Seq.create 1 x)) x) + +(* folding m over the concatenation of s1 and s2 + can be decomposed into a fold over s1 and a fold over s2 *) +val foldm_snoc_append (#a:Type) (#eq:CE.equiv a) (m:CE.cm a eq) (s1 s2: seq a) + : Lemma + (ensures eq.eq (foldm_snoc m (append s1 s2)) + (m.mult (foldm_snoc m s1) (foldm_snoc m s2))) + +(* folds over concatenated lists can is symmetric *) +val foldm_snoc_sym (#a:Type) (#eq:CE.equiv a) (m:CE.cm a eq) (s1 s2: seq a) + : Lemma + (ensures eq.eq (foldm_snoc m (append s1 s2)) + (foldm_snoc m (append s2 s1))) + +(* And, finally, if s0 and s1 are permutations, + then folding m over them is identical *) +val foldm_snoc_perm (#a:_) (#eq:_) + (m:CE.cm a eq) + (s0:seq a) + (s1:seq a) + (p:seqperm s0 s1) + : Lemma + (ensures eq.eq (foldm_snoc m s0) (foldm_snoc m s1)) + +/// foldm_snoc_split: This next bit is for a lemma that proves that if +/// if the fold is taken over a sequence of sums, it is equal +/// to a sum of folds of the summand sequences + +(* This constructs a sequence init function to be used to create + a sequence of function values in a given finite integer range *) +let init_func_from_expr #c (#n0: int) (#nk: not_less_than n0) + (expr: ifrom_ito n0 nk -> c) + (a: ifrom_ito n0 nk) + (b: ifrom_ito a nk) + (i: under (closed_interval_size a b)) + : c + = expr (n0+i) + +(* CommMonoid-induced pointwise sum of two functions *) +let func_sum #a #c #eq (cm: CE.cm c eq) (f g: a -> c) + : t:(a -> c){ forall (x:a). t x == f x `cm.mult` g x } + = fun (x:a) -> cm.mult (f x) (g x) + +open FStar.Seq.Equiv + +(* The lemma itself: + if the fold is taken over a sequence of sums, it is equal + to a sum of folds of the summand sequences *) +val foldm_snoc_split (#c:_) (#eq:_) + (cm: CE.cm c eq) + (n0: int) + (nk: not_less_than n0) + (expr1 expr2: (ifrom_ito n0 nk) -> c) + : Lemma (ensures (foldm_snoc cm (init (closed_interval_size n0 nk) (init_func_from_expr (func_sum cm expr1 expr2) n0 nk)) `eq.eq` + cm.mult (foldm_snoc cm (init (closed_interval_size n0 nk) (init_func_from_expr expr1 n0 nk))) + (foldm_snoc cm (init (closed_interval_size n0 nk) (init_func_from_expr expr2 n0 nk))))) + + + +val foldm_snoc_equality (#c:_) (#eq:_) (add: CE.cm c eq) (s t: seq c) + : Lemma (requires length s == length t /\ eq_of_seq eq s t) + (ensures foldm_snoc add s `eq.eq` foldm_snoc add t) + + +val foldm_snoc_split_seq (#c:_) (#eq:_) (add: CE.cm c eq) + (s: seq c) (t: seq c{length s == length t}) + (sum_seq: seq c{length sum_seq == length s}) + (proof: (i: under (length s)) + -> Lemma ((index s i `add.mult` index t i) + `eq.eq` (index sum_seq i))) + : Lemma ((foldm_snoc add s `add.mult` foldm_snoc add t) `eq.eq` + (foldm_snoc add sum_seq)) + +val foldm_snoc_of_equal_inits (#c:_) (#eq:_) (#m: pos) (cm: CE.cm c eq) + (f: (under m) -> c) (g: (under m) -> c) + : Lemma (requires (forall (i: under m). f i `eq.eq` g i)) + (ensures foldm_snoc cm (init m f) `eq.eq` foldm_snoc cm (init m g)) + +(* this one turns out to be quite useful to speed up big proofs *) +let foldm_snoc_decomposition #c #eq (cm: CE.cm c eq) + (s: seq c{length s > 0}) + : Lemma (foldm_snoc cm s == + cm.mult (snd (un_snoc s)) (foldm_snoc cm (fst (un_snoc s)))) = () diff --git a/stage0/ulib/FStar.Seq.Properties.fst b/stage0/ulib/FStar.Seq.Properties.fst new file mode 100644 index 00000000000..d7d564abdbd --- /dev/null +++ b/stage0/ulib/FStar.Seq.Properties.fst @@ -0,0 +1,667 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Seq.Properties + +open FStar.Seq.Base +module Seq = FStar.Seq.Base + +let lemma_append_inj_l #_ s1 s2 t1 t2 i = + assert (index s1 i == (index (append s1 s2) i)); + assert (index t1 i == (index (append t1 t2) i)) + +let lemma_append_inj_r #_ s1 s2 t1 t2 i = + assert (index s2 i == (index (append s1 s2) (i + length s1))); + assert (index t2 i == (index (append t1 t2) (i + length t1))) + +let lemma_append_len_disj #_ s1 s2 t1 t2 = + cut (length (append s1 s2) == length s1 + length s2); + cut (length (append t1 t2) == length t1 + length t2) + +let lemma_append_inj #_ s1 s2 t1 t2 = + lemma_append_len_disj s1 s2 t1 t2; + FStar.Classical.forall_intro #(i:nat{i < length s1}) #(fun i -> index s1 i == index t1 i) (lemma_append_inj_l s1 s2 t1 t2); + FStar.Classical.forall_intro #(i:nat{i < length s2}) #(fun i -> index s2 i == index t2 i) (lemma_append_inj_r s1 s2 t1 t2) + +let lemma_head_append #_ _ _ = () + +let lemma_tail_append #_ s1 s2 = cut (equal (tail (append s1 s2)) (append (tail s1) s2)) + +let lemma_cons_inj #_ v1 v2 s1 s2 = + let t1 = create 1 v1 in + let t2 = create 1 v2 in + lemma_append_inj t1 s1 t2 s2; + assert(index t1 0 == index t2 0) + +let lemma_split #_ s i = + cut (equal (append (fst (split s i)) (snd (split s i))) s) + +let rec mem_index' (#a:eqtype) (x:a) (s:seq a) + : Lemma (requires (mem x s)) + (ensures (exists i. index s i == x)) + (decreases (length s)) + = if length s = 0 then () + else if head s = x then () + else mem_index' x (tail s) + +let mem_index = mem_index' + +let lemma_slice_append #_ _ _ = () + +let rec lemma_slice_first_in_append' (#a:Type) (s1:seq a) (s2:seq a) + (i:nat{i <= length s1}) +: Lemma + (ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2))) + (decreases (length s1)) += if i = 0 then () + else lemma_slice_first_in_append' (tail s1) s2 (i - 1) + +let lemma_slice_first_in_append = lemma_slice_first_in_append' + +let slice_upd #_ s i j k v = + lemma_eq_intro (slice (upd s k v) i j) (slice s i j) + +let upd_slice #_ s i j k v = + lemma_eq_intro (upd (slice s i j) k v) (slice (upd s (i + k) v) i j) + +let lemma_append_cons #_ _ _ = () + +let lemma_tl #_ _ _ = () + +let rec sorted_feq' (#a:Type) + (f g : (a -> a -> Tot bool)) + (s:seq a{forall x y. f x y == g x y}) + : Lemma (ensures (sorted f s <==> sorted g s)) + (decreases (length s)) + = if length s <= 1 then () + else sorted_feq' f g (tail s) + +let sorted_feq = sorted_feq' + +let rec lemma_append_count' (#a:eqtype) (lo:seq a) (hi:seq a) +: Lemma + (requires True) + (ensures (forall x. count x (append lo hi) = (count x lo + count x hi))) + (decreases (length lo)) += if length lo = 0 + then cut (equal (append lo hi) hi) + else (cut (equal (cons (head lo) (append (tail lo) hi)) + (append lo hi)); + lemma_append_count' (tail lo) hi; + let tl_l_h = append (tail lo) hi in + let lh = cons (head lo) tl_l_h in + cut (equal (tail lh) tl_l_h)) + +let lemma_append_count = lemma_append_count' + +let lemma_append_count_aux #_ _ lo hi = lemma_append_count lo hi + +let lemma_mem_inversion #_ _ = () + +let rec lemma_mem_count' (#a:eqtype) (s:seq a) (f:a -> Tot bool) +: Lemma + (requires (forall (i:nat{i f x)) + (decreases (length s)) += if length s = 0 + then () + else (let t = i:nat{i f:(a -> a -> Tot bool){total_order a f} + -> lo:seq a{sorted f lo} + -> pivot:a + -> hi:seq a{sorted f hi} + -> Lemma (requires (forall y. (mem y lo ==> f y pivot) + /\ (mem y hi ==> f pivot y))) + (ensures (sorted f (append lo (cons pivot hi)))) + (decreases (length lo)) += fun #_ f lo pivot hi -> + if length lo = 0 + then (cut (equal (append lo (cons pivot hi)) (cons pivot hi)); + cut (equal (tail (cons pivot hi)) hi)) + else (sorted_concat_lemma' f (tail lo) pivot hi; + lemma_append_cons lo (cons pivot hi); + lemma_tl (head lo) (append (tail lo) (cons pivot hi))) + +let sorted_concat_lemma = sorted_concat_lemma' + +let split_5 #a s i j = + let frag_lo = slice s 0 i in + let frag_i = slice s i (i + 1) in + let frag_mid = slice s (i + 1) j in + let frag_j = slice s j (j + 1) in + let frag_hi = slice s (j + 1) (length s) in + upd (upd (upd (upd (create 5 frag_lo) 1 frag_i) 2 frag_mid) 3 frag_j) 4 frag_hi + +let lemma_swap_permutes_aux_frag_eq #a s i j i' j' = + cut (equal (slice s i' j') (slice (swap s i j) i' j')); + cut (equal (slice s i (i + 1)) (slice (swap s i j) j (j + 1))); + cut (equal (slice s j (j + 1)) (slice (swap s i j) i (i + 1))) + +#push-options "--z3rlimit 20" +let lemma_swap_permutes_aux #_ s i j x = + if j=i + then cut (equal (swap s i j) s) + else begin + let s5 = split_5 s i j in + let frag_lo, frag_i, frag_mid, frag_j, frag_hi = + index s5 0, index s5 1, index s5 2, index s5 3, index s5 4 in + lemma_append_count_aux x frag_lo (append frag_i (append frag_mid (append frag_j frag_hi))); + lemma_append_count_aux x frag_i (append frag_mid (append frag_j frag_hi)); + lemma_append_count_aux x frag_mid (append frag_j frag_hi); + lemma_append_count_aux x frag_j frag_hi; + + let s' = swap s i j in + let s5' = split_5 s' i j in + let frag_lo', frag_j', frag_mid', frag_i', frag_hi' = + index s5' 0, index s5' 1, index s5' 2, index s5' 3, index s5' 4 in + + lemma_swap_permutes_aux_frag_eq s i j 0 i; + lemma_swap_permutes_aux_frag_eq s i j (i + 1) j; + lemma_swap_permutes_aux_frag_eq s i j (j + 1) (length s); + + lemma_append_count_aux x frag_lo (append frag_j (append frag_mid (append frag_i frag_hi))); + lemma_append_count_aux x frag_j (append frag_mid (append frag_i frag_hi)); + lemma_append_count_aux x frag_mid (append frag_i frag_hi); + lemma_append_count_aux x frag_i frag_hi + end +#pop-options + +let append_permutations #a s1 s2 s1' s2' = + ( + lemma_append_count s1 s2; + lemma_append_count s1' s2' + ) + +let lemma_swap_permutes #a s i j + = FStar.Classical.forall_intro + #a + #(fun x -> count x s = count x (swap s i j)) + (lemma_swap_permutes_aux s i j) + +(* perm_len: + A lemma that shows that two sequences that are permutations + of each other also have the same length +*) +//a proof optimization: Z3 only needs to unfold the recursive definition of `count` once +#push-options "--fuel 1 --ifuel 1 --z3rlimit 20" +let rec perm_len' (#a:eqtype) (s1 s2: seq a) + : Lemma (requires (permutation a s1 s2)) + (ensures (length s1 == length s2)) + (decreases (length s1)) + = if length s1 = 0 then begin + if length s2 = 0 then () + else assert (count (index s2 0) s2 > 0) + end + else let s1_hd = head s1 in + let s1_tl = tail s1 in + assert (count s1_hd s1 > 0); + assert (count s1_hd s2 > 0); + assert (length s2 > 0); + let s2_hd = head s2 in + let s2_tl = tail s2 in + if s1_hd = s2_hd + then (assert (permutation a s1_tl s2_tl); perm_len' s1_tl s2_tl) + else let i = index_mem s1_hd s2 in + let s_pfx, s_sfx = split_eq s2 i in + assert (equal s_sfx (append (create 1 s1_hd) (tail s_sfx))); + let s2' = append s_pfx (tail s_sfx) in + lemma_append_count s_pfx s_sfx; + lemma_append_count (create 1 s1_hd) (tail s_sfx); + lemma_append_count s_pfx (tail s_sfx); + assert (permutation a s1_tl s2'); + perm_len' s1_tl s2' +#pop-options + +let perm_len = perm_len' + +let cons_perm #_ tl s = lemma_tl (head s) tl + +let lemma_mem_append #_ s1 s2 = lemma_append_count s1 s2 + +let lemma_slice_cons #_ s i j = + cut (equal (slice s i j) (append (create 1 (index s i)) (slice s (i + 1) j))); + lemma_mem_append (create 1 (index s i)) (slice s (i + 1) j) + +let lemma_slice_snoc #_ s i j = + cut (equal (slice s i j) (append (slice s i (j - 1)) (create 1 (index s (j - 1))))); + lemma_mem_append (slice s i (j - 1)) (create 1 (index s (j - 1))) + +let lemma_ordering_lo_snoc #_ f s i j pv = + cut (equal (slice s i (j + 1)) (append (slice s i j) (create 1 (index s j)))); + lemma_mem_append (slice s i j) (create 1 (index s j)) + +let lemma_ordering_hi_cons #_ f s back len pv = + cut (equal (slice s back len) (append (create 1 (index s back)) (slice s (back + 1) len))); + lemma_mem_append (create 1 (index s back)) (slice s (back + 1) len) + +let swap_frame_lo #_ s lo i j = cut (equal (slice s lo i) (slice (swap s i j) lo i)) + +let swap_frame_lo' #_ s lo i' i j = cut (equal (slice s lo i') (slice (swap s i j) lo i')) + +let swap_frame_hi #_ s i j k hi = cut (equal (slice s k hi) (slice (swap s i j) k hi)) + +let lemma_swap_slice_commute #_ s start i j len = + cut (equal (slice (swap s i j) start len) (swap (slice s start len) (i - start) (j - start))) + +let lemma_swap_permutes_slice #_ s start i j len = + lemma_swap_slice_commute s start i j len; + lemma_swap_permutes (slice s start len) (i - start) (j - start) + +let splice_refl #_ s i j = cut (equal s (splice s i s j)) + +let lemma_swap_splice #_ s start i j len = cut (equal (swap s i j) (splice s start (swap s i j) len)) + +let lemma_seq_frame_hi #_ s1 s2 i j m n = + cut (equal (slice s1 m n) (slice s2 m n)) + +let lemma_seq_frame_lo #_ s1 s2 i j m n = + cut (equal (slice s1 i j) (slice s2 i j)) + +let lemma_tail_slice #_ s i j = + cut (equal (tail (slice s i j)) (slice s (i + 1) j)) + +let lemma_weaken_frame_right #_ s1 s2 i j k = cut (equal s1 (splice s2 i s1 k)) + +let lemma_weaken_frame_left #_ s1 s2 i j k = cut (equal s1 (splice s2 i s1 k)) + +let lemma_trans_frame #_ s1 s2 s3 i j = cut (equal s1 (splice s3 i s1 j)) + +let lemma_weaken_perm_left #_ s1 s2 i j k = + cut (equal (slice s2 i k) (append (slice s2 i j) + (slice s2 j k))); + cut (equal (slice s1 i k) (append (slice s2 i j) + (slice s1 j k))); + lemma_append_count (slice s2 i j) (slice s2 j k); + lemma_append_count (slice s2 i j) (slice s1 j k) + +let lemma_weaken_perm_right #_ s1 s2 i j k = + cut (equal (slice s2 i k) (append (slice s2 i j) + (slice s2 j k))); + cut (equal (slice s1 i k) (append (slice s1 i j) + (slice s2 j k))); + lemma_append_count (slice s2 i j) (slice s2 j k); + lemma_append_count (slice s1 i j) (slice s2 j k) + +let lemma_trans_perm #_ _ _ _ _ _ = () + + +let lemma_cons_snoc #_ _ _ _ = () + +let lemma_tail_snoc #_ s x = lemma_slice_first_in_append s (Seq.create 1 x) 1 + +let lemma_snoc_inj #_ s1 s2 v1 v2 = + let t1 = create 1 v1 in + let t2 = create 1 v2 in + lemma_append_inj s1 t1 s2 t2; + assert(head t1 == head t2) + +let lemma_mem_snoc #_ s x = lemma_append_count s (Seq.create 1 x) + +let rec find_append_some': #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma + (requires (Some? (find_l f s1))) + (ensures (find_l f (append s1 s2) == find_l f s1)) + (decreases (length s1)) += fun #_ s1 s2 f -> + if f (head s1) then () + else + let _ = cut (equal (tail (append s1 s2)) (append (tail s1) s2)) in + find_append_some' (tail s1) s2 f + +let find_append_some = find_append_some' + +let rec find_append_none': #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma + (requires (None? (find_l f s1))) + (ensures (find_l f (append s1 s2) == find_l f s2)) + (decreases (length s1)) += fun #_ s1 s2 f -> + if Seq.length s1 = 0 then cut (equal (append s1 s2) s2) + else + let _ = cut (equal (tail (append s1 s2)) (append (tail s1) s2)) in + find_append_none' (tail s1) s2 f + +let find_append_none = find_append_none' + +let rec find_append_none_s2': #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma + (requires (None? (find_l f s2))) + (ensures (find_l f (append s1 s2) == find_l f s1)) + (decreases (length s1)) += fun #_ s1 s2 f -> + if Seq.length s1 = 0 then cut (equal (append s1 s2) s2) + else if f (head s1) then () + else begin + find_append_none_s2' (tail s1) s2 f; + cut (equal (tail (append s1 s2)) (append (tail s1) s2)) + end + +let find_append_none_s2 = find_append_none_s2' + +let find_snoc #_ s x f = + if Some? (find_l f s) then find_append_some s (Seq.create 1 x) f + else find_append_none s (Seq.create 1 x) f + +let un_snoc_snoc #_ s x = + let s', x = un_snoc (snoc s x) in + assert (Seq.equal s s') + +let find_mem #_ s f x + = match seq_find f s with + | None -> mem_index x s + | Some _ -> () + +let rec seq_mem_k': #a:eqtype -> s:seq a -> n:nat{n < Seq.length s} -> + Lemma (requires True) + (ensures (mem (Seq.index s n) s)) + (decreases n) += fun #_ s n -> + if n = 0 then () + else let tl = tail s in + seq_mem_k' tl (n - 1) + +let seq_mem_k = seq_mem_k' + +module L = FStar.List.Tot + +let lemma_seq_of_list_induction #_ l + = match l with + | [] -> () + | hd::tl -> lemma_tl hd (seq_of_list tl) + +let rec lemma_seq_list_bij': #a:Type -> s:seq a -> Lemma + (requires (True)) + (ensures (seq_of_list (seq_to_list s) == s)) + (decreases (length s)) += fun #_ s -> + let l = seq_to_list s in + lemma_seq_of_list_induction l; + if length s = 0 then ( + assert (equal s (seq_of_list l)) + ) + else ( + lemma_seq_list_bij' (slice s 1 (length s)); + assert (equal s (seq_of_list (seq_to_list s))) + ) + +let lemma_seq_list_bij = lemma_seq_list_bij' + +let rec lemma_list_seq_bij': #a:Type -> l:list a -> Lemma + (requires (True)) + (ensures (seq_to_list (seq_of_list l) == l)) + (decreases (L.length l)) += fun #_ l -> + lemma_seq_of_list_induction l; + if L.length l = 0 then () + else ( + lemma_list_seq_bij' (L.tl l); + assert(equal (seq_of_list (L.tl l)) (slice (seq_of_list l) 1 (length (seq_of_list l)))) + ) + +let lemma_list_seq_bij = lemma_list_seq_bij' + +let rec lemma_index_is_nth': #a:Type -> s:seq a -> i:nat{i < length s} -> Lemma + (requires True) + (ensures (L.index (seq_to_list s) i == index s i)) + (decreases i) += fun #_ s i -> + assert (s `equal` cons (head s) (tail s)); + if i = 0 then () + else ( + lemma_index_is_nth' (slice s 1 (length s)) (i-1) + ) + +let lemma_index_is_nth = lemma_index_is_nth' + +let contains #a s x = + exists (k:nat). k < Seq.length s /\ Seq.index s k == x + +let contains_intro #_ _ _ _ = () + +let contains_elim #_ _ _ = () + +let lemma_contains_empty #_ = () + +let lemma_contains_singleton #_ _ = () + +private let intro_append_contains_from_disjunction (#a:Type) (s1:seq a) (s2:seq a) (x:a) + : Lemma (requires s1 `contains` x \/ s2 `contains` x) + (ensures (append s1 s2) `contains` x) + = let open FStar.Classical in + let open FStar.Squash in + or_elim #(s1 `contains` x) #(s2 `contains` x) #(fun _ -> (append s1 s2) `contains` x) + (fun _ -> ()) + (fun _ -> let s = append s1 s2 in + exists_elim (s `contains` x) (get_proof (s2 `contains` x)) (fun k -> + assert (Seq.index s (Seq.length s1 + k) == x))) + +let append_contains_equiv #_ s1 s2 x + = FStar.Classical.move_requires (intro_append_contains_from_disjunction s1 s2) x + +let contains_snoc #_ s x = + FStar.Classical.forall_intro (append_contains_equiv s (Seq.create 1 x)) + +let rec lemma_find_l_contains' (#a:Type) (f:a -> Tot bool) (l:seq a) + : Lemma (requires True) (ensures Some? (find_l f l) ==> l `contains` (Some?.v (find_l f l))) + (decreases (Seq.length l)) + = if length l = 0 then () + else if f (head l) then () + else lemma_find_l_contains' f (tail l) + +let lemma_find_l_contains = lemma_find_l_contains' + +let contains_cons #_ hd tl x + = append_contains_equiv (Seq.create 1 hd) tl x + +let append_cons_snoc #_ _ _ _ = () + +let append_slices #_ _ _ = () + +let rec find_l_none_no_index' (#a:Type) (s:Seq.seq a) (f:(a -> Tot bool)) : + Lemma (requires (None? (find_l f s))) + (ensures (forall (i:nat{i < Seq.length s}). not (f (Seq.index s i)))) + (decreases (Seq.length s)) += if Seq.length s = 0 + then () + else (assert (not (f (head s))); + assert (None? (find_l f (tail s))); + find_l_none_no_index' (tail s) f; + assert (Seq.equal s (cons (head s) (tail s))); + find_append_none (create 1 (head s)) (tail s) f) + +let find_l_none_no_index = find_l_none_no_index' + +let cons_head_tail #_ s = + let _ : squash (slice s 0 1 == create 1 (index s 0)) = + lemma_index_slice s 0 1 0; + lemma_index_create 1 (index s 0) 0; + lemma_eq_elim (slice s 0 1) (create 1 (index s 0)) + in + lemma_split s 1 + +let head_cons #_ _ _ = () + +let suffix_of_tail #_ s = cons_head_tail s + +let index_cons_l #_ _ _ = () + +let index_cons_r #_ _ _ _ = () + +let append_cons #_ c s1 s2 = lemma_eq_elim (append (cons c s1) s2) (cons c (append s1 s2)) + +let index_tail #_ _ _ = () + +let mem_cons #_ x s = lemma_append_count (create 1 x) s + +let snoc_slice_index #_ s i j = lemma_eq_elim (snoc (slice s i j) (index s j)) (slice s i (j + 1)) + +let cons_index_slice #_ s i j _ = lemma_eq_elim (cons (index s i) (slice s (i + 1) j)) (slice s i j) + +let slice_is_empty #_ s i = lemma_eq_elim (slice s i i) Seq.empty + +let slice_length #_ s = lemma_eq_elim (slice s 0 (length s)) s + +let slice_slice #_ s i1 j1 i2 j2 = lemma_eq_elim (slice (slice s i1 j1) i2 j2) (slice s (i1 + i2) (i1 + j2)) + +let rec lemma_seq_of_list_index #_ l i + = lemma_seq_of_list_induction l; + match l with + | [] -> () + | hd::tl -> if i = 0 then () else lemma_seq_of_list_index tl (i - 1) + +let seq_of_list_tl #_ l = lemma_seq_of_list_induction l + +let rec mem_seq_of_list #_ x l += lemma_seq_of_list_induction l; + match l with + | [] -> () + | y :: q -> + let _ : squash (head (seq_of_list l) == y) = () in + let _ : squash (tail (seq_of_list l) == seq_of_list q) = seq_of_list_tl l in + let _ : squash (mem x (seq_of_list l) == (x = y || mem x (seq_of_list q))) = + lemma_mem_inversion (seq_of_list l) + in + mem_seq_of_list x q + +let rec intro_of_list'': #a:Type -> + i:nat -> + s:seq a -> + l:list a -> + Lemma + (requires ( + List.Tot.length l + i = length s /\ + i <= length s /\ + explode_and i s l)) + (ensures ( + equal (seq_of_list l) (slice s i (length s)))) + (decreases ( + List.Tot.length l)) += fun #_ i s l -> + lemma_seq_of_list_induction l; + match l with + | [] -> () + | hd :: tl -> intro_of_list'' (i + 1) s tl + +let intro_of_list' = intro_of_list'' + +let intro_of_list #_ s l = intro_of_list' 0 s l + +#push-options "--z3rlimit 20" +let rec elim_of_list'': #a:Type -> + i:nat -> + s:seq a -> + l:list a -> + Lemma + (requires ( + List.Tot.length l + i = length s /\ + i <= length s /\ + slice s i (length s) == seq_of_list l)) + (ensures ( + explode_and i s l)) + (decreases ( + List.Tot.length l)) += fun #_ i s l -> + match l with + | [] -> () + | hd :: tl -> + lemma_seq_of_list_induction l; + elim_of_list'' (i + 1) s tl +#pop-options + +let elim_of_list' = elim_of_list'' + +let elim_of_list #_ l = elim_of_list' 0 (seq_of_list l) l + +let rec lemma_seq_to_list_permutation' (#a:eqtype) (s:seq a) + :Lemma (requires True) (ensures (forall x. count x s == List.Tot.Base.count x (seq_to_list s))) (decreases (length s)) + = if length s > 0 then ( + assert (equal s (cons (head s) (tail s))); + lemma_seq_to_list_permutation' (slice s 1 (length s)) + ) + +let lemma_seq_to_list_permutation = lemma_seq_to_list_permutation' + +let rec lemma_seq_of_list_permutation #a l + = + lemma_seq_of_list_induction l; + match l with + | [] -> () + | _::tl -> lemma_seq_of_list_permutation tl + +let rec lemma_seq_of_list_sorted #a f l + = + lemma_seq_of_list_induction l; + if List.Tot.length l > 1 then begin + lemma_seq_of_list_induction (List.Tot.Base.tl l); + lemma_seq_of_list_sorted f (List.Tot.Base.tl l) + end + + +let lemma_seq_sortwith_correctness #_ f s + = let l = seq_to_list s in + let l' = List.Tot.Base.sortWith f l in + let s' = seq_of_list l' in + let cmp = List.Tot.Base.bool_of_compare f in + + (* sortedness *) + List.Tot.Properties.sortWith_sorted f l; //the list returned by List.sortWith is sorted + lemma_seq_of_list_sorted cmp l'; //seq_of_list preserves sortedness + + (* permutation *) + lemma_seq_to_list_permutation s; //seq_to_list is a permutation + List.Tot.Properties.sortWith_permutation f l; //List.sortWith is a permutation + lemma_seq_of_list_permutation l' //seq_of_list is a permutation + + +(****** Seq map ******) + +let rec map_seq #a #b f s : Tot (Seq.seq b) (decreases Seq.length s) = + if Seq.length s = 0 + then Seq.empty + else let hd, tl = head s, tail s in + cons (f hd) (map_seq f tl) + +let rec map_seq_len #a #b f s + : Lemma (ensures Seq.length (map_seq f s) == Seq.length s) (decreases Seq.length s) + = if Seq.length s = 0 + then () + else map_seq_len f (tail s) + +let rec map_seq_index #a #b f s i + : Lemma (ensures (map_seq_len f s; Seq.index (map_seq f s) i == f (Seq.index s i))) (decreases Seq.length s) + = map_seq_len f s; + if Seq.length s = 0 + then () + else if i = 0 + then () + else map_seq_index f (tail s) (i-1) + +let map_seq_append #a #b f s1 s2 = + map_seq_len f s1; + map_seq_len f s2; + map_seq_len f (Seq.append s1 s2); + Classical.forall_intro (map_seq_index f s1); + Classical.forall_intro (map_seq_index f s2); + Classical.forall_intro (map_seq_index f (Seq.append s1 s2)); + assert (Seq.equal (map_seq f (Seq.append s1 s2)) + (Seq.append (map_seq f s1) (map_seq f s2))) diff --git a/stage0/ulib/FStar.Seq.Properties.fsti b/stage0/ulib/FStar.Seq.Properties.fsti new file mode 100644 index 00000000000..0f5bb3513e6 --- /dev/null +++ b/stage0/ulib/FStar.Seq.Properties.fsti @@ -0,0 +1,762 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Seq.Properties + +open FStar.Seq.Base +module Seq = FStar.Seq.Base + +let lseq (a: Type) (l: nat) : Tot Type = + s: Seq.seq a { Seq.length s == l } + +let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s + +val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1} + -> Lemma (index s1 i == index t1 i) + +val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2} + -> Lemma (ensures (index s2 i == index t2 i)) + +val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))} + -> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2)) + +val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2} + -> Lemma (requires (equal (append s1 s2) (append t1 t2))) + (ensures (equal s1 t1 /\ equal s2 t2)) + +let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0 + +let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s) + +val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma + (head (append s1 s2) == head s1) + +val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma + (tail (append s1 s2) == append (tail s1) s2) + +let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1) + +val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a + -> Lemma (requires (equal (cons v1 s1) (cons v2 s2))) + (ensures (v1 == v2 /\ equal s1 s2)) + +let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a & seq a) + = slice s 0 i, slice s i (length s) + +val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma + (ensures (append (fst (split s i)) (snd (split s i)) == s)) + +let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) +: Pure + (seq a & seq a) + (requires True) + (ensures (fun x -> (append (fst x) (snd x) == s))) += let x = split s i in + lemma_split s i; + x + +let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s)) += if length s = 0 then 0 + else if head s = x + then 1 + count x (tail s) + else count x (tail s) + +let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0 + +val mem_index (#a:eqtype) (x:a) (s:seq a) + : Lemma (requires (mem x s)) + (ensures (exists i. index s i == x)) + +(* index_mem: + A utility function that finds the first index of + `x` in `s`, given that we know the `x` is actually contained in `s` *) +let rec index_mem (#a:eqtype) (x:a) (s:seq a) + : Pure nat + (requires (mem x s)) + (ensures (fun i -> i < length s /\ index s i == x)) + (decreases (length s)) + = if head s = x then 0 + else 1 + index_mem x (tail s) + +let swap (#a:Type) (s:seq a) (i:nat{i s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma + (ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2)))) + +val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma + (ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2))) + +val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s} + -> k:nat{k < length s} -> v:a -> Lemma + (requires k < i \/ j <= k) + (ensures slice (upd s k v) i j == slice s i j) + [SMTPat (slice (upd s k v) i j)] + +val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s} + -> k:nat{k < j - i} -> v:a -> Lemma + (requires i + k < j) + (ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j) + [SMTPat (upd (slice s i j) k v)] + +// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _)) +val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma + (requires True) + (ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2)))) + +val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma + (ensures (equal (tail (cons hd tl)) tl)) + +let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a) +: Tot bool (decreases (length s)) += if length s <= 1 + then true + else let hd = head s in + f hd (index s 1) && sorted f (tail s) + +val sorted_feq (#a:Type) + (f g : (a -> a -> Tot bool)) + (s:seq a{forall x y. f x y == g x y}) + : Lemma (ensures (sorted f s <==> sorted g s)) + + +val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma + (requires True) + (ensures (forall x. count x (append lo hi) = (count x lo + count x hi))) + +val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma + (requires True) + (ensures (count x (append lo hi) = (count x lo + count x hi))) + +val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma + (ensures (forall x. mem x s = (x=head s || mem x (tail s)))) + +val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma + (requires (forall (i:nat{i f x)) + +val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma + (requires True) + (ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s)))) + +type total_order (a:eqtype) (f: (a -> a -> Tot bool)) = + (forall a. f a a) (* reflexivity *) + /\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *) + /\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *) +type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f} + +val sorted_concat_lemma: #a:eqtype + -> f:(a -> a -> Tot bool){total_order a f} + -> lo:seq a{sorted f lo} + -> pivot:a + -> hi:seq a{sorted f hi} + -> Lemma (requires (forall y. (mem y lo ==> f y pivot) + /\ (mem y hi ==> f pivot y))) + (ensures (sorted f (append lo (cons pivot hi)))) + +val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a)) + (requires True) + (ensures (fun x -> + (length x = 5 + /\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4))))) + /\ equal (index x 0) (slice s 0 i) + /\ equal (index x 1) (slice s i (i+1)) + /\ equal (index x 2) (slice s (i+1) j) + /\ equal (index x 3) (slice s j (j + 1)) + /\ equal (index x 4) (slice s (j + 1) (length s))))) + +val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i j:nat{i <= j && j i':nat -> j':nat{i' <= j' /\ j'<=length s /\ + (j < i' //high slice + \/ j' <= i //low slice + \/ (i < i' /\ j' <= j)) //mid slice + } + -> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j' + /\ slice s i (i + 1) == slice (swap s i j) j (j + 1) + /\ slice s j (j + 1) == slice (swap s i j) i (i + 1))) + +val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i j:nat{i <= j && j x:a -> Lemma + (requires True) + (ensures (count x s = count x (swap s i j))) + +type permutation (a:eqtype) (s1:seq a) (s2:seq a) = + (forall i. count i s1 = count i s2) + +val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma + (requires permutation a s1 s1' /\ permutation a s2 s2') + (ensures permutation a (append s1 s2) (append s1' s2')) + +val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i tl:seq a -> s:seq a{length s > 0} -> + Lemma (requires (permutation a tl (tail s))) + (ensures (permutation a (cons (head s) tl) s)) + +val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a + -> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2))) + +val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s} + -> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j)))) + +val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s} + -> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1))))) + +val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a + -> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv)) + (ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv))) + +val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a + -> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back))) + (ensures ((forall y. mem y (slice s back len) ==> f pv y))) + +val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s} + -> Lemma (ensures (slice s lo i == slice (swap s i j) lo i)) + +val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s} + -> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i')) + +val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s} + -> Lemma (ensures (slice s k hi == slice (swap s i j) k hi)) + +val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s} + -> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start)))) + +val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s} + -> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len))) + +(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *) +let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)}) +: Tot (seq a) += Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1))) + +(* replace with sub *) +let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a) + = Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s))) + +val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s} + -> Lemma + (ensures (s == splice s i s j)) + +val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s} + -> Lemma + (ensures (swap s i j == splice s start (swap s i j) len)) + +val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1} + -> Lemma + (requires (s1 == (splice s2 i s1 j))) + (ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m))) + +val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1} + -> Lemma + (requires (s1 == (splice s2 m s1 n))) + (ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j))) + +val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s} + -> Lemma + (requires True) + (ensures (tail (slice s i j) == slice s (i + 1) j)) + [SMTPat (tail (slice s i j))] + +val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1} + -> Lemma + (requires (s1 == splice s2 i s1 j)) + (ensures (s1 == splice s2 i s1 k)) + +val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1} + -> Lemma + (requires (s1 == splice s2 j s1 k)) + (ensures (s1 == splice s2 i s1 k)) + +val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1} + -> Lemma + (requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j)) + (ensures (s1 == splice s3 i s1 j)) + +val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1} + -> Lemma + (requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k))) + (ensures (permutation a (slice s2 i k) (slice s1 i k))) + +val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1} + -> Lemma + (requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j))) + (ensures (permutation a (slice s2 i k) (slice s1 i k))) + +val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1} + -> Lemma + (requires (permutation a (slice s1 i j) (slice s2 i j) + /\ permutation a (slice s2 i j) (slice s3 i j))) + (ensures (permutation a (slice s1 i j) (slice s3 i j))) + + +(*New additions, please review*) + +let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x) + +val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a) + : Lemma (requires True) + (ensures (Seq.equal (cons hd (snoc s tl)) + (snoc (cons hd s) tl))) + +val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a + -> Lemma (ensures (tail (snoc s x) == snoc (tail s) x)) + +val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a + -> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2))) + (ensures (v1 == v2 /\ equal s1 s2)) + +val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a -> + Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y)) + +let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a) +: Tot (o:option a{Some? o ==> f (Some?.v o)}) + (decreases (Seq.length l)) += if Seq.length l = 0 then None + else if f (head l) then Some (head l) + else find_l f (tail l) + +let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a) +: GTot (o:option a{Some? o ==> f (Some?.v o)}) + (decreases (Seq.length l)) += if Seq.length l = 0 then None + else if f (head l) then Some (head l) + else ghost_find_l f (tail l) + +val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma + (requires (Some? (find_l f s1))) + (ensures (find_l f (append s1 s2) == find_l f s1)) + +val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma + (requires (None? (find_l f s1))) + (ensures (find_l f (append s1 s2) == find_l f s2)) + +val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma + (requires (None? (find_l f s2))) + (ensures (find_l f (append s1 s2) == find_l f s1)) + +val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool) + -> Lemma (ensures (let res = find_l f (snoc s x) in + match res with + | None -> find_l f s == None /\ not (f x) + | Some y -> res == find_l f s \/ (f x /\ x==y))) + +let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a & a){s == snoc (fst r) (snd r)}) = + let s', a = split s (length s - 1) in + assert (Seq.equal (snoc s' (Seq.index a 0)) s); + s', Seq.index a 0 + +val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x)) + +let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a) +: Tot (o:option a{Some? o ==> f (Some?.v o)}) + (decreases (Seq.length l)) += if Seq.length l = 0 then None + else let prefix, last = un_snoc l in + if f last then Some last + else find_r f prefix + +type found (i:nat) = True + +let rec seq_find_aux (#a:Type) (f:a -> Tot bool) (l:seq a) (ctr:nat{ctr <= Seq.length l}) +: Pure (option a) + (requires (forall (i:nat{ i < Seq.length l /\ i >= ctr}). + not (f (Seq.index l i) ))) + (ensures (function + | None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i)) + | Some x -> f x /\ (exists (i:nat{i < Seq.length l}). {:pattern (found i)} + found i /\ x == Seq.index l i))) += match ctr with + | 0 -> None + | _ -> let i = ctr - 1 in + if f (Seq.index l i) + then ( + cut (found i); + Some (Seq.index l i)) + else seq_find_aux f l i + +let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a) +: Pure (option a) + (requires True) + (ensures (function + | None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i)) + | Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)} + found i /\ x == Seq.index l i))) += seq_find_aux f l (Seq.length l) + +val find_mem (#a:eqtype) (s:seq a) (f:a -> Tot bool) (x:a{f x}) + : Lemma (requires (mem x s)) + (ensures (Some? (seq_find f s) /\ f (Some?.v (seq_find f s)))) + +let for_all + (#a: Type) + (f: (a -> Tot bool)) + (l: seq a) +: Pure bool + (requires True) + (ensures (fun b -> (b == true <==> (forall (i: nat {i < Seq.length l} ) . f (index l i) == true)))) += None? (seq_find (fun i -> not (f i)) l) + +val seq_mem_k: #a:eqtype -> s:seq a -> n:nat{n < Seq.length s} -> + Lemma (requires True) + (ensures (mem (Seq.index s n) s)) + [SMTPat (mem (Seq.index s n) s)] + +module L = FStar.List.Tot + +val lemma_seq_of_list_induction (#a:Type) (l:list a) + :Lemma (requires True) + (ensures (let s = seq_of_list l in + match l with + | [] -> Seq.equal s empty + | hd::tl -> s == cons hd (seq_of_list tl) /\ + head s == hd /\ tail s == (seq_of_list tl))) + +val lemma_seq_list_bij: #a:Type -> s:seq a -> Lemma + (requires (True)) + (ensures (seq_of_list (seq_to_list s) == s)) + +val lemma_list_seq_bij: #a:Type -> l:list a -> Lemma + (requires (True)) + (ensures (seq_to_list (seq_of_list l) == l)) + +unfold let createL_post (#a:Type0) (l:list a) (s:seq a) : GTot Type0 = + normalize (L.length l = length s) /\ seq_to_list s == l /\ seq_of_list l == s + +let createL (#a:Type0) (l:list a) +: Pure (seq a) + (requires True) + (ensures (fun s -> createL_post #a l s)) += let s = seq_of_list l in + lemma_list_seq_bij l; + s + +val lemma_index_is_nth: #a:Type -> s:seq a -> i:nat{i < length s} -> Lemma + (requires True) + (ensures (L.index (seq_to_list s) i == index s i)) + +//////////////////////////////////////////////////////////////////////////////// +//s `contains` x : Type0 +// An undecidable version of `mem`, +// for when the sequence payload is not an eqtype +//////////////////////////////////////////////////////////////////////////////// +[@@ remove_unused_type_parameters [0; 1; 2]] +val contains (#a:Type) (s:seq a) (x:a) : Tot Type0 + +val contains_intro (#a:Type) (s:seq a) (k:nat) (x:a) + : Lemma (k < Seq.length s /\ Seq.index s k == x + ==> + s `contains` x) + +val contains_elim (#a:Type) (s:seq a) (x:a) + : Lemma (s `contains` x + ==> + (exists (k:nat). k < Seq.length s /\ Seq.index s k == x)) + +val lemma_contains_empty (#a:Type) : Lemma (forall (x:a). ~ (contains Seq.empty x)) + +val lemma_contains_singleton (#a:Type) (x:a) : Lemma (forall (y:a). contains (create 1 x) y ==> y == x) + +val append_contains_equiv (#a:Type) (s1:seq a) (s2:seq a) (x:a) + : Lemma ((append s1 s2) `contains` x + <==> + (s1 `contains` x \/ s2 `contains` x)) + +val contains_snoc : #a:Type -> s:Seq.seq a -> x:a -> + Lemma (ensures (forall y. (snoc s x) `contains` y <==> s `contains` y \/ x==y)) + +val lemma_find_l_contains (#a:Type) (f:a -> Tot bool) (l:seq a) + : Lemma (requires True) (ensures Some? (find_l f l) ==> l `contains` (Some?.v (find_l f l))) + +val contains_cons (#a:Type) (hd:a) (tl:Seq.seq a) (x:a) + : Lemma ((cons hd tl) `contains` x + <==> + (x==hd \/ tl `contains` x)) + +val append_cons_snoc (#a:Type) (u: Seq.seq a) (x:a) (v:Seq.seq a) + : Lemma (Seq.equal (Seq.append u (cons x v)) + (Seq.append (snoc u x) v)) + +val append_slices (#a:Type) (s1:Seq.seq a) (s2:Seq.seq a) + : Lemma ( Seq.equal s1 (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) /\ + Seq.equal s2 (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length s1 + Seq.length s2)) /\ + (forall (i:nat) (j:nat). + i <= j /\ j <= Seq.length s2 ==> + Seq.equal (Seq.slice s2 i j) + (Seq.slice (Seq.append s1 s2) (Seq.length s1 + i) (Seq.length s1 + j)))) + + +val find_l_none_no_index (#a:Type) (s:Seq.seq a) (f:(a -> Tot bool)) : + Lemma (requires (None? (find_l f s))) + (ensures (forall (i:nat{i < Seq.length s}). not (f (Seq.index s i)))) + (decreases (Seq.length s)) + +(** More properties, with new naming conventions *) + +let suffix_of + (#a: Type) + (s_suff s: seq a) += exists s_pref . (s == append s_pref s_suff) + +val cons_head_tail + (#a: Type) + (s: seq a {length s > 0}) +: Lemma + (requires True) + (ensures (s == cons (head s) (tail s))) + [SMTPat (cons (head s) (tail s))] + +val head_cons + (#a: Type) + (x: a) + (s: seq a) +: Lemma + (ensures (head (cons x s) == x)) + +val suffix_of_tail + (#a: Type) + (s: seq a {length s > 0}) +: Lemma + (requires True) + (ensures ((tail s) `suffix_of` s)) + [SMTPat ((tail s) `suffix_of` s)] + +val index_cons_l + (#a: Type) + (c: a) + (s: seq a) +: Lemma + (ensures (index (cons c s) 0 == c)) + +val index_cons_r + (#a: Type) + (c: a) + (s: seq a) + (i: nat {1 <= i /\ i <= length s}) +: Lemma + (ensures (index (cons c s) i == index s (i - 1))) + +val append_cons + (#a: Type) + (c: a) + (s1 s2: seq a) +: Lemma + (ensures (append (cons c s1) s2 == cons c (append s1 s2))) + +val index_tail + (#a: Type) + (s: seq a {length s > 0}) + (i: nat {i < length s - 1} ) +: Lemma + (ensures (index (tail s) i == index s (i + 1))) + +val mem_cons + (#a:eqtype) + (x:a) + (s:seq a) +: Lemma + (ensures (forall y. mem y (cons x s) <==> mem y s \/ x=y)) + +val snoc_slice_index + (#a: Type) + (s: seq a) + (i: nat) + (j: nat {i <= j /\ j < length s} ) +: Lemma + (requires True) + (ensures (snoc (slice s i j) (index s j) == slice s i (j + 1))) + [SMTPat (snoc (slice s i j) (index s j))] + +val cons_index_slice + (#a: Type) + (s: seq a) + (i: nat) + (j: nat {i < j /\ j <= length s} ) + (k:nat{k == i+1}) +: Lemma + (requires True) + (ensures (cons (index s i) (slice s k j) == slice s i j)) + [SMTPat (cons (index s i) (slice s k j))] + +val slice_is_empty + (#a: Type) + (s: seq a) + (i: nat {i <= length s}) +: Lemma + (requires True) + (ensures (slice s i i == Seq.empty)) + [SMTPat (slice s i i)] + +val slice_length + (#a: Type) + (s: seq a) +: Lemma + (requires True) + (ensures (slice s 0 (length s) == s)) + [SMTPat (slice s 0 (length s))] + +val slice_slice + (#a: Type) + (s: seq a) + (i1: nat) + (j1: nat {i1 <= j1 /\ j1 <= length s} ) + (i2: nat) + (j2: nat {i2 <= j2 /\ j2 <= j1 - i1} ) +: Lemma + (requires True) + (ensures (slice (slice s i1 j1) i2 j2 == slice s (i1 + i2) (i1 + j2))) + [SMTPat (slice (slice s i1 j1) i2 j2)] + +val lemma_seq_of_list_index (#a:Type) (l:list a) (i:nat{i < List.Tot.length l}) + :Lemma (requires True) + (ensures (index (seq_of_list l) i == List.Tot.index l i)) + [SMTPat (index (seq_of_list l) i)] + +[@@(deprecated "seq_of_list")] +let of_list (#a:Type) (l:list a) :seq a = seq_of_list l + +val seq_of_list_tl + (#a: Type) + (l: list a { List.Tot.length l > 0 } ) +: Lemma + (requires True) + (ensures (seq_of_list (List.Tot.tl l) == tail (seq_of_list l))) + +val mem_seq_of_list + (#a: eqtype) + (x: a) + (l: list a) +: Lemma + (requires True) + (ensures (mem x (seq_of_list l) == List.Tot.mem x l)) + [SMTPat (mem x (seq_of_list l))] + +(** Dealing efficiently with `seq_of_list` by meta-evaluating conjunctions over +an entire list. *) + +let rec explode_and (#a: Type) + (i: nat) + (s: seq a { i <= length s }) + (l: list a { List.Tot.length l + i = length s }): + Tot Type + (decreases (List.Tot.length l)) += match l with + | [] -> True + | hd :: tl -> index s i == hd /\ explode_and (i + 1) s tl + +unfold +let pointwise_and s l = + norm [ iota; zeta; primops; delta_only [ `%(explode_and) ] ] (explode_and 0 s l) + +val intro_of_list': #a:Type -> + i:nat -> + s:seq a -> + l:list a -> + Lemma + (requires ( + List.Tot.length l + i = length s /\ + i <= length s /\ + explode_and i s l)) + (ensures ( + equal (seq_of_list l) (slice s i (length s)))) + +val intro_of_list (#a: Type) (s: seq a) (l: list a): + Lemma + (requires ( + List.Tot.length l = length s /\ + pointwise_and s l)) + (ensures ( + s == seq_of_list l)) + +val elim_of_list': #a:Type -> + i:nat -> + s:seq a -> + l:list a -> + Lemma + (requires ( + List.Tot.length l + i = length s /\ + i <= length s /\ + slice s i (length s) == seq_of_list l)) + (ensures ( + explode_and i s l)) + +val elim_of_list (#a: Type) (l: list a): + Lemma + (ensures ( + let s = seq_of_list l in + pointwise_and s l)) + +(****** sortWith ******) +let sortWith (#a:eqtype) (f:a -> a -> Tot int) (s:seq a) :Tot (seq a) + = seq_of_list (List.Tot.Base.sortWith f (seq_to_list s)) + +val lemma_seq_to_list_permutation (#a:eqtype) (s:seq a) + :Lemma (requires True) (ensures (forall x. count x s == List.Tot.Base.count x (seq_to_list s))) (decreases (length s)) + +val lemma_seq_of_list_permutation (#a:eqtype) (l:list a) + :Lemma (forall x. List.Tot.Base.count x l == count x (seq_of_list l)) + +val lemma_seq_of_list_sorted (#a:Type) (f:a -> a -> Tot bool) (l:list a) + :Lemma (requires (List.Tot.Properties.sorted f l)) (ensures (sorted f (seq_of_list l))) + +val lemma_seq_sortwith_correctness (#a:eqtype) (f:a -> a -> Tot int) (s:seq a) + :Lemma (requires (total_order a (List.Tot.Base.bool_of_compare f))) + (ensures (let s' = sortWith f s in sorted (List.Tot.Base.bool_of_compare f) s' /\ permutation a s s')) + +(* sort_lseq: + A wrapper of Seq.sortWith which proves that the output sequences + is a sorted permutation of the input sequence with the same length +*) +let sort_lseq (#a:eqtype) #n (f:tot_ord a) (s:lseq a n) + : s':lseq a n{sorted f s' /\ permutation a s s'} = + lemma_seq_sortwith_correctness (L.compare_of_bool f) s; + let s' = sortWith (L.compare_of_bool f) s in + perm_len s s'; + sorted_feq f (L.bool_of_compare (L.compare_of_bool f)) s'; + s' + +let rec foldr (#a #b:Type) (f:b -> a -> Tot a) (s:seq b) (init:a) + : Tot a (decreases (length s)) + = if length s = 0 then init + else f (head s) (foldr f (tail s) init) + +let rec foldr_snoc (#a #b:Type) (f:b -> a -> Tot a) (s:seq b) (init:a) + : Tot a (decreases (length s)) + = if length s = 0 then init + else let s, last = un_snoc s in + f last (foldr_snoc f s init) + +(****** Seq map ******) + +val map_seq (#a #b:Type) (f:a -> Tot b) (s:Seq.seq a) : Tot (Seq.seq b) + +val map_seq_len (#a #b:Type) (f:a -> Tot b) (s:Seq.seq a) + : Lemma (ensures Seq.length (map_seq f s) == Seq.length s) + +val map_seq_index (#a #b:Type) (f:a -> Tot b) (s:Seq.seq a) (i:nat{i < Seq.length s}) + : Lemma (ensures (map_seq_len f s; Seq.index (map_seq f s) i == f (Seq.index s i))) + +val map_seq_append (#a #b:Type) (f:a -> Tot b) (s1 s2:Seq.seq a) + : Lemma (ensures (map_seq f (Seq.append s1 s2) == + Seq.append (map_seq f s1) (map_seq f s2))) diff --git a/stage0/ulib/FStar.Seq.Sorted.fst b/stage0/ulib/FStar.Seq.Sorted.fst new file mode 100644 index 00000000000..4289f36638e --- /dev/null +++ b/stage0/ulib/FStar.Seq.Sorted.fst @@ -0,0 +1,128 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Seq.Sorted + +open FStar.Seq + +type sorted_pred (#a:eqtype) (f:tot_ord a) (s:seq a) : Type0 = + forall (i j: (k:nat{k f (index s i) (index s j) + +val sorted_pred_tail : + #a:eqtype -> + f:tot_ord a -> + s:seq a{length s > 0} -> + Lemma (requires (sorted_pred #a f s)) (ensures (sorted_pred #a f (tail s))) +let sorted_pred_tail #a f s = () + +val sorted_pred_sorted_lemma : + #a:eqtype -> + f:tot_ord a -> + s:seq a -> + Lemma (requires (sorted_pred f s)) (ensures (sorted #a f s == true)) (decreases (length s)) +let rec sorted_pred_sorted_lemma #a f s = + if length s <= 1 then () + else begin + assert (f (index s 0) (index s 1)) ; + sorted_pred_tail #a f s; + sorted_pred_sorted_lemma #a f (tail s) + end + +let intro_sorted_pred (#a:eqtype) (f:tot_ord a) (s:seq a) + ($g:(i:nat{i < length s} -> j:nat{j < length s} -> Lemma (requires (i <= j)) (ensures (f (index s i) (index s j))))) + : Lemma (sorted_pred #a f s) += let aux (i j : (k:nat{k < length s})) (p:squash (i <= j)) : GTot (squash (f (index s i) (index s j))) = + FStar.Squash.give_proof p ; + g i j ; + FStar.Squash.get_proof (f (index s i) (index s j)) + in + FStar.Classical.forall_intro_2 (fun (i j:(k:nat{k < length s})) -> + FStar.Classical.give_witness (FStar.Classical.arrow_to_impl (aux i j)) <: Lemma (i <= j ==> f (index s i) (index s j))) + +val sorted_pred_cons_lemma : + #a:eqtype -> + f:tot_ord a -> + s:seq a{length s > 1} -> + Lemma (requires (f (index s 0) (index s 1) /\ sorted_pred #a f (tail s))) (ensures (sorted_pred #a f s)) +let sorted_pred_cons_lemma #a f s = + let aux (i j : (k:nat{k < length s})) : Lemma (requires (i <= j)) (ensures (f (index s i) (index s j))) = + if i = 0 then + if j = 0 then () + else assert (f (index s 0) (index (tail s) 0) /\ f (index (tail s) 0) (index (tail s) (j-1))) + else assert (f (index (tail s) (i - 1)) (index (tail s) (j - 1))) + in + intro_sorted_pred #a f s aux + +val sorted_sorted_pred_lemma : + #a:eqtype -> + f:tot_ord a -> + s:seq a -> + Lemma (requires (sorted #a f s == true)) (ensures (sorted_pred #a f s)) (decreases (length s)) +let rec sorted_sorted_pred_lemma #a f s = + if length s = 0 then () + else if length s = 1 then () + else (sorted_sorted_pred_lemma #a f (tail s) ; sorted_pred_cons_lemma #a f s) + +val sorted_pred_slice_lemma : + #a:eqtype -> + f:tot_ord a -> + s:seq a -> + i:nat{i < length s} -> + j:nat{i <= j /\ j <= length s} -> + Lemma (requires (sorted_pred #a f s)) (ensures (sorted_pred #a f (slice s i j))) +let sorted_pred_slice_lemma #a f s i j = () + +val sorted_slice_lemma : + #a:eqtype -> + f:tot_ord a -> + s:seq a -> + i:nat{i < length s} -> + j:nat{i <= j /\ j <= length s} -> + Lemma (requires (sorted #a f s == true)) (ensures (sorted #a f (slice s i j) == true)) +let sorted_slice_lemma #a f s i j = + sorted_sorted_pred_lemma #a f s ; + sorted_pred_slice_lemma #a f s i j ; + sorted_pred_sorted_lemma #a f (slice s i j) + +val sorted_split_lemma : + #a:eqtype -> + f:tot_ord a -> + s:seq a -> + i:nat{i < length s} -> + Lemma (requires (sorted #a f s == true)) + (ensures (let s1, s2 = split s i in sorted #a f s1 == true /\ sorted #a f s2 == true)) +let sorted_split_lemma #a f s i = + sorted_slice_lemma #a f s 0 i ; + sorted_slice_lemma #a f s i (length s) + +val sorted_pred_append_lemma : + #a:eqtype -> + f:tot_ord a -> + s1:seq a -> + s2:seq a -> + Lemma (requires (sorted_pred #a f s1 /\ sorted_pred #a f s2 /\ (length s1 > 0 /\ length s2 > 0 ==> f (last s1) (head s2)))) + (ensures (sorted_pred #a f (append s1 s2))) +let sorted_pred_append_lemma #a f s1 s2 = + let s = append s1 s2 in + let aux (i j:(k:nat{k < length s})) : Lemma (requires (i <= j)) (ensures (f (index s i) (index s j))) = + if i < length s1 then + if j < length s1 then + assert (f (index s1 i) (index s1 j)) + else + (assert (f (index s1 i) (last s1)) ; assert (f (head s2) (index s2 (j - length s1)))) + else + (assert (j >= length s1) ; assert (f (index s2 (i - length s1)) (index s2 (j - length s1)))) + in + intro_sorted_pred #a f s aux diff --git a/stage0/ulib/FStar.Seq.fst b/stage0/ulib/FStar.Seq.fst new file mode 100644 index 00000000000..41a285f4f3b --- /dev/null +++ b/stage0/ulib/FStar.Seq.fst @@ -0,0 +1,18 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Seq +include FStar.Seq.Base +include FStar.Seq.Properties diff --git a/stage0/ulib/FStar.Set.fst b/stage0/ulib/FStar.Set.fst new file mode 100644 index 00000000000..eefca405718 --- /dev/null +++ b/stage0/ulib/FStar.Set.fst @@ -0,0 +1,60 @@ +(* + Copyright 2008-2014 Nikhil Swamy, Aseem Rastogi, + Microsoft Research, University of Maryland + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Set +(** Computational sets (on eqtypes): membership is a boolean function *) +#set-options "--initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0" +open FStar.FunctionalExtensionality +module F = FStar.FunctionalExtensionality + +let set (a:eqtype) = F.restricted_t a (fun _ -> bool) + +let equal (#a:eqtype) (s1:set a) (s2:set a) = F.feq s1 s2 + +(* destructors *) + +let mem #a x s = s x + +(* constructors *) +let empty #a = F.on_dom a (fun x -> false) +let singleton #a x = F.on_dom a (fun y -> y = x) +let union #a s1 s2 = F.on_dom a (fun x -> s1 x || s2 x) +let intersect #a s1 s2 = F.on_dom a (fun x -> s1 x && s2 x) +let complement #a s = F.on_dom a (fun x -> not (s x)) +let intension #a f = F.on_dom a f + +(* Properties *) +let mem_empty #a x = () +let mem_singleton #a x y = () +let mem_union #a x s1 s2 = () +let mem_intersect #a x s1 s2 = () +let mem_complement #a x s = () +let mem_intension #a x f = () +let mem_subset #a s1 s2 = () +let subset_mem #a s1 s2 = () + +(* extensionality *) +let lemma_equal_intro #a s1 s2 = () +let lemma_equal_elim #a s1 s2 = () +let lemma_equal_refl #a s1 s2 = () + +let disjoint_not_in_both (a:eqtype) (s1:set a) (s2:set a) : + Lemma + (requires (disjoint s1 s2)) + (ensures (forall (x:a).{:pattern (mem x s1) \/ (mem x s2)} mem x s1 ==> ~(mem x s2))) + [SMTPat (disjoint s1 s2)] += let f (x:a) : Lemma (~(mem x (intersect s1 s2))) = () in + FStar.Classical.forall_intro f diff --git a/stage0/ulib/FStar.Set.fsti b/stage0/ulib/FStar.Set.fsti new file mode 100644 index 00000000000..c9f3a78e02c --- /dev/null +++ b/stage0/ulib/FStar.Set.fsti @@ -0,0 +1,136 @@ +(* + Copyright 2008-2014 Nikhil Swamy, Aseem Rastogi, + Microsoft Research, University of Maryland + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Set +(** Computational sets (on eqtypes): membership is a boolean function *) +#set-options "--initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0" + +val set (a:eqtype) + : Type0 + +val equal (#a:eqtype) (s1:set a) (s2:set a) + : Type0 + +(* destructors *) + +val mem (#a:eqtype) (x:a) (s:set a) + : Tot bool + +(* constructors *) +val empty (#a:eqtype) + : Tot (set a) + +val singleton (#a:eqtype) (x:a) + : Tot (set a) + +val union : #a:eqtype -> set a -> set a -> Tot (set a) +val intersect : #a:eqtype -> set a -> set a -> Tot (set a) +val complement : #a:eqtype -> set a -> Tot (set a) +val intension : #a:eqtype -> (a -> Tot bool) -> GTot (set a) + +(* Derived functions *) + +let disjoint (#a:eqtype) (s1: set a) (s2: set a) = + equal (intersect s1 s2) empty + +let subset (#a:eqtype) (s1:set a) (s2:set a) = + forall x. mem x s1 ==> mem x s2 + +let add (#a:eqtype) (x:a) (s:set a) : set a = + union s (singleton x) + +let remove (#a:eqtype) (x:a) (s:set a) : set a = + intersect s (complement (singleton x)) + +(* Properties *) +val mem_empty: #a:eqtype -> x:a -> Lemma + (requires True) + (ensures (not (mem x empty))) + [SMTPat (mem x empty)] + +val mem_singleton: #a:eqtype -> x:a -> y:a -> Lemma + (requires True) + (ensures (mem y (singleton x) = (x=y))) + [SMTPat (mem y (singleton x))] + +val mem_union: #a:eqtype -> x:a -> s1:set a -> s2:set a -> Lemma + (requires True) + (ensures (mem x (union s1 s2) = (mem x s1 || mem x s2))) + [SMTPat (mem x (union s1 s2))] + +val mem_intersect: #a:eqtype -> x:a -> s1:set a -> s2:set a -> Lemma + (requires True) + (ensures (mem x (intersect s1 s2) = (mem x s1 && mem x s2))) + [SMTPat (mem x (intersect s1 s2))] + +val mem_complement: #a:eqtype -> x:a -> s:set a -> Lemma + (requires True) + (ensures (mem x (complement s) = not (mem x s))) + [SMTPat (mem x (complement s))] + +val mem_intension: #a:eqtype -> x:a -> f:(a -> Tot bool) -> Lemma + (requires True) + (ensures (mem x (intension f) = f x)) + +val mem_subset: #a:eqtype -> s1:set a -> s2:set a -> Lemma + (requires (forall x. mem x s1 ==> mem x s2)) + (ensures (subset s1 s2)) + [SMTPat (subset s1 s2)] + +val subset_mem: #a:eqtype -> s1:set a -> s2:set a -> Lemma + (requires (subset s1 s2)) + (ensures (forall x. mem x s1 ==> mem x s2)) + [SMTPat (subset s1 s2)] + +(* extensionality *) +val lemma_equal_intro: #a:eqtype -> s1:set a -> s2:set a -> Lemma + (requires (forall x. mem x s1 = mem x s2)) + (ensures (equal s1 s2)) + [SMTPat (equal s1 s2)] + +val lemma_equal_elim: #a:eqtype -> s1:set a -> s2:set a -> Lemma + (requires (equal s1 s2)) + (ensures (s1 == s2)) + [SMTPat (equal s1 s2)] + +val lemma_equal_refl: #a:eqtype -> s1:set a -> s2:set a -> Lemma + (requires (s1 == s2)) + (ensures (equal s1 s2)) + [SMTPat (equal s1 s2)] + +val disjoint_not_in_both (a:eqtype) (s1:set a) (s2:set a) + : Lemma + (requires (disjoint s1 s2)) + (ensures (forall (x:a).{:pattern (mem x s1) \/ (mem x s2)} mem x s1 ==> ~(mem x s2))) + [SMTPat (disjoint s1 s2)] + +(* Converting lists to sets *) + +(* WHY IS THIS HERE? It is not strictly part of the interface *) +#reset-options //restore fuel usage here +let rec as_set' (#a:eqtype) (l:list a) : set a = + match l with + | [] -> empty + | hd::tl -> union (singleton hd) (as_set' tl) + +unfold +let as_set (#a:eqtype) (l:list a) = normalize_term (as_set' l) + +let lemma_disjoint_subset (#a:eqtype) (s1:set a) (s2:set a) (s3:set a) + : Lemma (requires (disjoint s1 s2 /\ subset s3 s1)) + (ensures (disjoint s3 s2)) + = () diff --git a/stage0/ulib/FStar.SizeT.fst b/stage0/ulib/FStar.SizeT.fst new file mode 100644 index 00000000000..dbfb19c327b --- /dev/null +++ b/stage0/ulib/FStar.SizeT.fst @@ -0,0 +1,86 @@ +module FStar.SizeT +open FStar.Ghost +module I64 = FStar.Int64 + +(* This is only intended as a model, but will be extracted natively by Krml + with the correct C semantics *) + +(* We assume the existence of some lower bound on the size, + where the bound is at least 2^16 *) +assume +val bound : x:erased nat { x >= pow2 16 } + +type t : eqtype = | Sz : (x:U64.t { U64.v x < bound }) -> t + +let fits x = + FStar.UInt.fits x U64.n == true /\ + x < bound + +let fits_at_least_16 _ = () + +let v x = + U64.v (Sz?.x x) + +irreducible +let uint_to_t x = + Sz (U64.uint_to_t x) + +let size_v_inj (x: t) = () +let size_uint_to_t_inj (x: nat) = () + + +/// These two predicates are only used for modeling purposes, and their definitions must +/// remain abstract to ensure they can only be introduced through a static assert. +/// We simply define them as True here +let fits_u32 = (reveal bound >= pow2 32) == true +let fits_u64 = (reveal bound == pow2 64) + +let fits_u64_implies_fits_32 () + : Lemma + (requires fits_u64) + (ensures fits_u32) + = () + +let fits_u32_implies_fits (x:nat) + : Lemma + (requires fits_u32 /\ x < pow2 32) + (ensures fits x) + = () + +let fits_u64_implies_fits (x:nat) + : Lemma + (requires fits_u64 /\ x < pow2 64) + (ensures fits x) + = () + +let of_u32 (x: U32.t) + = uint_to_t (U32.v x) + +let of_u64 (x: U64.t) + = uint_to_t (U64.v x) + +let uint16_to_sizet x = uint_to_t (U16.v x) +let uint32_to_sizet x = uint_to_t (U32.v x) +let uint64_to_sizet x = uint_to_t (U64.v x) +let sizet_to_uint32 x = FStar.Int.Cast.uint64_to_uint32 (Sz?.x x) +let sizet_to_uint64 x = (Sz?.x x) + +let fits_lte x y = () + +let add x y = Sz <| U64.add x.x y.x +let sub x y = Sz <| U64.sub x.x y.x +let mul x y = Sz <| U64.mul x.x y.x + +let div x y = + let res_n = U64.div x.x y.x in + FStar.Math.Lib.slash_decr_axiom (U64.v x.x) (U64.v y.x); + assert (U64.v res_n < bound); + let res = Sz res_n in + fits_lte (U64.v res.x) (U64.v x.x); + res + +let rem x y = Sz <| U64.rem x.x y.x +let gt x y = U64.gt x.x y.x +let gte x y = U64.gte x.x y.x +let lt x y = U64.lt x.x y.x +let lte x y = U64.lte x.x y.x diff --git a/stage0/ulib/FStar.SizeT.fsti b/stage0/ulib/FStar.SizeT.fsti new file mode 100644 index 00000000000..4e9f42321db --- /dev/null +++ b/stage0/ulib/FStar.SizeT.fsti @@ -0,0 +1,184 @@ +module FStar.SizeT + +open FStar.Mul + +module U16 = FStar.UInt16 +module U32 = FStar.UInt32 +module U64 = FStar.UInt64 + +new +val t : eqtype + +val fits (x: nat) : Tot prop + +/// According to the C standard, "the bit width of t is not less than 16 since c99" +/// (https://en.cppreference.com/w/c/types/size_t) + +val fits_at_least_16 (x:nat) + : Lemma + (requires x < pow2 16) + (ensures fits x) + [SMTPat (fits x)] + +[@@noextract_to "krml"] +val v (x: t) : Pure nat + (requires True) + (ensures (fun y -> fits y)) + +/// We therefore offer two functions to create a t value. +/// Any value that fits in a uint_16 can be cast directly to t +/// Any value that might not fit in a uint_16 needs to satisfy the `fits_u32` +/// or `fits_u64` predicates. These predicates can only be introduced through a +/// stateful function (currently in Steel.ST.HigherArray), which will be extracted +/// to a static_assert by krml +val uint_to_t (x: nat) : Pure t + (requires (fits x)) + (ensures (fun y -> v y == x)) + +/// v and uint_to_t are inverses +val size_v_inj (x: t) + : Lemma + (ensures uint_to_t (v x) == x) + [SMTPat (v x)] + +val size_uint_to_t_inj (x: nat) + : Lemma + (requires fits x) + (ensures v (uint_to_t x) == x) + [SMTPat (uint_to_t x)] + +val fits_u32 : prop +val fits_u64 : prop + +val fits_u64_implies_fits_32 (_:unit) + : Lemma + (requires fits_u64) + (ensures fits_u32) + +val fits_u32_implies_fits (x:nat) + : Lemma + (requires fits_u32 /\ x < pow2 32) + (ensures fits x) + +val fits_u64_implies_fits (x:nat) + : Lemma + (requires fits_u64 /\ x < pow2 64) + (ensures fits x) + +/// Creates a size_t when given a uint32 literal. Note, this will not +/// extract if [x] is not a literal (e.g., 12ul). If you want to do a +/// cast, see `uint32_to_sizet` below +noextract inline_for_extraction +val of_u32 (x: U32.t) : Pure t + (requires fits_u32) + (ensures (fun y -> v y == U32.v x)) + +/// Creates a size_t when given a uint64 literal. Note, this will not +/// extract if [x] is not a literal (e.g., 12uL). If you want to do a +/// cast, see `uint64_to_sizet` below +noextract inline_for_extraction +val of_u64 (x: U64.t) : Pure t + (requires fits_u64) + (ensures (fun y -> v y == U64.v x)) + +val uint16_to_sizet (x:U16.t) : Pure t + (requires True) + (ensures fun y -> v y == U16.v x) + +val uint32_to_sizet (x:U32.t) : Pure t + (requires fits_u32) + (ensures fun y -> v y == U32.v x) + +val uint64_to_sizet (x:U64.t) : Pure t + (requires fits_u64) + (ensures fun y -> v y == U64.v x) + +val sizet_to_uint32 (x:t) : Pure U32.t + (requires True) + (ensures fun y -> U32.v y == v x % pow2 32) + +val sizet_to_uint64 (x:t) : Pure U64.t + (requires True) + (ensures fun y -> U64.v y == v x % pow2 64) + +val fits_lte (x y: nat) : Lemma + (requires (x <= y /\ fits y)) + (ensures (fits x)) + [SMTPat (fits x); SMTPat (fits y)] + +(** Non-overflowing arithmetic operations *) + +val add (x y: t) : Pure t + (requires (fits (v x + v y))) + (ensures (fun z -> v z == v x + v y)) + +val sub (x y: t) : Pure t + (requires (v x >= v y)) + (ensures (fun z -> v z == v x - v y)) + +val mul (x y: t) : Pure t + (requires (fits (v x * v y))) + (ensures (fun z -> v z == v x * v y)) + +(** Euclidean division of [a] and [b], with [b] non-zero *) +val div (a:t) (b:t{v b <> 0}) : Pure t + (requires (True)) + (ensures (fun c -> v a / v b = v c)) + +(** Modulo specification, similar to FStar.UInt.mod *) + +let mod_spec (a:nat{fits a}) (b:nat{fits b /\ b <> 0}) : GTot (n:nat{fits n}) = + let open FStar.Mul in + let res = a - ((a/b) * b) in + fits_lte res a; + res + +(** Euclidean remainder + + The result is the modulus of [a] with respect to a non-zero [b] *) +val rem (a:t) (b:t{v b <> 0}) : Pure t + (requires True) + (ensures (fun c -> mod_spec (v a) (v b) = v c)) + +(** Greater than *) +val gt (x y:t) : Pure bool + (requires True) + (ensures (fun z -> z == (v x > v y))) + +(** Greater than or equal *) +val gte (x y:t) : Pure bool + (requires True) + (ensures (fun z -> z == (v x >= v y))) + +(** Less than *) +val lt (x y:t) : Pure bool + (requires True) + (ensures (fun z -> z == (v x < v y))) + +(** Less than or equal *) +val lte (x y: t) : Pure bool + (requires True) + (ensures (fun z -> z == (v x <= v y))) + +(** Infix notations *) + +unfold let ( +^ ) = add +unfold let ( -^ ) = sub +unfold let ( *^ ) = mul +unfold let ( %^ ) = rem +unfold let ( >^ ) = gt +unfold let ( >=^ ) = gte +unfold let ( <^ ) = lt +unfold let ( <=^ ) = lte + +//This private primitive is used internally by the +//compiler to translate bounded integer constants +//with a desugaring-time check of the size of the number, +//rather than an expensive verification check. +//Since it is marked private, client programs cannot call it directly +//Since it is marked unfold, it eagerly reduces, +//eliminating the verification overhead of the wrapper +private +unfold +let __uint_to_t (x:int) : Tot t + = assume (x >= 0 /\ fits x); uint_to_t x diff --git a/stage0/ulib/FStar.Squash.fst b/stage0/ulib/FStar.Squash.fst new file mode 100644 index 00000000000..c29493a6f9e --- /dev/null +++ b/stage0/ulib/FStar.Squash.fst @@ -0,0 +1,53 @@ +(* + Copyright 2008-2024 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Squash +open FStar.IndefiniteDescription + +(* This file shows that there is another natural model for some of the + squash things; for this one it doesn't seem to harm importing this + file (exposing the implementation); it probably doesn't help either *) + +let return_squash (#a:Type) x = () + +let bind_squash (#a:Type) (#b:Type) f g = + g (elim_squash f) + +let push_squash (#a:Type) (#b:(a->Type)) f = + return_squash fun x -> elim_squash (f x) + +let get_proof (p:Type) = () + +let give_proof (#p:Type) _ = () + +let proof_irrelevance (p:Type) x y = () + +let squash_double_arrow #a #p f = + bind_squash f push_squash + +let push_sum (#a:Type) (#b:(a -> Type)) ($p : dtuple2 a (fun (x:a) -> squash (b x))) = + match p with + | Mkdtuple2 x y -> + bind_squash #(b x) #(dtuple2 a b) y (fun y' -> + return_squash (Mkdtuple2 x y')) + +let squash_double_sum (#a:Type) (#b:(a -> Type)) (p : squash (dtuple2 a (fun (x:a) -> squash (b x)))) = + bind_squash p (fun p' -> push_sum p') // Need eta... + +let map_squash (#a:Type) (#b:Type) s f = + bind_squash #a #b s (fun x -> return_squash (f x)) + +let join_squash (#a:Type) (x:squash (squash a)) = + bind_squash x (fun x -> x) diff --git a/stage0/ulib/FStar.Squash.fsti b/stage0/ulib/FStar.Squash.fsti new file mode 100644 index 00000000000..c0563201d88 --- /dev/null +++ b/stage0/ulib/FStar.Squash.fsti @@ -0,0 +1,94 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Squash + +/// The module provides an interface to work with [squash] types, F*'s +/// representation for proof-irrelevant propositions. +/// +/// The type [squash p] is defined in [Prims] as [_:unit{p}]. As such, +/// the [squash] type captures the classical logic used in F*'s +/// refinement types, although the interface in this module isn't +/// specifically classical. The module [FStar.Classical] provides +/// further derived forms to manipulate [squash] types. +/// +/// This is inspired in part by: Quotient Types: A Modular +/// Approach. Aleksey Nogin, TPHOLs 2002. +/// http://www.nuprl.org/documents/Nogin/QuotientTypes_02.pdf +/// +/// Broadly, [squash] is a monad, support the usual [return] and +/// [bind] operations. +/// +/// Additionally, it supports a [push_squash] operation that relates +/// arrow types and [squash]. + +(** A proof of [a] can be forgotten to create a squashed proof of [a] + *) +val return_squash (#a: Type) (x: a) : Tot (squash a) + +(** Sequential composition of squashed proofs *) +val bind_squash (#a #b: Type) (x: squash a) (f: (a -> GTot (squash b))) : Tot (squash b) + +(** The [push] operation, together with [bind_squash], allow deriving + some of the other operations, notably [squash_double_arrow]. We + rarely use the [push_squash] operation directly. + + One reading of [push f] is that for a function [f] that builds a + proof-irrelevant prooof of [b x] for all [x:a], there exists a + proof-irrelevant proof of [forall (x:a). b x]. + + Note: since [f] is not itself squashed, [push_squash f] is not + equal to [f]. *) +val push_squash (#a: Type) (#b: (a -> Type)) (f: (x: a -> Tot (squash (b x)))) + : Tot (squash (x: a -> GTot (b x))) + +/// The pre- and postconditions of of [Pure] are equivalent to +/// squashed arguments and results. + +(** [get_proof p], in a context requiring [p] is equivalent to a proof + of [squash p] *) +val get_proof (p: Type) : Pure (squash p) (requires p) (ensures (fun _ -> True)) + +(** [give_proof x], for [x:squash p] is a equivalent to ensuring + [p]. *) +val give_proof (#p: Type) (x: squash p) : Pure unit (requires True) (ensures (fun _ -> p)) + +(** All proofs of [squash p] are equal *) +val proof_irrelevance (p: Type) (x y: squash p) : Tot (squash (x == y)) + +(** Squashing the proof of the co-domain of squashed universal + quantifier is redundant---[squash_double_arrow] allows removing + it. *) +val squash_double_arrow (#a: Type) (#p: (a -> Type)) ($f: (squash (x: a -> GTot (squash (p x))))) + : GTot (squash (x: a -> GTot (p x))) + +(** The analog of [push_squash] for sums (existential quantification *) +val push_sum (#a: Type) (#b: (a -> Type)) ($p: (dtuple2 a (fun (x: a) -> squash (b x)))) + : Tot (squash (dtuple2 a b)) + +(** The analog of [squash_double_arrow] for sums (existential quantification) *) +val squash_double_sum + (#a: Type) + (#b: (a -> Type)) + ($p: (squash (dtuple2 a (fun (x: a) -> squash (b x))))) + : Tot (squash (dtuple2 a b)) + +(** [squash] is functorial; a ghost function can be mapped over a squash *) +val map_squash (#a #b: Type) (x: squash a) (f: (a -> GTot b)) : Tot (squash b) + +(** [squash] is a monad: double squashing is redundant and can be removed. *) +val join_squash (#a: Type) (x: squash (squash a)) : Tot (squash a) + diff --git a/stage0/ulib/FStar.SquashProperties.fst b/stage0/ulib/FStar.SquashProperties.fst new file mode 100644 index 00000000000..d8b9e6664f6 --- /dev/null +++ b/stage0/ulib/FStar.SquashProperties.fst @@ -0,0 +1,176 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.SquashProperties + +open FStar.Constructive + +open FStar.Squash + +val join_squash : #a:Type -> squash (squash a) -> GTot (squash a) +let join_squash #a s = bind_squash #(squash a) #a s (fun x -> x) + +val squash_arrow : #a:Type -> #p:(a -> Type) -> + $f:(x:a -> GTot (squash (p x))) -> GTot (squash (x:a -> GTot (p x))) +let squash_arrow #a #p f = squash_double_arrow (return_squash f) + +val forall_intro : #a:Type -> #p:(a -> Type) -> + $f:(x:a -> Lemma (p x)) -> Lemma (x:a -> GTot (p x))(* (forall (x:a). p x) *) +let forall_intro #a #p f = + let ff : (x:a -> GTot (squash (p x))) = (fun x -> f x; get_proof (p x)) in + give_proof #(x:a -> GTot (p x)) (squash_arrow #a #p ff) + + +// currently unused +// val squash_elim : a:Type -> #b:Type -> t1:b -> t2:b -> +// ( a -> Tot (ceq t1 t2)) -> +// Tot (squash a -> Tot (ceq t1 t2)) + +(* assume val tt (t:Type) : squash t *) + +(* assume val squash_mem_elim : a:Type -> #b:Type -> t1:b -> t2:b -> *) +(* (x:squash a -> t:(squash a -> Type) -> Tot (t ())) -> *) +(* Tot (x:squash a -> t:(squash a -> Type) -> Tot (t x)) *) + +(* get_proof and give_proof are phrased in terms of squash *) + +(* The whole point of defining squash is to soundly allow define excluded_middle; + here this follows from get_proof and give_proof *) + +val bool_of_or : #p:Type -> #q:Type -> Prims.sum p q -> + Tot (b:bool{(b ==> p) /\ (not(b) ==> q)}) +let bool_of_or #p #q t = + match t with + | Prims.Left _ -> true + | Prims.Right _ -> false + +val excluded_middle : p:Type -> GTot (squash (b:bool{b <==> p})) +let excluded_middle (p:Type) = map_squash (join_squash (get_proof (p \/ (~p)))) bool_of_or + + +val excluded_middle_squash : p:Type0 -> GTot (p \/ ~p) +let excluded_middle_squash p = + bind_squash (excluded_middle p) (fun x -> + if x then + map_squash (get_proof p) (Prims.Left #p) + else + return_squash (Prims.Right #_ #(~p) (return_squash (fun (h:p) -> + give_proof (return_squash h); + false_elim #False ())))) + +(* we thought we might prove proof irrelevance by Berardi ... but didn't manage *) + +(* Conditional on any Type -- unused below *) +val ifProp: #p:Type0 -> b:Type0 -> e1:squash p -> e2:squash p -> GTot (squash p) +let ifProp #p b e1 e2 = + bind_squash (excluded_middle_squash b) + (fun (x:Prims.sum b (~ b)) -> + match x with + | Prims.Left _ -> e1 + | Prims.Right _ -> e2) + +(* The powerset operator *) +type pow (p:Type) = p -> GTot bool + +noeq type retract 'a 'b : Type = + | MkR: i:('a -> GTot 'b) -> + j:('b -> GTot 'a) -> + inv:(x:'a -> GTot (ceq (j (i x)) x)) -> + retract 'a 'b + +noeq type retract_cond 'a 'b : Type = + | MkC: i2:('a -> GTot 'b) -> + j2:('b -> GTot 'a) -> + inv2:(retract 'a 'b -> x:'a -> GTot (ceq (j2 (i2 x)) x)) -> + retract_cond 'a 'b + +(* unused below *) +val ac: r:retract_cond 'a 'b -> retract 'a 'b -> x:'a -> + GTot (ceq ((MkC?.j2 r) (MkC?.i2 r x)) x) +let ac (MkC _ _ inv2) = inv2 + +let false_elim (#a:Type) (f:False) : Tot a + = match f with + +val l1: (a:Type0) -> (b:Type0) -> GTot (squash (retract_cond (pow a) (pow b))) +let l1 (a:Type) (b:Type) = + bind_squash (excluded_middle_squash (retract (pow a) (pow b))) + (fun (x:Prims.sum (retract (pow a) (pow b)) (~ (retract (pow a) (pow b)))) -> + match x with + | Prims.Left (MkR f0 g0 e) -> + return_squash (MkC f0 g0 (fun _ -> e)) + | Prims.Right nr -> + let f0 (x:pow a) (y:b) = false in + let g0 (x:pow b) (y:a) = false in + map_squash nr (fun (f:(retract (pow a) (pow b) -> GTot False)) -> + MkC f0 g0 (fun r x -> false_elim (f r)))) + +(* The paradoxical set *) +type u = p:Type -> Tot (squash (pow p)) + +(* NS: FAILS TO CHECK BEYOND HERE ... TODO, revisit *) + +(* Bijection between U and (pow U) *) +assume val f : u -> Tot (squash (pow u)) +#set-options "--print_universes" +(* let f x = x u *) //fails here without a means of denoting universes + +// val g : squash (pow U) -> Tot U +// let g sh = fun (x:Type) -> +// let (slX:squash (pow U -> Tot (pow x))) = map_squash (l1 x U) MkC?.j2 in +// let (srU:squash (pow U -> Tot (pow U))) = map_squash (l1 U U) MkC?.i2 in +// bind_squash srU (fun rU -> +// bind_squash slX (fun lX -> +// bind_squash sh (fun h -> +// return_squash (lX (rU h))))) + +// (* This only works if importing FStar.All.fst, which is nonsense *) +// val r : U +// let r = +// let ff : (U -> Tot (squash bool)) = +// (fun (u:U) -> map_squash (u U) (fun uu -> not (uu u))) in +// g (squash_arrow ff) + +(* CH: stopped here *) +(* val not_has_fixpoint : squash (ceq (r U r) (not (r U r))) *) +(* let not_has_fixpoint = Refl #bool #(r U r) *) + + +(* otherwise we could assume proof irrelevance as an axiom; + note that proof relevance shouldn't be derivable for squash types *) +(* val not_provable : unit -> *) +(* Tot (cnot (ceq (return_squash true) (return_squash false))) *) +(* val not_provable : unit -> *) +(* Tot (squash (cnot (ceq (return_squash true) (return_squash false)))) *) + +// type cheq (#a:Type) (x:a) : #b:Type -> b -> Type = +// | HRefl : cheq #a x #a x + +(* val not_provable : unit -> *) +(* Tot (cimp (cheq (return_squash #(b:bool{b=true}) true) *) +(* (return_squash #(b:bool{b=false}) false)) (squash cfalse)) *) +(* let not_provable () = *) +(* (fun h -> match h with *) +(* | HRefl -> *) +(* assert(return_squash #(b:bool{b=true}) true == *) +(* return_squash #(b:bool{b=false}) false); *) +(* bind_squash (return_squash #(b:bool{b=true}) true) (fun btrue -> *) +(* bind_squash (return_squash #(b:bool{b=false}) false) (fun bfalse -> *) +(* assert (btrue <> bfalse); magic()))) *) + +(* TODO: + - play with this a bit more; try out some examples + + search for give_proof / get_proof +*) diff --git a/stage0/ulib/FStar.String.fsti b/stage0/ulib/FStar.String.fsti new file mode 100644 index 00000000000..ba6245670af --- /dev/null +++ b/stage0/ulib/FStar.String.fsti @@ -0,0 +1,148 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.String +open FStar.List.Tot +(* String is a primitive type in F*. + + Most of the functions in this interface have a special status in + that they are: + + 1. All the total functions in this module are handled by F*'s + normalizers and can be reduced during typechecking + + 2. All the total functions, plus two functions in the ML effect, + have native OCaml implementations in FStar_String.ml + + These functions are, however, not suitable for use in Low* code, + since many of them incur implicit allocations that must be garbage + collected. + + For strings in Low*, see LowStar.String, LowStar.Literal etc. +*) + +type char = FStar.Char.char + +/// `list_of_string` and `string_of_list`: A pair of coercions to +/// expose and pack a string as a list of characters +val list_of_string : string -> Tot (list char) +val string_of_list : list char -> Tot string + +/// A pair +val string_of_list_of_string (s:string) + : Lemma (string_of_list (list_of_string s) == s) +val list_of_string_of_list (l:list char) + : Lemma (list_of_string (string_of_list l) == l) + +/// `strlen s` counts the number of utf8 values in a string +/// It is not the byte length of a string +let strlen s = List.length (list_of_string s) + +/// `length`, an alias for `strlen` +unfold +let length s = strlen s + +/// `maxlen`: When applied to a literal s of less than n characters, +/// `maxlen s n` reduces to `True` before going to the SMT solver. +/// Otherwise, the left disjunct reduces partially but the right +/// disjunct remains as is, allowing to keep `strlen s <= n` in the +/// context. +unfold +let maxlen s n = b2t (normalize_term (strlen s <= n)) \/ strlen s <= n + +/// `make l c`: builds a string of length `l` with each character set +/// to `c` +val make: l:nat -> char -> Tot (s:string {length s = l}) + +/// `string_of_char`: A convenient abbreviation for `make 1 c` +let string_of_char (c:char) : Tot string = make 1 c + +/// `split cs s`: splits the string by delimiters in `cs` +val split: list char -> string -> Tot (list string) + +/// `concat s l` concatentates the strings in `l` delimited by `s` +val concat: string -> list string -> Tot string + +/// `compare s0 s1`: lexicographic ordering on strings +val compare: string -> string -> Tot int + +/// `lowercase`: transform each character to its lowercase variant +val lowercase: string -> Tot string + +/// `uppercase`: transform each character to its uppercase variant +val uppercase: string -> Tot string + +/// `index s n`: returns the nth character in `s` +val index: s:string -> n:nat {n < length s} -> Tot char + +/// `index_of s c`: +/// The first index of `c` in `s` +/// returns -1 if the char is not found, for compatibility with C +val index_of: string -> char -> Tot int + +/// `sub s i len` +/// Second argument is a length, not an index. +/// Returns a substring of length `len` beginning at `i` +val sub: s:string -> i:nat -> l:nat{i + l <= length s} -> Tot (r: string {length r = l}) + +/// `collect f s`: maps `f` over each character of `s` +/// from left to right, appending and flattening the result +[@@(deprecated "FStar.String.collect can be defined using list_of_string and List.collect")] +val collect: (char -> FStar.All.ML string) -> string -> FStar.All.ML string + +/// `substring s i len` +/// A partial variant of `sub s i len` without bounds checks. +/// May fail with index out of bounds +val substring: string -> int -> int -> Ex string + +/// `get s i`: Similar to `index` except it may fail +/// if `i` is out of bounds +val get: string -> int -> Ex char + + +/// Some lemmas (admitted for now as we don't have a model) +val concat_length (s1 s2: string): Lemma + (ensures length (s1 ^ s2) = length s1 + length s2) + +val list_of_concat (s1 s2: string): Lemma + (ensures list_of_string (s1 ^ s2) == list_of_string s1 @ list_of_string s2) + +val index_string_of_list (l:list char) (i : nat{i < List.Tot.length l}) : + Lemma ( + (**) list_of_string_of_list l; // necessary to get equality between the lengths + index (string_of_list l) i == List.Tot.index l i) + +let index_list_of_string (s:string) (i : nat{i < length s}) : + Lemma (List.Tot.index (list_of_string s) i == index s i) = + index_string_of_list (list_of_string s) i; + string_of_list_of_string s + +let concat_injective (s0 s0':string) + (s1 s1':string) + : Lemma + (s0 ^ s1 == s0' ^ s1' /\ + (length s0 == length s0' \/ + length s1 == length s1') ==> + s0 == s0' /\ s1 == s1') + = list_of_concat s0 s1; + list_of_concat s0' s1'; + append_injective (list_of_string s0) + (list_of_string s0') + (list_of_string s1) + (list_of_string s1'); + string_of_list_of_string s0; + string_of_list_of_string s0'; + string_of_list_of_string s1; + string_of_list_of_string s1' diff --git a/stage0/ulib/FStar.StrongExcludedMiddle.fst b/stage0/ulib/FStar.StrongExcludedMiddle.fst new file mode 100644 index 00000000000..3c21369d254 --- /dev/null +++ b/stage0/ulib/FStar.StrongExcludedMiddle.fst @@ -0,0 +1,19 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.StrongExcludedMiddle + +let strong_excluded_middle : p:Type0 -> GTot (b:bool{b = true <==> p}) = + IndefiniteDescription.strong_excluded_middle diff --git a/stage0/ulib/FStar.Stubs.Errors.Msg.fsti b/stage0/ulib/FStar.Stubs.Errors.Msg.fsti new file mode 100644 index 00000000000..536dc55220d --- /dev/null +++ b/stage0/ulib/FStar.Stubs.Errors.Msg.fsti @@ -0,0 +1,54 @@ +module FStar.Stubs.Errors.Msg + +(* Implemented in src/, allows constructing structured pretty-printed error messages. *) + +open FStar.Pprint + +(* An error message is a list of documents. This allows us to print errors like +these: + +* Error 19 at tests/error-messages/Bug1997.fst(92,19-92,49): + - Assertion failed + - The SMT solver could not prove the query. Use --query_stats for more details. + - Also see: Prims.fst(96,32-96,42) + +The header is taken from the code and range, and then the documents are rendered +in order. + +`empty` documents in the list are skipped. +*) +type error_message = list document + +(* A helper for creating errors from strings, only to be used for text. +This will split the string into words and format is a paragraph. + +If you call this with a string containing a pretty-printed term (or +anything else) all its formatting will be lost. You should instead use +[term_to_doc] or similar to work with the documents directly, or as a +last resort use doc_of_string. *) +val text : string -> document + +(* Makes an indented sublist using bullet as a header for each list element. *) +val sublist : bullet:document -> elems:list document -> document + +(* == sublist (doc_of_string "- ") *) +val bulleted : list document -> document + +(* Create a simple error message from a string. If the string is just +text and can be long, please use [text] instead. On the other hand, if +you need to respect indentation/spacing in the string, then use this +one, but if that's the case it's probably better to build a doc instead +of lifting from a string. NB: mkmsg s is equal to [doc_of_string s]. *) +val mkmsg : string -> error_message + +(* A nested document that can be concatenated with another one *) +val subdoc : document -> document + +(* Only to be used by FStar.Errors *) +val renderdoc : document -> string + +(* Returns a document with the current stack trace *) +val backtrace_doc : unit -> document + +(* Render an error message as a string. *) +val rendermsg : error_message -> string diff --git a/stage0/ulib/FStar.Stubs.Reflection.Types.fsti b/stage0/ulib/FStar.Stubs.Reflection.Types.fsti new file mode 100644 index 00000000000..ff9504bee98 --- /dev/null +++ b/stage0/ulib/FStar.Stubs.Reflection.Types.fsti @@ -0,0 +1,43 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Stubs.Reflection.Types + +include FStar.Range +include FStar.Sealed + +assume new type namedv // named fresh variable +assume new type bv // db variable +assume new type binder + +assume new type term +assume new type env +assume new type fv +assume new type comp +assume new type sigelt // called `def` in the paper, but we keep the internal name here +assume new type ctx_uvar_and_subst +assume new type letbinding + +assume new type ident +assume new type universe_uvar +assume new type universe + +type name : eqtype = list string + +type univ_name = ident +type typ = term +type binders = list binder + +type decls = list sigelt diff --git a/stage0/ulib/FStar.Stubs.Reflection.V1.Builtins.fsti b/stage0/ulib/FStar.Stubs.Reflection.V1.Builtins.fsti new file mode 100644 index 00000000000..0acc9a7ad06 --- /dev/null +++ b/stage0/ulib/FStar.Stubs.Reflection.V1.Builtins.fsti @@ -0,0 +1,142 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Stubs.Reflection.V1.Builtins + +open FStar.Order +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Reflection.V1.Data +open FStar.Stubs.VConfig + +(*** Views ***) + +(* NOTE: You probably want inspect/pack from FStar.Tactics, which work + * over a fully named representation. If you use these, you have to + * work with de Bruijn indices (using Tv_BVar). The only reason these + * two exists is that they can be made Tot, and hence can be used in + * specifications. *) + +(** "Inspecting" a term: reveal one level of its syntax via the type +term_view. + +Crucially, this function guarantees that the result "precedes" the +argument, since it is morally exposing the subterms of [t] in the view. +This can be justified by this model of [term] and [term_view]: + + type term = | Pack of term_view + let pack_ln = Pack + let inspect_ln (Pack tv) = tv + +Where inspect_ln would give exactly this guarantee on its result. Of +course, the [term] type is actually implemented via internal F* terms, +but the inspect and pack should be consistent with this model. +*) + +(* Views *) +val inspect_ln : (t:term) -> tv:term_view{tv << t} +val pack_ln : term_view -> term + +val inspect_comp : (c:comp) -> cv:comp_view{cv << c} +val pack_comp : comp_view -> comp + +val inspect_sigelt : sigelt -> sigelt_view +val pack_sigelt : sigelt_view -> sigelt + +val inspect_fv : fv -> name +val pack_fv : name -> fv + +val inspect_bv : v:bv -> bvv:bv_view {bvv << v} +val pack_bv : bv_view -> bv + +val inspect_lb : lb:letbinding -> lbv:lb_view {lbv << lb} +val pack_lb : lb_view -> letbinding + +val inspect_binder : b:binder -> bv:binder_view {bv << b} +val pack_binder : binder_view -> binder + +val inspect_universe : u:universe -> uv:universe_view{uv << u} +val pack_universe : universe_view -> universe + +(* The bijection lemmas: the view exposes all details of terms. *) +val pack_inspect_inv : (t:term) -> Lemma (~(Tv_Unsupp? (inspect_ln t)) ==> pack_ln (inspect_ln t) == t) +val inspect_pack_inv : (tv:term_view) -> Lemma (inspect_ln (pack_ln tv) == tv) + +val pack_inspect_comp_inv : (c:comp) -> Lemma (pack_comp (inspect_comp c) == c) +val inspect_pack_comp_inv : (cv:comp_view) -> Lemma (inspect_comp (pack_comp cv) == cv) + +val inspect_pack_bv (xv:bv_view) : Lemma (inspect_bv (pack_bv xv) == xv) +val pack_inspect_bv (x:bv) : Lemma (pack_bv (inspect_bv x) == x) + +val inspect_pack_binder (bview:binder_view) : Lemma (inspect_binder (pack_binder bview) == bview) +val pack_inspect_binder (b:binder) : Lemma (pack_binder (inspect_binder b) == b) + +val pack_inspect_fv (fv:fv) : Lemma (ensures pack_fv (inspect_fv fv) == fv) +val inspect_pack_fv (nm:name) : Lemma (ensures inspect_fv (pack_fv nm) == nm) + +val pack_inspect_universe (u:universe) : Lemma (pack_universe (inspect_universe u) == u) +val inspect_pack_universe (uv:universe_view) : Lemma (inspect_universe (pack_universe uv) == uv) + +(** These are equivalent to [String.concat "."], [String.split ['.']] + * and [String.compare]. We're only taking them as primitives to break + * the dependency of Reflection/Tactics into * FStar.String, which + * pulls a LOT of modules. *) +val implode_qn : list string -> string +val explode_qn : string -> list string +val compare_string : s1:string -> s2:string -> x:int{x == 0 <==> s1 == s2} + +(** Primitives & helpers *) +val lookup_typ : env -> name -> option sigelt +val compare_bv : bv -> bv -> order +val binders_of_env : env -> binders +val moduleof : env -> name +val lookup_attr : term -> env -> list fv +val all_defs_in_env : env -> list fv +val defs_in_module : env -> name -> list fv +val term_eq : term -> term -> bool +val env_open_modules : env -> list name + +(** [push_binder] extends the environment with a single binder. + This is useful as one traverses the syntax of a term, + pushing binders as one traverses a binder in a lambda, + match, etc. TODO: Should this really be push_bv? *) +val push_binder : env -> binder -> env + +(** Attributes are terms, not to be confused with Prims.attribute. *) +val sigelt_attrs : sigelt -> list term +val set_sigelt_attrs : list term -> sigelt -> sigelt + +(** Setting and reading qualifiers from sigelts *) +val sigelt_quals : sigelt -> list qualifier +val set_sigelt_quals : list qualifier -> sigelt -> sigelt + +(** Obtains the vconfig under which a particular sigelt was typechecked. + This function returns None if "--record_options" was not on when + typechecking the sigelt. *) +val sigelt_opts : sigelt -> option vconfig + +(** Embed a vconfig as a term, for instance to use it with the check_with attribute *) +val embed_vconfig : vconfig -> term + +(** Substitute an open bv (a name) by a term [t1] in a term [t2]. *) +val subst : bv -> t1:term -> t2:term -> term + +(** Close an open binder in a term (i.e. turn the name into a de Bruijn index. *) +val close_term : binder -> term -> term + +(** Get the range of a term, i.e., the source location where it was defined. *) +val range_of_term : term -> range + +(** Get the range of a sigelt, i.e., the source location where it was defined. *) +val range_of_sigelt : sigelt -> range diff --git a/stage0/ulib/FStar.Stubs.Reflection.V1.Data.fsti b/stage0/ulib/FStar.Stubs.Reflection.V1.Data.fsti new file mode 100644 index 00000000000..edca6b450ec --- /dev/null +++ b/stage0/ulib/FStar.Stubs.Reflection.V1.Data.fsti @@ -0,0 +1,200 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Stubs.Reflection.V1.Data + +(* What's this!? Well, the compiler now works fully with V2, so whenever +we need to reason/extract a term, we need things like V2.Tv_App and V2.pack_ln +in scope. So, force them into scope here. *) +open FStar.Stubs.Reflection.V2.Data {} +open FStar.Stubs.Reflection.V2.Builtins {} + +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Syntax.Syntax + +(* V1 does not really use the primitive ident type, but this +explicit pair of string & range *) +type ident = string & range +type univ_name = ident + +noeq +type vconst = + | C_Unit : vconst + | C_Int : int -> vconst // Not exposing the full details of our integer repr. + | C_True : vconst + | C_False : vconst + | C_String : string -> vconst + | C_Range : range -> vconst + | C_Reify : vconst + | C_Reflect : name -> vconst + (* TODO: complete *) + +type universes = list universe + +// This is shadowing `pattern` from Prims (for smt_pats) +noeq +type pattern = + | Pat_Constant : vconst -> pattern // A built-in constant + | Pat_Cons : fv -> option universes -> list (pattern & bool) -> pattern + // A fully applied constructor, each boolean marks + // whether the argument was an explicitly-provided + // implicit argument + | Pat_Var : bv -> sealed typ -> pattern // Pattern bound variable + | Pat_Dot_Term : option term -> pattern // Dot pattern: resolved by other elements in the pattern and type + +type branch = pattern & term // | pattern -> term + +noeq +type aqualv = + | Q_Implicit + | Q_Explicit + | Q_Meta of term + +type argv = term & aqualv + +(* The type of a string observable only with a tactic. + All values of type ppname_t are provably equal *) +let ppname_t = FStar.Sealed.Inhabited.sealed "" +let as_ppname (x:string) : ppname_t = FStar.Sealed.Inhabited.seal x + +noeq +type bv_view = { + bv_ppname : ppname_t; + bv_index : nat; +} + +noeq +type binder_view = { + binder_bv : bv; + binder_qual : aqualv; + binder_attrs : list term; + binder_sort : typ; +} + +noeq +type universe_view = + | Uv_Zero : universe_view + | Uv_Succ : universe -> universe_view + | Uv_Max : universes -> universe_view + | Uv_BVar : nat -> universe_view + | Uv_Name : univ_name -> universe_view + | Uv_Unif : universe_uvar -> universe_view + | Uv_Unk : universe_view + +noeq +type term_view = + | Tv_Var : v:bv -> term_view + | Tv_BVar : v:bv -> term_view + | Tv_FVar : v:fv -> term_view + | Tv_UInst : v:fv -> us:universes -> term_view + | Tv_App : hd:term -> a:argv -> term_view + | Tv_Abs : bv:binder -> body:term -> term_view + | Tv_Arrow : bv:binder -> c:comp -> term_view + | Tv_Type : universe -> term_view + | Tv_Refine : bv:bv -> sort:typ -> ref:term -> term_view + | Tv_Const : vconst -> term_view + | Tv_Uvar : nat -> ctx_uvar_and_subst -> term_view + | Tv_Let : recf:bool -> attrs:(list term) -> bv:bv -> ty:typ -> def:term -> body:term -> term_view + | Tv_Match : scrutinee:term -> ret:option match_returns_ascription -> brs:(list branch) -> term_view + | Tv_AscribedT : e:term -> t:term -> tac:option term -> use_eq:bool -> term_view + | Tv_AscribedC : e:term -> c:comp -> tac:option term -> use_eq:bool -> term_view + | Tv_Unknown : term_view // An underscore: _ + | Tv_Unsupp : term_view // failed to inspect, not supported + +let notAscription (tv:term_view) : bool = + not (Tv_AscribedT? tv) && not (Tv_AscribedC? tv) + +// Very basic for now +noeq +type comp_view = + | C_Total : ret:typ -> comp_view + | C_GTotal : ret:typ -> comp_view + | C_Lemma : term -> term -> term -> comp_view // pre, post, patterns + | C_Eff : us:universes -> + eff_name:name -> + result:term -> + eff_args:(list argv) -> + decrs:list term -> + comp_view + +(* Constructor for an inductive type. See explanation in +[Sg_Inductive] below. *) +type ctor = name & typ + +noeq +type lb_view = { + lb_fv : fv; + lb_us : list univ_name; + lb_typ : typ; + lb_def : term +} + +noeq +type sigelt_view = + | Sg_Let : + (r:bool) -> + (lbs:list letbinding) -> + sigelt_view + + // Sg_Inductive basically coalesces the Sig_bundle used internally, + // where the type definition and its constructors are split. + // While that might be better for typechecking, this is probably better for metaprogrammers + // (no mutually defined types for now) + | Sg_Inductive : + (nm:name) -> // name of the inductive type being defined + (univs:list univ_name) -> // universe variables + (params:binders) -> // parameters + (typ:typ) -> // the type annotation for the inductive, i.e., indices -> Type #u + (cts:list ctor) -> // the constructors, opened with univs and applied to params already + sigelt_view + + | Sg_Val : + (nm:name) -> + (univs:list univ_name) -> + (typ:typ) -> + sigelt_view + + | Unk + +(* Qualifiers for sigelts, see src/FStar.Syntax.Syntax for an explanation. *) +noeq +type qualifier = + | Assumption + | InternalAssumption + | New + | Private + | Unfold_for_unification_and_vcgen + | Visible_default + | Irreducible + | Inline_for_extraction + | NoExtract + | Noeq + | Unopteq + | TotalEffect + | Logic + | Reifiable + | Reflectable of name + | Discriminator of name + | Projector of name & ident + | RecordType of list ident & list ident + | RecordConstructor of list ident & list ident + | Action of name + | ExceptionConstructor + | HasMaskedEffect + | Effect + | OnlyName + +(* Should remove, but there are clients using it. *) +let var : eqtype = nat diff --git a/stage0/ulib/FStar.Stubs.Reflection.V2.Builtins.fsti b/stage0/ulib/FStar.Stubs.Reflection.V2.Builtins.fsti new file mode 100644 index 00000000000..ba1254f927c --- /dev/null +++ b/stage0/ulib/FStar.Stubs.Reflection.V2.Builtins.fsti @@ -0,0 +1,204 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Stubs.Reflection.V2.Builtins + +open FStar.Order +open FStar.Stubs.Syntax.Syntax +open FStar.Stubs.VConfig +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Reflection.V2.Data + +(*** Views ***) + +(* NOTE: You probably want inspect/pack from FStar.Tactics, which work + * over a fully named representation. If you use these, you have to + * work with de Bruijn indices (using Tv_BVar). The only reason these + * two exists is that they can be made Tot, and hence can be used in + * specifications. *) + +(** "Inspecting" a term: reveal one level of its syntax via the type +term_view. + +Crucially, this function guarantees that the result "precedes" the +argument, since it is morally exposing the subterms of [t] in the view. +This can be justified by this model of [term] and [term_view]: + + type term = | Pack of term_view + let pack_ln = Pack + let inspect_ln (Pack tv) = tv + +Where inspect_ln would give exactly this guarantee on its result. Of +course, the [term] type is actually implemented via internal F* terms, +but the inspect and pack should be consistent with this model. +*) + +(* Views *) +val inspect_ln : (t:term) -> tv:term_view{tv << t} +val pack_ln : term_view -> term + +val inspect_comp : (c:comp) -> cv:comp_view{cv << c} +val pack_comp : comp_view -> comp + +val inspect_sigelt : sigelt -> sigelt_view +val pack_sigelt : sv:sigelt_view{~(Unk? sv)} -> sigelt + +val inspect_fv : fv -> name +val pack_fv : name -> fv + +val inspect_namedv : v:namedv -> nv:namedv_view {nv << v} +val pack_namedv : namedv_view -> namedv + +val inspect_bv : v:bv -> bvv:bv_view {bvv << v} +val pack_bv : bv_view -> bv + +val inspect_lb : lb:letbinding -> lbv:lb_view {lbv << lb} +val pack_lb : lb_view -> letbinding + +val inspect_binder : b:binder -> bv:binder_view {bv << b} +val pack_binder : binder_view -> binder + +val inspect_universe : u:universe -> uv:universe_view{uv << u} +val pack_universe : universe_view -> universe + +val inspect_ident : i:ident -> iv:ident_view{iv << i} +val pack_ident : ident_view -> ident + +(* The bijection lemmas: the view exposes all details of terms. *) +val pack_inspect_inv : (t:term) -> Lemma (~(Tv_Unsupp? (inspect_ln t)) ==> pack_ln (inspect_ln t) == t) +val inspect_pack_inv : (tv:term_view) -> Lemma (inspect_ln (pack_ln tv) == tv) + +val pack_inspect_comp_inv : (c:comp) -> Lemma (pack_comp (inspect_comp c) == c) +val inspect_pack_comp_inv : (cv:comp_view) -> Lemma (inspect_comp (pack_comp cv) == cv) + +val inspect_pack_namedv (xv:namedv_view) : Lemma (inspect_namedv (pack_namedv xv) == xv) +val pack_inspect_namedv (x:namedv) : Lemma (pack_namedv (inspect_namedv x) == x) + +val inspect_pack_bv (xv:bv_view) : Lemma (inspect_bv (pack_bv xv) == xv) +val pack_inspect_bv (x:bv) : Lemma (pack_bv (inspect_bv x) == x) + +val inspect_pack_binder (bview:binder_view) : Lemma (inspect_binder (pack_binder bview) == bview) +val pack_inspect_binder (b:binder) : Lemma (pack_binder (inspect_binder b) == b) + +val pack_inspect_fv (fv:fv) : Lemma (ensures pack_fv (inspect_fv fv) == fv) +val inspect_pack_fv (nm:name) : Lemma (ensures inspect_fv (pack_fv nm) == nm) + +val pack_inspect_universe (u:universe) : Lemma (pack_universe (inspect_universe u) == u) +val inspect_pack_universe (uv:universe_view) : Lemma (inspect_universe (pack_universe uv) == uv) + +val pack_inspect_ident (u:ident) : Lemma (pack_ident (inspect_ident u) == u) +val inspect_pack_ident (uv:ident_view) : Lemma (inspect_ident (pack_ident uv) == uv) + +val pack_inspect_lb (lb:letbinding) : Lemma (pack_lb (inspect_lb lb) == lb) +val inspect_pack_lb (lbv:lb_view) : Lemma (inspect_lb (pack_lb lbv) == lbv) + +val pack_inspect_sigelt (se:sigelt) : Lemma ((~(Unk? (inspect_sigelt se))) ==> pack_sigelt (inspect_sigelt se) == se) +val inspect_pack_sigelt (sev:sigelt_view { ~ (Unk? sev) }) : Lemma (inspect_sigelt (pack_sigelt sev) == sev) + + +val simple_binder_defn (b:binder) : + Lemma (binder_is_simple b <==> + Q_Explicit? (inspect_binder b).qual /\ Nil? (inspect_binder b).attrs) + [SMTPat (binder_is_simple b)] + +(** These are equivalent to [String.concat "."], [String.split ['.']] + * and [String.compare]. We're only taking them as primitives to break + * the dependency of Reflection/Tactics into FStar.String, which + * pulls a LOT of modules. *) +val implode_qn : list string -> string +val explode_qn : string -> list string +val compare_string : s1:string -> s2:string -> x:int{x == 0 <==> s1 == s2} + +(** Lookup a top-level definition in the environment (not necessarily +a type) *) +val lookup_typ : env -> name -> option sigelt + +(** Compare two bvs by their index. This should really not be a +primitive, and just use inspect to compare the index field. *) +[@@(deprecated "Use FStar.Reflection.V2.Derived.compare_bv")] +val compare_bv : bv -> bv -> order + +(** (As above.) Compare two namedv by their uniq. This should really not +be a primitive, and just use inspect to compare the uniq field. *) +[@@(deprecated "Use FStar.Reflection.V2.Derived.compare_namedv")] +val compare_namedv : namedv -> namedv -> order + +(** Returns all bindings in an environment *) +val vars_of_env : env -> list binding + +(** Returns the current module of an environment. *) +val moduleof : env -> name + +(** Returns all top-level sigelts marked with a given attribute. The +criterion used is that the [attr] attribute MUST be a top-level name +(Tv_FVar) and any sigelt that has an attribute with [attr] (possibly +applied) is returned. The sigelt can then be inspect to find the +arguments to the attribute, if needed. + +Used e.g. to find all typeclass instances, and read their functional +dependencies. *) +val lookup_attr_ses : attr:term -> env -> list sigelt + +(** As [lookup_attr_ses], but just returns the name associated +to the sigelts. *) +val lookup_attr : term -> env -> list fv + +(** Returns all top-level names in an environment. *) +val all_defs_in_env : env -> list fv + +(** Returns all top-level names in a given module. *) +val defs_in_module : env -> name -> list fv + +(** Compare two terms for equality using the internal implementation. +Deprecated, we should use the userland version instead. *) +[@@(deprecated "Use FStar.Reflection.V2.TermEq.term_eq")] +val term_eq : term -> term -> bool + +(** Return all module names which are opened in the given scope. *) +val env_open_modules : env -> list name + +(** [push_binding] extends the environment with a single binding. +This is useful as one traverses the syntax of a term, +pushing bindings as one traverses (and opens) a binder in a lambda, +match, etc. *) +val push_namedv : env -> namedv -> env + +(** Attributes are terms, not to be confused with Prims.attribute. *) +val sigelt_attrs : sigelt -> list term +val set_sigelt_attrs : list term -> sigelt -> sigelt + +(** Setting and reading qualifiers from sigelts *) +val sigelt_quals : sigelt -> list qualifier +val set_sigelt_quals : list qualifier -> sigelt -> sigelt + +(** Obtains the vconfig under which a particular sigelt was typechecked. + This function returns None if "--record_options" was not on when + typechecking the sigelt. *) +val sigelt_opts : sigelt -> option vconfig + +(** Embed a vconfig as a term, for instance to use it with the check_with attribute *) +val embed_vconfig : vconfig -> term + +(** Apply a substitution on a term *) +val subst_term : subst_t -> term -> term + +(** Apply a substitution on a computation. TODO: userspace? *) +val subst_comp : subst_t -> comp -> comp + +(** Get the range of a term, i.e., the source location where it was defined. *) +val range_of_term : term -> range + +(** Get the range of a sigelt, i.e., the source location where it was defined. *) +val range_of_sigelt : sigelt -> range diff --git a/stage0/ulib/FStar.Stubs.Reflection.V2.Data.fsti b/stage0/ulib/FStar.Stubs.Reflection.V2.Data.fsti new file mode 100644 index 00000000000..a03dcb9ab26 --- /dev/null +++ b/stage0/ulib/FStar.Stubs.Reflection.V2.Data.fsti @@ -0,0 +1,243 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Stubs.Reflection.V2.Data + +include FStar.Stubs.Syntax.Syntax +open FStar.Stubs.Reflection.Types + +(* The type of a string observable only with a tactic. + All values of type ppname_t are provably equal *) +let ppname_t = FStar.Sealed.Inhabited.sealed "" +let as_ppname (x:string) : ppname_t = FStar.Sealed.Inhabited.seal x + +noeq +type vconst = + | C_Unit : vconst + | C_Int : int -> vconst // Not exposing the full details of our integer repr. + | C_True : vconst + | C_False : vconst + | C_String : string -> vconst + | C_Range : range -> vconst + | C_Reify : vconst + | C_Reflect : name -> vconst + | C_Real : string -> vconst (* Real literals are represented as a string e.g. "1.2" *) + (* TODO: complete *) + +type universes = list universe + +type ident_view = string & range + +noeq +type pattern = + // A built-in constant + | Pat_Constant : + c : vconst -> + pattern + + // A fully applied constructor, each boolean marks whether the + // argument was an explicitly-provided implicit argument + | Pat_Cons : + head : fv -> + univs : option universes -> + subpats : list (pattern & bool) -> + pattern + + // A pattern-bound variable. It has a sealed sort in it. + // This sort is ignored by the typechecker, but may be useful + // for metaprogram to look at heuristically. There is nothing + // else here but a ppname, the variable is referred to by its DB index. + // This means all Pat_Var are provably equal. + | Pat_Var : + sort : sealed term -> + ppname : ppname_t -> + pattern + + // Dot pattern: resolved by other elements in the pattern and type + | Pat_Dot_Term : + t : option term -> + pattern + +type branch = pattern & term // | pattern -> term + +noeq +type aqualv = + | Q_Implicit + | Q_Explicit + | Q_Equality + | Q_Meta of term + +type argv = term & aqualv + +(* A named variable, with a unique identifier *) +noeq +type namedv_view = { + uniq : nat; + sort : sealed typ; // REMOVE? + ppname : ppname_t; +} + +(* A bound variable, with a de Bruijn index *) +noeq +type bv_view = { + index : nat; + sort : sealed typ; // REMOVE? + ppname : ppname_t; +} + +(* Binders consist of a type, qualifiers, and attributes. There is also +a sealed name. *) +noeq +type binder_view = { + sort : typ; + qual : aqualv; + attrs : list term; + ppname : ppname_t; +} + +(* A binding is a variable in the environment. It is like a namedv, but has +an explicit (unsealed) sort *) +noeq +type binding = { + uniq : nat; + sort : typ; + ppname : ppname_t; +} +type bindings = list binding + +(** We use the binder type for letbindings and refinements, +but no qualifiers nor attributes can appear there. We call these +binders simple. This module assumes an abstract predicate +for them, which is later assumed to be equivalent to being a binder +without qualifiers nor attributes (once inspect_binder is in scope). *) +val binder_is_simple : binder -> Tot bool + +type simple_binder = b:binder{binder_is_simple b} + +noeq +type universe_view = + | Uv_Zero : universe_view + | Uv_Succ : universe -> universe_view + | Uv_Max : universes -> universe_view + | Uv_BVar : nat -> universe_view + | Uv_Name : univ_name -> universe_view + | Uv_Unif : universe_uvar -> universe_view + | Uv_Unk : universe_view + +noeq +type term_view = + | Tv_Var : v:namedv -> term_view + | Tv_BVar : v:bv -> term_view + | Tv_FVar : v:fv -> term_view + | Tv_UInst : v:fv -> us:universes -> term_view + | Tv_App : hd:term -> a:argv -> term_view + | Tv_Abs : bv:binder -> body:term -> term_view + | Tv_Arrow : bv:binder -> c:comp -> term_view + | Tv_Type : universe -> term_view + | Tv_Refine : b:simple_binder -> ref:term -> term_view + | Tv_Const : vconst -> term_view + | Tv_Uvar : nat -> ctx_uvar_and_subst -> term_view + | Tv_Let : recf:bool -> attrs:(list term) -> b:simple_binder -> def:term -> body:term -> term_view + | Tv_Match : scrutinee:term -> ret:option match_returns_ascription -> brs:(list branch) -> term_view + | Tv_AscribedT : e:term -> t:term -> tac:option term -> use_eq:bool -> term_view + | Tv_AscribedC : e:term -> c:comp -> tac:option term -> use_eq:bool -> term_view + | Tv_Unknown : term_view // An underscore: _ + | Tv_Unsupp : term_view // failed to inspect, not supported + +let notAscription (tv:term_view) : bool = + not (Tv_AscribedT? tv) && not (Tv_AscribedC? tv) + +// Very basic for now +noeq +type comp_view = + | C_Total : ret:typ -> comp_view + | C_GTotal : ret:typ -> comp_view + | C_Lemma : term -> term -> term -> comp_view // pre, post, patterns + | C_Eff : us:universes -> + eff_name:name -> + result:term -> + eff_args:(list argv) -> + decrs:list term -> + comp_view + +(* Constructor for an inductive type. See explanation in +[Sg_Inductive] below. *) +type ctor = name & typ + +noeq +type lb_view = { + lb_fv : fv; + lb_us : list univ_name; + lb_typ : typ; + lb_def : term +} + +noeq +type sigelt_view = + | Sg_Let : + (r:bool) -> + (lbs:list letbinding) -> + sigelt_view + + // Sg_Inductive basically coalesces the Sig_bundle used internally, + // where the type definition and its constructors are split. + // While that might be better for typechecking, this is probably better for metaprogrammers + // (no mutually defined types for now) + | Sg_Inductive : + (nm:name) -> // name of the inductive type being defined + (univs:list univ_name) -> // universe variables + (params:binders) -> // parameters + (typ:typ) -> // the type annotation for the inductive, i.e., indices -> Type #u + (cts:list ctor) -> // the constructors, opened with univs and applied to params already + sigelt_view + + | Sg_Val : + (nm:name) -> + (univs:list univ_name) -> + (typ:typ) -> + sigelt_view + + | Unk + +(* Qualifiers for sigelts, see src/FStar.Syntax.Syntax for an explanation. *) +noeq +type qualifier = + | Assumption + | InternalAssumption + | New + | Private + | Unfold_for_unification_and_vcgen + | Visible_default + | Irreducible + | Inline_for_extraction + | NoExtract + | Noeq + | Unopteq + | TotalEffect + | Logic + | Reifiable + | Reflectable of name + | Discriminator of name + | Projector of name & ident + | RecordType of list ident & list ident + | RecordConstructor of list ident & list ident + | Action of name + | ExceptionConstructor + | HasMaskedEffect + | Effect + | OnlyName + +(* Should remove, but there are clients using it. *) +let var : eqtype = nat diff --git a/stage0/ulib/FStar.Stubs.Syntax.Syntax.fsti b/stage0/ulib/FStar.Stubs.Syntax.Syntax.fsti new file mode 100644 index 00000000000..951e963e348 --- /dev/null +++ b/stage0/ulib/FStar.Stubs.Syntax.Syntax.fsti @@ -0,0 +1,20 @@ +module FStar.Stubs.Syntax.Syntax +open FStar.Stubs.Reflection.Types + +noeq +type subst_elt = + | DB : int -> namedv -> subst_elt + | DT : int -> term -> subst_elt + | NM : namedv -> int -> subst_elt + | NT : namedv -> term -> subst_elt + | UN : int -> universe -> subst_elt + | UD : ident -> int -> subst_elt +type subst_t = list subst_elt + + +(* + * match e as binder returns t|C + * + * the bool says whether returns (bool = false) or returns$ (bool = true, use type equality + *) +type match_returns_ascription = binder & (either term comp & option term & bool) diff --git a/stage0/ulib/FStar.Stubs.Tactics.Common.fsti b/stage0/ulib/FStar.Stubs.Tactics.Common.fsti new file mode 100644 index 00000000000..019ceaa6181 --- /dev/null +++ b/stage0/ulib/FStar.Stubs.Tactics.Common.fsti @@ -0,0 +1,14 @@ +module FStar.Stubs.Tactics.Common + +include FStar.Stubs.Errors.Msg + +(* This module is realized by FStar.Tactics.Common in the F* sources. +Any change must be reflected there. *) + +exception NotAListLiteral + +(* We should attempt to not use this one and define more exceptions +above. *) +exception TacticFailure of error_message & option FStar.Range.range + +exception SKIP diff --git a/stage0/ulib/FStar.Stubs.Tactics.Result.fsti b/stage0/ulib/FStar.Stubs.Tactics.Result.fsti new file mode 100644 index 00000000000..a0a64dee7cb --- /dev/null +++ b/stage0/ulib/FStar.Stubs.Tactics.Result.fsti @@ -0,0 +1,28 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Stubs.Tactics.Result + +// This file is never extracted. It's a copy of the one with the same name in +// the compiler. It lives here so that one doesn't need to adjust their load +// path to use tactics from ulib. + +open FStar.Stubs.Tactics.Types + +noeq type __result a = + | Success : v:a -> ps:proofstate -> __result a + | Failed : exn:exn (* Error *) + -> ps:proofstate (* The proofstate at time of failure *) + -> __result a diff --git a/stage0/ulib/FStar.Stubs.Tactics.Types.fsti b/stage0/ulib/FStar.Stubs.Tactics.Types.fsti new file mode 100644 index 00000000000..d3898c04d25 --- /dev/null +++ b/stage0/ulib/FStar.Stubs.Tactics.Types.fsti @@ -0,0 +1,74 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Stubs.Tactics.Types + +open FStar.Stubs.Reflection.Types +include FStar.Stubs.Tactics.Common +include FStar.Stubs.TypeChecker.Core + +assume new type goal +assume new type proofstate + +(* Returns the active goals *) +val goals_of : proofstate -> list goal +(* Returns the goals marked for SMT *) +val smt_goals_of : proofstate -> list goal + +(* Inspecting a goal *) +val goal_env : goal -> env +val goal_type : goal -> typ +val goal_witness : goal -> term +val is_guard : goal -> bool (* A bit of helper info: did this goal come from a VC guard? *) + +val get_label : goal -> string +val set_label : string -> goal -> goal + +(* Tracing *) +val incr_depth : proofstate -> proofstate +val decr_depth : proofstate -> proofstate +(* [tracepoint] always returns true. We do not use unit to prevent +erasure. *) +val tracepoint : proofstate -> b:bool{b == true} +val set_proofstate_range : proofstate -> FStar.Range.range -> proofstate + +type direction = + | TopDown + | BottomUp + +type ctrl_flag = + | Continue + | Skip + | Abort + +type guard_policy = + | Goal // Add guards as (normal) goals + | SMT // Add guards as SMT goals + | SMTSync // Send guards to SMT immediately, will *log* errors (not raise) if anything fails + | Force // Force guards without SMT, immediately. Raises an exception on failure. + | ForceSMT // Force guards with SMT, immediately. Raises an exception on failure. + | Drop // Drop guards, clearly unsound! careful! + +(* Typing reflection *) +val non_informative_token (g:env) (t:typ) : Type0 +val subtyping_token (g:env) (t0 t1:typ) : Type0 +val equiv_token (g:env) (t0 t1:typ) : Type0 +val typing_token (g:env) (e:term) (c:tot_or_ghost & typ) : Type0 + +(* Must be inline, this definition is not there in src/FStar.Tactics.Types *) +inline_for_extraction +let issues = list FStar.Issue.issue + +val tref (a:Type) : Type0 diff --git a/stage0/ulib/FStar.Stubs.Tactics.Unseal.fsti b/stage0/ulib/FStar.Stubs.Tactics.Unseal.fsti new file mode 100644 index 00000000000..db321b34fe4 --- /dev/null +++ b/stage0/ulib/FStar.Stubs.Tactics.Unseal.fsti @@ -0,0 +1,25 @@ +(* + Copyright 2008-2023 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +(** +The [unseal] primitive to observe sealed values. +*) +module FStar.Stubs.Tactics.Unseal + +open FStar.Sealed +open FStar.Tactics.Effect + +(** Observe a sealed value. See Sealed.seal too. *) +val unseal : #a:Type -> sealed a -> Tac a diff --git a/stage0/ulib/FStar.Stubs.Tactics.V1.Builtins.fsti b/stage0/ulib/FStar.Stubs.Tactics.V1.Builtins.fsti new file mode 100644 index 00000000000..de18e6838f6 --- /dev/null +++ b/stage0/ulib/FStar.Stubs.Tactics.V1.Builtins.fsti @@ -0,0 +1,451 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +(** +Every tactic primitive, i.e., those built into the compiler +@summary Tactic primitives +*) +module FStar.Stubs.Tactics.V1.Builtins + +open FStar.Stubs.VConfig +open FStar.Stubs.Reflection.V1.Builtins +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Reflection.V1.Data +open FStar.Reflection.Const +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.Types +include FStar.Stubs.Tactics.Unseal + +(** [top_env] returns the environment where the tactic started running. + * This works even if no goals are present. *) +val top_env : unit -> Tac env + +(** [fresh ()] returns a fresh integer. It does not get reset when +catching a failure. *) +val fresh : unit -> Tac int + +(** [refine_intro] will turn a goal of shape [w : x:t{phi}] +into [w : t] and [phi{w/x}] *) +val refine_intro : unit -> Tac unit + +(** [tc] returns the type of a term in [env], +or fails if it is untypeable. *) +val tc : env -> term -> Tac term + +(** [tcc] like [tc], but returns the full computation type +with the effect label and its arguments (WPs, etc) as well *) +val tcc : env -> term -> Tac comp + +(** [unshelve] creates a goal from a term for its given type. +It can be used when the system decided not to present a goal, but +you want one anyway. For example, if you request a uvar through +[uvar_env] or [fresh_uvar], you might want to instantiate it +explicitly. *) +val unshelve : term -> Tac unit + +(** [unquote t] with turn a quoted term [t] into an actual value, of +any type. This will fail at tactic runtime if the quoted term does not +typecheck to type [a]. *) +val unquote : #a:Type -> term -> Tac a + +(** [catch t] will attempt to run [t] and allow to recover from a +failure. If [t] succeeds with return value [a], [catch t] returns [Inr +a]. On failure, it returns [Inl msg], where [msg] is the error [t] +raised, and all unionfind effects are reverted. See also [recover] and +[or_else]. *) +val catch : #a:Type -> (unit -> Tac a) -> TacS (either exn a) + +(** Like [catch t], but will not discard unionfind effects on failure. *) +val recover : #a:Type -> (unit -> Tac a) -> TacS (either exn a) + +(** [norm steps] will call the normalizer on the current goal's +type and witness, with its reduction behaviour parameterized +by the flags in [steps]. +Currently, the flags (provided in FStar.Pervasives) are +[simpl] (do logical simplifications) +[whnf] (only reduce until weak head-normal-form) +[primops] (performing primitive reductions, such as arithmetic and +string operations) +[delta] (unfold names) +[zeta] (unroll let rec bindings, but with heuristics to avoid loops) +[zeta_full] (unroll let rec bindings fully) +[iota] (reduce match statements over constructors) +[delta_only] (restrict delta to only unfold this list of fully-qualified identifiers) +*) +val norm : list norm_step -> Tac unit + +(** [norm_term_env e steps t] will call the normalizer on the term [t] +using the list of steps [steps], over environment [e]. The list has the same meaning as for [norm]. *) +val norm_term_env : env -> list norm_step -> term -> Tac term + +(** [norm_binder_type steps b] will call the normalizer on the type of the [b] +binder for the current goal. Notably, this cannot be done via binder_retype and norm, +because of uvars being resolved to lambda-abstractions. *) +val norm_binder_type : list norm_step -> binder -> Tac unit + +(** [intro] pushes the first argument of an arrow goal into the +environment, turning [Gamma |- ?u : x:a -> b] into [Gamma, x:a |- ?u' : b]. +Note that this does not work for logical implications/forall. See +FStar.Tactics.Logic for that. +*) +val intro : unit -> Tac binder + +(** Similar to intros, but allows to build a recursive function. +Currently broken (c.f. issue #1103) +*) +val intro_rec : unit -> Tac (binder & binder) + +(** [rename_to b nm] will rename the binder [b] to [nm] in +the environment, goal, and witness in a safe manner. The only use of this +is to make goals and terms more user readable. The primitive returns +the new binder, since the old one disappears from the context. *) +val rename_to : binder -> string -> Tac binder + +(** [revert] pushes out a binder from the environment into the goal type, +so a behaviour opposite to [intros]. +*) +val revert : unit -> Tac unit + +(** [binder_retype] changes the type of a binder in the context. After calling it +with a binder of type `t`, the user is presented with a goal of the form `t == ?u` +to be filled. The original goal (following that one) has the type of `b` in the +context replaced by `?u`. +*) +val binder_retype : binder -> Tac unit + +(** [clear_top] will drop the outermost binder from the environment. +Can only be used if the goal does not at all depend on it. +*) +val clear_top : unit -> Tac unit + +(** [clear] will drop the given binder from the context, is +nothing depends on it. +*) +val clear : binder -> Tac unit + +(** If [b] is a binder of type [v == r], [rewrite b] will rewrite +the variable [v] for [r] everywhere in the current goal type and witness/ +*) +val rewrite : binder -> Tac unit + +(** First boolean is whether to attempt to introduce a refinement +before solving. In that case, a goal for the refinement formula will be +added. Second boolean is whether to set the expected type internally. +Just use `exact` from FStar.Tactics.Derived if you don't know what's up +with all this. *) +val t_exact : maybe_refine:bool -> set_expected_typ:bool -> term -> Tac unit + +(** Inner primitive for [apply], takes a boolean specifying whether +to not ask for implicits that appear free in posterior goals, a +boolean specifying whether it's forbidden to instantiate uvars in the +goal, and a boolean specifying whether uvars resolved during unification +of the goal to the term should be typechecked as part of t_apply + +If the third boolean is false, those uvars will be typechecked at the +end by the tactics engine. + +Example: when [uopt] is true, applying transitivity to [|- a = c] +will give two goals, [|- a = ?u] and [|- ?u = c] without asking to +instantiate [?u] since it will most likely be constrained later by +solving these goals. In any case, we track [?u] and will fail if it's +not solved later. + +Example: when [noinst] is true, applying a function returning +[1 = 2] will fail on a goal of the shape [1 = ?u] since it must +instantiate [?u]. We use this in typeclass resolution. + +You may want [apply] from FStar.Tactics.Derived, or one of +the other user facing variants. +*) +val t_apply : uopt:bool -> noinst:bool -> tc_resolved_uvars:bool -> term -> Tac unit + +(** [t_apply_lemma ni nilhs l] will solve a goal of type [squash phi] +when [l] is a Lemma ensuring [phi]. The arguments to [l] and its +requires clause are introduced as new goals. As a small optimization, +[unit] arguments are discharged by the engine. For the meaning of +the [noinst] boolean arg see [t_apply], briefly, it does not allow to +instantiate uvars in the goal. The [noinst_lhs] flag is similar, it +forbids instantiating uvars *but only on the LHS of the goal*, provided +the goal is an equality. It is meant to be useful for rewrite-goals, of +the shape [X = ?u]. Setting [noinst] means [noinst_lhs] is ignored. *) +val t_apply_lemma : noinst:bool -> noinst_lhs:bool -> term -> Tac unit +// TODO: do the unit thing too for [apply]. + +(** [print str] has no effect on the proofstate, but will have the side effect +of printing [str] on the compiler's standard output. *) +val print : string -> Tac unit + +(** [debugging ()] returns true if the current module has the debug flag +on, i.e. when [--debug Tac] was passed in. *) +val debugging : unit -> Tac bool + +(** Similar to [print], but will dump a text representation of the proofstate +along with the message. *) +val dump : string -> Tac unit + +(** Similar to [dump], but will print *every* unsolved implicit +in the proofstate, not only the visible/focused goals. When the +[print_resolved] boolean is true, it will also print every solved goal. +Warning, these can be a *lot*. *) +val dump_all : print_resolved:bool -> string -> Tac unit + +(** Will print a goal for every unresolved implicit in the provided goal. *) +val dump_uvars_of : goal -> string -> Tac unit + +(** Solves a goal [Gamma |= squash (l == r)] by attempting to unify +[l] with [r]. This currently only exists because of some universe +problems when trying to [apply] a reflexivity lemma. When [allow_guards] +is [true], it is allowed that (some) guards are raised during the +unification process and added as a single goal to be discharged later. +Currently, the only guards allowed here are for equating refinement +types (e.g. [x:int{x>0}] and [x:int{0 Tac unit + +(** Provides a proof for the equality +[(match e with ... | pi -> ei ...) a1 .. an + == (match e with ... | pi -> e1 a1 .. an)]. +This is particularly useful to rewrite the expression on the left to the +one on the right when the RHS is actually a unification variable. *) +val t_commute_applied_match : unit -> Tac unit + +(** In case there are goals that are already solved which have + non-trivial typing guards, make those guards as explicit proof + obligations in the tactic state, solving any trivial ones by simplification. + See tests/bug-reports/Bug2635.fst for some examples + + Update 11/14/2022: with the introduction of the core typechecker, + this primitive should no longer be necessary. Try using the compat pre core + flags, or `with_compat_pre_core` primitive if your code breaks without + this.*) +[@@deprecated "This will soon be removed, please use compat pre core settings if needed"] +val gather_or_solve_explicit_guards_for_resolved_goals : unit -> Tac unit + +(** [ctrl_rewrite] will traverse the current goal, and call [ctrl] + * repeatedly on subterms. When [ctrl t] returns [(true, _)], the + * tactic will call [rw] with a goal of type [t = ?u], which once + * solved will rewrite [t] to the solution of [?u]. No goal is + * made if [ctrl t] returns [(false, _)]. + * + * The second component of the return value of [ctrl] specifies + * whether for continue descending in the goal or not. It can + * either be: + * - Continue: keep on with further subterms + * - Skip: stop descending in this tree + * - Abort: stop everything + * + * The first argument is the direction, [TopDown] or [BottomUp]. It + * specifies how the AST of the goal is traversed (preorder or postorder). + * + * Note: for [BottomUp] a [Skip] means to stop trying to rewrite everything + * from the current node up to the root, but still consider siblings. This + * means that [ctrl_rewrite BottomUp (fun _ -> (true, Skip)) t] will call [t] + * for every leaf node in the AST. + * + * See [pointwise] and [topdown_rewrite] for more friendly versions. + *) +val ctrl_rewrite : + direction -> + (ctrl : term -> Tac (bool & ctrl_flag)) -> + (rw:unit -> Tac unit) -> + Tac unit + +(** Given the current goal [Gamma |- w : t], +[dup] will turn this goal into +[Gamma |- ?u : t] and +[Gamma |= ?u == w]. It can thus be used to change +a goal's witness in any way needed, by choosing +some [?u] (possibly with exact) and then solving the other goal. +*) +val dup : unit -> Tac unit + +// Proof namespace management +(** [prune "A.B.C"] will mark all top-level definitions in module +[A.B.C] (and submodules of it) to not be encoded to the SMT, for the current goal. +The string is a namespace prefix. [prune ""] will prune everything, but note +that [prune "FStar.S"] will not prune ["FStar.Set"]. *) +val prune : string -> Tac unit + +(** The opposite operation of [prune]. The latest one takes precedence. *) +val addns : string -> Tac unit + +(** Destruct a value of an inductive type by matching on it. The generated +match has one branch for each constructor and is therefore trivially +exhaustive, no VC is generated for that purpose. It returns a list +with the fvars of each constructor and their arities, in the order +they appear as goals. *) +val t_destruct : term -> Tac (list (fv & nat)) + +(** Set command line options for the current goal. Mostly useful to +change SMT encoding options such as [set_options "--z3rlimit 20"]. *) +val set_options : string -> Tac unit + +(** Creates a new, unconstrained unification variable in environment +[env]. The type of the uvar can optionally be provided in [o]. If not +provided, a second uvar is created for the type. *) +val uvar_env : env -> option typ -> Tac term + +(** Creates a new, unconstrained unification variable in environment +[env]. The type of the uvar must be provided and the uvar can be solved +to a Ghost term of that type *) +val ghost_uvar_env : env -> typ -> Tac term + +(** Creates a new, unconstrained universe unification variable. +The returned term is Type (U_Unif ?u). *) +val fresh_universe_uvar : unit -> Tac term + +(** Call the unifier on two terms. The returned boolean specifies +whether unification was possible. When the tactic returns true, the +terms have been unified, instantiating uvars as needed. When false, +unification was not possible and no change to uvars occurs. *) +val unify_env : env -> t1:term -> t2:term -> Tac bool + +(** Similar to [unify_env], but allows for some guards to be raised +during unification (see [t_trefl] for an explanation). Will add a new +goal with the guard. *) +val unify_guard_env : env -> t1:term -> t2:term -> Tac bool + +(** Check if [t1] matches [t2], i.e., whether [t2] can have its uvars +instantiated into unifying with [t1]. When the tactic returns true, the +terms have been unified, instantiating uvars as needed. When false, +matching was not possible and no change to uvars occurs. *) +val match_env : env -> t1:term -> t2:term -> Tac bool + +(** Launches an external process [prog] with arguments [args] and input +[input] and returns the output. For security reasons, this can only be +performed when the `--unsafe_tactic_exec` options was provided for the +current F* invocation. The tactic will fail if this is not so. *) +val launch_process : string -> list string -> string -> Tac string + +(** Get a fresh bv of some name and type. The name is only useful +for pretty-printing, since there is a fresh inaccessible integer within +the bv too. *) +val fresh_bv_named : string -> Tac bv + +(** Change the goal to another type, given that it is convertible +to the current type. *) +val change : typ -> Tac unit + +(** Get the current guard policy. The guard policy specifies what should +be done when a VC arises internally from the tactic engine. Options +are SMT (mark it as an SMT goal), Goal (add it as an extra goal) +and Force (only allow trivial guards, that need no SMT. *) +val get_guard_policy : unit -> Tac guard_policy + +(** Set the current guard policy. See [get_guard_policy} for an explanation *) +val set_guard_policy : guard_policy -> Tac unit + +(** [lax_on] returns true iff the current environment has the +`--admit_smt_queries true` option set, and thus drops all verification conditions. *) +val lax_on : unit -> Tac bool + +(** Admit the current goal and set the witness to the given term. +Absolutely unsafe. Raises a warning. *) +val tadmit_t : term -> Tac unit + +(** View a term in a fully-named representation *) +[@@coercion] +val inspect : term -> Tac term_view + +(** Pack a term view on a fully-named representation back into a term. +Note: calling this with Tv_Unsupp will raise an exception. *) +[@@coercion] +val pack : term_view -> Tac term + +(** Similar to [pack] above, but does not flatten arrows, it leaves + then in a curried form instead *) +val pack_curried : term_view -> Tac term + +(** Join the first two goals, which must be irrelevant, in a single +one by finding a maximal prefix of their environment and reverting +appropriately. Useful to minimize SMT queries that share internal +obligations. *) +val join : unit -> Tac unit + +(* Local metastate via a string-keyed map. [lget] fails if the +found element is not typeable at the requested type. *) +val lget : #a:Type -> string -> Tac a +val lset : #a:Type -> string -> a -> Tac unit + +(** Set the current set of active goals at will. Obligations remain +in the implicits. *) +val set_goals : list goal -> Tac unit + +(** Set the current set of SMT goals at will. Obligations remain in the +implicits. TODO: This is a really bad name, there's no special "SMT" +about these goals. *) +val set_smt_goals : list goal -> Tac unit + +(** [curms ()] returns the current (wall) time in milliseconds *) +val curms : unit -> Tac int + +(** [set_urgency u] sets the urgency of error messages. Usually set just +before raising an exception (see e.g. [fail_silently]). *) +val set_urgency : int -> TacS unit + +(** [string_to_term e s] runs the F* parser on the string [s] in the +environment [e], and produces a term. *) +val string_to_term : env -> string -> Tac term + +(** [push_bv_dsenv e id] pushes a identifier [id] into the parsing +environment of [e] an environment. It returns a new environment that +has the identifier [id] along with its corresponding bounded +variable. *) +val push_bv_dsenv : env -> string -> Tac (env & bv) + +(** Print a term via the pretty printer. This is considered effectful +since 1) setting options can change the behavior of this function, and +hence is not really pure; and 2) this function could expose details of +the term representation that do not show up in the view, invalidating +the pack_inspect_inv/inspeck_pack_inv lemmas. *) +val term_to_string : term -> Tac string + +(** Print a computation type via the pretty printer. See comment +on [term_to_string]. *) +val comp_to_string : comp -> Tac string + +(** Print a source range as a string *) +val range_to_string : range -> Tac string + +(** A variant of Reflection.term_eq that may inspect more underlying +details of terms. This function could distinguish two _otherwise equal +terms_, but that distinction cannot leave the Tac effect. + +This is only exposed as a migration path. Please use +[Reflection.term_eq] instead. *) +[@@deprecated "Use Reflection.term_eq instead"] +val term_eq_old : term -> term -> Tac bool + +(** Runs the input tactic `f` with compat pre core setting `n`. +It is an escape hatch for maintaining backward compatibility +for code that breaks with the core typechecker. *) +val with_compat_pre_core : #a:Type -> n:int -> f:(unit -> Tac a) -> Tac a + +(** Get the [vconfig], including fuel, ifuel, rlimit, etc, +associated with the current goal. *) +val get_vconfig : unit -> Tac vconfig + +(** Set the [vconfig], including fuel, ifuel, rlimit, etc, associated +with the current goal. This vconfig will be used if the goal is +attempted by SMT at the end of a tactic run. *) +val set_vconfig : vconfig -> Tac unit + +(** Attempt to solve the current goal with SMT immediately, and fail +if it cannot be solved. The vconfig specifies fuels, limits, etc. The +current goal's vconfig is ignored in favor of this one. *) +val t_smt_sync : vconfig -> Tac unit + +(** This returns the free uvars that appear in a term. This is not +a reflection primitive as it depends on the state of the UF graph. *) +val free_uvars : term -> Tac (list int) diff --git a/stage0/ulib/FStar.Stubs.Tactics.V2.Builtins.fsti b/stage0/ulib/FStar.Stubs.Tactics.V2.Builtins.fsti new file mode 100644 index 00000000000..a277a35e88d --- /dev/null +++ b/stage0/ulib/FStar.Stubs.Tactics.V2.Builtins.fsti @@ -0,0 +1,629 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +(** +Every tactic primitive, i.e., those built into the compiler +@summary Tactic primitives +*) +module FStar.Stubs.Tactics.V2.Builtins + +open FStar.Stubs.VConfig +open FStar.Stubs.Reflection.Types +open FStar.Reflection.Const +open FStar.Stubs.Reflection.V2.Data +open FStar.Stubs.Reflection.V2.Builtins +open FStar.Tactics.Effect +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.Types +include FStar.Stubs.Tactics.Unseal + +(** Resolve unification variable indirections at the top of the term. *) +val compress : term -> Tac term + +(** [top_env] returns the environment where the tactic started running. + * This works even if no goals are present. *) +val top_env : unit -> Tac env + +(** [fresh ()] returns a fresh number generated by incrementing an +internal counter. The counter does not get reset when catching a +failure. *) +val fresh : unit -> Tac nat + +(** [refine_intro] will turn a goal of shape [w : x:t{phi}] +into [w : t] and [phi{w/x}] *) +val refine_intro : unit -> Tac unit + +(** [tc] returns the type of a term in [env], +or fails if it is untypeable. *) +val tc : env -> term -> Tac term + +(** [tcc] like [tc], but returns the full computation type +with the effect label and its arguments (WPs, etc) as well *) +val tcc : env -> term -> Tac comp + +(** [unshelve] creates a goal from a term for its given type. +It can be used when the system decided not to present a goal, but +you want one anyway. For example, if you request a uvar through +[uvar_env] or [fresh_uvar], you might want to instantiate it +explicitly. *) +val unshelve : term -> Tac unit + +(** [unquote t] with turn a quoted term [t] into an actual value, of +any type. This will fail at tactic runtime if the quoted term does not +typecheck to type [a]. *) +val unquote : #a:Type -> term -> Tac a + +(** [catch t] will attempt to run [t] and allow to recover from a +failure. If [t] succeeds with return value [a], [catch t] returns [Inr +a]. On failure, it returns [Inl msg], where [msg] is the error [t] +raised, and all unionfind effects are reverted. See also [recover] and +[or_else]. *) +val catch : #a:Type -> (unit -> Tac a) -> TacS (either exn a) + +(** Like [catch t], but will not discard unionfind effects on failure. *) +val recover : #a:Type -> (unit -> Tac a) -> TacS (either exn a) + +(** [norm steps] will call the normalizer on the current goal's +type and witness, with its reduction behaviour parameterized +by the flags in [steps]. +Currently, the flags (provided in FStar.Pervasives) are +[simpl] (do logical simplifications) +[whnf] (only reduce until weak head-normal-form) +[primops] (performing primitive reductions, such as arithmetic and +string operations) +[delta] (unfold names) +[zeta] (unroll let rec bindings, but with heuristics to avoid loops) +[zeta_full] (unroll let rec bindings fully) +[iota] (reduce match statements over constructors) +[delta_only] (restrict delta to only unfold this list of fully-qualified identifiers) +*) +val norm : list norm_step -> Tac unit + +(** [norm_term_env e steps t] will call the normalizer on the term [t] +using the list of steps [steps], over environment [e]. The list has the same meaning as for [norm]. *) +val norm_term_env : env -> list norm_step -> term -> Tac term + +(** [norm_binding_type steps b] will call the normalizer on the type of the [b] +binding for the current goal. Notably, this cannot be done via var_retype and norm, +because of uvars being resolved to lambda-abstractions. *) +val norm_binding_type : list norm_step -> binding -> Tac unit + +(** [intro] pushes the first argument of an arrow goal into the +environment, turning [Gamma |- ?u : x:a -> b] into [Gamma, x:a |- ?u' : b]. +Note that this does not work for logical implications/forall. See +FStar.Tactics.Logic for that. +*) +val intro : unit -> Tac binding + +(** Like [intro] but pushes many binders at once. The goal has to be +a literal arrow, we will not normalize it nor unfold it. This can be +faster than repeatedly calling intros, and furthermore it will solve the +witness to a flat abstraction instead of many nested ones. + +[max] is an upper bound on the amount of binders this will introduce, +only considered when [max >= 0] (so use [-1] for no limit). *) +val intros (max:int) : Tac (list binding) + +(** Similar to intros, but allows to build a recursive function. +Currently broken (c.f. issue #1103) +*) +val intro_rec : unit -> Tac (binding & binding) + +(** [rename_to b nm] will rename the binder [b] to [nm] in +the environment, goal, and witness in a safe manner. The only use of this +is to make goals and terms more user readable. The primitive returns +the new binder, since the old one disappears from the context. *) +val rename_to : binding -> string -> Tac binding + +(** [revert] pushes out a binder from the environment into the goal type, +so a behaviour opposite to [intros]. +*) +val revert : unit -> Tac unit + +(** [var_retype] changes the type of a variable in the context. After +calling it with a binding of type `t`, the user is presented with a goal +of the form `t == ?u` to be filled. The original goal (following that +one) has the type of `b` in the context replaced by `?u`. *) +val var_retype : binding -> Tac unit + +(** [clear_top] will drop the outermost binder from the environment. Can +only be used if the goal does not at all depend on it. *) +val clear_top : unit -> Tac unit + +(** [clear] will drop the given binder from the context, is nothing +depends on it. *) +val clear : binding -> Tac unit + +(** If [b] is a binding of type [v == r], [rewrite b] will rewrite the +variable [v] for [r] everywhere in the current goal type and witness. *) +val rewrite : binding -> Tac unit + +(** [grewrite t1 t2] will rewrite [t1] anywhere it appears in the goal +for [t2]. It will add a goal (after the current one) for [t1 == t2]. *) +val grewrite (t1 t2 : term) : Tac unit + +(** First boolean is whether to attempt to introduce a refinement +before solving. In that case, a goal for the refinement formula will be +added. Second boolean is whether to set the expected type internally. +Just use `exact` from FStar.Tactics.Derived if you don't know what's up +with all this. *) +val t_exact : maybe_refine:bool -> set_expected_typ:bool -> term -> Tac unit + +(** Inner primitive for [apply], takes a boolean specifying whether +to not ask for implicits that appear free in posterior goals, a +boolean specifying whether it's forbidden to instantiate uvars in the +goal, and a boolean specifying whether uvars resolved during unification +of the goal to the term should be typechecked as part of t_apply + +If the third boolean is false, those uvars will be typechecked at the +end by the tactics engine. + +Example: when [uopt] is true, applying transitivity to [|- a = c] +will give two goals, [|- a = ?u] and [|- ?u = c] without asking to +instantiate [?u] since it will most likely be constrained later by +solving these goals. In any case, we track [?u] and will fail if it's +not solved later. + +Example: when [noinst] is true, applying a function returning +[1 = 2] will fail on a goal of the shape [1 = ?u] since it must +instantiate [?u]. We use this in typeclass resolution. + +You may want [apply] from FStar.Tactics.Derived, or one of +the other user facing variants. +*) +val t_apply : uopt:bool -> noinst:bool -> tc_resolved_uvars:bool -> term -> Tac unit + +(** [t_apply_lemma ni nilhs l] will solve a goal of type [squash phi] +when [l] is a Lemma ensuring [phi]. The arguments to [l] and its +requires clause are introduced as new goals. As a small optimization, +[unit] arguments are discharged by the engine. For the meaning of +the [noinst] boolean arg see [t_apply], briefly, it does not allow to +instantiate uvars in the goal. The [noinst_lhs] flag is similar, it +forbids instantiating uvars *but only on the LHS of the goal*, provided +the goal is an equality. It is meant to be useful for rewrite-goals, of +the shape [X = ?u]. Setting [noinst] means [noinst_lhs] is ignored. *) +val t_apply_lemma : noinst:bool -> noinst_lhs:bool -> term -> Tac unit +// TODO: do the unit thing too for [apply]. + +(** [print str] has no effect on the proofstate, but will have the side effect +of printing [str] on the compiler's standard output. *) +val print : string -> Tac unit + +(** [debugging ()] returns true if the current module has the debug flag +on, i.e. when [--debug Tac] was passed in. *) +val debugging : unit -> Tac bool + +(** [ide ()] return true if F* is running in interactive mode. This is +useful to only print diagnostics messages in interactive mode but not in +batch. *) +val ide : unit -> Tac bool + +(** Similar to [print], but will dump a text representation of the proofstate +along with the message. *) +val dump : string -> Tac unit + +(** Similar to [dump], but will print *every* unsolved implicit +in the proofstate, not only the visible/focused goals. When the +[print_resolved] boolean is true, it will also print every solved goal. +Warning, these can be a *lot*. *) +val dump_all : print_resolved:bool -> string -> Tac unit + +(** Will print a goal for every unresolved implicit in the provided goal. *) +val dump_uvars_of : goal -> string -> Tac unit + +(** Solves a goal [Gamma |= squash (l == r)] by attempting to unify +[l] with [r]. This currently only exists because of some universe +problems when trying to [apply] a reflexivity lemma. When [allow_guards] +is [true], it is allowed that (some) guards are raised during the +unification process and added as a single goal to be discharged later. +Currently, the only guards allowed here are for equating refinement +types (e.g. [x:int{x>0}] and [x:int{0 Tac unit + +(** Provides a proof for the equality +[(match e with ... | pi -> ei ...) a1 .. an + == (match e with ... | pi -> e1 a1 .. an)]. +This is particularly useful to rewrite the expression on the left to the +one on the right when the RHS is actually a unification variable. *) +val t_commute_applied_match : unit -> Tac unit + +(** In case there are goals that are already solved which have + non-trivial typing guards, make those guards as explicit proof + obligations in the tactic state, solving any trivial ones by simplification. + See tests/bug-reports/Bug2635.fst for some examples + + Update 11/14/2022: with the introduction of the core typechecker, + this primitive should no longer be necessary. Try using the compat pre core + flags, or `with_compat_pre_core` primitive if your code breaks without + this.*) +[@@deprecated "This will soon be removed, please use compat pre core settings if needed"] +val gather_or_solve_explicit_guards_for_resolved_goals : unit -> Tac unit + +(** [ctrl_rewrite] will traverse the current goal, and call [ctrl] + * repeatedly on subterms. When [ctrl t] returns [(true, _)], the + * tactic will call [rw] with a goal of type [t = ?u], which once + * solved will rewrite [t] to the solution of [?u]. No goal is + * made if [ctrl t] returns [(false, _)]. + * + * The second component of the return value of [ctrl] specifies + * whether for continue descending in the goal or not. It can + * either be: + * - Continue: keep on with further subterms + * - Skip: stop descending in this tree + * - Abort: stop everything + * + * The first argument is the direction, [TopDown] or [BottomUp]. It + * specifies how the AST of the goal is traversed (preorder or postorder). + * + * Note: for [BottomUp] a [Skip] means to stop trying to rewrite everything + * from the current node up to the root, but still consider siblings. This + * means that [ctrl_rewrite BottomUp (fun _ -> (true, Skip)) t] will call [t] + * for every leaf node in the AST. + * + * See [pointwise] and [topdown_rewrite] for more friendly versions. + *) +val ctrl_rewrite : + direction -> + (ctrl : term -> Tac (bool & ctrl_flag)) -> + (rw:unit -> Tac unit) -> + Tac unit + +(** Given the current goal [Gamma |- w : t], +[dup] will turn this goal into +[Gamma |- ?u : t] and +[Gamma |= ?u == w]. It can thus be used to change +a goal's witness in any way needed, by choosing +some [?u] (possibly with exact) and then solving the other goal. +*) +val dup : unit -> Tac unit + +// Proof namespace management +(** [prune "A.B.C"] will mark all top-level definitions in module +[A.B.C] (and submodules of it) to not be encoded to the SMT, for the current goal. +The string is a namespace prefix. [prune ""] will prune everything, but note +that [prune "FStar.S"] will not prune ["FStar.Set"]. *) +val prune : string -> Tac unit + +(** The opposite operation of [prune]. The latest one takes precedence. *) +val addns : string -> Tac unit + +(** Destruct a value of an inductive type by matching on it. The generated +match has one branch for each constructor and is therefore trivially +exhaustive, no VC is generated for that purpose. It returns a list +with the fvars of each constructor and their arities, in the order +they appear as goals. *) +val t_destruct : term -> Tac (list (fv & nat)) + +(** Set command line options for the current goal. Mostly useful to +change SMT encoding options such as [set_options "--z3rlimit 20"]. *) +val set_options : string -> Tac unit + +(** Creates a new, unconstrained unification variable in environment +[env]. The type of the uvar can optionally be provided in [o]. If not +provided, a second uvar is created for the type. *) +val uvar_env : env -> option typ -> Tac term + +(** Creates a new, unconstrained unification variable in environment +[env]. The type of the uvar must be provided and the uvar can be solved +to a Ghost term of that type *) +val ghost_uvar_env : env -> typ -> Tac term + +(** Creates a new, unconstrained universe unification variable. +The returned term is Type (U_Unif ?u). *) +val fresh_universe_uvar : unit -> Tac term + +(** Call the unifier on two terms. The returned boolean specifies +whether unification was possible. When the tactic returns true, the +terms have been unified, instantiating uvars as needed. When false, +unification was not possible and no change to uvars occurs. *) +val unify_env : env -> t1:term -> t2:term -> Tac bool + +(** Similar to [unify_env], but allows for some guards to be raised +during unification (see [t_trefl] for an explanation). Will add a new +goal with the guard. *) +val unify_guard_env : env -> t1:term -> t2:term -> Tac bool + +(** Check if [t1] matches [t2], i.e., whether [t2] can have its uvars +instantiated into unifying with [t1]. When the tactic returns true, the +terms have been unified, instantiating uvars as needed. When false, +matching was not possible and no change to uvars occurs. *) +val match_env : env -> t1:term -> t2:term -> Tac bool + +(** Launches an external process [prog] with arguments [args] and input +[input] and returns the output. For security reasons, this can only be +performed when the `--unsafe_tactic_exec` options was provided for the +current F* invocation. The tactic will fail if this is not so. *) +val launch_process : string -> list string -> string -> Tac string + +(** Get a fresh bv of some name and type. The name is only useful +for pretty-printing, since there is a fresh inaccessible integer within +the bv too. *) +(* val fresh_bv_named : string -> Tac bv *) + +(** Change the goal to another type, given that it is convertible +to the current type. *) +val change : typ -> Tac unit + +(** Get the current guard policy. The guard policy specifies what should +be done when a VC arises internally from the tactic engine. Options +are SMT (mark it as an SMT goal), Goal (add it as an extra goal) +and Force (only allow trivial guards, that need no SMT. *) +val get_guard_policy : unit -> Tac guard_policy + +(** Set the current guard policy. See [get_guard_policy} for an explanation *) +val set_guard_policy : guard_policy -> Tac unit + +(** [lax_on] returns true iff the current environment has the +`--admit_smt_queries true` option set, and thus drops all verification conditions. *) +val lax_on : unit -> Tac bool + +(** Admit the current goal and set the witness to the given term. +Absolutely unsafe. Raises a warning. *) +val tadmit_t : term -> Tac unit + +(** Join the first two goals, which must be irrelevant, in a single +one by finding a maximal prefix of their environment and reverting +appropriately. Useful to minimize SMT queries that share internal +obligations. *) +val join : unit -> Tac unit + +(* Local metastate via a string-keyed map. [lget] fails if the +found element is not typeable at the requested type. *) +val lget : #a:Type -> string -> Tac a +val lset : #a:Type -> string -> a -> Tac unit + +(** Set the current set of active goals at will. Obligations remain +in the implicits. *) +val set_goals : list goal -> Tac unit + +(** Set the current set of SMT goals at will. Obligations remain in the +implicits. TODO: This is a really bad name, there's no special "SMT" +about these goals. *) +val set_smt_goals : list goal -> Tac unit + +(** [curms ()] returns the current (wall) time in milliseconds *) +val curms : unit -> Tac int + +(** [set_urgency u] sets the urgency of error messages. Usually set just +before raising an exception (see e.g. [fail_silently]). *) +val set_urgency : int -> TacS unit + +(** [set_dump_failure b] controls whether the engine will dump out +the proofstate if a tactic fails during exception. This is true by +default, but can be disabled to get less verbosity. *) +val set_dump_on_failure : bool -> TacS unit + +(** [string_to_term e s] runs the F* parser on the string [s] in the +environment [e], and produces a term. *) +val string_to_term : env -> string -> Tac term + +(** [push_bv_dsenv e id] pushes a identifier [id] into the parsing +environment of [e] an environment. It returns a new environment that +has the identifier [id] along with its corresponding binding. *) +val push_bv_dsenv : env -> string -> Tac (env & binding) + +(** Print a term via the pretty printer. This is considered effectful +since 1) setting options can change the behavior of this function, and +hence is not really pure; and 2) this function could expose details of +the term representation that do not show up in the view, invalidating +the pack_inspect_inv/inspeck_pack_inv lemmas. *) +val term_to_string : term -> Tac string + +(** Print a computation type via the pretty printer. See comment +on [term_to_string]. *) +val comp_to_string : comp -> Tac string + +(** Like term_to_string, but returns an unrendered pretty-printing +document *) +val term_to_doc : term -> Tac Pprint.document + +(** Like comp_to_string, but returns an unrendered pretty-printing +document *) +val comp_to_doc : comp -> Tac Pprint.document + +(** Print a source range as a string *) +val range_to_string : range -> Tac string + +(** A variant of Reflection.term_eq that may inspect more underlying +details of terms. This function could distinguish two _otherwise equal +terms_, but that distinction cannot leave the Tac effect. + +This is only exposed as a migration path. Please use +[Reflection.term_eq] instead. *) +[@@deprecated "Use Reflection.term_eq instead"] +val term_eq_old : term -> term -> Tac bool + +(** Runs the input tactic `f` with compat pre core setting `n`. +It is an escape hatch for maintaining backward compatibility +for code that breaks with the core typechecker. *) +val with_compat_pre_core : #a:Type -> n:int -> f:(unit -> Tac a) -> Tac a + +(** Get the [vconfig], including fuel, ifuel, rlimit, etc, +associated with the current goal. *) +val get_vconfig : unit -> Tac vconfig + +(** Set the [vconfig], including fuel, ifuel, rlimit, etc, associated +with the current goal. This vconfig will be used if the goal is +attempted by SMT at the end of a tactic run. *) +val set_vconfig : vconfig -> Tac unit + +(** Attempt to solve the current goal with SMT immediately, and fail +if it cannot be solved. The vconfig specifies fuels, limits, etc. The +current goal's vconfig is ignored in favor of this one. *) +val t_smt_sync : vconfig -> Tac unit + +(** This returns the free uvars that appear in a term. This is not +a reflection primitive as it depends on the state of the UF graph. *) +val free_uvars : term -> Tac (list int) + +(* Return all k/v pairs in the state. The order is unspecified, +do not rely on it. *) +val all_ext_options : unit -> Tac (list (string & string)) + +(* Lookup a k/v pair in the --ext option state. The empty string +is returned if the key was unset. *) +val ext_getv (k:string) : Tac string + +(* Return all k/v pairs in the state which are within +the given namespace. *) +val ext_getns (ns:string) : Tac (list (string & string)) + +(** The following primitives provide support for local state + during execution of a tactic. + The local state is monotonic, it is not + reverted when the tactic backtracks (using catch e.g.) + *) +val alloc (#a:Type) (x:a) : Tac (tref a) +val read (#a:Type) (r:tref a) : Tac a +val write (#a:Type) (r:tref a) (x:a) : Tac unit + +(***** APIs used in the meta DSL framework *****) + +(** Meta DSL framework is an experimental feature + See examples/dsls/ for more details + Following APIs are part of the framework *) + +(** TODO: maybe the equiv APIs should require typing of the arguments? *) + +unfold +let ret_t (a:Type) = option a & issues + +val is_non_informative (g:env) (t:typ) + : Tac (ret_t (non_informative_token g t)) + +val check_subtyping (g:env) (t0 t1:typ) + : Tac (ret_t (subtyping_token g t0 t1)) + +val t_check_equiv (smt_ok:bool) (unfolding_ok:bool) (g:env) (t0 t1:typ) + : Tac (ret_t (equiv_token g t0 t1)) + +// +// Compute the type of e using the core typechecker +// +val core_compute_term_type (g:env) (e:term) + : Tac (ret_t (r:(tot_or_ghost & typ){typing_token g e r})) + +// +// Check that e:eff t using the core typechecker +// +val core_check_term (g:env) (e:term) (t:typ) (eff:tot_or_ghost) + : Tac (ret_t (typing_token g e (eff, t))) + +// +// Return eff s.t. e:eff t using the core typechecker +// +val core_check_term_at_type (g:env) (e:term) (t:typ) + : Tac (ret_t (eff:tot_or_ghost{typing_token g e (eff, t)})) + +// +// Instantiate the implicits in e and compute its type +// +val tc_term (g:env) (e:term) + : Tac (ret_t (r:(term & (tot_or_ghost & typ)){typing_token g (fst r) (snd r)})) + +val universe_of (g:env) (e:term) + : Tac (ret_t (u:universe{typing_token g e (E_Total, pack_ln (Tv_Type u))})) + +type prop_validity_token (g:env) (t:term) = + Ghost.erased ( + e:term{typing_token g t (E_Total, pack_ln (Tv_FVar (pack_fv prop_qn))) /\ + typing_token g e (E_Total, t)} + ) + +val check_prop_validity (g:env) (t:term) + : Tac (ret_t (prop_validity_token g t)) + +// Can't immediately move to FStar.Tactics.Types since pattern is not in scope there +val match_complete_token (g:env) (sc:term) (t:typ) (pats:list pattern) (bnds:list (list binding)) + : Type0 + +// Returns elaborated patterns, the bindings for each one, and a token +val check_match_complete (g:env) (sc:term) (t:typ) (pats:list pattern) + : Tac (option (pats_bnds:(list pattern & list (list binding)) + {match_complete_token g sc t (fst pats_bnds) (snd pats_bnds) + /\ List.Tot.length (fst pats_bnds) == List.Tot.length (snd pats_bnds) + /\ List.Tot.length (fst pats_bnds) == List.Tot.length pats})) + +// +// Instantiate implicits in t +// +// When the return value is Some (l, t', ty), +// l is the list of fresh names (fresh w.r.t. g) and types, +// that represent implicits, of corresponding types, the API could not solve for +// +// t' is the elaborated t, and ty is its type +// +val instantiate_implicits (g:env) (t:term) (expected_typ : option term) + : Tac (ret_t (list (namedv & typ) & term & typ)) + +// +// Tries to find substitutions for the names in uvs +// by unifying t0 and t1 +// +// Internally, it creates fresh unification variables for uvs, +// substitutes them in t0 and t1, +// and tries to unify them +// +// The returned list contains uvs that could be solved +// +// uvs names are oldest binding first (i.e., most recent binding at the end of the list) +// +// and under the environment (g, (rev uvs)), t0 and t1 should be well-typed +// +// The API does not provide any guarantees, the caller should typecheck +// that the solutions are well-typed +// +val try_unify (g:env) (uvs:list (namedv & typ)) (t0 t1:term) + : Tac (ret_t (list (namedv & term))) + +val maybe_relate_after_unfolding (g:env) (t1 t2:term) + : Tac (ret_t unfold_side) + +val maybe_unfold_head (g:env) (t0:term) + : Tac (ret_t (t1:term{equiv_token g t0 t1})) + +(** [norm_well_typed_term e steps t] will call the normalizer on the +term [t] using the list of steps [steps], over environment [e]. It +differs from norm_term_env in that it will not attempt to typecheck t +(so there is an implicit well-typing precondition for t, which we are +not strcitly requiring yet in reflection primitives) and it will also +return a token for the equivalence between t and t'. *) +val norm_well_typed_term + (g:env) (steps : list norm_step) (t:term) + : Tac (t':term{equiv_token g t t'}) + +val push_open_namespace (g:env) (ns:name) + : Tac env + +val push_module_abbrev (g:env) (n:string) (m:name) + : Tac env + +val resolve_name (g:env) (n:name) + : Tac (option (either bv fv)) + +val log_issues (issues:list FStar.Issue.issue) + : Tac unit + +(* Reentrancy: a metaprogram can spawn a sub-metaprogram to +solve a goal, starting from a clean state, and obtain the witness +that solves it. *) +val call_subtac (g:env) (t : unit -> Tac unit) (u:universe) + (goal_ty : term{typing_token g goal_ty (E_Total, pack_ln (Tv_Type u))}) + : Tac (ret_t (w:term{typing_token g w (E_Total, goal_ty)})) + +val call_subtac_tm + (g:env) (t : term) (u:universe) + (goal_ty : term{typing_token g goal_ty (E_Total, pack_ln (Tv_Type u))}) + : Tac (ret_t (w:term{typing_token g w (E_Total, goal_ty)})) diff --git a/stage0/ulib/FStar.Stubs.TypeChecker.Core.fsti b/stage0/ulib/FStar.Stubs.TypeChecker.Core.fsti new file mode 100644 index 00000000000..e9e96aa1a8e --- /dev/null +++ b/stage0/ulib/FStar.Stubs.TypeChecker.Core.fsti @@ -0,0 +1,30 @@ +(* + Copyright 2008-2023 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Stubs.TypeChecker.Core + +// +// A stub for using some type definition from FStar.TypeChecker.Core +// + +type tot_or_ghost = + | E_Total + | E_Ghost + +type unfold_side = + | Left + | Right + | Both + | Neither diff --git a/stage0/ulib/FStar.Stubs.VConfig.fsti b/stage0/ulib/FStar.Stubs.VConfig.fsti new file mode 100644 index 00000000000..5260dbcc515 --- /dev/null +++ b/stage0/ulib/FStar.Stubs.VConfig.fsti @@ -0,0 +1,57 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Stubs.VConfig + +(** This type represents the set of verification-relevant options used + to check a particular definition. It can be read from tactics via + sigelt_opts and set via the check_with attribute. + + This type, and the whole module, mirror FStar.VConfig in F* sources. + *) +type vconfig = { + initial_fuel : int; + max_fuel : int; + initial_ifuel : int; + max_ifuel : int; + detail_errors : bool; + detail_hint_replay : bool; + no_smt : bool; + quake_lo : int; + quake_hi : int; + quake_keep : bool; + retry : bool; + smtencoding_elim_box : bool; + smtencoding_nl_arith_repr : string; + smtencoding_l_arith_repr : string; + smtencoding_valid_intro : bool; + smtencoding_valid_elim : bool; + tcnorm : bool; + no_plugins : bool; + no_tactics : bool; + z3cliopt : list string; + z3smtopt : list string; + z3refresh : bool; + z3rlimit : int; + z3rlimit_factor : int; + z3seed : int; + z3version : string; + trivial_pre_for_unannotated_effectful_fns : bool; + reuse_hint_for : option string; +} + +(** Marker to check a sigelt with a particular vconfig *) +irreducible +let check_with (vcfg : vconfig) : unit = () diff --git a/stage0/ulib/FStar.TSet.fst b/stage0/ulib/FStar.TSet.fst new file mode 100644 index 00000000000..e84f7246884 --- /dev/null +++ b/stage0/ulib/FStar.TSet.fst @@ -0,0 +1,91 @@ +(* + Copyright 2008-2014 Nikhil Swamy, Aseem Rastogi, + Microsoft Research, University of Maryland + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +(** Propositional sets (on any types): membership is a predicate *) +module FStar.TSet + +#set-options "--initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0" +module P = FStar.PropositionalExtensionality +module F = FStar.FunctionalExtensionality + +(* + * AR: mark it must_erase_for_extraction temporarily until CMI comes in + *) +[@@erasable] +let set a = F.restricted_t a (fun _ -> prop) + +let equal #_ s1 s2 = forall x. s1 x <==> s2 x + +let mem x s = s x + +let empty #a = F.on_dom a #(fun _ -> prop) (fun x -> False) +let singleton #a x = F.on_dom a #(fun _ -> prop) (fun y -> y == x) +let union #a s1 s2 = F.on_dom a #(fun _ -> prop) (fun x -> s1 x \/ s2 x) +let intersect #a s1 s2 = F.on_dom a #(fun _ -> prop) (fun x -> s1 x /\ s2 x) +let complement #a s = F.on_dom a #(fun _ -> prop) (fun x -> ~ (s x)) +let intension #a f = F.on_dom a #(fun _ -> prop) f + +let mem_empty #_ _ = () +let mem_singleton #_ _ _ = () +let mem_union #_ _ _ _ = () +let mem_intersect #_ _ _ _ = () +let mem_complement #_ _ _ = () +let mem_subset #_ _ _ = () +let subset_mem #_ _ _ = () +let mem_intension #_ _ _ = () + +let lemma_equal_intro #_ _ _ = () +let lemma_equal_elim #a s1 s2 = PredicateExtensionality.predicateExtensionality a s1 s2 +let lemma_equal_refl #_ _ _ = () + +let tset_of_set #a s = + F.on_dom a #(fun _ -> prop) (fun (x:a) -> squash (b2t (Set.mem x s))) + +#push-options "--smtencoding.valid_intro true --smtencoding.valid_elim true" +private let lemma_mem_tset_of_set_l (#a:eqtype) (s:Set.set a) (x:a) + :Lemma (requires True) + (ensures (mem x (tset_of_set s) ==> Set.mem x s)) + = if FStar.StrongExcludedMiddle.strong_excluded_middle (mem x (tset_of_set s)) then + let t1 = mem x (tset_of_set s) in + let t2 = b2t (Set.mem x s) in + let u:(squash t1) = FStar.Squash.get_proof t1 in + let u:(squash (squash t2)) = u in + let u:squash t2 = FStar.Squash.join_squash u in + FStar.Squash.give_proof u + else () +#pop-options + +private let lemma_mem_tset_of_set_r (#a:eqtype) (s:Set.set a) (x:a) + :Lemma (requires True) + (ensures (Set.mem x s ==> mem x (tset_of_set s))) + = if Set.mem x s then + let u:squash (b2t (Set.mem x s)) = () in + let _ = assert (mem x (tset_of_set s) == squash (b2t (Set.mem x s))) in + FStar.Squash.give_proof u + else () + +let lemma_mem_tset_of_set #a s x = lemma_mem_tset_of_set_l #a s x; lemma_mem_tset_of_set_r #a s x + +let filter #a f s = F.on_dom a #(fun _ -> prop) (fun (x:a) -> f x /\ s x) + +let lemma_mem_filter #a f s x = () + +let exists_y_in_s (#a:Type) (#b:Type) (s:set a) (f:a -> Tot b) (x:b) : Tot prop = + exists (y:a). mem y s /\ x == f y + +let map #_ #b f s = F.on_dom b (exists_y_in_s s f) + +let lemma_mem_map #_ #_ _ _ _ = () diff --git a/stage0/ulib/FStar.TSet.fsti b/stage0/ulib/FStar.TSet.fsti new file mode 100644 index 00000000000..47f99d567c9 --- /dev/null +++ b/stage0/ulib/FStar.TSet.fsti @@ -0,0 +1,131 @@ +(* + Copyright 2008-2014 Nikhil Swamy, Aseem Rastogi, + Microsoft Research, University of Maryland + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +(** Propositional sets (on any types): membership is a predicate *) +module FStar.TSet + +#set-options "--initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0" + +(* + * AR: mark it must_erase_for_extraction temporarily until CMI comes in + *) +[@@must_erase_for_extraction; erasable] +val set (a:Type u#a) : Type u#(max 1 a) + +val equal (#a:Type) (s1:set a) (s2:set a) : prop + +(* destructors *) + +val mem : 'a -> set 'a -> prop + +(* constructors *) +val empty : #a:Type -> Tot (set a) +val singleton : #a:Type -> x:a -> Tot (set a) +val union : #a:Type -> x:set a -> y:set a -> Tot (set a) +val intersect : #a:Type -> x:set a -> y:set a -> Tot (set a) +val complement : #a:Type -> x:set a -> Tot (set a) +val intension : #a:Type -> (a -> prop) -> Tot (set a) + +(* ops *) +let subset (#a:Type) (s1:set a) (s2:set a) : Type0 = forall x. mem x s1 ==> mem x s2 + +(* Properties *) +val mem_empty: #a:Type -> x:a -> Lemma + (requires True) + (ensures (~ (mem x empty))) + [SMTPat (mem x empty)] + +val mem_singleton: #a:Type -> x:a -> y:a -> Lemma + (requires True) + (ensures (mem y (singleton x) <==> (x==y))) + [SMTPat (mem y (singleton x))] + +val mem_union: #a:Type -> x:a -> s1:set a -> s2:set a -> Lemma + (requires True) + (ensures (mem x (union s1 s2) == (mem x s1 \/ mem x s2))) + [SMTPat (mem x (union s1 s2))] + +val mem_intersect: #a:Type -> x:a -> s1:set a -> s2:set a -> Lemma + (requires True) + (ensures (mem x (intersect s1 s2) == (mem x s1 /\ mem x s2))) + [SMTPat (mem x (intersect s1 s2))] + +val mem_complement: #a:Type -> x:a -> s:set a -> Lemma + (requires True) + (ensures (mem x (complement s) == ~(mem x s))) + [SMTPat (mem x (complement s))] + +val mem_subset: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (forall x. mem x s1 ==> mem x s2)) + (ensures (subset s1 s2)) + [SMTPat (subset s1 s2)] + +val subset_mem: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (subset s1 s2)) + (ensures (forall x. mem x s1 ==> mem x s2)) + [SMTPat (subset s1 s2)] + +val mem_intension (#a:Type) (x:a) (f:(a -> prop)) +: Lemma + (ensures (mem x (intension f) == f x)) + [SMTPat (mem x (intension f))] + +(* extensionality *) + +val lemma_equal_intro: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (forall x. mem x s1 <==> mem x s2)) + (ensures (equal s1 s2)) + [SMTPat (equal s1 s2)] + +val lemma_equal_elim: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (equal s1 s2)) + (ensures (s1 == s2)) + [SMTPat (equal s1 s2)] + +val lemma_equal_refl: #a:Type -> s1:set a -> s2:set a -> Lemma + (requires (s1 == s2)) + (ensures (equal s1 s2)) + [SMTPat (equal s1 s2)] + +val tset_of_set (#a:eqtype) (s:Set.set a) : Tot (set a) + +val lemma_mem_tset_of_set (#a:eqtype) (s:Set.set a) (x:a) + :Lemma (requires True) + (ensures (Set.mem x s <==> mem x (tset_of_set s))) + [SMTPat (mem x (tset_of_set s))] + +val filter (#a:Type) (f:a -> Type0) (s:set a) : Tot (set a) + +val lemma_mem_filter (#a:Type) (f:(a -> Type0)) (s:set a) (x:a) + :Lemma (requires True) + (ensures (mem x (filter f s) <==> mem x s /\ f x)) + [SMTPat (mem x (filter f s))] + +val map (#a:Type) (#b:Type) (f:a -> Tot b) (s:set a) : Tot (set b) + +val lemma_mem_map (#a:Type) (#b:Type) (f:(a -> Tot b)) (s:set a) (x:b) + :Lemma ((exists (y:a). {:pattern (mem y s)} mem y s /\ x == f y) <==> mem x (map f s)) + [SMTPat (mem x (map f s))] + +#reset-options +let rec as_set' (#a:Type) (l:list a) : Tot (set a) = + match l with + | [] -> empty + | hd::tl -> union (singleton hd) (as_set' tl) + + +(* unfold let as_set (#a:Type) (l:list a) : set a = *) +(* Prims.norm [zeta; iota; delta_only ["FStar.TSet.as_set'"]] (as_set' l) *) diff --git a/stage0/ulib/FStar.Tactics.Arith.fst b/stage0/ulib/FStar.Tactics.Arith.fst new file mode 100644 index 00000000000..fb5c0e654fd --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Arith.fst @@ -0,0 +1,52 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Arith + +open FStar.Tactics.V2.Bare +open FStar.Reflection.V2.Formula +open FStar.Reflection.V2.Arith + +// decide if the current goal is arith, drop the built representation of it +let is_arith_goal () : Tac bool = + let g = cur_goal () in + match run_tm (is_arith_prop g) with + | Inr _ -> true + | _ -> false + +val split_arith : unit -> Tac unit +let rec split_arith () = + if is_arith_goal () then + begin + prune ""; + addns "Prims"; + smt () + end + else begin + let g = cur_goal () in + match term_as_formula g with + | True_ -> + trivial () + | And l r -> + seq FStar.Tactics.split split_arith + | Implies p q -> + let _ = implies_intro () in + seq split_arith l_revert + | Forall _x _sort _p -> + let bs = forall_intros () in + seq split_arith (fun () -> l_revert_all bs) + | _ -> + () + end diff --git a/stage0/ulib/FStar.Tactics.BV.Lemmas.fst b/stage0/ulib/FStar.Tactics.BV.Lemmas.fst new file mode 100644 index 00000000000..f1162ad1514 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.BV.Lemmas.fst @@ -0,0 +1,38 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.BV.Lemmas + +open FStar.BV +open FStar.UInt + +// using uint_t' instead of uint_t breaks the tactic (goes to inl). + +(* Congruence lemmas *) +let cong_bvand #n #w #x #y #z pf1 pf2 = () +let cong_bvxor #n #w #x #y #z pf1 pf2 = () +let cong_bvor #n #w #x #y #z pf1 pf2 = () +let cong_bvshl #n #w #x #y pf = () +let cong_bvshr #n #w #x #y pf = () +let cong_bvdiv #n #w #x #y pf = () +let cong_bvmod #n #w #x #y pf = () +let cong_bvmul #n #w #x #y pf = () +let cong_bvadd #n #w #x #y #z pf1 pf2 = () +let cong_bvsub #n #w #x #y #z pf1 pf2 = () +let eq_to_bv #n #x #y pf = int2bv_lemma_2 #n x y +let lt_to_bv #n #x #y pf = int2bv_lemma_ult_2 #n x y +let trans #n #x #y #z #w pf1 pf2 pf3 = () +let trans_lt #n #x #y #z #w pf1 pf2 pf3 = () +let trans_lt2 #n #x #y #z #w pf1 pf2 pf3 = int2bv_lemma_ult_2 x y \ No newline at end of file diff --git a/stage0/ulib/FStar.Tactics.BV.Lemmas.fsti b/stage0/ulib/FStar.Tactics.BV.Lemmas.fsti new file mode 100644 index 00000000000..bd5bde0903d --- /dev/null +++ b/stage0/ulib/FStar.Tactics.BV.Lemmas.fsti @@ -0,0 +1,90 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.BV.Lemmas + +open FStar.BV +open FStar.UInt + +// using uint_t' instead of uint_t breaks the tactic (goes to inl). + +(* Congruence lemmas *) +val cong_bvand : #n:pos -> (#w:bv_t n) -> (#x:bv_t n) -> + (#y:bv_t n) -> (#z:bv_t n) -> + squash (w == y) -> squash (x == z) -> + Lemma (bvand #n w x == bvand #n y z) + +val cong_bvxor : #n:pos -> (#w:bv_t n) -> (#x:bv_t n) -> + (#y:bv_t n) -> (#z:bv_t n) -> + squash (w == y) -> squash (x == z) -> + Lemma (bvxor w x == bvxor y z) + +val cong_bvor : #n:pos -> (#w:bv_t n) -> (#x:bv_t n) -> + (#y:bv_t n) -> (#z:bv_t n) -> + squash (w == y) -> squash (x == z) -> + Lemma (bvor w x == bvor y z) + +val cong_bvshl : #n:pos -> (#w:bv_t n) -> (#x:uint_t n) -> + (#y:bv_t n) -> squash (w == y) -> + Lemma (bvshl w x == bvshl y x) + +val cong_bvshr : #n:pos -> #w:bv_t n -> (#x:uint_t n) -> + #y:bv_t n -> squash (w == y) -> + Lemma (bvshr #n w x == bvshr #n y x) + +val cong_bvdiv : #n:pos -> #w:bv_t n -> (#x:uint_t n{x <> 0}) -> + #y:bv_t n -> squash (w == y) -> + Lemma (bvdiv #n w x == bvdiv #n y x) + +val cong_bvmod : #n:pos -> #w:bv_t n -> (#x:uint_t n{x <> 0}) -> + #y:bv_t n -> squash (w == y) -> + Lemma (bvmod #n w x == bvmod #n y x) + +val cong_bvmul : #n:pos -> #w:bv_t n -> (#x:uint_t n) -> + #y:bv_t n -> squash (w == y) -> + Lemma (bvmul #n w x == bvmul #n y x) + +val cong_bvadd : #n:pos -> (#w:bv_t n) -> (#x:bv_t n) -> + (#y:bv_t n) -> (#z:bv_t n) -> + squash (w == y) -> squash (x == z) -> + Lemma (bvadd w x == bvadd y z) + +val cong_bvsub : #n:pos -> (#w:bv_t n) -> (#x:bv_t n) -> + (#y:bv_t n) -> (#z:bv_t n) -> + squash (w == y) -> squash (x == z) -> + Lemma (bvsub w x == bvsub y z) + +(* Used to reduce the initial equation to an equation on bitvectors*) +val eq_to_bv: #n:pos -> (#x:uint_t n) -> (#y:uint_t n) -> + squash (int2bv #n x == int2bv #n y) -> Lemma (x == y) + +val lt_to_bv: #n:pos -> (#x:uint_t n) -> (#y:uint_t n) -> + (b2t (bvult #n (int2bv #n x) (int2bv #n y))) -> Lemma (x < y) + +(* Creates two fresh variables and two equations of the form int2bv + x = z /\ int2bv y = w. The above lemmas transform these two + equations before finally instantiating them through reflexivity, + leaving Z3 to solve z = w *) +val trans: #n:pos -> (#x:bv_t n) -> (#y:bv_t n) -> (#z:bv_t n) -> (#w:bv_t n) -> + squash (x == z) -> squash (y == w) -> squash (z == w) -> + Lemma (x == y) + +val trans_lt: #n:pos -> (#x:bv_t n) -> (#y:bv_t n) -> (#z:bv_t n) -> (#w:bv_t n) -> + (eq2 #(bv_t n) x z) -> (eq2 #(bv_t n) y w) -> squash (bvult #n z w) -> + Lemma (bvult #n x y) + +val trans_lt2: #n:pos -> (#x:uint_t n) -> (#y:uint_t n) -> (#z:bv_t n) -> (#w:bv_t n) -> + squash (int2bv #n x == z) -> squash (int2bv #n y == w) -> squash (bvult #n z w) -> + Lemma (x < y) \ No newline at end of file diff --git a/stage0/ulib/FStar.Tactics.BV.fst b/stage0/ulib/FStar.Tactics.BV.fst new file mode 100644 index 00000000000..ffc88862410 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.BV.fst @@ -0,0 +1,124 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.BV + +open FStar.Tactics.V2.Bare +open FStar.Tactics.MApply0 +open FStar.Reflection.V2.Formula +open FStar.Reflection.V2.Arith +open FStar.BV +open FStar.UInt +open FStar.Tactics.BV.Lemmas + +// using uint_t' instead of uint_t breaks the tactic (goes to inl). + +let rec arith_expr_to_bv (e:expr) : Tac unit = + match e with + | NatToBv (MulMod e1 _) | MulMod e1 _ -> + apply_lemma (`int2bv_mul); + apply_lemma (`cong_bvmul); + arith_expr_to_bv e1 + | NatToBv (Umod e1 _) | Umod e1 _ -> + apply_lemma (`int2bv_mod); + apply_lemma (`cong_bvmod); + arith_expr_to_bv e1 + | NatToBv (Udiv e1 _) | Udiv e1 _ -> + apply_lemma (`int2bv_div); + apply_lemma (`cong_bvdiv); + arith_expr_to_bv e1 + | NatToBv (Shl e1 _) | Shl e1 _ -> + apply_lemma (`int2bv_shl); + apply_lemma (`cong_bvshl); + arith_expr_to_bv e1 + | NatToBv (Shr e1 _) | Shr e1 _ -> + apply_lemma (`int2bv_shr); + apply_lemma (`cong_bvshr); + arith_expr_to_bv e1 + | NatToBv (Land e1 e2) | (Land e1 e2) -> + apply_lemma (`int2bv_logand); + apply_lemma (`cong_bvand); + arith_expr_to_bv e1; + arith_expr_to_bv e2 + | NatToBv (Lxor e1 e2) | (Lxor e1 e2) -> + apply_lemma (`int2bv_logxor); + apply_lemma (`cong_bvxor); + arith_expr_to_bv e1; + arith_expr_to_bv e2 + | NatToBv (Lor e1 e2) | (Lor e1 e2) -> + apply_lemma (`int2bv_logor); + apply_lemma (`cong_bvor); + arith_expr_to_bv e1; + arith_expr_to_bv e2 + | NatToBv (Ladd e1 e2) | (Ladd e1 e2) -> + apply_lemma (`int2bv_add); + apply_lemma (`cong_bvadd); + arith_expr_to_bv e1; + arith_expr_to_bv e2 + | NatToBv (Lsub e1 e2) | (Lsub e1 e2) -> + apply_lemma (`int2bv_sub); + apply_lemma (`cong_bvsub); + arith_expr_to_bv e1; + arith_expr_to_bv e2 + | _ -> + trefl () + +let arith_to_bv_tac () : Tac unit = focus (fun () -> + norm [delta_only ["FStar.BV.bvult"]]; + let g = cur_goal () in + let f = term_as_formula g in + match f with + | Comp (Eq _) l r -> + begin match run_tm (as_arith_expr l) with + | Inl s -> + dump s; + trefl () + | Inr e -> + // dump "inr arith_to_bv"; + seq (fun () -> arith_expr_to_bv e) trefl + end + | _ -> + fail ("arith_to_bv_tac: unexpected: " ^ term_to_string g) +) + +(* As things are right now, we need to be able to parse NatToBv +too. This can be useful, if we have mixed expressions so I'll leave it +as is for now *) +let bv_tac () = focus (fun () -> + mapply0 (`eq_to_bv); + mapply0 (`trans); + arith_to_bv_tac (); + arith_to_bv_tac (); + set_options "--smtencoding.elim_box true"; + norm [delta] ; + smt () +) + +let bv_tac_lt n = focus (fun () -> + let nn = pack (Tv_Const (C_Int n)) in + let t = mk_app (`trans_lt2) [(nn, Q_Implicit)] in + apply_lemma t; + arith_to_bv_tac (); + arith_to_bv_tac (); + set_options "--smtencoding.elim_box true"; + smt () +) + +let to_bv_tac () = focus (fun () -> + apply_lemma (`eq_to_bv); + apply_lemma (`trans); + arith_to_bv_tac (); + arith_to_bv_tac () +) diff --git a/stage0/ulib/FStar.Tactics.BV.fsti b/stage0/ulib/FStar.Tactics.BV.fsti new file mode 100644 index 00000000000..eb6f09cdd72 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.BV.fsti @@ -0,0 +1,31 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.BV + +open FStar.Tactics.Effect +open FStar.Tactics.BV.Lemmas {} (* bring into tc scope, since the tactic calls the lemmas *) + +[@@plugin] +val arith_to_bv_tac () : Tac unit + +[@@plugin] +val bv_tac () : Tac unit + +[@@plugin] +val bv_tac_lt (n:int) : Tac unit + +[@@plugin] +val to_bv_tac () : Tac unit \ No newline at end of file diff --git a/stage0/ulib/FStar.Tactics.BreakVC.fst b/stage0/ulib/FStar.Tactics.BreakVC.fst new file mode 100644 index 00000000000..a4e4af39ca6 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.BreakVC.fst @@ -0,0 +1,34 @@ +module FStar.Tactics.BreakVC + +open FStar.Tactics.V2 + +let mono_lem () : Lemma (tac_wp_monotonic #unit break_wp') = + assert (tac_wp_monotonic #unit break_wp') by begin + norm [delta]; + l_to_r [`spinoff_eq] + end + +let squash_p_impl_p (p:pure_post unit) : squash (squash (p ()) ==> p ()) = () + +#push-options "--no_tactics" // don't process `with_tactic` markers + +let (==>>) = (==>) // Working around #3173 and #3175 + +let aux (ps:proofstate) (p : __result unit -> Type0) +: Lemma (break_wp ps p ==> tac_return_wp () ps p) += calc (==>>) { + break_wp ps p; + == {} + spinoff (squash (p (Success () ps))); + <==> { spinoff_equiv (squash (p (Success () ps))) } + squash (p (Success () ps)); + ==>> { squash_p_impl_p _ } + p (Success () ps); + ==> { () } + tac_return_wp () ps p; + } + +let break_vc () : TAC unit break_wp = + Classical.forall_intro_2 aux; + () +#pop-options diff --git a/stage0/ulib/FStar.Tactics.BreakVC.fsti b/stage0/ulib/FStar.Tactics.BreakVC.fsti new file mode 100644 index 00000000000..0ef1fd151a9 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.BreakVC.fsti @@ -0,0 +1,22 @@ +module FStar.Tactics.BreakVC + +open FStar.Tactics +open FStar.Stubs.Tactics.Result + +let break_wp' : tac_wp_t0 unit = + fun ps p -> spinoff (squash (p (Success () ps))) + +val mono_lem () : Lemma (tac_wp_monotonic #unit break_wp') + +private +let post () : Tac unit = + norm [delta_fully [`%mono_lem; `%break_wp']]; + trefl() + +[@@postprocess_with post] +unfold +let break_wp : tac_wp_t unit = + let _ = mono_lem () in + break_wp' + +val break_vc () : TAC unit break_wp diff --git a/stage0/ulib/FStar.Tactics.Builtins.fsti b/stage0/ulib/FStar.Tactics.Builtins.fsti new file mode 100644 index 00000000000..f852a1c1fa2 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Builtins.fsti @@ -0,0 +1,19 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Builtins + +(* This module is a temporary for Meta-F* migration *) +include FStar.Stubs.Tactics.V1.Builtins diff --git a/stage0/ulib/FStar.Tactics.Canon.Lemmas.fst b/stage0/ulib/FStar.Tactics.Canon.Lemmas.fst new file mode 100644 index 00000000000..8c35cb2c801 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Canon.Lemmas.fst @@ -0,0 +1,38 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Canon.Lemmas + +open FStar.Mul + +let distr #x #y #z = () +let distl #x #y #z = () +let ass_plus_l #x #y #z = () +let ass_mult_l #x #y #z = () +let comm_plus #x #y = () +let sw_plus #x #y #z = () +let sw_mult #x #y #z = () +let comm_mult #x #y = () +let trans #a #x #z #y e1 e2 = () +let cong_plus #w #x #y #z p q = () +let cong_mult #w #x #y #z p q = () +let neg_minus_one #x = () +let x_plus_zero #x = () +let zero_plus_x #x = () +let x_mult_zero #x = () +let zero_mult_x #x = () +let x_mult_one #x = () +let one_mult_x #x = () +let minus_is_plus #x #y = () \ No newline at end of file diff --git a/stage0/ulib/FStar.Tactics.Canon.Lemmas.fsti b/stage0/ulib/FStar.Tactics.Canon.Lemmas.fsti new file mode 100644 index 00000000000..7b573543497 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Canon.Lemmas.fsti @@ -0,0 +1,61 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Canon.Lemmas + +open FStar.Mul + +val distr : (#x : int) -> (#y : int) -> (#z : int) -> Lemma (x * (y + z) == x * y + x * z) + +val distl : (#x : int) -> (#y : int) -> (#z : int) -> Lemma ((x + y) * z == x * z + y * z) + +val ass_plus_l : (#x : int) -> (#y : int) -> (#z : int) -> Lemma (x + (y + z) == (x + y) + z) + +val ass_mult_l : (#x : int) -> (#y : int) -> (#z : int) -> Lemma (x * (y * z) == (x * y) * z) + +val comm_plus : (#x : int) -> (#y : int) -> Lemma (x + y == y + x) + +val sw_plus : (#x : int) -> (#y : int) -> (#z : int) -> Lemma ((x + y) + z == (x + z) + y) + +val sw_mult : (#x : int) -> (#y : int) -> (#z : int) -> Lemma ((x * y) * z == (x * z) * y) + +val comm_mult : (#x : int) -> (#y : int) -> Lemma (x * y == y * x) + +val trans : (#a:Type) -> (#x:a) -> (#z:a) -> (#y:a) -> + squash (x == y) -> squash (y == z) -> Lemma (x == z) + +val cong_plus : (#w:int) -> (#x:int) -> (#y:int) -> (#z:int) -> + squash (w == y) -> squash (x == z) -> + Lemma (w + x == y + z) + +val cong_mult : (#w:int) -> (#x:int) -> (#y:int) -> (#z:int) -> + squash (w == y) -> squash (x == z) -> + Lemma (w * x == y * z) + +val neg_minus_one : (#x:int) -> Lemma (-x == (-1) * x) + +val x_plus_zero : (#x:int) -> Lemma (x + 0 == x) + +val zero_plus_x : (#x:int) -> Lemma (0 + x == x) + +val x_mult_zero : (#x:int) -> Lemma (x * 0 == 0) + +val zero_mult_x : (#x:int) -> Lemma (0 * x == 0) + +val x_mult_one : (#x:int) -> Lemma (x * 1 == x) + +val one_mult_x : (#x:int) -> Lemma (1 * x == x) + +val minus_is_plus : (#x : int) -> (#y : int) -> Lemma (x - y == x + (-y)) diff --git a/stage0/ulib/FStar.Tactics.Canon.fst b/stage0/ulib/FStar.Tactics.Canon.fst new file mode 100644 index 00000000000..61a7ef8f431 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Canon.fst @@ -0,0 +1,179 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Canon + +open FStar.Reflection.V2 +open FStar.Tactics.V2.Bare +open FStar.Reflection.V2.Arith +open FStar.Mul +module O = FStar.Order +open FStar.Tactics.Canon.Lemmas + +let step (t : unit -> Tac unit) : Tac unit = + apply_lemma (`trans); + t () + +let step_lemma (lem : term) : Tac unit = + step (fun () -> apply_lemma lem) + +val canon_point : expr -> Tac expr +let rec canon_point e = + let skip () : Tac expr = + trefl (); e + in + match e with + // Evaluate constants + | Plus (Lit a) (Lit b) -> + norm [primops]; + trefl (); + Lit (a + b) + + | Mult (Lit a) (Lit b) -> + norm [delta; primops]; // Need delta to turn op_Star into op_Multiply, as there's no primop for it + trefl (); + Lit (a * b) + + // Forget about negations + | Neg e -> + step_lemma (`neg_minus_one); + canon_point (Mult (Lit (-1)) e) + + // Distribute + | Mult a (Plus b c) -> + step_lemma (`distr); + step_lemma (`cong_plus); + let l = canon_point (Mult a b) in + let r = canon_point (Mult a c) in + canon_point (Plus l r) + + | Mult (Plus a b) c -> + step_lemma (`distl); + step_lemma (`cong_plus); + let l = canon_point (Mult a c) in + let r = canon_point (Mult b c) in + canon_point (Plus l r) + + // Associate to the left + | Mult a (Mult b c) -> + step_lemma (`ass_mult_l); + step_lemma (`cong_mult); + let l = canon_point (Mult a b) in + let r = canon_point c in + canon_point (Mult l r) + + | Plus a (Plus b c) -> + step_lemma (`ass_plus_l); + step_lemma (`cong_plus); + let l = canon_point (Plus a b) in + let r = canon_point c in + canon_point (Plus l r) + + | Plus (Plus a b) c -> + if O.gt (compare_expr b c) + then begin + step_lemma (`sw_plus); + apply_lemma (`cong_plus); + let l = canon_point (Plus a c) in + trefl() ; + Plus l b + end + else skip () + + | Mult (Mult a b) c -> + if O.gt (compare_expr b c) + then begin + step_lemma (`sw_mult); + apply_lemma (`cong_mult); + let l = canon_point (Mult a c) in + trefl (); + Mult l b + end + else skip () + + | Plus a (Lit 0) -> + apply_lemma (`x_plus_zero); + a + + | Plus (Lit 0) b -> + apply_lemma (`zero_plus_x); + b + + | Plus a b -> + if O.gt (compare_expr a b) + then (apply_lemma (`comm_plus); Plus b a) + else skip () + + | Mult (Lit 0) _ -> + apply_lemma (`zero_mult_x); + Lit 0 + + | Mult _ (Lit 0) -> + apply_lemma (`x_mult_zero); + Lit 0 + + | Mult (Lit 1) r -> + apply_lemma (`one_mult_x); + r + + | Mult l (Lit 1) -> + apply_lemma (`x_mult_one); + l + + | Mult a b -> + if O.gt (compare_expr a b) + then (apply_lemma (`comm_mult); Mult b a) + else skip () + + // Forget about subtraction + | Minus a b -> + step_lemma (`minus_is_plus); + step_lemma (`cong_plus); + trefl (); + let negb = match b with | Lit n -> Lit (-n) | _ -> Neg b in + // ^ We need to take care wrt literals, since an application (- N) + // will get reduced to the literal -N and then neg_minus_one will not + // apply. + let r = canon_point negb in + canon_point (Plus a r) + + | _ -> + skip () + +// On canon_point_entry, we interpret the LHS of the goal as an +// arithmetic expression, of which we keep track in canon_point so we +// avoid reinterpreting the goal, which gives a good speedup. +// +// However, we are repeating work between canon_point_entry calls, since +// in (L + R), we are called once for L, once for R, and once for the +// sum which traverses both (their canonized forms, actually). +// +// The proper way to solve this is have some state-passing in pointwise, +// maybe having the inner tactic be of type (list a -> tactic a), where +// the list is the collected results for all child calls. +let canon_point_entry () : Tac unit = + norm [primops]; + let g = cur_goal () in + match term_as_formula g with + | Comp (Eq _) l r -> + begin match run_tm (is_arith_expr l) with + | Inr e -> (let _e = canon_point e in ()) + | Inl _ -> trefl () + end + | _ -> + fail ("impossible: " ^ term_to_string g) + +let canon () : Tac unit = + pointwise canon_point_entry diff --git a/stage0/ulib/FStar.Tactics.Canon.fsti b/stage0/ulib/FStar.Tactics.Canon.fsti new file mode 100644 index 00000000000..88e00ee66bf --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Canon.fsti @@ -0,0 +1,22 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Canon + +open FStar.Tactics.Effect +open FStar.Tactics.Canon.Lemmas {} (* bring lemmas into tc scope *) + +[@@plugin] +val canon () : Tac unit diff --git a/stage0/ulib/FStar.Tactics.CanonCommMonoid.fst b/stage0/ulib/FStar.Tactics.CanonCommMonoid.fst new file mode 100644 index 00000000000..18553f1ca9f --- /dev/null +++ b/stage0/ulib/FStar.Tactics.CanonCommMonoid.fst @@ -0,0 +1,485 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.CanonCommMonoid + +open FStar.Algebra.CommMonoid +open FStar.List +open FStar.Reflection.V2 +open FStar.Tactics.V2 +open FStar.Classical +open FStar.Tactics.CanonCommSwaps + +private +let term_eq = FStar.Reflection.TermEq.Simple.term_eq + +(* An expression canonizer for commutative monoids. + Inspired by: + - http://adam.chlipala.net/cpdt/html/Cpdt.Reflection.html + - http://poleiro.info/posts/2015-04-13-writing-reflective-tactics.html +*) + +(* Only dump when debugging is on *) +private let dump m = if debugging () then dump m + +(***** Expression syntax *) + +let var : eqtype = nat + +type exp : Type = + | Unit : exp + | Var : var -> exp + | Mult : exp -> exp -> exp + +let rec exp_to_string (e:exp) : string = + match e with + | Unit -> "Unit" + | Var x -> "Var " ^ string_of_int (x <: var) + | Mult e1 e2 -> "Mult (" ^ exp_to_string e1 + ^ ") (" ^ exp_to_string e2 ^ ")" + +(***** Expression denotation *) + +// Use a map that stores for each variable +// (1) its denotation that should be treated abstractly (type a) and +// (2) user-specified extra information depending on its term (type b) + +let vmap (a b:Type) = list (var & (a&b)) & (a & b) +let const (#a #b:Type) (xa:a) (xb:b) : vmap a b = [], (xa,xb) +let select (#a #b:Type) (x:var) (vm:vmap a b) : Tot a = + match assoc #var #(a & b) x (fst vm) with + | Some (a, _) -> a + | _ -> fst (snd vm) +let select_extra (#a #b:Type) (x:var) (vm:vmap a b) : Tot b = + match assoc #var #(a & b) x (fst vm) with + | Some (_, b) -> b + | _ -> snd (snd vm) +let update (#a #b:Type) (x:var) (xa:a) (xb:b) (vm:vmap a b) : vmap a b = + (x, (xa, xb))::fst vm, snd vm + +let rec mdenote (#a #b:Type) (m:cm a) (vm:vmap a b) (e:exp) : Tot a = + match e with + | Unit -> CM?.unit m + | Var x -> select x vm + | Mult e1 e2 -> CM?.mult m (mdenote m vm e1) (mdenote m vm e2) + +let rec xsdenote (#a #b:Type) (m:cm a) (vm:vmap a b) (xs:list var) : Tot a = + match xs with + | [] -> CM?.unit m + | [x] -> select x vm + | x::xs' -> CM?.mult m (select x vm) (xsdenote m vm xs') + +(***** Flattening expressions to lists of variables *) + +let rec flatten (e:exp) : list var = + match e with + | Unit -> [] + | Var x -> [x] + | Mult e1 e2 -> flatten e1 @ flatten e2 + +let rec flatten_correct_aux (#a #b:Type) (m:cm a) (vm:vmap a b) + (xs1 xs2:list var) : + Lemma (xsdenote m vm (xs1 @ xs2) == CM?.mult m (xsdenote m vm xs1) + (xsdenote m vm xs2)) = + match xs1 with + | [] -> CM?.identity m (xsdenote m vm xs2) + | [x] -> if (Nil? xs2) then right_identity m (select x vm) + | x::xs1' -> (CM?.associativity m (select x vm) + (xsdenote m vm xs1') (xsdenote m vm xs2); + flatten_correct_aux m vm xs1' xs2) + +let rec flatten_correct (#a #b:Type) (m:cm a) (vm:vmap a b) (e:exp) : + Lemma (mdenote m vm e == xsdenote m vm (flatten e)) = + match e with + | Unit | Var _ -> () + | Mult e1 e2 -> flatten_correct_aux m vm (flatten e1) (flatten e2); + flatten_correct m vm e1; flatten_correct m vm e2 + +(***** Permuting the lists of variables + by swapping adjacent elements *) + +(* The user has control over the permutation. He can store extra + information in the vmap and use that for choosing the + permutation. This means that permute has access to the vmap. *) + +let permute (b:Type) = a:Type -> vmap a b -> list var -> list var + +// high-level correctness criterion for permutations +let permute_correct (#b:Type) (p:permute b) = + #a:Type -> m:cm a -> vm:vmap a b -> xs:list var -> + Lemma (xsdenote m vm xs == xsdenote m vm (p a vm xs)) + +// sufficient condition: +// permutation has to be expressible as swaps of adjacent list elements + +let rec apply_swap_aux_correct (#a #b:Type) (n:nat) (m:cm a) (vm:vmap a b) + (xs:list var) (s:swap (length xs + n)) : + Lemma (requires True) + (ensures (xsdenote m vm xs == xsdenote m vm (apply_swap_aux n xs s))) + (decreases xs) = + match xs with + | [] | [_] -> () + | x1 :: x2 :: xs' -> + if n = (s <: nat) + then (// x1 + (x2 + xs') =a (x1 + x2) + xs' + // =c (x2 + x1) + xs' = a x2 + (x1 + xs') + let a = CM?.associativity m in + a (select x1 vm) (select x2 vm) (xsdenote m vm xs'); + a (select x2 vm) (select x1 vm) (xsdenote m vm xs'); + CM?.commutativity m (select x1 vm) (select x2 vm)) + else apply_swap_aux_correct (n+1) m vm (x2 :: xs') s + +let apply_swap_correct (#a #b:Type) (m:cm a) (vm:vmap a b) + (xs:list var) (s:swap (length xs)): + Lemma (requires True) + (ensures (xsdenote m vm xs == xsdenote m vm (apply_swap xs s))) + (decreases xs) = apply_swap_aux_correct 0 m vm xs s + +let rec apply_swaps_correct (#a #b:Type) (m:cm a) (vm:vmap a b) + (xs:list var) (ss:list (swap (length xs))): + Lemma (requires True) + (ensures (xsdenote m vm xs == xsdenote m vm (apply_swaps xs ss))) + (decreases ss) = + match ss with + | [] -> () + | s::ss' -> apply_swap_correct m vm xs s; + apply_swaps_correct m vm (apply_swap xs s) ss' + +let permute_via_swaps (#b:Type) (p:permute b) = + (#a:Type) -> (vm:vmap a b) -> xs:list var -> + Lemma (exists ss. p a vm xs == apply_swaps xs ss) + +let permute_via_swaps_correct_aux + (#b:Type) (p:permute b) (pvs:permute_via_swaps p) + (#a:Type) (m:cm a) (vm:vmap a b) (xs:list var) : + Lemma (xsdenote m vm xs == xsdenote m vm (p a vm xs)) = + pvs vm xs; + assert(exists ss. p a vm xs == apply_swaps xs ss); + exists_elim (xsdenote m vm xs == xsdenote m vm (p a vm xs)) + (() <: squash (exists ss. p a vm xs == apply_swaps xs ss)) + (fun ss -> apply_swaps_correct m vm xs ss) + +let permute_via_swaps_correct + (#b:Type) (p:permute b) (pvs:permute_via_swaps p) : permute_correct p = + permute_via_swaps_correct_aux p pvs + +(***** Sorting variables is a correct permutation + (since it can be done by swaps) *) + +// Here we sort without associating any extra information with the +// variables and only look at the actual identifiers + +let sort : permute unit = + (fun a vm -> List.Tot.Base.sortWith #nat (compare_of_bool (<))) + +let sortWith (#b:Type) (f:nat -> nat -> int) : permute b = + (fun a vm -> List.Tot.Base.sortWith #nat f) + +let sort_via_swaps (#a:Type) (vm : vmap a unit) (xs:list var) : + Lemma (exists ss. sort a vm xs == apply_swaps xs ss) = + List.Tot.Properties.sortWith_permutation #nat (compare_of_bool (<)) xs; + let ss = equal_counts_implies_swaps #nat xs (sort a vm xs) in + assert (sort a vm xs == apply_swaps xs ss) + +let sortWith_via_swaps (#a #b:Type) (f:nat -> nat -> int) + (vm : vmap a b) (xs:list var) : + Lemma (exists ss. sortWith #b f a vm xs == apply_swaps xs ss) = + List.Tot.Properties.sortWith_permutation #nat f xs; + let ss = equal_counts_implies_swaps #nat xs (sortWith #b f a vm xs) in + assert (sortWith #b f a vm xs == apply_swaps xs ss) + +let sort_correct_aux (#a:Type) (m:cm a) (vm:vmap a unit) (xs:list var) : + Lemma (xsdenote m vm xs == xsdenote m vm (sort a vm xs)) = + permute_via_swaps_correct #unit sort sort_via_swaps m vm xs + +let sortWith_correct_aux (#a #b:Type) (f:nat -> nat -> int) (m:cm a) (vm:vmap a b) (xs:list var) : + Lemma (xsdenote m vm xs == xsdenote m vm (sortWith #b f a vm xs)) = + permute_via_swaps_correct (sortWith f) (fun #a -> sortWith_via_swaps f) m vm xs + +let sort_correct : permute_correct #unit sort = sort_correct_aux + +let sortWith_correct (#b:Type) (f:nat -> nat -> int) : + permute_correct #b (sortWith #b f) = + (fun #a -> sortWith_correct_aux #a #b f) + +(***** Canonicalization tactics *) + +let canon (#a #b:Type) (vm:vmap a b) (p:permute b) (e:exp) = p a vm (flatten e) + +let canon_correct (#a #b:Type) (p:permute b) (pc:permute_correct p) + (m:cm a) (vm:vmap a b) (e:exp) : + Lemma (mdenote m vm e == xsdenote m vm (canon vm p e)) = + flatten_correct m vm e; pc m vm (flatten e) + +let monoid_reflect (#a #b:Type) (p:permute b) (pc:permute_correct p) + (m:cm a) (vm:vmap a b) (e1 e2:exp) + (_ : squash (xsdenote m vm (canon vm p e1) == + xsdenote m vm (canon vm p e2))) + : squash (mdenote m vm e1 == mdenote m vm e2) = + canon_correct p pc m vm e1; canon_correct p pc m vm e2 + +(* Finds the position of first occurrence of x in xs. + This is now specialized to terms and their funny term_eq. *) +let rec where_aux (n:nat) (x:term) (xs:list term) : + Tac (option nat) = + match xs with + | [] -> None + | x'::xs' -> if term_eq_old x x' then Some n else where_aux (n+1) x xs' +let where = where_aux 0 + +// This expects that mult, unit, and t have already been normalized +let rec reification_aux (#a #b:Type) (unquotea:term->Tac a) (ts:list term) + (vm:vmap a b) (f:term->Tac b) + (mult unit t : term) : Tac (exp & list term & vmap a b) = + let hd, tl = collect_app_ref t in + let fvar (t:term) (ts:list term) (vm:vmap a b) : Tac (exp & list term & vmap a b) = + match where t ts with + | Some v -> (Var v, ts, vm) + | None -> let vfresh = length ts in let z = unquotea t in + (Var vfresh, ts @ [t], update vfresh z (f t) vm) + in + match inspect hd, list_unref tl with + | Tv_FVar fv, [(t1, Q_Explicit) ; (t2, Q_Explicit)] -> + if term_eq_old (pack (Tv_FVar fv)) mult + then (let (e1,ts,vm) = reification_aux unquotea ts vm f mult unit t1 in + let (e2,ts,vm) = reification_aux unquotea ts vm f mult unit t2 in + (Mult e1 e2, ts, vm)) + else fvar t ts vm + | _, _ -> + if term_eq_old t unit + then (Unit, ts, vm) + else fvar t ts vm + +// TODO: could guarantee same-length lists +let reification (b:Type) (f:term->Tac b) (def:b) (#a:Type) + (unquotea:term->Tac a) (quotea:a -> Tac term) (tmult tunit:term) (munit:a) + (ts:list term) : + Tac (list exp & vmap a b) = + let tmult: term = norm_term [delta;zeta;iota] tmult in + let tunit: term = norm_term [delta;zeta;iota] tunit in + let ts = Tactics.Util.map (norm_term [delta;zeta;iota]) ts in + // dump ("mult = " ^ term_to_string mult ^ + // "; unit = " ^ term_to_string unit ^ + // "; t = " ^ term_to_string t); + let (es,_, vm) = + Tactics.Util.fold_left + (fun (es,vs,vm) t -> + let (e,vs,vm) = reification_aux unquotea vs vm f tmult tunit t + in (e::es,vs,vm)) + ([],[], const munit def) ts + in (List.Tot.Base.rev es,vm) + +val term_mem: term -> list term -> Tac bool +let rec term_mem x = function + | [] -> false + | hd::tl -> if term_eq_old hd x then true else term_mem x tl + +let unfold_topdown (ts: list term) = + let should_rewrite (s:term) : Tac (bool & int) = + (term_mem s ts, 0) + in + let rewrite () : Tac unit = + norm [delta]; + trefl() + in + topdown_rewrite should_rewrite rewrite + +let rec quote_list (#a:Type) (ta:term) (quotea:a->Tac term) (xs:list a) : + Tac term = + match xs with + | [] -> mk_app (`Nil) [(ta, Q_Implicit)] + | x::xs' -> mk_app (`Cons) [(ta, Q_Implicit); + (quotea x, Q_Explicit); + (quote_list ta quotea xs', Q_Explicit)] + +let quote_vm (#a #b:Type) (ta tb: term) + (quotea:a->Tac term) (quoteb:b->Tac term) (vm:vmap a b) : Tac term = + let quote_pair (p:a&b) : Tac term = + mk_app (`Mktuple2) [(ta, Q_Implicit); (tb, Q_Implicit); + (quotea (fst p), Q_Explicit); (quoteb (snd p), Q_Explicit)] in + let t_a_star_b = mk_e_app (`tuple2) [ta;tb] in + let quote_map_entry (p:(nat&(a&b))) : Tac term = + mk_app (`Mktuple2) [(`nat, Q_Implicit); (t_a_star_b, Q_Implicit); + (pack (Tv_Const (C_Int (fst p))), Q_Explicit); + (quote_pair (snd p), Q_Explicit)] in + let tyentry = mk_e_app (`tuple2) [(`nat); t_a_star_b] in + let tlist = quote_list tyentry quote_map_entry (fst vm) in + (* dump (term_to_string (tc tlist)); *) + let tpair = quote_pair (snd vm) in + (* dump (term_to_string (tc tpair)); *) + let tylist = mk_e_app (`list) [tyentry] in + (* dump (term_to_string (tc tylist)); *) + mk_app (`Mktuple2) [(tylist, Q_Implicit); (t_a_star_b, Q_Implicit); + (tlist, Q_Explicit); (tpair, Q_Explicit)] + +let rec quote_exp (e:exp) : Tac term = + match e with + | Unit -> `Unit + | Var x -> mk_e_app (`Var) [pack (Tv_Const (C_Int x))] + | Mult e1 e2 -> mk_e_app (`Mult) [quote_exp e1; quote_exp e2] + +(* [@@plugin] *) +let canon_monoid_aux + (a b: Type) (ta: term) (unquotea: term -> Tac a) (quotea: a -> Tac term) + (tm tmult tunit: term) (munit: a) (tb: term) (quoteb:b->Tac term) + (f:term->Tac b) (def:b) (tp:term) (tpc:term): Tac unit = + norm []; + match term_as_formula (cur_goal ()) with + | Comp (Eq (Some t)) t1 t2 -> + // dump ("t1 =" ^ term_to_string t1 ^ + // "; t2 =" ^ term_to_string t2); + if term_eq_old t ta then + match reification b f def unquotea quotea tmult tunit munit [t1;t2] with + | [r1;r2], vm -> + // dump ("r1=" ^ exp_to_string r1 ^ + // "; r2=" ^ exp_to_string r2); + // dump ("vm =" ^ term_to_string (quote vm)); + + // change_sq (quote (mdenote m vm r1 == mdenote m vm r2)); + // TODO: quasi-quotes would help at least for splicing in the vm r1 r2 + let tvm = quote_vm ta tb quotea quoteb vm in + let tr1 = quote_exp r1 in + let tr2 = quote_exp r2 in + let teq:term = mk_app (`eq2) + [(ta, Q_Implicit); + (mk_app (`mdenote) [(ta,Q_Implicit); (tb,Q_Implicit); + (tm,Q_Explicit); (tvm,Q_Explicit); (tr1,Q_Explicit)], Q_Explicit); + (mk_app (`mdenote) [(ta,Q_Implicit); (tb,Q_Implicit); + (tm,Q_Explicit); (tvm,Q_Explicit); (tr2,Q_Explicit)], Q_Explicit)] in + change_sq teq; + + // dump ("before =" ^ term_to_string (norm_term [delta;primops] + // (quote (mdenote m vm r1 == mdenote m vm r2)))); + // dump ("expected after =" ^ term_to_string (norm_term [delta;primops] + // (quote (xsdenote m vm (canon vm p r1) == + // xsdenote m vm (canon vm p r2))))); + // mapply0 (quote (monoid_reflect #a #b p pc)); + mapply0 (mk_app (`monoid_reflect) [(ta, Q_Implicit); + (tb, Q_Implicit); + (tp, Q_Explicit); + (tpc, Q_Explicit)]); + (* dump ("before unfold, tp = " ^ term_to_string tp); *) + unfold_topdown [(`canon); (`xsdenote); tp]; + (* dump ("after unfold"); *) + // would like to do only this norm [primops] but ... + // for now having to do all this mess + norm [delta_only [// term_to_string tp; + `%canon; + `%xsdenote; + `%flatten; + `%select; + `%select_extra; + `%quote_list; + `%quote_vm; + `%quote_exp; + + (* Defined ahead *) + "FStar.Tactics.CanonCommMonoid.const_compare"; + "FStar.Tactics.CanonCommMonoid.special_compare"; + + `%FStar.Pervasives.Native.fst; + `%FStar.Pervasives.Native.snd; + `%FStar.Pervasives.Native.__proj__Mktuple2__item___1; + `%FStar.Pervasives.Native.__proj__Mktuple2__item___2; + + `%FStar.List.Tot.assoc; + `%FStar.List.Tot.op_At; + `%FStar.List.Tot.append; + + (* TODO: the rest is a super brittle stop-gap, know thy instances *) + "SL.AutoTactic.compare_b"; + "SL.AutoTactic.compare_v"; + `%FStar.Order.int_of_order; + `%FStar.Reflection.V2.compare_term; + `%FStar.List.Tot.sortWith; + `%FStar.List.Tot.partition; + `%FStar.List.Tot.bool_of_compare; + `%FStar.List.Tot.compare_of_bool; + ]; zeta; iota; primops] // TODO: restrict primops to "less than" only + // - would need this even if unfold_def did it's job? + // ; dump "done" + | _ -> fail "Unexpected" + else fail "Goal should be an equality at the right monoid type" + | _ -> fail "Goal should be an equality" + +let canon_monoid_with + (b:Type) (f:term->Tac b) (def:b) (p:permute b) (pc:permute_correct p) + (#a:Type) (m:cm a) : Tac unit = + canon_monoid_aux a b + (quote a) (unquote #a) (fun (x:a) -> quote x) + (quote m) (quote (CM?.mult m)) (quote (CM?.unit m)) (CM?.unit m) + (quote b) (fun (x:b) -> quote x) f def (quote p) (quote (pc <: permute_correct p)) + +let canon_monoid (#a:Type) (cm:cm a) : Tac unit = + canon_monoid_with unit (fun _ -> ()) () + (fun a -> sort a) sort_correct cm + +(***** Examples *) + +let lem0 (a b c d : int) = + assert (0 + 1 + a + b + c + d + 2 == (b + 0) + 2 + d + (c + a + 0) + 1) + by (canon_monoid int_plus_cm; trefl ()) + + +// (* Trying to enable computation with constants beyond unit. +// It might be enough to move all them to the end of the list by +// a careful ordering and let the normalizer do its thing: *) + +// remember if something is a constant or not +let is_const (t:term) : Tac bool = Tv_Const? (inspect t) + +// sort things and put the constants last +let const_compare (#a:Type) (vm:vmap a bool) (x y:var) = + match select_extra x vm, select_extra y vm with + | false, false | true, true -> compare_of_bool (<) x y + | false, true -> 1 + | true, false -> -1 + +let const_last (a:Type) (vm:vmap a bool) (xs:list var) : list var = + List.Tot.Base.sortWith #nat (const_compare vm) xs + +let canon_monoid_const #a cm = canon_monoid_with bool is_const false + (fun a -> const_last a) + (fun #a m vm xs -> sortWith_correct #bool (const_compare vm) #a m vm xs) #a cm + +let lem1 (a b c d : int) = + assert_by_tactic (0 + 1 + a + b + c + d + 2 == (b + 0) + 2 + d + (c + a + 0) + 1) + (fun _ -> canon_monoid_const int_plus_cm; trefl()) + +// (* Trying to only bring some constants to the front, +// as Nik said would be useful for separation logic *) + +// remember if something is a constant or not +let is_special (ts:list term) (t:term) : Tac bool = t `term_mem` ts + +// put the special things sorted before the non-special ones, +// but don't change anything else +let special_compare (#a:Type) (vm:vmap a bool) (x y:var) = + match select_extra x vm, select_extra y vm with + | false, false -> 0 + | true, true -> compare_of_bool (<) x y + | false, true -> -1 + | true, false -> 1 + +let special_first (a:Type) (vm:vmap a bool) (xs:list var) : list var = + List.Tot.Base.sortWith #nat (special_compare vm) xs + +let special_first_correct : permute_correct special_first = + (fun #a m vm xs -> sortWith_correct #bool (special_compare vm) #a m vm xs) + +let canon_monoid_special (ts:list term) = + canon_monoid_with bool (is_special ts) false + (fun a -> special_first a) + special_first_correct diff --git a/stage0/ulib/FStar.Tactics.CanonCommMonoidSimple.Equiv.fst b/stage0/ulib/FStar.Tactics.CanonCommMonoidSimple.Equiv.fst new file mode 100644 index 00000000000..954d38c4dca --- /dev/null +++ b/stage0/ulib/FStar.Tactics.CanonCommMonoidSimple.Equiv.fst @@ -0,0 +1,392 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.CanonCommMonoidSimple.Equiv + +open FStar.Algebra.CommMonoid.Equiv +open FStar.List +open FStar.Classical +open FStar.Tactics.CanonCommSwaps +open FStar.Tactics.V2.Bare + +private +let term_eq = FStar.Reflection.TermEq.Simple.term_eq + +(* A simple expression canonizer for commutative monoids (working up to + some given equivalence relation as opposed to just propositional equality). + For a canonizer with more features see FStar.Tactics.CanonCommMonoid.fst. + + Based on FStar.Tactics.CanonCommMonoidSimple.fst +*) + +(* Only dump when debugging is on *) +//let dump m = if debugging () then dump m + +(***** Expression syntax *) + +// GM: ugh, we had `nat`, but then we get bitten by lack +// of subtyping over datatypes when we typecheck the amap term +// we generate (see convert_am). +let atom : eqtype = int + +type exp : Type = + | Unit : exp + | Mult : exp -> exp -> exp + | Atom : atom -> exp + +let rec exp_to_string (e:exp) : string = + match e with + | Unit -> "Unit" + | Atom x -> "Atom " ^ string_of_int (x <: atom) + | Mult e1 e2 -> "Mult (" ^ exp_to_string e1 + ^ ") (" ^ exp_to_string e2 ^ ")" + +(***** Expression denotation *) + +// Use a map that stores for each atom +// (1) its denotation that should be treated abstractly (type a) and +// (2) user-specified extra information depending on its term (type b) + +let amap (a:Type) = list (atom & a) & a +let const (#a:Type) (xa:a) : amap a = ([], xa) +let select (#a:Type) (x:atom) (am:amap a) : Tot a = + match assoc #atom #a x (fst am) with + | Some a -> a + | _ -> snd am +let update (#a:Type) (x:atom) (xa:a) (am:amap a) : amap a = + (x, xa)::fst am, snd am + +let rec mdenote (#a:Type u#aa) (eq:equiv a) (m:cm a eq) (am:amap a) (e:exp) : a = + match e with + | Unit -> CM?.unit m + | Atom x -> select x am + | Mult e1 e2 -> CM?.mult m (mdenote eq m am e1) (mdenote eq m am e2) + +let rec xsdenote (#a:Type) (eq:equiv a) (m:cm a eq) (am:amap a) (xs:list atom) : a = + match xs with + | [] -> CM?.unit m + | [x] -> select x am + | x::xs' -> CM?.mult m (select x am) (xsdenote eq m am xs') + +(***** Flattening expressions to lists of atoms *) + +let rec flatten (e:exp) : list atom = + match e with + | Unit -> [] + | Atom x -> [x] + | Mult e1 e2 -> flatten e1 @ flatten e2 + +let rec flatten_correct_aux (#a:Type) (eq:equiv a) (m:cm a eq) (am:amap a) (xs1 xs2:list atom) + : Lemma (xsdenote eq m am (xs1 @ xs2) `EQ?.eq eq` CM?.mult m (xsdenote eq m am xs1) + (xsdenote eq m am xs2)) = + match xs1 with + | [] -> + CM?.identity m (xsdenote eq m am xs2); + EQ?.symmetry eq (CM?.mult m (CM?.unit m) (xsdenote eq m am xs2)) (xsdenote eq m am xs2) + | [x] -> ( + if (Nil? xs2) + then (right_identity eq m (select x am); + EQ?.symmetry eq (CM?.mult m (select x am) (CM?.unit m)) (select x am)) + else EQ?.reflexivity eq (CM?.mult m (xsdenote eq m am [x]) (xsdenote eq m am xs2))) + | x::xs1' -> + flatten_correct_aux eq m am xs1' xs2; + EQ?.reflexivity eq (select x am); + CM?.congruence m (select x am) (xsdenote eq m am (xs1' @ xs2)) + (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2)); + CM?.associativity m (select x am) (xsdenote eq m am xs1') (xsdenote eq m am xs2); + EQ?.symmetry eq (CM?.mult m (CM?.mult m (select x am) (xsdenote eq m am xs1')) (xsdenote eq m am xs2)) + (CM?.mult m (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2))); + EQ?.transitivity eq (CM?.mult m (select x am) (xsdenote eq m am (xs1' @ xs2))) + (CM?.mult m (select x am) (CM?.mult m (xsdenote eq m am xs1') (xsdenote eq m am xs2))) + (CM?.mult m (CM?.mult m (select x am) (xsdenote eq m am xs1')) (xsdenote eq m am xs2)) + +let rec flatten_correct (#a:Type) (eq:equiv a) (m:cm a eq) (am:amap a) (e:exp) + : Lemma (mdenote eq m am e `EQ?.eq eq` xsdenote eq m am (flatten e)) = + match e with + | Unit -> EQ?.reflexivity eq (CM?.unit m) + | Atom x -> EQ?.reflexivity eq (select x am) + | Mult e1 e2 -> + flatten_correct_aux eq m am (flatten e1) (flatten e2); + EQ?.symmetry eq (xsdenote eq m am (flatten e1 @ flatten e2)) + (CM?.mult m (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2))); + flatten_correct eq m am e1; + flatten_correct eq m am e2; + CM?.congruence m (mdenote eq m am e1) (mdenote eq m am e2) + (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2)); + EQ?.transitivity eq (CM?.mult m (mdenote eq m am e1) (mdenote eq m am e2)) + (CM?.mult m (xsdenote eq m am (flatten e1)) (xsdenote eq m am (flatten e2))) + (xsdenote eq m am (flatten e1 @ flatten e2)) + +(***** Permuting the lists of atoms + by swapping adjacent elements *) + +let permute = list atom -> list atom + +// high-level correctness criterion for permutations +let permute_correct (p:permute) = + #a:Type -> eq:equiv a -> m:cm a eq -> am:amap a -> xs:list atom -> + Lemma (xsdenote eq m am xs `EQ?.eq eq` xsdenote eq m am (p xs)) + +// sufficient condition: +// permutation has to be expressible as swaps of adjacent list elements + +// In the general case, an arbitrary permutation can be done via swaps. +// (see FStar.Tactics.CanonCommSwaps for a proof) + +let rec apply_swap_aux_correct (#a:Type) (n:nat) (eq:equiv a) (m:cm a eq) (am:amap a) + (xs:list atom) (s:swap (length xs + n)) + : Lemma (requires True) + (ensures (xsdenote eq m am xs `EQ?.eq eq` xsdenote eq m am (apply_swap_aux n xs s))) + (decreases xs) = + match xs with + | [] -> EQ?.reflexivity eq (CM?.unit m) + | [x] -> EQ?.reflexivity eq (select x am) + | [x1;x2] -> + if n = (s <: nat) + then CM?.commutativity m (select x1 am) (select x2 am) + else EQ?.reflexivity eq (xsdenote eq m am [x1;x2]) + | x1 :: x2 :: xs' -> + if n = (s <: nat) + then ( + CM?.associativity m (select x1 am) (select x2 am) (xsdenote eq m am xs'); + EQ?.symmetry eq (CM?.mult m (CM?.mult m (select x1 am) (select x2 am)) (xsdenote eq m am xs')) + (CM?.mult m (select x1 am) (CM?.mult m (select x2 am) (xsdenote eq m am xs'))); + CM?.commutativity m (select x1 am) (select x2 am); + EQ?.reflexivity eq (xsdenote eq m am xs'); + CM?.congruence m (CM?.mult m (select x1 am) (select x2 am)) (xsdenote eq m am xs') + (CM?.mult m (select x2 am) (select x1 am)) (xsdenote eq m am xs'); + CM?.associativity m (select x2 am) (select x1 am) (xsdenote eq m am xs'); + EQ?.transitivity eq (CM?.mult m (select x1 am) (CM?.mult m (select x2 am) (xsdenote eq m am xs'))) + (CM?.mult m (CM?.mult m (select x1 am) (select x2 am)) (xsdenote eq m am xs')) + (CM?.mult m (CM?.mult m (select x2 am) (select x1 am)) (xsdenote eq m am xs')); + EQ?.transitivity eq (CM?.mult m (select x1 am) (CM?.mult m (select x2 am) (xsdenote eq m am xs'))) + (CM?.mult m (CM?.mult m (select x2 am) (select x1 am)) (xsdenote eq m am xs')) + (CM?.mult m (select x2 am) (CM?.mult m (select x1 am) (xsdenote eq m am xs')))) + else ( + apply_swap_aux_correct (n+1) eq m am (x2 :: xs') s; + EQ?.reflexivity eq (select x1 am); + CM?.congruence m (select x1 am) (xsdenote eq m am (x2 :: xs')) + (select x1 am) (xsdenote eq m am (apply_swap_aux (n+1) (x2 :: xs') s))) + +let apply_swap_correct (#a:Type) (eq:equiv a) (m:cm a eq) (am:amap a) + (xs:list atom) (s:swap (length xs)) + : Lemma (ensures (xsdenote eq m am xs `EQ?.eq eq` xsdenote eq m am (apply_swap xs s))) + (decreases xs) = + apply_swap_aux_correct 0 eq m am xs s + +let rec apply_swaps_correct (#a:Type) (eq:equiv a) (m:cm a eq) (am:amap a) + (xs:list atom) (ss:list (swap (length xs))) + : Lemma (requires True) + (ensures (xsdenote eq m am xs `EQ?.eq eq` xsdenote eq m am (apply_swaps xs ss))) + (decreases ss) = + match ss with + | [] -> EQ?.reflexivity eq (xsdenote eq m am xs) + | s::ss' -> + apply_swap_correct eq m am xs s; + apply_swaps_correct eq m am (apply_swap xs s) ss'; + EQ?.transitivity eq (xsdenote eq m am xs) + (xsdenote eq m am (apply_swap xs s)) + (xsdenote eq m am (apply_swaps (apply_swap xs s) ss')) + +let permute_via_swaps (p:permute) = + (#a:Type) -> (am:amap a) -> xs:list atom -> + Lemma (exists (ss:swaps_for xs). p xs == apply_swaps xs ss) + +let permute_via_swaps_correct_aux (p:permute) (pvs:permute_via_swaps p) + (#a:Type) (eq:equiv a)(m:cm a eq) (am:amap a) (xs:list atom) + : Lemma (xsdenote eq m am xs `EQ?.eq eq` xsdenote eq m am (p xs)) = + pvs am xs; + assert(exists (ss:swaps_for xs). p xs == apply_swaps xs ss); + exists_elim (xsdenote eq m am xs `EQ?.eq eq` xsdenote eq m am (p xs)) + (() <: squash (exists (ss:swaps_for xs). p xs == apply_swaps xs ss)) + (fun ss -> apply_swaps_correct eq m am xs ss) + +let permute_via_swaps_correct + (p:permute) (pvs:permute_via_swaps p) : permute_correct p = + fun #a -> permute_via_swaps_correct_aux p pvs #a + +(***** Sorting atoms is a correct permutation + (since it can be done by swaps) *) + +// Here we sort the variable numbers + +let sort : permute = List.Tot.Base.sortWith #int (compare_of_bool (<)) + +let sort_via_swaps (#a:Type) (am:amap a) (xs:list atom) + : Lemma (exists (ss:swaps_for xs). sort xs == apply_swaps xs ss) + = List.Tot.Properties.sortWith_permutation #int (compare_of_bool (<)) xs; + let ss = equal_counts_implies_swaps xs (sort xs) in + () + +let sort_correct_aux (#a:Type) (eq:equiv a) (m:cm a eq) (am:amap a) (xs:list atom) + : Lemma (xsdenote eq m am xs `EQ?.eq eq` xsdenote eq m am (sort xs)) = + permute_via_swaps_correct sort (fun #a am -> sort_via_swaps am) eq m am xs + +let sort_correct : permute_correct sort = (fun #a -> sort_correct_aux #a) + +(***** Canonicalization tactics *) + +let canon (e:exp) = sort (flatten e) + +let canon_correct (#a:Type) (eq:equiv a) (m:cm a eq) (am:amap a) (e:exp) + : Lemma (mdenote eq m am e `EQ?.eq eq` xsdenote eq m am (canon e)) = + flatten_correct eq m am e; + sort_correct eq m am (flatten e); + EQ?.transitivity eq (mdenote eq m am e) + (xsdenote eq m am (flatten e)) + (xsdenote eq m am (sort (flatten e))) + +let monoid_reflect_orig (#a:Type) (eq:equiv a) (m:cm a eq) (am:amap a) (e1 e2:exp) + : Lemma (requires (xsdenote eq m am (canon e1) `EQ?.eq eq` xsdenote eq m am (canon e2))) + (ensures (mdenote eq m am e1 `EQ?.eq eq` mdenote eq m am e2)) = + canon_correct eq m am e1; + canon_correct eq m am e2; + EQ?.symmetry eq (mdenote eq m am e2) (xsdenote eq m am (canon e2)); + EQ?.transitivity eq (mdenote eq m am e1) + (xsdenote eq m am (canon e1)) + (xsdenote eq m am (canon e2)); + EQ?.transitivity eq (mdenote eq m am e1) + (xsdenote eq m am (canon e2)) + (mdenote eq m am e2) + +let monoid_reflect (#a:Type) (eq:equiv a) (m:cm a eq) (am:amap a) (e1 e2:exp) + (_ : squash (xsdenote eq m am (canon e1) `EQ?.eq eq` xsdenote eq m am (canon e2))) + : squash (mdenote eq m am e1 `EQ?.eq eq` mdenote eq m am e2) = + monoid_reflect_orig #a eq m am e1 e2 + +(* Finds the position of first occurrence of x in xs. + This is now specialized to terms and their funny term_eq. *) +let rec where_aux (n:nat) (x:term) (xs:list term) : + Tac (option nat) = + match xs with + | [] -> None + | x'::xs' -> if term_eq x x' then Some n else where_aux (n+1) x xs' +let where = where_aux 0 + +let fatom (t:term) (ts:list term) (am:amap term) : Tac (exp & list term & amap term) = + match where t ts with + | Some v -> (Atom v, ts, am) + | None -> + let vfresh = length ts in + let t = norm_term [iota; zeta] t in + (Atom vfresh, ts @ [t], update vfresh t am) + +// This expects that mult, unit, and t have already been normalized +let rec reification_aux (ts:list term) (am:amap term) + (mult unit t : term) : Tac (exp & list term & amap term) = + let hd, tl = collect_app t in + match inspect hd, tl with + | Tv_FVar fv, [(t1, Q_Explicit) ; (t2, Q_Explicit)] -> + if term_eq (pack (Tv_FVar fv)) mult + then (let (e1, ts, am) = reification_aux ts am mult unit t1 in + let (e2, ts, am) = reification_aux ts am mult unit t2 in + (Mult e1 e2, ts, am)) + else fatom t ts am + | _, _ -> + if term_eq t unit + then (Unit, ts, am) + else fatom t ts am + +let reification (eq: term) (m: term) (ts:list term) (am:amap term) (t:term) : + Tac (exp & list term & amap term) = + let mult = norm_term [iota; zeta; delta] (`CM?.mult (`#m)) in + let unit = norm_term [iota; zeta; delta] (`CM?.unit (`#m)) in + let t = norm_term [iota; zeta] t in + reification_aux ts am mult unit t + +let rec repeat_cong_right_identity (eq: term) (m: term) : Tac unit = + or_else (fun _ -> apply_lemma (`right_identity)) + (fun _ -> apply_lemma (`CM?.congruence (`#m)); + split (); + apply_lemma (`EQ?.reflexivity (`#eq)); + repeat_cong_right_identity eq m + ) + +let rec convert_map (m : list (atom & term)) : term = + match m with + | [] -> `[] + | (a, t)::ps -> + let a = pack (Tv_Const (C_Int a)) in + (* let t = norm_term [delta] t in *) + `((`#a, (`#t)) :: (`#(convert_map ps))) + +(* `am` is an amap (basically a list) of terms, each representing a value +of type `a` (whichever we are canonicalizing). This functions converts +`am` into a single `term` of type `amap a`, suitable to call `mdenote` with *) +let convert_am (am : amap term) : term = + let (map, def) = am in + (* let def = norm_term [delta] def in *) + `( (`#(convert_map map), `#def) ) + +let rec quote_exp (e:exp) : term = + match e with + | Unit -> (`Unit) + | Mult e1 e2 -> (`Mult (`#(quote_exp e1)) (`#(quote_exp e2))) + | Atom n -> let nt = pack (Tv_Const (C_Int n)) in + (`Atom (`#nt)) + +let canon_lhs_rhs (eq: term) (m: term) (lhs rhs:term) : Tac unit = + let m_unit = norm_term [iota; zeta; delta](`CM?.unit (`#m)) in + let am = const m_unit in (* empty map *) + let (r1, ts, am) = reification eq m [] am lhs in + let (r2, _, am) = reification eq m ts am rhs in + //dump ("am = " ^ term_to_string (quote am)); + //dump ("r1 = " ^ term_to_string (norm_term [delta;primops] (quote (mdenote eq m am r1)))); + //dump ("r2 = " ^ term_to_string (norm_term [delta;primops] (quote (mdenote eq m am r2)))); + //dump ("before = " ^ term_to_string (norm_term [hnf;delta;primops] + // (quote (mdenote eq m am r1 `EQ?.eq eq` mdenote eq m am r2)))); + //dump ("current goal -- " ^ term_to_string (cur_goal ())); + let am = convert_am am in + let r1 = quote_exp r1 in + let r2 = quote_exp r2 in + change_sq (`(mdenote (`#eq) (`#m) (`#am) (`#r1) + `EQ?.eq (`#eq)` + mdenote (`#eq) (`#m) (`#am) (`#r2))); + (* dump ("expected after = " ^ term_to_string (norm_term [delta;primops] *) + (* (quote (xsdenote eq m am (canon r1) `EQ?.eq eq` *) + (* xsdenote eq m am (canon r2))))); *) + apply (`monoid_reflect); + //dump ("after apply monoid_reflect"); + norm [iota; zeta; delta_only [`%canon; `%xsdenote; `%flatten; `%sort; + `%select; `%assoc; `%fst; `%__proj__Mktuple2__item___1; + `%(@); `%append; `%List.Tot.sortWith; + `%List.Tot.partition; `%bool_of_compare; + `%compare_of_bool; + ]; primops]; + //dump "before refl"; + or_else (fun _ -> apply_lemma (`(EQ?.reflexivity (`#eq)))) + (fun _ -> repeat_cong_right_identity eq m) + +[@@plugin] +let canon_monoid (eq: term) (m: term) : Tac unit = + norm [iota; zeta]; + let t = cur_goal () in + // removing top-level squash application + let sq, rel_xy = collect_app t in + // unpacking the application of the equivalence relation (lhs `EQ?.eq eq` rhs) + (match rel_xy with + | [(rel_xy,_)] -> ( + let rel, xy = collect_app rel_xy in + if (length xy >= 2) + then ( + match FStar.List.Tot.Base.index xy (length xy - 2) , FStar.List.Tot.index xy (length xy - 1) with + | (lhs, Q_Explicit) , (rhs, Q_Explicit) -> canon_lhs_rhs eq m lhs rhs + | _ -> fail "Goal should have been an application of a binary relation to 2 explicit arguments" + ) + else ( + fail "Goal should have been an application of a binary relation to n implicit and 2 explicit arguments" + ) + ) + | _ -> fail "Goal should be squash applied to a binary relation") diff --git a/stage0/ulib/FStar.Tactics.CanonCommMonoidSimple.fst b/stage0/ulib/FStar.Tactics.CanonCommMonoidSimple.fst new file mode 100644 index 00000000000..e534d902447 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.CanonCommMonoidSimple.fst @@ -0,0 +1,274 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.CanonCommMonoidSimple + +open FStar.Algebra.CommMonoid +open FStar.List +open FStar.Reflection.V2 +open FStar.Tactics.V2.Bare +open FStar.Classical +open FStar.Tactics.CanonCommSwaps + +let term_eq = FStar.Stubs.Tactics.V2.Builtins.term_eq_old + +(* A simple expression canonizer for commutative monoids. + For a canonizer with more features see FStar.Tactics.CanonCommMonoid.fst. + + Inspired by: + - http://adam.chlipala.net/cpdt/html/Cpdt.Reflection.html + - http://poleiro.info/posts/2015-04-13-writing-reflective-tactics.html +*) + +(* Only dump when debugging is on *) +let dump m = if debugging () then dump m + +(***** Expression syntax *) + +let atom : eqtype = nat + +type exp : Type = + | Unit : exp + | Mult : exp -> exp -> exp + | Atom : atom -> exp + +let rec exp_to_string (e:exp) : string = + match e with + | Unit -> "Unit" + | Atom x -> "Atom " ^ string_of_int (x <: atom) + | Mult e1 e2 -> "Mult (" ^ exp_to_string e1 + ^ ") (" ^ exp_to_string e2 ^ ")" + +(***** Expression denotation *) + +// Use a map that stores for each atom +// (1) its denotation that should be treated abstractly (type a) and +// (2) user-specified extra information depending on its term (type b) + +let amap (a:Type) = list (atom & a) & a +let const (#a:Type) (xa:a) : amap a = ([], xa) +let select (#a:Type) (x:atom) (am:amap a) : Tot a = + match assoc #atom #a x (fst am) with + | Some a -> a + | _ -> snd am +let update (#a:Type) (x:atom) (xa:a) (am:amap a) : amap a = + (x, xa)::fst am, snd am + +let rec mdenote (#a:Type) (m:cm a) (am:amap a) (e:exp) : a = + match e with + | Unit -> CM?.unit m + | Atom x -> select x am + | Mult e1 e2 -> CM?.mult m (mdenote m am e1) (mdenote m am e2) + +let rec xsdenote (#a:Type) (m:cm a) (am:amap a) (xs:list atom) : a = + match xs with + | [] -> CM?.unit m + | [x] -> select x am + | x::xs' -> CM?.mult m (select x am) (xsdenote m am xs') + +(***** Flattening expressions to lists of atoms *) + +let rec flatten (e:exp) : list atom = + match e with + | Unit -> [] + | Atom x -> [x] + | Mult e1 e2 -> flatten e1 @ flatten e2 + +let rec flatten_correct_aux (#a:Type) (m:cm a) (am:amap a) (xs1 xs2:list atom) : + Lemma (xsdenote m am (xs1 @ xs2) == CM?.mult m (xsdenote m am xs1) + (xsdenote m am xs2)) = + match xs1 with + | [] -> CM?.identity m (xsdenote m am xs2) + | [x] -> if (Nil? xs2) then right_identity m (select x am) + | x::xs1' -> (CM?.associativity m (select x am) + (xsdenote m am xs1') (xsdenote m am xs2); + flatten_correct_aux m am xs1' xs2) + +let rec flatten_correct (#a:Type) (m:cm a) (am:amap a) (e:exp) : + Lemma (mdenote m am e == xsdenote m am (flatten e)) = + match e with + | Unit | Atom _ -> () + | Mult e1 e2 -> flatten_correct_aux m am (flatten e1) (flatten e2); + flatten_correct m am e1; flatten_correct m am e2 + +(***** Permuting the lists of atoms + by swapping adjacent elements *) + +let permute = list atom -> list atom + +// high-level correctness criterion for permutations +let permute_correct (p:permute) = + #a:Type -> m:cm a -> am:amap a -> xs:list atom -> + Lemma (xsdenote m am xs == xsdenote m am (p xs)) + +// sufficient condition: +// permutation has to be expressible as swaps of adjacent list elements + +// In the general case, an arbitrary permutation can be done via swaps. +// (see FStar.Tactics.CanonCommSwaps for a proof) + + +let rec apply_swap_aux_correct (#a:Type) (n:nat) (m:cm a) (am:amap a) + (xs:list atom) (s:swap (length xs + n)) : + Lemma (requires True) + (ensures (xsdenote m am xs == xsdenote m am (apply_swap_aux n xs s))) + (decreases xs) = + match xs with + | [] | [_] -> () + | x1 :: x2 :: xs' -> + if n = (s <: nat) + then (// x1 + (x2 + xs') =a (x1 + x2) + xs' + // =c (x2 + x1) + xs' = a x2 + (x1 + xs') + let a = CM?.associativity m in + a (select x1 am) (select x2 am) (xsdenote m am xs'); + a (select x2 am) (select x1 am) (xsdenote m am xs'); + CM?.commutativity m (select x1 am) (select x2 am)) + else apply_swap_aux_correct (n+1) m am (x2 :: xs') s + +let apply_swap_correct (#a:Type) (m:cm a) (am:amap a) + (xs:list atom) (s:swap (length xs)): + Lemma (ensures (xsdenote m am xs == xsdenote m am (apply_swap xs s))) + (decreases xs) = apply_swap_aux_correct 0 m am xs s + +let rec apply_swaps_correct (#a:Type) (m:cm a) (am:amap a) + (xs:list atom) (ss:list (swap (length xs))): + Lemma (requires True) + (ensures (xsdenote m am xs == xsdenote m am (apply_swaps xs ss))) + (decreases ss) = + match ss with + | [] -> () + | s::ss' -> apply_swap_correct m am xs s; + apply_swaps_correct m am (apply_swap xs s) ss' + +let permute_via_swaps (p:permute) = + (#a:Type) -> (am:amap a) -> xs:list atom -> + Lemma (exists ss. p xs == apply_swaps xs ss) + +let permute_via_swaps_correct_aux (p:permute) (pvs:permute_via_swaps p) + (#a:Type) (m:cm a) (am:amap a) (xs:list atom) : + Lemma (xsdenote m am xs == xsdenote m am (p xs)) = + pvs am xs; + assert(exists ss. p xs == apply_swaps xs ss); + exists_elim (xsdenote m am xs == xsdenote m am (p xs)) + (() <: squash (exists ss. p xs == apply_swaps xs ss)) + (fun ss -> apply_swaps_correct m am xs ss) + +let permute_via_swaps_correct + (p:permute) (pvs:permute_via_swaps p) : permute_correct p = + fun #a -> permute_via_swaps_correct_aux p pvs #a + +(***** Sorting atoms is a correct permutation + (since it can be done by swaps) *) + +// Here we sort the variable numbers + +let sort : permute = List.Tot.Base.sortWith #nat (compare_of_bool (<)) + +let sort_via_swaps (#a:Type) (am : amap a) (xs:list atom) + : Lemma (exists ss. sort xs == apply_swaps xs ss) + = + List.Tot.Properties.sortWith_permutation #nat (compare_of_bool (<)) xs; + let ss = equal_counts_implies_swaps #nat xs (sort xs) in + () + +let sort_correct_aux (#a:Type) (m:cm a) (am:amap a) (xs:list atom) : + Lemma (xsdenote m am xs == xsdenote m am (sort xs)) = + permute_via_swaps_correct sort (fun #a am -> sort_via_swaps am) m am xs + +let sort_correct : permute_correct sort = (fun #a -> sort_correct_aux #a) + +(***** Canonicalization tactics *) + +(* [@@plugin] *) +let canon (e:exp) = sort (flatten e) + +let canon_correct (#a:Type) (m:cm a) (am:amap a) (e:exp) : + Lemma (mdenote m am e == xsdenote m am (canon e)) = + flatten_correct m am e; sort_correct m am (flatten e) + +let monoid_reflect_orig (#a:Type) (m:cm a) (am:amap a) (e1 e2:exp) : + Lemma (requires (xsdenote m am (canon e1) == xsdenote m am (canon e2))) + (ensures (mdenote m am e1 == mdenote m am e2)) = + canon_correct m am e1; canon_correct m am e2 + +let monoid_reflect (#a:Type) (m:cm a) (am:amap a) (e1 e2:exp) + (_ : squash (xsdenote m am (canon e1) == xsdenote m am (canon e2))) + : squash (mdenote m am e1 == mdenote m am e2) = + canon_correct m am e1; canon_correct m am e2 + +(* Finds the position of first occurrence of x in xs. + This is now specialized to terms and their funny term_eq. *) +let rec where_aux (n:nat) (x:term) (xs:list term) : + Tac (option nat) = + match xs with + | [] -> None + | x'::xs' -> if term_eq x x' then Some n else where_aux (n+1) x xs' +let where = where_aux 0 + +// This expects that mult, unit, and t have already been normalized +let rec reification_aux (#a:Type) (ts:list term) (am:amap a) + (mult unit t : term) : Tac (exp & list term & amap a) = + let hd, tl = collect_app_ref t in + let fatom (t:term) (ts:list term) (am:amap a) : Tac (exp & list term & amap a) = + match where t ts with + | Some v -> (Atom v, ts, am) + | None -> let vfresh = length ts in let z = unquote t in + (Atom vfresh, ts @ [t], update vfresh z am) + in + match inspect hd, list_unref tl with + | Tv_FVar fv, [(t1, Q_Explicit) ; (t2, Q_Explicit)] -> + if term_eq (pack (Tv_FVar fv)) mult + then (let (e1,ts,am) = reification_aux ts am mult unit t1 in + let (e2,ts,am) = reification_aux ts am mult unit t2 in + (Mult e1 e2, ts, am)) + else fatom t ts am + | _, _ -> + if term_eq t unit + then (Unit, ts, am) + else fatom t ts am + +let reification (#a:Type) (m:cm a) (ts:list term) (am:amap a) (t:term) : + Tac (exp & list term & amap a) = + let mult = norm_term [delta;zeta;iota] (quote (CM?.mult m)) in + let unit = norm_term [delta;zeta;iota] (quote (CM?.unit m)) in + let t = norm_term [delta;zeta;iota] t in + reification_aux ts am mult unit t + +let canon_monoid (#a:Type) (m:cm a) : Tac unit = + norm []; + match term_as_formula (cur_goal ()) with + | Comp (Eq (Some t)) t1 t2 -> + // dump ("t1 =" ^ term_to_string t1 ^ + // "; t2 =" ^ term_to_string t2); + if term_eq t (quote a) then + let (r1, ts, am) = reification m [] (const (CM?.unit m)) t1 in + let (r2, _, am) = reification m ts am t2 in + dump ("am =" ^ term_to_string (quote am)); + change_sq (quote (mdenote m am r1 == mdenote m am r2)); + // dump ("before =" ^ term_to_string (norm_term [delta;primops] + // (quote (mdenote m am r1 == mdenote m am r2)))); + // dump ("expected after =" ^ term_to_string (norm_term [delta;primops] + // (quote (xsdenote m am (canon r1) == + // xsdenote m am (canon r2))))); + apply (`monoid_reflect); + // dump ("after apply"); + norm [delta_only [`%canon; `%xsdenote; `%flatten; `%sort; + `%select; `%assoc; `%fst; `%__proj__Mktuple2__item___1; + `%(@); `%append; `%List.Tot.sortWith; + `%List.Tot.partition; `%bool_of_compare; `%compare_of_bool; + ]; primops] + // ;dump "done" + else fail "Goal should be an equality at the right monoid type" + | _ -> fail "Goal should be an equality" diff --git a/stage0/ulib/FStar.Tactics.CanonCommSemiring.fst b/stage0/ulib/FStar.Tactics.CanonCommSemiring.fst new file mode 100644 index 00000000000..5864cce4493 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.CanonCommSemiring.fst @@ -0,0 +1,1708 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.CanonCommSemiring + +/// A tactic to solve equalities on a commutative semiring (a, +, *, 0, 1) +/// +/// The tactic [canon_semiring] is parameterized by the base type [a] and +/// a semiring theory [cr a]. This requires: +/// +/// - A commutative monoid (a, +, 0) for addition +/// That is, + is associative, commutative and has identity element 0 +/// - An additive inverse operator for (a, +, 0), making it an Abelian group +/// That is, a + -a = 0 +/// - A commutative monoid (a, *, 1) for multiplication +/// That is, * is associative, commutative and has identity element 1 +/// - Multiplication left-distributes over addition +/// That is, a * (b + c) == a * b + a * c +/// - 0 is an absorbing element of multiplication +/// That is, 0 * a = 0 +/// +/// In contrast to the previous version of FStar.Tactics.CanonCommSemiring, +/// the tactic defined here canonizes products, additions and additive inverses, +/// collects coefficients in monomials, and eliminates trivial expressions. +/// +/// This is based on the legacy (second) version of Coq's ring tactic: +/// - https://github.com/coq-contribs/legacy-ring/ +/// +/// See also the newest ring tactic in Coq, which is even more general +/// and efficient: +/// - https://coq.inria.fr/refman/addendum/ring.html +/// - http://www.cs.ru.nl/~freek/courses/tt-2014/read/10.1.1.61.3041.pdf + +open FStar.List +open FStar.Algebra.CommMonoid + +(* Trying to not just open FStar.Tactics.V2 to reduce deps. +TODO: Add an interface to this module. It's non trivial due to the quoting. *) +open FStar.Stubs.Reflection.Types +open FStar.Reflection.V2 +open FStar.Reflection.V2.Formula +open FStar.Stubs.Tactics.Types +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Tactics.V2.Derived +open FStar.Tactics.Util +open FStar.Tactics.NamedView +open FStar.Tactics.MApply + +private +let term_eq = FStar.Reflection.TermEq.Simple.term_eq + +(** An attribute for marking definitions to unfold by the tactic *) +irreducible let canon_attr = () + +/// +/// Commutative semiring theory +/// + +let distribute_left_lemma (a:Type) (cm_add:cm a) (cm_mult:cm a) = + let ( + ) = cm_add.mult in + let ( * ) = cm_mult.mult in + x:a -> y:a -> z:a -> Lemma (x * (y + z) == x * y + x * z) + +let distribute_right_lemma (a:Type) (cm_add:cm a) (cm_mult:cm a) = + let ( + ) = cm_add.mult in + let ( * ) = cm_mult.mult in + x:a -> y:a -> z:a -> Lemma ((x + y) * z == x * z + y * z) + +let mult_zero_l_lemma (a:Type) (cm_add:cm a) (cm_mult:cm a) = + x:a -> Lemma (cm_mult.mult cm_add.unit x == cm_add.unit) + +let add_opp_r_lemma (a:Type) (cm_add:cm a) (opp:(a -> a)) = + let ( + ) = cm_add.mult in + x:a -> Lemma (x + opp x == cm_add.unit) + +[@@canon_attr] +unopteq +type cr (a:Type) = + | CR : + cm_add: cm a -> + cm_mult: cm a -> + opp: (a -> a) -> + add_opp: add_opp_r_lemma a cm_add opp -> + distribute: distribute_left_lemma a cm_add cm_mult -> + mult_zero_l: mult_zero_l_lemma a cm_add cm_mult -> + cr a + +let distribute_right (#a:Type) (r:cr a) : distribute_right_lemma a r.cm_add r.cm_mult = + fun x y z -> + r.cm_mult.commutativity (r.cm_add.mult x y) z; + r.distribute z x y; + r.cm_mult.commutativity x z; + r.cm_mult.commutativity y z + +/// +/// Syntax of canonical ring expressions +/// + +(** + * Marking expressions we would like to normalize fully. + * This does not do anything at the moment, but it would be nice + * to have a cheap mechanism to make this work without traversing the + * whole goal. +**) +[@@canon_attr] +unfold let norm_fully (#a:Type) (x:a) = x + +let index: eqtype = nat + +(* + * A list of variables represents a sorted product of one or more variables. + * We do not need to prove sortedness to prove correctness, so we never + * make it explicit. + *) +type varlist = + | Nil_var : varlist + | Cons_var : index -> varlist -> varlist + +(* + * A canonical expression represents an ordered sum of monomials. + * Each monomial is either: + * - a varlist (a product of variables): x1 * ... * xk + * - a product of a scalar and a varlist: c * x1 * ... * xk + * + * The order on monomials is the lexicographic order on varlist, with the + * additional convention that monomials with a scalar are less than monomials + * without a scalar. + *) +type canonical_sum a = + | Nil_monom : canonical_sum a + | Cons_monom : a -> varlist -> canonical_sum a -> canonical_sum a + | Cons_varlist : varlist -> canonical_sum a -> canonical_sum a + +[@@canon_attr] +let rec varlist_lt (x y:varlist) : bool = + match x, y with + | Nil_var, Cons_var _ _ -> true + | Cons_var i xs, Cons_var j ys -> + if i < j then true else i = j && varlist_lt xs ys + | _, _ -> false + +[@@canon_attr] +val varlist_merge: l1:varlist -> l2:varlist -> Tot varlist (decreases %[l1; l2; 0]) + +[@@canon_attr] +val vm_aux: index -> t1:varlist -> l2:varlist -> Tot varlist (decreases %[t1; l2; 1]) + +(* Merges two lists of variables, preserving sortedness *) +[@@canon_attr] +let rec varlist_merge l1 l2 = + match l1, l2 with + | _, Nil_var -> l1 + | Nil_var, _ -> l2 + | Cons_var v1 t1, Cons_var v2 t2 -> vm_aux v1 t1 l2 +and vm_aux v1 t1 l2 = + match l2 with + | Cons_var v2 t2 -> + if v1 < v2 + then Cons_var v1 (varlist_merge t1 l2) + else Cons_var v2 (vm_aux v1 t1 t2) + | _ -> Cons_var v1 t1 + +(* + * Merges two canonical expressions + * + * We require that [a] is eqtype for better reasons later. + * Here it is convenient to fix the universe of [a] in + * mutually recursive functions. + *) +[@@canon_attr] +val canonical_sum_merge : #a:eqtype -> cr a + -> s1:canonical_sum a -> s2:canonical_sum a + -> Tot (canonical_sum a) (decreases %[s1; s2; 0]) + +[@@canon_attr] +val csm_aux: #a:eqtype -> r:cr a -> c1:a -> l1:varlist -> t1:canonical_sum a + -> s2:canonical_sum a -> Tot (canonical_sum a) (decreases %[t1; s2; 1]) + +[@@canon_attr] +let rec canonical_sum_merge #a r s1 s2 = + let aplus = r.cm_add.mult in + let aone = r.cm_mult.unit in + match s1 with + | Cons_monom c1 l1 t1 -> csm_aux r c1 l1 t1 s2 + | Cons_varlist l1 t1 -> csm_aux r aone l1 t1 s2 + | Nil_monom -> s2 + +and csm_aux #a r c1 l1 t1 s2 = + let aplus = r.cm_add.mult in + let aone = r.cm_mult.unit in + match s2 with + | Cons_monom c2 l2 t2 -> + if l1 = l2 + then Cons_monom (norm_fully (aplus c1 c2)) l1 (canonical_sum_merge r t1 t2) + else + if varlist_lt l1 l2 + then Cons_monom c1 l1 (canonical_sum_merge r t1 s2) + else Cons_monom c2 l2 (csm_aux #a r c1 l1 t1 t2) + | Cons_varlist l2 t2 -> + if l1 = l2 + then Cons_monom (norm_fully (aplus c1 aone)) l1 (canonical_sum_merge r t1 t2) + else + if varlist_lt l1 l2 + then Cons_monom c1 l1 (canonical_sum_merge r t1 s2) + else Cons_varlist l2 (csm_aux r c1 l1 t1 t2) + | Nil_monom -> + //if c1 = aone then Cons_varlist l1 t1 else + Cons_monom c1 l1 t1 + +(* Inserts a monomial into the appropriate position in a canonical sum *) +val monom_insert: #a:eqtype -> r:cr a + -> c1:a -> l1:varlist -> s2:canonical_sum a -> canonical_sum a + +[@@canon_attr] +let rec monom_insert #a r c1 l1 s2 = + let aplus = r.cm_add.mult in + let aone = r.cm_mult.unit in + match s2 with + | Cons_monom c2 l2 t2 -> + if l1 = l2 + then Cons_monom (norm_fully (aplus c1 c2)) l1 t2 + else + if varlist_lt l1 l2 + then Cons_monom c1 l1 s2 + else Cons_monom c2 l2 (monom_insert r c1 l1 t2) + | Cons_varlist l2 t2 -> + if l1 = l2 + then Cons_monom (norm_fully (aplus c1 aone)) l1 t2 + else + if varlist_lt l1 l2 + then Cons_monom c1 l1 s2 + else Cons_varlist l2 (monom_insert r c1 l1 t2) + | Nil_monom -> + if c1 = aone + then Cons_varlist l1 Nil_monom + else Cons_monom c1 l1 Nil_monom + +(* Inserts a monomial without scalar into a canonical sum *) +val varlist_insert: #a:eqtype -> cr a -> varlist -> canonical_sum a -> canonical_sum a + +[@@canon_attr] +let varlist_insert #a r l1 s2 = + let aone = r.cm_mult.unit in + monom_insert r aone l1 s2 + +(* Multiplies a sum by a scalar c0 *) +val canonical_sum_scalar: #a:Type -> cr a -> a -> canonical_sum a -> canonical_sum a + +[@@canon_attr] +let rec canonical_sum_scalar #a r c0 s = + let amult = r.cm_mult.mult in + match s with + | Cons_monom c l t -> Cons_monom (norm_fully (amult c0 c)) l (canonical_sum_scalar r c0 t) + | Cons_varlist l t -> Cons_monom c0 l (canonical_sum_scalar r c0 t) + | Nil_monom -> Nil_monom + +(* Multiplies a sum by a monomial without scalar *) +val canonical_sum_scalar2: #a:eqtype -> cr a -> varlist + -> canonical_sum a -> canonical_sum a + +[@@canon_attr] +let rec canonical_sum_scalar2 #a r l0 s = + match s with + | Cons_monom c l t -> + monom_insert r c (varlist_merge l0 l) (canonical_sum_scalar2 r l0 t) + | Cons_varlist l t -> + varlist_insert r (varlist_merge l0 l) (canonical_sum_scalar2 r l0 t) + | Nil_monom -> Nil_monom + + +(* Multiplies a sum by a monomial with scalar *) +val canonical_sum_scalar3: #a:eqtype -> cr a -> a -> varlist + -> canonical_sum a -> canonical_sum a + +[@@canon_attr] +let rec canonical_sum_scalar3 #a r c0 l0 s = + let amult = r.cm_mult.mult in + match s with + | Cons_monom c l t -> + monom_insert r (norm_fully (amult c0 c)) (varlist_merge l0 l) + (canonical_sum_scalar3 r c0 l0 t) + | Cons_varlist l t -> + monom_insert r c0 (varlist_merge l0 l) + (canonical_sum_scalar3 r c0 l0 t) + | Nil_monom -> s + +(* Multiplies two canonical sums *) +val canonical_sum_prod: #a:eqtype -> cr a + -> canonical_sum a -> canonical_sum a -> canonical_sum a + +[@@canon_attr] +let rec canonical_sum_prod #a r s1 s2 = + match s1 with + | Cons_monom c1 l1 t1 -> + canonical_sum_merge r (canonical_sum_scalar3 r c1 l1 s2) + (canonical_sum_prod r t1 s2) + | Cons_varlist l1 t1 -> + canonical_sum_merge r (canonical_sum_scalar2 r l1 s2) + (canonical_sum_prod r t1 s2) + | Nil_monom -> s1 + +/// +/// Syntax of concrete semiring polynomials +/// + +(* This is the type where we reflect expressions before normalization *) +type spolynomial a = + | SPvar : index -> spolynomial a + | SPconst : a -> spolynomial a + | SPplus : spolynomial a -> spolynomial a -> spolynomial a + | SPmult : spolynomial a -> spolynomial a -> spolynomial a + +(** Canonize a reflected expression *) +val spolynomial_normalize: #a:eqtype -> cr a -> spolynomial a -> canonical_sum a + +[@@canon_attr] +let rec spolynomial_normalize #a r p = + match p with + | SPvar i -> Cons_varlist (Cons_var i Nil_var) Nil_monom + | SPconst c -> Cons_monom c Nil_var Nil_monom + | SPplus l q -> + canonical_sum_merge r (spolynomial_normalize r l) (spolynomial_normalize r q) + | SPmult l q -> + canonical_sum_prod r (spolynomial_normalize r l) (spolynomial_normalize r q) + +(** + * Simplify a canonical sum. + * Removes 0 * x1 * ... * xk and turns 1 * x1 * ... * xk into x1 * ... * xk +**) +val canonical_sum_simplify: #a:eqtype -> cr a -> canonical_sum a -> canonical_sum a + +[@@canon_attr] +let rec canonical_sum_simplify #a r s = + let azero = r.cm_add.unit in + let aone = r.cm_mult.unit in + let aplus = r.cm_add.mult in + match s with + | Cons_monom c l t -> + if norm_fully (c = azero) then canonical_sum_simplify r t + else + if norm_fully (c = aone) + then Cons_varlist l (canonical_sum_simplify r t) + else Cons_monom c l (canonical_sum_simplify r t) + | Cons_varlist l t -> Cons_varlist l (canonical_sum_simplify r t) + | Nil_monom -> s + +(** + * The main canonization algorithm: turn an expression into a sum and + * simplify it. +**) +val spolynomial_simplify: #a:eqtype -> cr a -> spolynomial a -> canonical_sum a + +[@@canon_attr] +let spolynomial_simplify #a r p = + canonical_sum_simplify r + (spolynomial_normalize r p) + +/// +/// Interpretation of varlists, monomials and canonical sums +/// + +type var = nat + +(** + * The variable map: + * This maps polynomial variables to ring expressions. That is, any term + * that is not an addition or a multiplication is turned into a variable + * + * The representation is inefficient. For large terms it might be worthwhile + * using a better data structure. +**) +let vmap a = list (var & a) & a + +(** Add a new entry in a variable map *) +let update (#a:Type) (x:var) (xa:a) (vm:vmap a) : vmap a = + let l, y = vm in (x, xa) :: l, y + +(** Quotes a list *) +let rec quote_list (#a:Type) (ta:term) (quotea:a -> Tac term) (xs:list a) : + Tac term = + match xs with + | [] -> mk_app (`Nil) [(ta, Q_Implicit)] + | x::xs' -> mk_app (`Cons) [(ta, Q_Implicit); + (quotea x, Q_Explicit); + (quote_list ta quotea xs', Q_Explicit)] + +(** Quotes a variable map *) +let quote_vm (#a:Type) (ta: term) (quotea:a -> Tac term) (vm:vmap a) : Tac term = + let quote_map_entry (p:(nat & a)) : Tac term = + mk_app (`Mktuple2) [(`nat, Q_Implicit); (ta, Q_Implicit); + (pack (Tv_Const (C_Int (fst p))), Q_Explicit); + (quotea (snd p), Q_Explicit)] in + let tyentry = mk_e_app (`tuple2) [(`nat); ta] in + let tlist = quote_list tyentry quote_map_entry (fst vm) in + let tylist = mk_e_app (`list) [tyentry] in + mk_app (`Mktuple2) [(tylist, Q_Implicit); (ta, Q_Implicit); + (tlist, Q_Explicit); (quotea (snd vm), Q_Explicit)] + +(** + * A varlist is interpreted as the product of the entries in the variable map + * + * Unbound variables are mapped to the default value according to the map. + * This would normally never occur, but it makes it easy to prove correctness. + *) +[@@canon_attr] +let interp_var (#a:Type) (vm:vmap a) (i:index) = + match List.Tot.Base.assoc i (fst vm) with + | Some x -> x + | _ -> snd vm + +[@@canon_attr] +private +let rec ivl_aux (#a:Type) (r:cr a) (vm:vmap a) (x:index) (t:varlist) + : Tot a (decreases t) = + let amult = r.cm_mult.mult in + match t with + | Nil_var -> interp_var vm x + | Cons_var x' t' -> amult (interp_var vm x) (ivl_aux r vm x' t') + +[@@canon_attr] +let interp_vl (#a:Type) (r:cr a) (vm:vmap a) (l:varlist) = + let aone = r.cm_mult.unit in + match l with + | Nil_var -> aone + | Cons_var x t -> ivl_aux r vm x t + +[@@canon_attr] +let interp_m (#a:Type) (r:cr a) (vm:vmap a) (c:a) (l:varlist) = + let amult = r.cm_mult.mult in + match l with + | Nil_var -> c + | Cons_var x t -> amult c (ivl_aux r vm x t) + +[@@canon_attr] +let rec ics_aux (#a:Type) (r:cr a) (vm:vmap a) (x:a) (s:canonical_sum a) + : Tot a (decreases s) = + let aplus = r.cm_add.mult in + match s with + | Nil_monom -> x + | Cons_varlist l t -> aplus x (ics_aux r vm (interp_vl r vm l) t) + | Cons_monom c l t -> aplus x (ics_aux r vm (interp_m r vm c l) t) + +(** Interpretation of a canonical sum *) +[@@canon_attr] +let interp_cs (#a:Type) (r:cr a) (vm:vmap a) (s:canonical_sum a) : a = + let azero = r.cm_add.unit in + match s with + | Nil_monom -> azero + | Cons_varlist l t -> ics_aux r vm (interp_vl r vm l) t + | Cons_monom c l t -> ics_aux r vm (interp_m r vm c l) t + +(** Interpretation of a polynomial *) +[@@canon_attr] +let rec interp_sp (#a:Type) (r:cr a) (vm:vmap a) (p:spolynomial a) : a = + let aplus = r.cm_add.mult in + let amult = r.cm_mult.mult in + match p with + | SPconst c -> c + | SPvar i -> interp_var vm i + | SPplus p1 p2 -> aplus (interp_sp r vm p1) (interp_sp r vm p2) + | SPmult p1 p2 -> amult (interp_sp r vm p1) (interp_sp r vm p2) + +/// +/// Proof of correctness +/// + +val mult_one_l (#a:Type) (r:cr a) (x:a) : + Lemma (r.cm_mult.mult r.cm_mult.unit x == x) + [SMTPat (r.cm_mult.mult r.cm_mult.unit x)] +let mult_one_l #a r x = + r.cm_mult.identity x + +val mult_one_r (#a:Type) (r:cr a) (x:a) : + Lemma (r.cm_mult.mult x r.cm_mult.unit == x) + [SMTPat (r.cm_mult.mult x r.cm_mult.unit)] +let mult_one_r #a r x = + r.cm_mult.commutativity r.cm_mult.unit x + +val mult_zero_l (#a:Type) (r:cr a) (x:a) : + Lemma (r.cm_mult.mult r.cm_add.unit x == r.cm_add.unit) + [SMTPat (r.cm_mult.mult r.cm_add.unit x)] +let mult_zero_l #a r x = + r.mult_zero_l x + +val mult_zero_r (#a:Type) (r:cr a) (x:a) : + Lemma (r.cm_mult.mult x r.cm_add.unit == r.cm_add.unit) + [SMTPat (r.cm_mult.mult x r.cm_add.unit)] +let mult_zero_r #a r x = + r.cm_mult.commutativity x r.cm_add.unit + +val add_zero_l (#a:Type) (r:cr a) (x:a) : + Lemma (r.cm_add.mult r.cm_add.unit x == x) + [SMTPat (r.cm_add.mult r.cm_add.unit x)] +let add_zero_l #a r x = + r.cm_add.identity x + +val add_zero_r (#a:Type) (r:cr a) (x:a) : + Lemma (r.cm_add.mult x r.cm_add.unit == x) + [SMTPat (r.cm_add.mult x r.cm_add.unit)] +let add_zero_r #a r x = + r.cm_add.commutativity r.cm_add.unit x + +val opp_unique (#a:Type) (r:cr a) (x y:a) : Lemma + (requires r.cm_add.mult x y == r.cm_add.unit) + (ensures y == r.opp x) +let opp_unique #a r x y = + let ( + ) = r.cm_add.mult in + let zero = r.cm_add.unit in + calc (==) { + y; + == { r.add_opp x } + y + (x + r.opp x); + == { r.cm_add.associativity y x (r.opp x) } + (y + x) + r.opp x; + == { r.cm_add.commutativity x y } + zero + r.opp x; + == { } + r.opp x; + } + +val add_mult_opp (#a:Type) (r:cr a) (x:a) : Lemma + (r.cm_add.mult x (r.cm_mult.mult (r.opp r.cm_mult.unit) x) == r.cm_add.unit) +let add_mult_opp #a r x = + let ( + ) = r.cm_add.mult in + let ( * ) = r.cm_mult.mult in + let zero = r.cm_add.unit in + let one = r.cm_mult.unit in + calc (==) { + x + r.opp one * x; + == { } + one * x + r.opp one * x; + == { distribute_right r one (r.opp one) x } + (one + r.opp one) * x; + == { r.add_opp one } + zero * x; + == { } + zero; + } + +val ivl_aux_ok (#a:Type) (r:cr a) (vm:vmap a) (v:varlist) (i:index) : Lemma + (ivl_aux r vm i v == r.cm_mult.mult (interp_var vm i) (interp_vl r vm v)) +let ivl_aux_ok #a r vm v i = () + +val vm_aux_ok (#a:eqtype) (r:cr a) (vm:vmap a) (v:index) (t l:varlist) : + Lemma + (ensures + interp_vl r vm (vm_aux v t l) == + r.cm_mult.mult (interp_vl r vm (Cons_var v t)) (interp_vl r vm l)) + (decreases %[t; l; 1]) + +val varlist_merge_ok (#a:eqtype) (r:cr a) (vm:vmap a) (x y:varlist) : + Lemma + (ensures + interp_vl r vm (varlist_merge x y) == + r.cm_mult.mult (interp_vl r vm x) (interp_vl r vm y)) + (decreases %[x; y; 0]) + +let rec varlist_merge_ok #a r vm x y = + let amult = r.cm_mult.mult in + match x, y with + | Cons_var v1 t1, Nil_var -> () + | Cons_var v1 t1, Cons_var v2 t2 -> + if v1 < v2 + then + begin + varlist_merge_ok r vm t1 y; + assert ( + interp_vl r vm (varlist_merge x y) == + amult (interp_var vm v1) (amult (interp_vl r vm t1) (interp_vl r vm y))); + r.cm_mult.associativity + (interp_var vm v1) (interp_vl r vm t1) (interp_vl r vm y) + end + else + vm_aux_ok r vm v1 t1 y + | Nil_var, _ -> () +and vm_aux_ok #a r vm v1 t1 l2 = + match l2 with + | Cons_var v2 t2 -> + if v1 < v2 + then + begin + varlist_merge_ok r vm t1 l2; + r.cm_mult.associativity + (interp_var vm v1) (interp_vl r vm t1) (interp_vl r vm l2) + end + else + begin + vm_aux_ok r vm v1 t1 t2; + calc (==) { + interp_vl r vm (Cons_var v2 (vm_aux v1 t1 t2)); + == { } + ivl_aux r vm v2 (vm_aux v1 t1 t2); + == { } + r.cm_mult.mult (interp_var vm v2) (interp_vl r vm (vm_aux v1 t1 t2)); + == { } + r.cm_mult.mult (interp_var vm v2) (r.cm_mult.mult (interp_vl r vm (Cons_var v1 t1)) (interp_vl r vm t2)); + == { r.cm_mult.commutativity + (interp_vl r vm (Cons_var v1 t1)) (interp_vl r vm t2) } + r.cm_mult.mult (interp_var vm v2) + (r.cm_mult.mult (interp_vl r vm t2) (interp_vl r vm (Cons_var v1 t1)) ); + == { r.cm_mult.associativity + (interp_var vm v2) + (interp_vl r vm t2) (interp_vl r vm (Cons_var v1 t1)) } + r.cm_mult.mult + (r.cm_mult.mult (interp_var vm v2) (interp_vl r vm t2)) + (interp_vl r vm (Cons_var v1 t1)); + == { r.cm_mult.commutativity + (interp_vl r vm (Cons_var v1 t1)) (interp_vl r vm (Cons_var v2 t2)) } + r.cm_mult.mult (interp_vl r vm (Cons_var v1 t1)) (interp_vl r vm (Cons_var v2 t2)); + } + end + | _ -> () + +val ics_aux_ok: #a:eqtype -> r:cr a -> vm:vmap a -> x:a -> s:canonical_sum a -> + Lemma (ensures ics_aux r vm x s == r.cm_add.mult x (interp_cs r vm s)) + (decreases s) +let rec ics_aux_ok #a r vm x s = + match s with + | Nil_monom -> () + | Cons_varlist l t -> + ics_aux_ok r vm (interp_vl r vm l) t + | Cons_monom c l t -> + ics_aux_ok r vm (interp_m r vm c l) t + +val interp_m_ok: #a:eqtype -> r:cr a -> vm:vmap a -> x:a -> l:varlist -> + Lemma (interp_m r vm x l == r.cm_mult.mult x (interp_vl r vm l)) +let interp_m_ok #a r vm x l = () + +val aplus_assoc_4: #a:Type -> r:cr a -> w:a -> x:a -> y:a -> z:a -> Lemma + (let aplus = r.cm_add.mult in + aplus (aplus w x) (aplus y z) == aplus (aplus w y) (aplus x z)) +let aplus_assoc_4 #a r w x y z = + let aplus = r.cm_add.mult in + let assoc = r.cm_add.associativity in + let comm = r.cm_add.commutativity in + calc (==) { + aplus (aplus w x) (aplus y z); + == { assoc w x (aplus y z) } + aplus w (aplus x (aplus y z)); + == { comm x (aplus y z) } + aplus w (aplus (aplus y z) x); + == { assoc w (aplus y z) x } + aplus (aplus w (aplus y z)) x; + == { assoc w y z } + aplus (aplus (aplus w y) z) x; + == { assoc (aplus w y) z x } + aplus (aplus w y) (aplus z x); + == { comm z x } + aplus (aplus w y) (aplus x z); + } + +val canonical_sum_merge_ok: #a:eqtype -> r:cr a -> vm:vmap a + -> s1:canonical_sum a -> s2:canonical_sum a -> + Lemma + (ensures + interp_cs r vm (canonical_sum_merge r s1 s2) == + r.cm_add.mult (interp_cs r vm s1) (interp_cs r vm s2)) + (decreases %[s1; s2; 0]) + +val csm_aux_ok: #a:eqtype -> r:cr a -> vm:vmap a + -> c1:a -> l1:varlist -> t1:canonical_sum a -> s2:canonical_sum a -> + Lemma + (ensures + interp_cs r vm (csm_aux r c1 l1 t1 s2) == + r.cm_add.mult (interp_cs r vm (Cons_monom c1 l1 t1)) (interp_cs r vm s2)) + (decreases %[t1; s2; 1]) + +let rec canonical_sum_merge_ok #a r vm s1 s2 = + let aone = r.cm_mult.unit in + let aplus = r.cm_add.mult in + let amult = r.cm_mult.mult in + match s1 with + | Cons_monom c1 l1 t1 -> csm_aux_ok #a r vm c1 l1 t1 s2 + | Cons_varlist l1 t1 -> + calc (==) { + interp_cs r vm (canonical_sum_merge r s1 s2); + == { } + interp_cs r vm (csm_aux r aone l1 t1 s2); + == { csm_aux_ok #a r vm aone l1 t1 s2 } + aplus (interp_cs r vm (Cons_monom aone l1 t1)) + (interp_cs r vm s2); + == { ics_aux_ok r vm (interp_vl r vm l1) t1 } + aplus (interp_cs r vm (Cons_varlist l1 t1)) + (interp_cs r vm s2); + } + | Nil_monom -> () +and csm_aux_ok #a r vm c1 l1 t1 s2 = + let aplus = r.cm_add.mult in + let aone = r.cm_mult.unit in + let amult = r.cm_mult.mult in + match s2 with + | Nil_monom -> () + | Cons_monom c2 l2 t2 -> + let s1 = Cons_monom c1 l1 t1 in + if l1 = l2 then + begin + calc (==) { + interp_cs r vm (csm_aux r c1 l1 t1 s2); + == { } + ics_aux r vm (interp_m r vm (aplus c1 c2) l1) + (canonical_sum_merge r t1 t2); + == { ics_aux_ok r vm (interp_m r vm (aplus c1 c2) l1) + (canonical_sum_merge r t1 t2) } + aplus (interp_m r vm (aplus c1 c2) l1) + (interp_cs r vm (canonical_sum_merge r t1 t2)); + == { interp_m_ok r vm (aplus c1 c2) l1 } + aplus (amult (aplus c1 c2) (interp_vl r vm l1)) + (interp_cs r vm (canonical_sum_merge r t1 t2)); + == { canonical_sum_merge_ok r vm t1 t2 } + aplus (amult (aplus c1 c2) (interp_vl r vm l1)) + (aplus (interp_cs r vm t1) (interp_cs r vm t2)); + == { distribute_right r c1 c2 (interp_vl r vm l1) } + aplus (aplus (amult c1 (interp_vl r vm l1)) + (amult c2 (interp_vl r vm l2))) + (aplus (interp_cs r vm t1) + (interp_cs r vm t2)); + == { aplus_assoc_4 r + (amult c1 (interp_vl r vm l1)) + (amult c2 (interp_vl r vm l2)) + (interp_cs r vm t1) + (interp_cs r vm t2) } + aplus (aplus (amult c1 (interp_vl r vm l1)) (interp_cs r vm t1)) + (aplus (amult c2 (interp_vl r vm l2)) (interp_cs r vm t2)); + == { ics_aux_ok r vm (amult c1 (interp_vl r vm l1)) t1; + interp_m_ok r vm c1 l1 } + aplus (interp_cs r vm s1) + (aplus (amult c2 (interp_vl r vm l2)) (interp_cs r vm t2)); + == { ics_aux_ok r vm (amult c2 (interp_vl r vm l2)) t2; + interp_m_ok r vm c2 l2 } + aplus (interp_cs r vm s1) (interp_cs r vm s2); + } + end + else if varlist_lt l1 l2 then + begin + calc (==) { + interp_cs r vm (canonical_sum_merge r s1 s2); + == { } + ics_aux r vm (interp_m r vm c1 l1) + (canonical_sum_merge r t1 s2); + == { ics_aux_ok r vm (interp_m r vm c1 l1) + (canonical_sum_merge r t1 s2) } + aplus (interp_m r vm c1 l1) + (interp_cs r vm (canonical_sum_merge r t1 s2)); + == { interp_m_ok r vm c1 l1 } + aplus (amult c1 (interp_vl r vm l1)) + (interp_cs r vm (canonical_sum_merge r t1 s2)); + == { canonical_sum_merge_ok r vm t1 s2 } + aplus (amult c1 (interp_vl r vm l1)) + (aplus (interp_cs r vm t1) (interp_cs r vm s2)); + == { r.cm_add.associativity + (amult c1 (interp_vl r vm l1)) + (interp_cs r vm t1) + (interp_cs r vm s2) + } + aplus (aplus (amult c1 (interp_vl r vm l1)) + (interp_cs r vm t1)) + (interp_cs r vm s2); + == { ics_aux_ok r vm (amult c1 (interp_vl r vm l1)) t1; + interp_m_ok r vm c1 l1 } + aplus (interp_cs r vm s1) (interp_cs r vm s2); + } + end + else + begin + calc (==) { + interp_cs r vm (csm_aux r c1 l1 t1 s2); + == { } + ics_aux r vm (interp_m r vm c2 l2) + (csm_aux r c1 l1 t1 t2); + == { ics_aux_ok r vm (interp_m r vm c2 l2) + (csm_aux r c1 l1 t1 t2) } + aplus (interp_m r vm c2 l2) + (interp_cs r vm (csm_aux r c1 l1 t1 t2)); + == { interp_m_ok r vm c2 l2 } + aplus (amult c2 (interp_vl r vm l2)) + (interp_cs r vm (csm_aux r c1 l1 t1 t2)); + == { csm_aux_ok r vm c1 l1 t1 t2 } + aplus (amult c2 (interp_vl r vm l2)) + (aplus (interp_cs r vm s1) (interp_cs r vm t2)); + == { r.cm_add.commutativity (interp_cs r vm s1) (interp_cs r vm t2) } + aplus (amult c2 (interp_vl r vm l2)) + (aplus (interp_cs r vm t2) (interp_cs r vm s1)); + == { r.cm_add.associativity + (amult c2 (interp_vl r vm l2)) + (interp_cs r vm t2) + (interp_cs r vm s1) + } + aplus (aplus (amult c2 (interp_vl r vm l2)) + (interp_cs r vm t2)) + (interp_cs r vm s1); + == { ics_aux_ok r vm (amult c1 (interp_vl r vm l1)) t1; + interp_m_ok r vm c1 l1 } + aplus (interp_cs r vm s2) (interp_cs r vm s1); + == { r.cm_add.commutativity (interp_cs r vm s1) (interp_cs r vm s2) } + aplus (interp_cs r vm s1) (interp_cs r vm s2); + } + end + | Cons_varlist l2 t2 -> // Same as Cons_monom with c2 = aone + let c2 = aone in + let s1 = Cons_monom c1 l1 t1 in + if l1 = l2 then + begin + calc (==) { + interp_cs r vm (csm_aux r c1 l1 t1 s2); + == { } + ics_aux r vm (interp_m r vm (aplus c1 c2) l1) + (canonical_sum_merge r t1 t2); + == { ics_aux_ok r vm (interp_m r vm (aplus c1 c2) l1) + (canonical_sum_merge r t1 t2) } + aplus (interp_m r vm (aplus c1 c2) l1) + (interp_cs r vm (canonical_sum_merge r t1 t2)); + == { interp_m_ok r vm (aplus c1 c2) l1 } + aplus (amult (aplus c1 c2) (interp_vl r vm l1)) + (interp_cs r vm (canonical_sum_merge r t1 t2)); + == { canonical_sum_merge_ok r vm t1 t2 } + aplus (amult (aplus c1 c2) (interp_vl r vm l1)) + (aplus (interp_cs r vm t1) (interp_cs r vm t2)); + == { distribute_right r c1 c2 (interp_vl r vm l1) } + aplus (aplus (amult c1 (interp_vl r vm l1)) + (amult c2 (interp_vl r vm l2))) + (aplus (interp_cs r vm t1) + (interp_cs r vm t2)); + == { aplus_assoc_4 r + (amult c1 (interp_vl r vm l1)) + (amult c2 (interp_vl r vm l2)) + (interp_cs r vm t1) + (interp_cs r vm t2) } + aplus (aplus (amult c1 (interp_vl r vm l1)) (interp_cs r vm t1)) + (aplus (amult c2 (interp_vl r vm l2)) (interp_cs r vm t2)); + == { ics_aux_ok r vm (amult c1 (interp_vl r vm l1)) t1; + interp_m_ok r vm c1 l1 } + aplus (interp_cs r vm s1) + (aplus (amult c2 (interp_vl r vm l2)) (interp_cs r vm t2)); + == { ics_aux_ok r vm (amult c2 (interp_vl r vm l2)) t2; + interp_m_ok r vm c2 l2 } + aplus (interp_cs r vm s1) (interp_cs r vm s2); + } + end + else if varlist_lt l1 l2 then + begin + calc (==) { + interp_cs r vm (canonical_sum_merge r s1 s2); + == { } + ics_aux r vm (interp_m r vm c1 l1) + (canonical_sum_merge r t1 s2); + == { ics_aux_ok r vm (interp_m r vm c1 l1) + (canonical_sum_merge r t1 s2) } + aplus (interp_m r vm c1 l1) + (interp_cs r vm (canonical_sum_merge r t1 s2)); + == { interp_m_ok r vm c1 l1 } + aplus (amult c1 (interp_vl r vm l1)) + (interp_cs r vm (canonical_sum_merge r t1 s2)); + == { canonical_sum_merge_ok r vm t1 s2 } + aplus (amult c1 (interp_vl r vm l1)) + (aplus (interp_cs r vm t1) (interp_cs r vm s2)); + == { r.cm_add.associativity + (amult c1 (interp_vl r vm l1)) + (interp_cs r vm t1) + (interp_cs r vm s2) + } + aplus (aplus (amult c1 (interp_vl r vm l1)) + (interp_cs r vm t1)) + (interp_cs r vm s2); + == { ics_aux_ok r vm (amult c1 (interp_vl r vm l1)) t1; + interp_m_ok r vm c1 l1 } + aplus (interp_cs r vm s1) (interp_cs r vm s2); + } + end + else + begin + calc (==) { + interp_cs r vm (csm_aux r c1 l1 t1 s2); + == { } + ics_aux r vm (interp_m r vm c2 l2) + (csm_aux r c1 l1 t1 t2); + == { ics_aux_ok r vm (interp_m r vm c2 l2) + (csm_aux r c1 l1 t1 t2) } + aplus (interp_m r vm c2 l2) + (interp_cs r vm (csm_aux r c1 l1 t1 t2)); + == { interp_m_ok r vm c2 l2 } + aplus (amult c2 (interp_vl r vm l2)) + (interp_cs r vm (csm_aux r c1 l1 t1 t2)); + == { csm_aux_ok r vm c1 l1 t1 t2 } + aplus (amult c2 (interp_vl r vm l2)) + (aplus (interp_cs r vm s1) (interp_cs r vm t2)); + == { r.cm_add.commutativity (interp_cs r vm s1) (interp_cs r vm t2) } + aplus (amult c2 (interp_vl r vm l2)) + (aplus (interp_cs r vm t2) (interp_cs r vm s1)); + == { r.cm_add.associativity + (amult c2 (interp_vl r vm l2)) + (interp_cs r vm t2) + (interp_cs r vm s1) + } + aplus (aplus (amult c2 (interp_vl r vm l2)) + (interp_cs r vm t2)) + (interp_cs r vm s1); + == { ics_aux_ok r vm (amult c1 (interp_vl r vm l1)) t1; + interp_m_ok r vm c1 l1 } + aplus (interp_cs r vm s2) (interp_cs r vm s1); + == { r.cm_add.commutativity (interp_cs r vm s1) (interp_cs r vm s2) } + aplus (interp_cs r vm s1) (interp_cs r vm s2); + } + end + +val monom_insert_ok: #a:eqtype -> r:cr a -> vm:vmap a + -> c1:a -> l1:varlist -> s2:canonical_sum a -> + Lemma + (interp_cs r vm (monom_insert r c1 l1 s2) == + r.cm_add.mult (r.cm_mult.mult c1 (interp_vl r vm l1)) (interp_cs r vm s2)) +let rec monom_insert_ok #a r vm c1 l1 s2 = + let aplus = r.cm_add.mult in + let amult = r.cm_mult.mult in + let aone = r.cm_mult.unit in + match s2 with + | Cons_monom c2 l2 t2 -> + if l1 = l2 + then + calc (==) { + interp_cs r vm (monom_insert r c1 l1 s2); + == { } + interp_cs r vm (Cons_monom (aplus c1 c2) l1 t2); + == { } + ics_aux r vm (interp_m r vm (aplus c1 c2) l1) t2; + == { ics_aux_ok r vm (interp_m r vm (aplus c1 c2) l1) t2 } + aplus (interp_m r vm (aplus c1 c2) l1) (interp_cs r vm t2); + == { interp_m_ok r vm (aplus c1 c2) l1 } + aplus (amult (aplus c1 c2) (interp_vl r vm l2)) (interp_cs r vm t2); + == { distribute_right r c1 c2 (interp_vl r vm l2) } + aplus (aplus (amult c1 (interp_vl r vm l1)) + (amult c2 (interp_vl r vm l2))) + (interp_cs r vm t2); + == { r.cm_add.associativity + (amult c1 (interp_vl r vm l1)) + (amult c2 (interp_vl r vm l2)) + (interp_cs r vm t2) } + aplus (amult c1 (interp_vl r vm l1)) + (aplus (amult c2 (interp_vl r vm l2)) + (interp_cs r vm t2)); + == { ics_aux_ok r vm (interp_m r vm c2 l2) t2 } + aplus (amult c1 (interp_vl r vm l1)) (interp_cs r vm s2); + } + else + if varlist_lt l1 l2 then () + else + calc (==) { + interp_cs r vm (monom_insert r c1 l1 s2); + == { } + interp_cs r vm (Cons_monom c2 l2 (monom_insert r c1 l1 t2)); + == { } + aplus (amult c2 (interp_vl r vm l2)) + (interp_cs r vm (monom_insert r c1 l1 t2)); + == { monom_insert_ok r vm c1 l1 t2 } + aplus (amult c2 (interp_vl r vm l2)) + (aplus (amult c1 (interp_vl r vm l1)) + (interp_cs r vm t2)); + == { r.cm_add.commutativity + (amult c1 (interp_vl r vm l1)) + (interp_cs r vm t2) } + aplus (amult c2 (interp_vl r vm l2)) + (aplus (interp_cs r vm t2) + (amult c1 (interp_vl r vm l1))); + == { r.cm_add.associativity + (amult c2 (interp_vl r vm l2)) + (interp_cs r vm t2) + (amult c1 (interp_vl r vm l1)) } + aplus (aplus (amult c2 (interp_vl r vm l2)) + (interp_cs r vm t2)) + (amult c1 (interp_vl r vm l1)); + == { ics_aux_ok r vm (interp_m r vm c2 l2) t2 } + aplus (interp_cs r vm s2) (amult c1 (interp_vl r vm l1)); + == { r.cm_add.commutativity + (interp_cs r vm s2) + (amult c1 (interp_vl r vm l1)) } + aplus (amult c1 (interp_vl r vm l1)) (interp_cs r vm s2); + } + | Cons_varlist l2 t2 -> // Same as Cons_monom with c2 = aone + let c2 = aone in + if l1 = l2 + then + calc (==) { + interp_cs r vm (monom_insert r c1 l1 s2); + == { } + interp_cs r vm (Cons_monom (aplus c1 c2) l1 t2); + == { } + ics_aux r vm (interp_m r vm (aplus c1 c2) l1) t2; + == { ics_aux_ok r vm (interp_m r vm (aplus c1 c2) l1) t2 } + aplus (interp_m r vm (aplus c1 c2) l1) (interp_cs r vm t2); + == { interp_m_ok r vm (aplus c1 c2) l1 } + aplus (amult (aplus c1 c2) (interp_vl r vm l2)) (interp_cs r vm t2); + == { distribute_right r c1 c2 (interp_vl r vm l2) } + aplus (aplus (amult c1 (interp_vl r vm l1)) + (amult c2 (interp_vl r vm l2))) + (interp_cs r vm t2); + == { r.cm_add.associativity + (amult c1 (interp_vl r vm l1)) + (amult c2 (interp_vl r vm l2)) + (interp_cs r vm t2) } + aplus (amult c1 (interp_vl r vm l1)) + (aplus (amult c2 (interp_vl r vm l2)) + (interp_cs r vm t2)); + == { ics_aux_ok r vm (interp_m r vm c2 l2) t2 } + aplus (amult c1 (interp_vl r vm l1)) (interp_cs r vm s2); + } + else + if varlist_lt l1 l2 then () + else + calc (==) { + interp_cs r vm (monom_insert r c1 l1 s2); + == { } + interp_cs r vm (Cons_monom c2 l2 (monom_insert r c1 l1 t2)); + == { } + aplus (amult c2 (interp_vl r vm l2)) + (interp_cs r vm (monom_insert r c1 l1 t2)); + == { monom_insert_ok r vm c1 l1 t2 } + aplus (amult c2 (interp_vl r vm l2)) + (aplus (amult c1 (interp_vl r vm l1)) + (interp_cs r vm t2)); + == { r.cm_add.commutativity + (amult c1 (interp_vl r vm l1)) + (interp_cs r vm t2) } + aplus (amult c2 (interp_vl r vm l2)) + (aplus (interp_cs r vm t2) + (amult c1 (interp_vl r vm l1))); + == { r.cm_add.associativity + (amult c2 (interp_vl r vm l2)) + (interp_cs r vm t2) + (amult c1 (interp_vl r vm l1)) } + aplus (aplus (amult c2 (interp_vl r vm l2)) + (interp_cs r vm t2)) + (amult c1 (interp_vl r vm l1)); + == { ics_aux_ok r vm (interp_m r vm c2 l2) t2 } + aplus (interp_cs r vm s2) (amult c1 (interp_vl r vm l1)); + == { r.cm_add.commutativity + (interp_cs r vm s2) + (amult c1 (interp_vl r vm l1)) } + aplus (amult c1 (interp_vl r vm l1)) (interp_cs r vm s2); + } + | Nil_monom -> () + +val varlist_insert_ok: #a:eqtype -> r:cr a -> vm:vmap a + -> l1:varlist -> s2:canonical_sum a -> + Lemma (interp_cs r vm (varlist_insert r l1 s2) == + r.cm_add.mult (interp_vl r vm l1) (interp_cs r vm s2)) +let varlist_insert_ok #a r vm l1 s2 = + let aone = r.cm_mult.unit in + monom_insert_ok r vm aone l1 s2 + +val canonical_sum_scalar_ok: #a:eqtype -> r:cr a -> vm:vmap a + -> c0:a -> s:canonical_sum a -> + Lemma ( + interp_cs r vm (canonical_sum_scalar r c0 s) == + r.cm_mult.mult c0 (interp_cs r vm s)) +let rec canonical_sum_scalar_ok #a r vm c0 s = + let aone = r.cm_mult.unit in + let aplus = r.cm_add.mult in + let amult = r.cm_mult.mult in + match s with + | Cons_monom c l t -> + calc (==) { + interp_cs r vm (canonical_sum_scalar r c0 s); + == { } + interp_cs r vm (Cons_monom (amult c0 c) l (canonical_sum_scalar r c0 t)); + == { } + aplus (amult (amult c0 c) (interp_vl r vm l)) + (interp_cs r vm (canonical_sum_scalar r c0 t)); + == { r.cm_mult.associativity c0 c (interp_vl r vm l) } + aplus (amult c0 (amult c (interp_vl r vm l))) + (interp_cs r vm (canonical_sum_scalar r c0 t)); + == { canonical_sum_scalar_ok r vm c0 t } + aplus (amult c0 (amult c (interp_vl r vm l))) + (amult c0 (interp_cs r vm t)); + == { r.distribute c0 (amult c (interp_vl r vm l)) + (interp_cs r vm t) } + amult c0 (aplus (amult c (interp_vl r vm l)) (interp_cs r vm t)); + == { } + amult c0 (interp_cs r vm s); + } + | Cons_varlist l t -> // Same as Cons_monom c l t with c = r.cm_mult.unit + let c = aone in + calc (==) { + interp_cs r vm (canonical_sum_scalar r c0 s); + == { } + interp_cs r vm (Cons_monom (amult c0 c) l (canonical_sum_scalar r c0 t)); + == { } + aplus (amult (amult c0 c) (interp_vl r vm l)) + (interp_cs r vm (canonical_sum_scalar r c0 t)); + == { r.cm_mult.associativity c0 c (interp_vl r vm l) } + aplus (amult c0 (amult c (interp_vl r vm l))) + (interp_cs r vm (canonical_sum_scalar r c0 t)); + == { canonical_sum_scalar_ok r vm c0 t } + aplus (amult c0 (amult c (interp_vl r vm l))) + (amult c0 (interp_cs r vm t)); + == { r.distribute c0 (amult c (interp_vl r vm l)) + (interp_cs r vm t) } + amult c0 (aplus (amult c (interp_vl r vm l)) (interp_cs r vm t)); + == { } + amult c0 (interp_cs r vm s); + } + | Nil_monom -> () + +val canonical_sum_scalar2_ok: #a:eqtype -> r:cr a -> vm:vmap a + -> l0:varlist -> s:canonical_sum a -> + Lemma ( + interp_cs r vm (canonical_sum_scalar2 r l0 s) == + r.cm_mult.mult (interp_vl r vm l0) (interp_cs r vm s)) +let rec canonical_sum_scalar2_ok #a r vm l0 s = + let aone = r.cm_mult.unit in + let aplus = r.cm_add.mult in + let amult = r.cm_mult.mult in + match s with + | Cons_monom c l t -> + calc (==) { + interp_cs r vm (canonical_sum_scalar2 r l0 s); + == { } + interp_cs r vm + (monom_insert r c (varlist_merge l0 l) (canonical_sum_scalar2 r l0 t)); + == { monom_insert_ok r vm c (varlist_merge l0 l) (canonical_sum_scalar2 r l0 t) } + aplus (amult c (interp_vl r vm (varlist_merge l0 l))) + (interp_cs r vm (canonical_sum_scalar2 r l0 t)); + == { varlist_merge_ok r vm l0 l } + aplus (amult c (amult (interp_vl r vm l0) (interp_vl r vm l))) + (interp_cs r vm (canonical_sum_scalar2 r l0 t)); + == { canonical_sum_scalar2_ok r vm l0 t } + aplus (amult c (amult (interp_vl r vm l0) (interp_vl r vm l))) + (amult (interp_vl r vm l0) (interp_cs r vm t)); + == { r.cm_mult.associativity c (interp_vl r vm l0) + (interp_vl r vm l) } + aplus (amult (amult c (interp_vl r vm l0)) (interp_vl r vm l)) + (amult (interp_vl r vm l0) (interp_cs r vm t)); + == { r.cm_mult.commutativity (interp_vl r vm l0) c } + aplus (amult (amult (interp_vl r vm l0) c) (interp_vl r vm l)) + (amult (interp_vl r vm l0) (interp_cs r vm t)); + == { r.cm_mult.associativity (interp_vl r vm l0) c (interp_vl r vm l) } + aplus (amult (interp_vl r vm l0) (amult c (interp_vl r vm l))) + (amult (interp_vl r vm l0) (interp_cs r vm t)); + == { r.distribute (interp_vl r vm l0) + (amult c (interp_vl r vm l)) (interp_cs r vm t) } + amult (interp_vl r vm l0) + (aplus (amult c (interp_vl r vm l)) (interp_cs r vm t)); + == { } + amult (interp_vl r vm l0) (interp_cs r vm s); + } + | Cons_varlist l t -> // Same as Cons_monom c l t with c = aone + let c = aone in + calc (==) { + interp_cs r vm (canonical_sum_scalar2 r l0 s); + == { } + interp_cs r vm + (monom_insert r c (varlist_merge l0 l) (canonical_sum_scalar2 r l0 t)); + == { monom_insert_ok r vm c (varlist_merge l0 l) (canonical_sum_scalar2 r l0 t) } + aplus (amult c (interp_vl r vm (varlist_merge l0 l))) + (interp_cs r vm (canonical_sum_scalar2 r l0 t)); + == { varlist_merge_ok r vm l0 l } + aplus (amult c (amult (interp_vl r vm l0) (interp_vl r vm l))) + (interp_cs r vm (canonical_sum_scalar2 r l0 t)); + == { canonical_sum_scalar2_ok r vm l0 t } + aplus (amult c (amult (interp_vl r vm l0) (interp_vl r vm l))) + (amult (interp_vl r vm l0) (interp_cs r vm t)); + == { r.cm_mult.associativity c (interp_vl r vm l0) + (interp_vl r vm l) } + aplus (amult (amult c (interp_vl r vm l0)) (interp_vl r vm l)) + (amult (interp_vl r vm l0) (interp_cs r vm t)); + == { r.cm_mult.commutativity (interp_vl r vm l0) c } + aplus (amult (amult (interp_vl r vm l0) c) (interp_vl r vm l)) + (amult (interp_vl r vm l0) (interp_cs r vm t)); + == { r.cm_mult.associativity (interp_vl r vm l0) c (interp_vl r vm l) } + aplus (amult (interp_vl r vm l0) (amult c (interp_vl r vm l))) + (amult (interp_vl r vm l0) (interp_cs r vm t)); + == { r.distribute (interp_vl r vm l0) + (amult c (interp_vl r vm l)) (interp_cs r vm t) } + amult (interp_vl r vm l0) + (aplus (amult c (interp_vl r vm l)) (interp_cs r vm t)); + == { } + amult (interp_vl r vm l0) (interp_cs r vm s); + } + | Nil_monom -> () + +val canonical_sum_scalar3_ok: #a:eqtype -> r:cr a -> vm:vmap a + -> c0:a -> l0:varlist -> s:canonical_sum a -> + Lemma ( + interp_cs r vm (canonical_sum_scalar3 r c0 l0 s) == + r.cm_mult.mult (r.cm_mult.mult c0 (interp_vl r vm l0)) (interp_cs r vm s)) +let rec canonical_sum_scalar3_ok #a r vm c0 l0 s = + let aone = r.cm_mult.unit in + let aplus = r.cm_add.mult in + let amult = r.cm_mult.mult in + match s with + | Cons_monom c l t -> + calc (==) { + interp_cs r vm (canonical_sum_scalar3 r c0 l0 s); + == { } + interp_cs r vm + (monom_insert r (amult c0 c) (varlist_merge l0 l) + (canonical_sum_scalar3 r c0 l0 t)); + == { monom_insert_ok r vm (amult c0 c) (varlist_merge l0 l) (canonical_sum_scalar3 r c0 l0 t) } + aplus (amult (amult c0 c) (interp_vl r vm (varlist_merge l0 l))) + (interp_cs r vm (canonical_sum_scalar3 r c0 l0 t)); + == { varlist_merge_ok r vm l0 l } + aplus (amult (amult c0 c) (amult (interp_vl r vm l0) (interp_vl r vm l))) + (interp_cs r vm (canonical_sum_scalar3 r c0 l0 t)); + == { canonical_sum_scalar3_ok r vm c0 l0 t } + aplus (amult (amult c0 c) (amult (interp_vl r vm l0) (interp_vl r vm l))) + (amult (amult c0 (interp_vl r vm l0)) (interp_cs r vm t)); + == { r.cm_mult.associativity (amult c0 c) + (interp_vl r vm l0) (interp_vl r vm l) } + aplus (amult (amult (amult c0 c) (interp_vl r vm l0)) (interp_vl r vm l)) + (amult (amult c0 (interp_vl r vm l0)) (interp_cs r vm t)); + == { r.cm_mult.commutativity c0 c } + aplus (amult (amult (amult c c0) (interp_vl r vm l0)) (interp_vl r vm l)) + (amult (amult c0 (interp_vl r vm l0)) (interp_cs r vm t)); + == { r.cm_mult.associativity c c0 (interp_vl r vm l0) } + aplus (amult (amult c (amult c0 (interp_vl r vm l0))) (interp_vl r vm l)) + (amult (amult c0 (interp_vl r vm l0)) (interp_cs r vm t)); + == { r.cm_mult.commutativity c (amult c0 (interp_vl r vm l0)) } + aplus (amult (amult (amult c0 (interp_vl r vm l0)) c) (interp_vl r vm l)) + (amult (amult c0 (interp_vl r vm l0)) (interp_cs r vm t)); + == { r.cm_mult.associativity (amult c0 (interp_vl r vm l0)) c (interp_vl r vm l) } + aplus (amult (amult c0 (interp_vl r vm l0)) (amult c (interp_vl r vm l))) + (amult (amult c0 (interp_vl r vm l0)) (interp_cs r vm t)); + == { r.distribute (amult c0 (interp_vl r vm l0)) + (amult c (interp_vl r vm l)) (interp_cs r vm t) } + amult (amult c0 (interp_vl r vm l0)) + (aplus (amult c (interp_vl r vm l)) (interp_cs r vm t)); + == { } + amult (amult c0 (interp_vl r vm l0)) (interp_cs r vm s); + } + | Cons_varlist l t -> // Same as Cons_monom c l t with c = aone + let c = aone in + calc (==) { + interp_cs r vm (canonical_sum_scalar3 r c0 l0 s); + == { } + interp_cs r vm + (monom_insert r (amult c0 c) (varlist_merge l0 l) + (canonical_sum_scalar3 r c0 l0 t)); + == { monom_insert_ok r vm (amult c0 c) (varlist_merge l0 l) (canonical_sum_scalar3 r c0 l0 t) } + aplus (amult (amult c0 c) (interp_vl r vm (varlist_merge l0 l))) + (interp_cs r vm (canonical_sum_scalar3 r c0 l0 t)); + == { varlist_merge_ok r vm l0 l } + aplus (amult (amult c0 c) (amult (interp_vl r vm l0) (interp_vl r vm l))) + (interp_cs r vm (canonical_sum_scalar3 r c0 l0 t)); + == { canonical_sum_scalar3_ok r vm c0 l0 t } + aplus (amult (amult c0 c) (amult (interp_vl r vm l0) (interp_vl r vm l))) + (amult (amult c0 (interp_vl r vm l0)) (interp_cs r vm t)); + == { r.cm_mult.associativity (amult c0 c) + (interp_vl r vm l0) (interp_vl r vm l) } + aplus (amult (amult (amult c0 c) (interp_vl r vm l0)) (interp_vl r vm l)) + (amult (amult c0 (interp_vl r vm l0)) (interp_cs r vm t)); + == { r.cm_mult.commutativity c0 c } + aplus (amult (amult (amult c c0) (interp_vl r vm l0)) (interp_vl r vm l)) + (amult (amult c0 (interp_vl r vm l0)) (interp_cs r vm t)); + == { r.cm_mult.associativity c c0 (interp_vl r vm l0) } + aplus (amult (amult c (amult c0 (interp_vl r vm l0))) (interp_vl r vm l)) + (amult (amult c0 (interp_vl r vm l0)) (interp_cs r vm t)); + == { r.cm_mult.commutativity c (amult c0 (interp_vl r vm l0)) } + aplus (amult (amult (amult c0 (interp_vl r vm l0)) c) (interp_vl r vm l)) + (amult (amult c0 (interp_vl r vm l0)) (interp_cs r vm t)); + == { r.cm_mult.associativity (amult c0 (interp_vl r vm l0)) c (interp_vl r vm l) } + aplus (amult (amult c0 (interp_vl r vm l0)) (amult c (interp_vl r vm l))) + (amult (amult c0 (interp_vl r vm l0)) (interp_cs r vm t)); + == { r.distribute (amult c0 (interp_vl r vm l0)) + (amult c (interp_vl r vm l)) (interp_cs r vm t) } + amult (amult c0 (interp_vl r vm l0)) + (aplus (amult c (interp_vl r vm l)) (interp_cs r vm t)); + == { } + amult (amult c0 (interp_vl r vm l0)) (interp_cs r vm s); + } + | Nil_monom -> () + +val canonical_sum_prod_ok: #a:eqtype -> r:cr a -> vm:vmap a -> + s1:canonical_sum a -> s2:canonical_sum a -> + Lemma (interp_cs r vm (canonical_sum_prod r s1 s2) == + r.cm_mult.mult (interp_cs r vm s1) (interp_cs r vm s2)) +let rec canonical_sum_prod_ok #a r vm s1 s2 = + let aone = r.cm_mult.unit in + let aplus = r.cm_add.mult in + let amult = r.cm_mult.mult in + match s1 with + | Cons_monom c1 l1 t1 -> + calc (==) { + interp_cs r vm (canonical_sum_prod r s1 s2); + == { } + interp_cs r vm + (canonical_sum_merge r (canonical_sum_scalar3 r c1 l1 s2) + (canonical_sum_prod r t1 s2)); + == { canonical_sum_merge_ok r vm + (canonical_sum_scalar3 r c1 l1 s2) + (canonical_sum_prod r t1 s2) } + aplus (interp_cs r vm (canonical_sum_scalar3 r c1 l1 s2)) + (interp_cs r vm (canonical_sum_prod r t1 s2)); + == { canonical_sum_scalar3_ok r vm c1 l1 s2; + canonical_sum_prod_ok r vm t1 s2 } + aplus (amult (amult c1 (interp_vl r vm l1)) (interp_cs r vm s2)) + (amult (interp_cs r vm t1) (interp_cs r vm s2)); + == { distribute_right r (amult c1 (interp_vl r vm l1)) + (interp_cs r vm t1) (interp_cs r vm s2) } + amult (aplus (amult c1 (interp_vl r vm l1)) (interp_cs r vm t1)) + (interp_cs r vm s2); + == { } + amult (interp_cs r vm s1) (interp_cs r vm s2); + } + | Cons_varlist l1 t1 -> + calc (==) { + interp_cs r vm (canonical_sum_prod r s1 s2); + == { } + interp_cs r vm + (canonical_sum_merge r (canonical_sum_scalar2 r l1 s2) + (canonical_sum_prod r t1 s2)); + == { canonical_sum_merge_ok r vm + (canonical_sum_scalar2 r l1 s2) + (canonical_sum_prod r t1 s2) } + aplus (interp_cs r vm (canonical_sum_scalar2 r l1 s2)) + (interp_cs r vm (canonical_sum_prod r t1 s2)); + == { canonical_sum_scalar2_ok r vm l1 s2; + canonical_sum_prod_ok r vm t1 s2 } + aplus (amult (interp_vl r vm l1) (interp_cs r vm s2)) + (amult (interp_cs r vm t1) (interp_cs r vm s2)); + == { distribute_right r (interp_vl r vm l1) + (interp_cs r vm t1) (interp_cs r vm s2) } + amult (aplus (interp_vl r vm l1) (interp_cs r vm t1)) + (interp_cs r vm s2); + == { } + amult (interp_cs r vm s1) (interp_cs r vm s2); + } + | Nil_monom -> () + +val spolynomial_normalize_ok: #a:eqtype -> r:cr a -> vm:vmap a -> p:spolynomial a -> + Lemma (interp_cs r vm (spolynomial_normalize r p) == interp_sp r vm p) +let rec spolynomial_normalize_ok #a r vm p = + match p with + | SPvar _ -> () + | SPconst _ -> () + | SPplus l q -> + canonical_sum_merge_ok r vm + (spolynomial_normalize r l) (spolynomial_normalize r q); + spolynomial_normalize_ok r vm l; + spolynomial_normalize_ok r vm q + | SPmult l q -> + canonical_sum_prod_ok r vm + (spolynomial_normalize r l) (spolynomial_normalize r q); + spolynomial_normalize_ok r vm l; + spolynomial_normalize_ok r vm q + +val canonical_sum_simplify_ok: #a:eqtype -> r:cr a -> vm:vmap a -> s:canonical_sum a -> + Lemma (interp_cs r vm (canonical_sum_simplify r s) == interp_cs r vm s) +let rec canonical_sum_simplify_ok #a r vm s = + let azero = r.cm_add.unit in + let aone = r.cm_mult.unit in + match s with + | Cons_monom c _ t -> canonical_sum_simplify_ok r vm t + | Cons_varlist _ t -> canonical_sum_simplify_ok r vm t + | Nil_monom -> () + +val spolynomial_simplify_ok: #a:eqtype -> r:cr a -> vm:vmap a -> p:spolynomial a -> + Lemma (interp_cs r vm (spolynomial_simplify r p) == interp_sp r vm p) +let spolynomial_simplify_ok #a r vm p = + canonical_sum_simplify_ok r vm (spolynomial_normalize r p); + spolynomial_normalize_ok r vm p + + +(** + * This is the type where we first reflect expressions, + * before eliminating additive inverses +**) +type polynomial a = + | Pvar : index -> polynomial a + | Pconst : a -> polynomial a + | Pplus : polynomial a -> polynomial a -> polynomial a + | Pmult : polynomial a -> polynomial a -> polynomial a + | Popp : polynomial a -> polynomial a + +(** Canonize a reflected expression *) +val polynomial_normalize: #a:eqtype -> cr a -> polynomial a -> canonical_sum a + +[@@canon_attr] +let rec polynomial_normalize #a r p = + match p with + | Pvar i -> Cons_varlist (Cons_var i Nil_var) Nil_monom + | Pconst c -> Cons_monom c Nil_var Nil_monom + | Pplus l q -> + canonical_sum_merge r (polynomial_normalize r l) (polynomial_normalize r q) + | Pmult l q -> + canonical_sum_prod r (polynomial_normalize r l) (polynomial_normalize r q) + | Popp p -> + canonical_sum_scalar3 r (r.opp r.cm_mult.unit) Nil_var (polynomial_normalize r p) + +val polynomial_simplify: #a:eqtype -> cr a -> polynomial a -> canonical_sum a + +[@@canon_attr] +let polynomial_simplify #a r p = + canonical_sum_simplify r + (polynomial_normalize r p) + +(** Translate to a representation without additive inverses *) +val spolynomial_of: #a:eqtype -> cr a -> polynomial a -> spolynomial a + +[@@canon_attr] +let rec spolynomial_of #a r p = + match p with + | Pvar i -> SPvar i + | Pconst c -> SPconst c + | Pplus l q -> SPplus (spolynomial_of r l) (spolynomial_of r q) + | Pmult l q -> SPmult (spolynomial_of r l) (spolynomial_of r q) + | Popp p -> SPmult (SPconst (r.opp r.cm_mult.unit)) (spolynomial_of r p) + +(** Interpretation of a polynomial *) +[@@canon_attr] +let rec interp_p (#a:Type) (r:cr a) (vm:vmap a) (p:polynomial a) : a = + let aplus = r.cm_add.mult in + let amult = r.cm_mult.mult in + match p with + | Pconst c -> c + | Pvar i -> interp_var vm i + | Pplus p1 p2 -> aplus (interp_p r vm p1) (interp_p r vm p2) + | Pmult p1 p2 -> amult (interp_p r vm p1) (interp_p r vm p2) + | Popp p -> r.opp (interp_p r vm p) + + +val spolynomial_of_ok: #a:eqtype -> r:cr a -> vm:vmap a -> p:polynomial a -> + Lemma (interp_p r vm p == interp_sp r vm (spolynomial_of r p)) +let rec spolynomial_of_ok #a r vm p = + match p with + | Pconst c -> () + | Pvar i -> () + | Pplus p1 p2 -> + spolynomial_of_ok r vm p1; + spolynomial_of_ok r vm p2 + | Pmult p1 p2 -> + spolynomial_of_ok r vm p1; + spolynomial_of_ok r vm p2 + | Popp p -> + spolynomial_of_ok r vm p; + let x = interp_sp r vm (spolynomial_of r p) in + let y = r.cm_mult.mult (r.opp r.cm_mult.unit) x in + add_mult_opp r x; + opp_unique r x y + + +val polynomial_normalize_ok: #a:eqtype -> r:cr a -> vm:vmap a -> p:polynomial a -> + Lemma (interp_cs r vm (polynomial_normalize r p) == + interp_cs r vm (spolynomial_normalize r (spolynomial_of r p))) +let rec polynomial_normalize_ok #a r vm p = + match p with + | Pvar _ -> () + | Pconst _ -> () + | Pplus l q -> + canonical_sum_merge_ok r vm + (polynomial_normalize r l) + (polynomial_normalize r q); + canonical_sum_merge_ok r vm + (spolynomial_normalize r (spolynomial_of r l)) + (spolynomial_normalize r (spolynomial_of r q)); + polynomial_normalize_ok r vm l; + polynomial_normalize_ok r vm q + + | Pmult l q -> + canonical_sum_prod_ok r vm + (polynomial_normalize r l) + (polynomial_normalize r q); + canonical_sum_prod_ok r vm + (spolynomial_normalize r (spolynomial_of r l)) + (spolynomial_normalize r (spolynomial_of r q)); + polynomial_normalize_ok r vm l; + polynomial_normalize_ok r vm q + + | Popp p1 -> + let l = SPconst (r.opp r.cm_mult.unit) in + polynomial_normalize_ok r vm p1; + canonical_sum_prod_ok r vm + (spolynomial_normalize r l) + (polynomial_normalize r p1); + canonical_sum_prod_ok r vm + (spolynomial_normalize r l) + (spolynomial_normalize r (spolynomial_of r p1)) + + +val polynomial_simplify_ok: #a:eqtype -> r:cr a -> vm:vmap a -> p:polynomial a -> + Lemma (interp_cs r vm (polynomial_simplify r p) == interp_p r vm p) +let polynomial_simplify_ok #a r vm p = + calc (==) { + interp_cs r vm (polynomial_simplify r p); + == { } + interp_cs r vm (canonical_sum_simplify r (polynomial_normalize r p)); + == { canonical_sum_simplify_ok r vm (polynomial_normalize r p) } + interp_cs r vm (polynomial_normalize r p); + == { polynomial_normalize_ok r vm p } + interp_cs r vm (spolynomial_normalize r (spolynomial_of r p)); + == { spolynomial_normalize_ok r vm (spolynomial_of r p) } + interp_sp r vm (spolynomial_of r p); + == { spolynomial_of_ok r vm p } + interp_p r vm p; + } + + +/// +/// Tactic definition +/// + +(* Only dump when debugging is on *) +let ddump m = if debugging () then dump m + +(** + * Finds the position of first occurrence of x in xs. + * This is specialized to terms and their funny term_eq. +**) +let rec find_aux (n:nat) (x:term) (xs:list term) : Tac (option nat) = + match xs with + | [] -> None + | x'::xs' -> if term_eq x x' then Some n else find_aux (n+1) x xs' + +let find = find_aux 0 + +let make_fvar (#a:Type) (t:term) (unquotea:term -> Tac a) (ts:list term) + (vm:vmap a) : Tac (polynomial a & list term & vmap a) = + match find t ts with + | Some v -> (Pvar v, ts, vm) + | None -> + let vfresh = length ts in + let z = unquotea t in + (Pvar vfresh, ts @ [t], update vfresh z vm) + +(** This expects that add, opp, mone mult, and t have already been normalized *) +let rec reification_aux (#a:Type) (unquotea:term -> Tac a) (ts:list term) (vm:vmap a) (add opp mone mult t: term) : Tac (polynomial a & list term & vmap a) = + // ddump ("term = " ^ term_to_string t ^ "\n"); + let hd, tl = collect_app_ref t in + match inspect hd, list_unref tl with + | Tv_FVar fv, [(t1, _) ; (t2, _)] -> + //ddump ("add = " ^ term_to_string add ^ " + // \nmul = " ^ term_to_string mult); + //ddump ("fv = " ^ term_to_string (pack (Tv_FVar fv))); + let binop (op:polynomial a -> polynomial a -> polynomial a) : Tac (polynomial a & list term & vmap a) = + let (e1, ts, vm) = reification_aux unquotea ts vm add opp mone mult t1 in + let (e2, ts, vm) = reification_aux unquotea ts vm add opp mone mult t2 in + (op e1 e2, ts, vm) + in + if term_eq (pack (Tv_FVar fv)) add then binop Pplus else + if term_eq (pack (Tv_FVar fv)) mult then binop Pmult else + make_fvar t unquotea ts vm + | Tv_FVar fv, [(t1, _)] -> + let monop (op:polynomial a -> polynomial a) : Tac (polynomial a & list term & vmap a) = + let (e, ts, vm) = reification_aux unquotea ts vm add opp mone mult t1 in + (op e, ts, vm) + in + if term_eq (pack (Tv_FVar fv)) opp then monop Popp else + make_fvar t unquotea ts vm + | Tv_Const _, [] -> Pconst (unquotea t), ts, vm + | _, _ -> make_fvar t unquotea ts vm + +(** + * How to normalize terms in the tactic. + * This is carefully tuned to unfold all and no more than required +**) +let steps = + [ + primops; + iota; + zeta; + delta_attr [`%canon_attr]; + delta_only [ + `%FStar.Mul.op_Star; // For integer ring + `%FStar.Algebra.CommMonoid.int_plus_cm; // For integer ring + `%FStar.Algebra.CommMonoid.int_multiply_cm; // For integer ring + `%FStar.Algebra.CommMonoid.__proj__CM__item__mult; + `%FStar.Algebra.CommMonoid.__proj__CM__item__unit; + `%__proj__CR__item__cm_add; + `%__proj__CR__item__opp; + `%__proj__CR__item__cm_mult; + `%FStar.List.Tot.assoc; + `%FStar.Pervasives.Native.fst; + `%FStar.Pervasives.Native.snd; + `%FStar.Pervasives.Native.__proj__Mktuple2__item___1; + `%FStar.Pervasives.Native.__proj__Mktuple2__item___2; + `%FStar.List.Tot.op_At; + `%FStar.List.Tot.append; + ] + ] + +let canon_norm () : Tac unit = norm steps + +let reification (#a:Type) + (unquotea:term -> Tac a) (quotea:a -> Tac term) (tadd topp tmone tmult:term) (munit:a) (ts:list term) : Tac (list (polynomial a) & vmap a) = + // Be careful not to normalize operations too much + // E.g. we don't want to turn ( +% ) into (a + b) % prime + // or we won't be able to spot ring operations + let add = tadd in + let opp = topp in + let mone = tmone in + let mult = tmult in + let ts = Tactics.Util.map (norm_term steps) ts in + //ddump ("add = " ^ term_to_string add ^ "\nmult = " ^ term_to_string mult); + let (es, _, vm) = + Tactics.Util.fold_left + (fun (es, vs, vm) t -> + let (e, vs, vm) = reification_aux unquotea vs vm add opp mone mult t + in (e::es, vs, vm)) + ([],[], ([], munit)) ts + in (List.Tot.Base.rev es, vm) + +(* The implicit argument in the application of `Pconst` is crucial *) +let rec quote_polynomial (#a:Type) (ta:term) (quotea:a -> Tac term) (e:polynomial a) : Tac term = + match e with + | Pconst c -> mk_app (`Pconst) [(ta, Q_Implicit); (quotea c, Q_Explicit)] + | Pvar x -> mk_e_app (`Pvar) [pack (Tv_Const (C_Int x))] + | Pplus e1 e2 -> + mk_e_app (`Pplus) [quote_polynomial ta quotea e1; quote_polynomial ta quotea e2] + | Pmult e1 e2 -> + mk_e_app (`Pmult) [quote_polynomial ta quotea e1; quote_polynomial ta quotea e2] + | Popp e -> mk_e_app (`Popp) [quote_polynomial ta quotea e] + +(* Constructs the 3 main goals of the tactic *) +let semiring_reflect (#a:eqtype) (r:cr a) (vm:vmap a) (e1 e2:polynomial a) (a1 a2:a) + (_ : squash ( + interp_cs r vm (polynomial_simplify r e1) == + interp_cs r vm (polynomial_simplify r e2))) + (_ : squash (a1 == interp_p r vm e1)) + (_ : squash (a2 == interp_p r vm e2)) : + squash (a1 == a2) + = + polynomial_simplify_ok r vm e1; + polynomial_simplify_ok r vm e2 + +(* [@@plugin] *) +let canon_semiring_aux + (a: Type) (ta: term) (unquotea: term -> Tac a) (quotea: a -> Tac term) + (tr tadd topp tmone tmult: term) + (munit: a) + : Tac unit += + focus (fun () -> + norm []; // Do not normalize anything implicitly + let g = cur_goal () in + match term_as_formula g with + | Comp (Eq (Some t)) t1 t2 -> + (* First, make sure we have an equality at type ta, since otherwise + we will fail to apply the reflection Lemma. We can just cut by the equality + we want, since they should be equiprovable (though not equal). *) + let b = tcut (`(squash (eq2 #(`#ta) (`#t1) (`#t2)))) in + (* Try solving it trivially if type was exactly the same, or give to smt. + It should really be trivial. *) + begin + try exact b with | _ -> smt () + end; + begin + match reification unquotea quotea tadd topp tmone tmult munit [t1; t2] with + | ([e1; e2], vm) -> +(* + ddump (term_to_string t1); + ddump (term_to_string t2); + let r : cr a = unquote tr in + ddump ("vm = " ^ term_to_string (quote vm) ^ "\n" ^ + "before = " ^ term_to_string (norm_term steps + (quote (interp_p r vm e1 == interp_p r vm e2)))); + dump ("expected after = " ^ term_to_string (norm_term steps + (quote ( + interp_cs r vm (polynomial_simplify r e1) == + interp_cs r vm (polynomial_simplify r e2))))); +*) + let tvm = quote_vm ta quotea vm in + let te1 = quote_polynomial ta quotea e1 in + //ddump ("te1 = " ^ term_to_string te1); + let te2 = quote_polynomial ta quotea e2 in + //ddump ("te2 = " ^ term_to_string te2); + mapply (`(semiring_reflect + #(`#ta) (`#tr) (`#tvm) (`#te1) (`#te2) (`#t1) (`#t2))); + //ddump "Before canonization"; + canon_norm (); + //ddump "After canonization"; + later (); + //ddump "Before normalizing left-hand side"; + canon_norm (); + //ddump "After normalizing left-hand side"; + trefl (); + //ddump "Before normalizing right-hand side"; + canon_norm (); + //ddump "After normalizing right-hand side"; + trefl () + | _ -> fail "Unexpected" + end + | _ -> fail "Goal should be an equality") + +let canon_semiring (#a:eqtype) (r:cr a) : Tac unit = + canon_semiring_aux a + (quote a) (unquote #a) (fun (x:a) -> quote x) (quote r) + (norm_term steps (quote r.cm_add.mult)) + (norm_term steps (quote r.opp)) + (norm_term steps (quote (r.opp r.cm_mult.unit))) + (norm_term steps (quote r.cm_mult.mult)) + r.cm_add.unit + +/// Ring of integers + +[@@canon_attr] +let int_cr : cr int = + CR int_plus_cm int_multiply_cm op_Minus (fun x -> ()) (fun x y z -> ()) (fun x -> ()) + +let int_semiring () : Tac unit = canon_semiring int_cr diff --git a/stage0/ulib/FStar.Tactics.CanonCommSwaps.fst b/stage0/ulib/FStar.Tactics.CanonCommSwaps.fst new file mode 100644 index 00000000000..34a209eb2e7 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.CanonCommSwaps.fst @@ -0,0 +1,144 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.CanonCommSwaps + +open FStar.List.Tot.Base + +let swap (n:nat) :Type = x:nat{x < n-1} + +let rec apply_swap_aux (#a:Type) (n:nat) (xs:list a) (s:swap (length xs + n)) : + Pure (list a) (requires True) + (ensures (fun zs -> length zs == length xs)) (decreases xs) = + match xs with + | [] | [_] -> xs + | x1 :: x2 :: xs' -> if n = (s <: nat) + then x2 :: x1 :: xs' + else x1 :: apply_swap_aux (n+1) (x2 :: xs') s + +let apply_swap (#a:Type) = apply_swap_aux #a 0 + +let rec apply_swaps (#a:Type) (xs:list a) (ss:list (swap (length xs))) : + Pure (list a) (requires True) + (ensures (fun zs -> length zs == length xs)) (decreases ss) = + match ss with + | [] -> xs + | s::ss' -> apply_swaps (apply_swap xs s) ss' + +let equal_counts (#a:eqtype) (xs ys:list a) : Type0 = + (forall (e:a).{:pattern (count e xs) \/ (count e ys)} count e xs == count e ys) + +let extend_equal_counts (#a:eqtype) (h:a) (xs ys:list a) : Lemma + (requires equal_counts xs ys) + (ensures equal_counts (h::xs) (h::ys)) + = + () + +let retract_equal_counts (#a:eqtype) (h:a) (xs ys:list a) : Lemma + (requires equal_counts (h::xs) (h::ys)) + (ensures equal_counts xs ys) + = + assert (forall (e:a).{:pattern (count e xs) \/ (count e ys)} count e (h::xs) == count e (h::ys)) + +unfold let swap_for (#a:eqtype) (xs:list a) = swap (length xs) +unfold let swaps_for (#a:eqtype) (xs:list a) = list (swap_for xs) + +let rec append_swaps (#a:eqtype) (xs:list a) (ss1 ss2:swaps_for xs) : Lemma + (ensures apply_swaps xs (ss1 @ ss2) == apply_swaps (apply_swaps xs ss1) ss2) + (decreases ss1) + = + match ss1 with + | [] -> () + | h::t -> append_swaps (apply_swap xs h) t ss2 + +let rec lift_swap_cons (#a:eqtype) (n:nat) (h:a) (xs:list a) (s:swap (length xs + n)) : Lemma + (requires n <= s) + (ensures apply_swap_aux n (h::xs) (s + 1) == h::(apply_swap_aux n xs s)) + (decreases xs) + = + match xs with + | [] -> () + | x::xt -> if n < s then lift_swap_cons (n + 1) x xt s + +let rec lift_swaps_cons (#a:eqtype) (h:a) (xs:list a) (ss:swaps_for xs) : Pure (swaps_for (h::xs)) + (requires True) + (ensures (fun ss' -> + apply_swaps (h::xs) ss' == h::(apply_swaps xs ss) + )) + (decreases ss) + = + match ss with + | [] -> [] + | s::st -> + ( + lift_swap_cons 0 h xs s; + (s + 1)::(lift_swaps_cons h (apply_swap xs s) st) + ) + +let rec swap_to_front (#a:eqtype) (h:a) (xs:list a) : Pure (swaps_for xs) + (requires count h xs >= 1) + (ensures (fun ss -> + let ys = apply_swaps xs ss in + equal_counts xs ys /\ + Cons? ys /\ + hd ys == h + )) + = + match xs with + | [] -> [] + | x::xt -> + ( + if x = h then [] + else + ( + let ss = swap_to_front h xt in // ss turns xt into h::xt' + let ss' = lift_swaps_cons x xt ss in // ss' turns x::xt into x::h::xt' + let s:swap_for xs = 0 in + append_swaps xs ss' [s]; + ss' @ [s] + ) + ) + +let rec equal_counts_implies_swaps (#a:eqtype) (xs ys:list a) : Pure (swaps_for xs) + (requires equal_counts xs ys) + (ensures (fun ss -> ys == apply_swaps xs ss)) + (decreases ys) + = + match ys with + | [] -> + ( + match xs with + | [] -> [] + | x::xt -> + ( + assert (count x xs >= 1); + [] + ) + ) + | y::yt -> + ( + assert (count y ys >= 1); + assert (count y xs >= 1); + let ss0 = swap_to_front y xs in // find y in xs, swap it to the front + let xs' = apply_swaps xs ss0 in // hd xs' == y + let xt = tl xs' in // xs' == y::xt + retract_equal_counts y xt yt; // prove (equal_counts xt yt) + let ss1 = equal_counts_implies_swaps xt yt in // prove (yt == apply_swaps xt ss1) + let ss1' = lift_swaps_cons y xt ss1 in // y::yt == apply_swaps (y::xt) ss1' + // ys == apply_swaps (apply_swaps xs ss0) ss1' + append_swaps xs ss0 ss1'; + ss0 @ ss1' + ) + diff --git a/stage0/ulib/FStar.Tactics.CanonMonoid.fst b/stage0/ulib/FStar.Tactics.CanonMonoid.fst new file mode 100644 index 00000000000..129d785c746 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.CanonMonoid.fst @@ -0,0 +1,127 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.CanonMonoid + +open FStar.Algebra.Monoid +open FStar.List +open FStar.Reflection.V2 +open FStar.Tactics.V2 + +private +let term_eq = FStar.Reflection.TermEq.Simple.term_eq + +(* Only dump when debugging is on *) +let dump m = if debugging () then dump m + +(* "A Monoid Expression Simplifier" ported from + http://adam.chlipala.net/cpdt/html/Cpdt.Reflection.html *) + +type exp (a:Type) : Type = + | Unit : exp a + | Var : a -> exp a + | Mult : exp a -> exp a -> exp a + +let rec exp_to_string (#a:Type) (a_to_string:a->string) (e:exp a) = + match e with + | Unit -> "Unit" + | Var x -> "Var " ^ a_to_string x + | Mult e1 e2 -> "Mult (" ^ exp_to_string a_to_string e1 + ^ ") (" ^ exp_to_string a_to_string e2 ^ ")" + +let rec mdenote (#a:Type) (m:monoid a) (e:exp a) : a = + match e with + | Unit -> Monoid?.unit m + | Var x -> x + | Mult e1 e2 -> Monoid?.mult m (mdenote m e1) (mdenote m e2) + +let rec mldenote (#a:Type) (m:monoid a) (xs:list a) : a = + match xs with + | [] -> Monoid?.unit m + | [x] -> x + | x::xs' -> Monoid?.mult m x (mldenote m xs') + +let rec flatten (#a:Type) (e:exp a) : list a = + match e with + | Unit -> [] + | Var x -> [x] + | Mult e1 e2 -> flatten e1 @ flatten e2 + +(* This proof internally uses the monoid laws; the SMT solver picks up + on them because they are written as squashed formulas in the + definition of monoid; need to be careful with this since these are + quantified formulas without any patterns. Dangerous stuff! *) +let rec flatten_correct_aux (#a:Type) (m:monoid a) ml1 ml2 : + Lemma (mldenote m (ml1 @ ml2) == Monoid?.mult m (mldenote m ml1) + (mldenote m ml2)) = + match ml1 with + | [] -> () + | e::es1' -> flatten_correct_aux m es1' ml2 + +let rec flatten_correct (#a:Type) (m:monoid a) (e:exp a) : + Lemma (mdenote m e == mldenote m (flatten e)) = + match e with + | Unit | Var _ -> () + | Mult e1 e2 -> flatten_correct_aux m (flatten e1) (flatten e2); + flatten_correct m e1; flatten_correct m e2 + +let monoid_reflect (#a:Type) (m:monoid a) (e1 e2:exp a) + (_ : squash (mldenote m (flatten e1) == mldenote m (flatten e2))) + : squash (mdenote m e1 == mdenote m e2) = + flatten_correct m e1; flatten_correct m e2 + +// This expects that mult, unit, and me have already been normalized +let rec reification_aux (#a:Type) (mult unit me : term) : Tac (exp a) = + let hd, tl = collect_app_ref me in + let tl = list_unref tl in + match inspect hd, tl with + | Tv_FVar fv, [(me1, Q_Explicit) ; (me2, Q_Explicit)] -> + if term_eq_old (pack (Tv_FVar fv)) mult + then Mult (reification_aux mult unit me1) (reification_aux mult unit me2) + else Var (unquote me) + | _, _ -> + if term_eq_old me unit + then Unit + else Var (unquote me) + +let reification (#a:Type) (m:monoid a) (me:term) : Tac (exp a) = + let mult = norm_term [delta;zeta;iota] (quote (Monoid?.mult m)) in + let unit = norm_term [delta;zeta;iota] (quote (Monoid?.unit m)) in + let me = norm_term [delta;zeta;iota] me in + // dump ("mult = " ^ term_to_string mult ^ + // "; unit = " ^ term_to_string unit ^ + // "; me = " ^ term_to_string me); + reification_aux mult unit me + +let canon_monoid (#a:Type) (m:monoid a) : Tac unit = + norm []; + let g = cur_goal () in + match term_as_formula g with + | Comp (Eq (Some t)) me1 me2 -> + (* First, make sure we have an equality at type ta, since otherwise + we will fail to apply the reflection Lemma. We can just cut by the equality + we want, since they should be equiprovable (though not equal). *) + let b = tcut (`(squash (eq2 #(`#(quote a)) (`#me1) (`#me2)))) in + smt (); // let the SMT prove it, it should really be trivial + + let r1 = reification m me1 in + let r2 = reification m me2 in + change_sq (quote (eq2 #a (mdenote m r1) (mdenote m r2))); + apply (`monoid_reflect); + norm [delta_only [`%mldenote; + `%flatten; + `%FStar.List.Tot.op_At; + `%FStar.List.Tot.append]] + | _ -> fail "Goal should be an equality" diff --git a/stage0/ulib/FStar.Tactics.CheckLN.fst b/stage0/ulib/FStar.Tactics.CheckLN.fst new file mode 100644 index 00000000000..0b1791a4e7b --- /dev/null +++ b/stage0/ulib/FStar.Tactics.CheckLN.fst @@ -0,0 +1,72 @@ +module FStar.Tactics.CheckLN + +open FStar.Tactics.V2.Bare +open FStar.Tactics.Util + +let rec for_all (p : 'a -> Tac bool) (l : list 'a) : Tac bool = + match l with + | [] -> true + | x::xs -> if p x then for_all p xs else false + +let rec check (t:term) : Tac bool = + match inspect t with + (* We are using the named view, which opens terms + as needed on every node. If we reach a bvar, the original + term is not LN. *) + | Tv_BVar bv -> false + + | Tv_Const _ -> true + | Tv_Uvar _ _ -> false (* conservative *) + + | Tv_Var _ -> true + | Tv_FVar _ -> true + | Tv_UInst _ us -> for_all check_u us + | Tv_App hd (a, q) -> if check hd then check a else false + | Tv_Abs b body -> if check b.sort then check body else false + | Tv_Arrow b c -> if check b.sort then check_comp c else false + | Tv_Type u -> check_u u + | Tv_Refine b ref -> if check b.sort then check ref else false + | Tv_Let recf attrs b def body -> + if not (for_all check attrs) then false else + if not (check def) then false else + check body + | Tv_Match sc _ brs -> + if check sc then for_all check_br brs else false + | Tv_AscribedT e t _ _ -> + if check e then check t else false + | Tv_AscribedC e c _ _ -> + if check e then check_comp c else false + + | Tv_Unknown -> true + | Tv_Unsupp -> true // hm.. +and check_u (u:universe) : Tac bool = + match inspect_universe u with + | Uv_BVar _ -> false + | Uv_Name _ -> true + | Uv_Unif _ -> false (* conservative *) + | Uv_Zero -> true + | Uv_Succ u -> check_u u + | Uv_Max us -> for_all check_u us + | Uv_Unk -> true +and check_comp (c:comp) : Tac bool = + match c with + | C_Total typ -> check typ + | C_GTotal typ -> check typ + | C_Lemma pre post pats -> + if not (check pre) then false else + if not (check post) then false else + check pats + | C_Eff us nm res args decrs -> + if not (for_all check_u us) then false else + if not (check res) then false else + if not (for_all (fun (a,q) -> check a) args) then false else + if not (for_all check decrs) then false else + true + +and check_br (b:branch) : Tac bool = + (* Could check the pattern's ascriptions too. *) + let (p, t) = b in + check t + +[@@plugin] +let check_ln (t:term) : Tac bool = check t diff --git a/stage0/ulib/FStar.Tactics.CheckLN.fsti b/stage0/ulib/FStar.Tactics.CheckLN.fsti new file mode 100644 index 00000000000..3f9e4c9d78c --- /dev/null +++ b/stage0/ulib/FStar.Tactics.CheckLN.fsti @@ -0,0 +1,7 @@ +module FStar.Tactics.CheckLN + +open FStar.Tactics.V2.Bare + +(* Checks if a term is locally nameless. *) +[@@plugin] +val check_ln (t:term) : Tac bool diff --git a/stage0/ulib/FStar.Tactics.Derived.fst b/stage0/ulib/FStar.Tactics.Derived.fst new file mode 100644 index 00000000000..97c1e367d06 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Derived.fst @@ -0,0 +1,19 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Derived + +(* This module is a temporary for Meta-F* migration *) +include FStar.Tactics.V1.Derived diff --git a/stage0/ulib/FStar.Tactics.Effect.fst b/stage0/ulib/FStar.Tactics.Effect.fst new file mode 100644 index 00000000000..f60b36cc6fd --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Effect.fst @@ -0,0 +1,52 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Effect + +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Tactics.Types +open FStar.Stubs.Tactics.Result + +/// This admit is to typecheck the bind implementation when the +/// interface is interleaved + +#push-options "--admit_smt_queries true" +let tac_bind_interleave_begin = () +#pop-options +let tac_bind_interleave_end = () + +let with_tactic _ p = p + +let rewrite_with_tactic _ p = p + +let synth_by_tactic #_ _ = admit () + +#push-options "--smtencoding.valid_intro true --smtencoding.valid_elim true" +let assert_by_tactic _ _ = () +#pop-options + +let by_tactic_seman _ _ = () + +let preprocess_with _ = () + +let postprocess_with _ = () + +let postprocess_for_extraction_with _ = () + +#set-options "--no_tactics" + +let unfold_with_tactic _ _ = () + +let unfold_rewrite_with_tactic _ _ = () diff --git a/stage0/ulib/FStar.Tactics.Effect.fsti b/stage0/ulib/FStar.Tactics.Effect.fsti new file mode 100644 index 00000000000..d104d531ef3 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Effect.fsti @@ -0,0 +1,255 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Effect + +open FStar.Monotonic.Pure + +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Tactics.Types +open FStar.Stubs.Tactics.Result + +(* This module is extracted, don't add any `assume val`s or extraction + * will break. (`synth_by_tactic` is fine) *) + +type tac_wp_t0 (a:Type) = + proofstate -> (__result a -> Type0) -> Type0 + +unfold +let tac_wp_monotonic (#a:Type) (wp:tac_wp_t0 a) = + forall (ps:proofstate) (p q:__result a -> Type0). + (forall x. p x ==> q x) ==> (wp ps p ==> wp ps q) + +type tac_wp_t (a:Type) = wp:tac_wp_t0 a{tac_wp_monotonic wp} + +let tac_repr (a:Type) (wp:tac_wp_t a) = + ps0:proofstate -> DIV (__result a) (as_pure_wp (wp ps0)) + +unfold +let tac_return_wp (#a:Type) (x:a) : tac_wp_t a = + fun ps post -> post (Success x ps) + +(* monadic return *) +let tac_return (a:Type) (x:a) : tac_repr a (tac_return_wp x) = + fun (s:proofstate) -> Success x s + +unfold +let tac_bind_wp (#a #b:Type) (wp_f:tac_wp_t a) (wp_g:a -> tac_wp_t b) : tac_wp_t b = + fun ps post -> + wp_f ps (fun r -> + match r with + | Success x ps -> wp_g x ps post + | Failed ex ps -> post (Failed ex ps)) + +/// An optimization to name the continuation + +unfold +let tac_wp_compact (a:Type) (wp:tac_wp_t a) : tac_wp_t a = + fun ps post -> + forall (k:__result a -> Type0). (forall (r:__result a).{:pattern (guard_free (k r))} post r ==> k r) ==> wp ps k + + +/// tac_bind_interleave_begin is an ugly hack to get interface interleaving +/// work with admit_smt_queries true for the bind combinator + +val tac_bind_interleave_begin : unit + + +/// We cannot verify the bind combinator, since the body of bind +/// does some operations on the proof state, with which we cannot prove +/// that the proofstate is sequenced. Two ways to fix it: +/// +/// 1. We separate the "meta" proofstate s.t. range, depth, etc. from the main +/// proofstate, and then sequencing only applies to the main proofstate +/// +/// 2. The pre and post of the TAC effect are just exception pre and post, +/// since we can't prove much about the proofstate anyway, as it is +/// mostly abstract + +(* monadic bind *) +#push-options "--admit_smt_queries true" +let tac_bind (a:Type) (b:Type) + (wp_f:tac_wp_t a) + (wp_g:a -> tac_wp_t b) + (r1 r2:range) + (t1:tac_repr a wp_f) + (t2:(x:a -> tac_repr b (wp_g x))) : tac_repr b (tac_wp_compact b (tac_bind_wp wp_f wp_g)) = + fun ps -> + let ps = set_proofstate_range ps r1 in + let ps = incr_depth ps in + let r = t1 ps in + match r with + | Success a ps' -> + let ps' = set_proofstate_range ps' r2 in + // Force evaluation of __tracepoint q even on the interpreter + begin match tracepoint ps' with + | true -> t2 a (decr_depth ps') + end + | Failed e ps' -> Failed e ps' +#pop-options + + +/// tac_bind_interleave_end is an ugly hack to get interface interleaving +/// work with admit_smt_queries true for the bind combinator + +val tac_bind_interleave_end : unit + +unfold +let tac_if_then_else_wp (#a:Type) (wp_then:tac_wp_t a) (wp_else:tac_wp_t a) (b:bool) + : tac_wp_t a + = fun ps post -> (b ==> wp_then ps post) /\ + ((~ b) ==> wp_else ps post) + +let tac_if_then_else (a:Type) + (wp_then:tac_wp_t a) + (wp_else:tac_wp_t a) + (f:tac_repr a wp_then) + (g:tac_repr a wp_else) + (b:bool) + : Type + = tac_repr a (tac_wp_compact a (tac_if_then_else_wp wp_then wp_else b)) + +let tac_subcomp (a:Type) + (wp_f:tac_wp_t a) + (wp_g:tac_wp_t a) + (f:tac_repr a wp_f) + : Pure (tac_repr a wp_g) + (requires forall ps p. wp_g ps p ==> wp_f ps p) + (ensures fun _ -> True) + = f + +let tac_close (a b:Type) + (wp_f:b -> tac_wp_t a) + (f:(x:b -> tac_repr a (wp_f x))) = + + tac_repr a (fun ps post -> forall (x:b). wp_f x ps post) + +/// default effect is Tac : meaning, unannotated TAC functions will be +/// typed as Tac a +/// +/// And the bind combinator has range arguments +/// that will be provided when the effect is reified + +[@@ default_effect "FStar.Tactics.Effect.Tac"; bind_has_range_args] +reflectable +effect { + TAC (a:Type) (wp:tac_wp_t a) + with { repr=tac_repr; + return=tac_return; + bind=tac_bind; + if_then_else=tac_if_then_else; + subcomp=tac_subcomp; + close = tac_close } +} + +(* Hoare variant *) +effect TacH (a:Type) (pre : proofstate -> Tot Type0) (post : proofstate -> __result a -> Tot Type0) = + TAC a (fun ps post' -> pre ps /\ (forall r. post ps r ==> post' r)) + +(* "Total" variant *) +effect Tac (a:Type) = TacH a (requires (fun _ -> True)) (ensures (fun _ _ -> True)) + +(* Metaprograms that succeed *) +effect TacS (a:Type) = TacH a (requires (fun _ -> True)) (ensures (fun _ps r -> Success? r)) + +(* A variant that doesn't prove totality (nor type safety!) *) +effect TacF (a:Type) = TacH a (requires (fun _ -> False)) (ensures (fun _ _ -> True)) + +unfold +let lift_div_tac_wp (#a:Type) (wp:pure_wp a) : tac_wp_t a = + elim_pure_wp_monotonicity wp; + fun ps p -> wp (fun x -> p (Success x ps)) + +let lift_div_tac (a:Type) (wp:pure_wp a) (f:eqtype_as_type unit -> DIV a wp) + : tac_repr a (lift_div_tac_wp wp) + = elim_pure_wp_monotonicity wp; + fun ps -> Success (f ()) ps + +sub_effect DIV ~> TAC = lift_div_tac + +let get () + : TAC proofstate (fun ps post -> post (Success ps ps)) + = TAC?.reflect (fun ps -> Success ps ps) + +let raise (#a:Type) (e:exn) + : TAC a (fun ps post -> post (Failed e ps)) + = TAC?.reflect (fun ps -> Failed #a e ps) + + +/// assert p by t + +val with_tactic (t : unit -> Tac unit) (p:Type u#a) : Type u#a + +(* This syntactic marker will generate a goal of the shape x == ?u for + * a new unification variable ?u, and run tactic [t] to solve this goal. + * If after running [t], the uvar was solved and only trivial goals remain + * in the proofstate, then `rewrite_with_tactic t x` will be replaced + * by the solution of ?u *) +val rewrite_with_tactic (t:unit -> Tac unit) (#a:Type) (x:a) : a + +(* This will run the tactic in order to (try to) produce a term of type + * t. Note that the type looks dangerous from a logical perspective. It + * should not lead to any inconsistency, however, as any time this term + * appears during typechecking, it is forced to be fully applied and the + * tactic is run. A failure of the tactic is a typechecking failure. It + * can be thought as a language construct, and not a real function. *) +val synth_by_tactic : (#t:Type) -> (unit -> Tac unit) -> Tot t + +val assert_by_tactic (p:Type) (t:unit -> Tac unit) + : Pure unit + (requires (set_range_of (with_tactic t (squash p)) (range_of t))) + (ensures (fun _ -> p)) + +val by_tactic_seman (tau:unit -> Tac unit) (phi:Type) + : Lemma (with_tactic tau phi ==> phi) + +(* One can always bypass the well-formedness of metaprograms. It does + * not matter as they are only run at typechecking time, and if they get + * stuck, the compiler will simply raise an error. *) +let assume_safe (#a:Type) (tau:unit -> TacF a) : Tac a = admit (); tau () + +private let tac a b = a -> Tac b +private let tactic a = tac unit a + +(* A hook to preprocess a definition before it is typechecked and + * elaborated. This attribute should be used for top-level lets. The + * tactic [tau] will be called on a quoting of the definition of the let + * (if many, once for each) and the result of the tactic will replace + * the definition. There are no goals involved, nor any proof obligation + * to be done by the tactic. *) +val preprocess_with (tau : term -> Tac term) : Tot unit + +(* A hook to postprocess a definition, after typechecking, and rewrite + * it into a (provably equal) shape chosen by the user. This can be used + * to implement custom transformations previous to extraction, such as + * selective inlining. When ran added to a definition [let x = E], the + * [tau] metaprogram is presented with a goal of the shape [E == ?u] for + * a fresh uvar [?u]. The metaprogram should then both instantiate [?u] + * and prove the equality. *) +val postprocess_with (tau : unit -> Tac unit) : Tot unit + +(* Similar semantics to [postprocess_with], but the metaprogram only + * runs before extraction, and hence typechecking and the logical + * environment should not be affected at all. *) +val postprocess_for_extraction_with (tau : unit -> Tac unit) : Tot unit + +#set-options "--no_tactics" + +val unfold_with_tactic (t:unit -> Tac unit) (p:Type) + : Lemma (requires p) + (ensures (with_tactic t p)) + +val unfold_rewrite_with_tactic (t:unit -> Tac unit) (#a:Type) (p:a) + : Lemma (rewrite_with_tactic t p == p) diff --git a/stage0/ulib/FStar.Tactics.Logic.fst b/stage0/ulib/FStar.Tactics.Logic.fst new file mode 100644 index 00000000000..0f3c6122aca --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Logic.fst @@ -0,0 +1,19 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Logic + +(* This module is a temporary for Meta-F* migration *) +include FStar.Tactics.V1.Logic diff --git a/stage0/ulib/FStar.Tactics.MApply.fst b/stage0/ulib/FStar.Tactics.MApply.fst new file mode 100644 index 00000000000..5ec9c4a3a7e --- /dev/null +++ b/stage0/ulib/FStar.Tactics.MApply.fst @@ -0,0 +1,3 @@ +module FStar.Tactics.MApply + +(* This file just here to trigger extraction. *) diff --git a/stage0/ulib/FStar.Tactics.MApply.fsti b/stage0/ulib/FStar.Tactics.MApply.fsti new file mode 100644 index 00000000000..2a79d8eb5b6 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.MApply.fsti @@ -0,0 +1,24 @@ +module FStar.Tactics.MApply + +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Reflection.V2.Data +open FStar.Tactics.Effect +open FStar.Tactics.V2.SyntaxCoercions + +include FStar.Tactics.MApply0 + +class termable (a : Type) = { + to_term : a -> Tac term +} + +instance termable_term : termable term = { + to_term = (fun t -> t); +} + +instance termable_binding : termable binding = { + to_term = (fun b -> binding_to_term b); +} + +let mapply (#ty:Type) {| termable ty |} (x : ty) : Tac unit = + let t = to_term x in + mapply0 t diff --git a/stage0/ulib/FStar.Tactics.MApply0.fst b/stage0/ulib/FStar.Tactics.MApply0.fst new file mode 100644 index 00000000000..887e055d02b --- /dev/null +++ b/stage0/ulib/FStar.Tactics.MApply0.fst @@ -0,0 +1,84 @@ +module FStar.Tactics.MApply0 + +open FStar.Reflection.V2 +open FStar.Reflection.V2.Formula + +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Tactics.NamedView +open FStar.Tactics.V2.SyntaxHelpers +open FStar.Tactics.V2.Derived +open FStar.Tactics.V2.SyntaxCoercions + +let push1 #p #q f u = () +let push1' #p #q f u = () + +(* + * Some easier applying, which should prevent frustration + * (or cause more when it doesn't do what you wanted to) + *) +val apply_squash_or_lem : d:nat -> term -> Tac unit +let rec apply_squash_or_lem d t = + (* Before anything, try a vanilla apply and apply_lemma *) + try apply t with | _ -> + try apply (`FStar.Squash.return_squash); apply t with | _ -> + try apply_lemma t with | _ -> + + // Fuel cutoff, just in case. + if d <= 0 then fail "mapply: out of fuel" else begin + + let ty = tc (cur_env ()) t in + let tys, c = collect_arr ty in + match inspect_comp c with + | C_Lemma pre post _ -> + begin + let post = `((`#post) ()) in (* unthunk *) + let post = norm_term [] post in + (* Is the lemma an implication? We can try to intro *) + match term_as_formula' post with + | Implies p q -> + apply_lemma (`push1); + apply_squash_or_lem (d-1) t + + | _ -> + fail "mapply: can't apply (1)" + end + | C_Total rt -> + begin match unsquash_term rt with + (* If the function returns a squash, just apply it, since our goals are squashed *) + | Some rt -> + // DUPLICATED, refactor! + begin + let rt = norm_term [] rt in + (* Is the lemma an implication? We can try to intro *) + match term_as_formula' rt with + | Implies p q -> + apply_lemma (`push1); + apply_squash_or_lem (d-1) t + + | _ -> + fail "mapply: can't apply (2)" + end + + (* If not, we can try to introduce the squash ourselves first *) + | None -> + // DUPLICATED, refactor! + begin + let rt = norm_term [] rt in + (* Is the lemma an implication? We can try to intro *) + match term_as_formula' rt with + | Implies p q -> + apply_lemma (`push1); + apply_squash_or_lem (d-1) t + + | _ -> + apply (`FStar.Squash.return_squash); + apply t + end + end + | _ -> fail "mapply: can't apply (3)" + end + +(* `m` is for `magic` *) +let mapply0 (t : term) : Tac unit = + apply_squash_or_lem 10 t diff --git a/stage0/ulib/FStar.Tactics.MApply0.fsti b/stage0/ulib/FStar.Tactics.MApply0.fsti new file mode 100644 index 00000000000..7b923720b1a --- /dev/null +++ b/stage0/ulib/FStar.Tactics.MApply0.fsti @@ -0,0 +1,18 @@ +module FStar.Tactics.MApply0 + +open FStar.Stubs.Reflection.Types +open FStar.Tactics.Effect + +(* Used by mapply, must be exposed, but not to be used directly *) +private val push1 : (#p:Type) -> (#q:Type) -> + squash (p ==> q) -> + squash p -> + squash q +private val push1' : (#p:Type) -> (#q:Type) -> + (p ==> q) -> + squash p -> + squash q + +(* `m` is for `magic` *) +[@@plugin] +val mapply0 (t : term) : Tac unit diff --git a/stage0/ulib/FStar.Tactics.MkProjectors.fst b/stage0/ulib/FStar.Tactics.MkProjectors.fst new file mode 100644 index 00000000000..e512e890ad1 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.MkProjectors.fst @@ -0,0 +1,232 @@ +module FStar.Tactics.MkProjectors + +(* NB: We cannot use typeclasses here, or any module that depends on +them, since they use the tactics defined here. So we must be careful +with our includes. *) +open FStar.List.Tot +open FStar.Reflection.V2 +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Stubs.Syntax.Syntax +open FStar.Tactics.V2.SyntaxCoercions +open FStar.Tactics.V2.SyntaxHelpers +open FStar.Tactics.V2.Derived +open FStar.Tactics.Util +open FStar.Tactics.NamedView + +exception NotFound + +let meta_projectors = () + +(* Thunked version of debug *) +let debug (f : unit -> Tac string) : Tac unit = + if debugging () then + print (f ()) + +[@@plugin] +let mk_one_projector (unf:list string) (np:nat) (i:nat) : Tac unit = + debug (fun () -> dump "ENTRY mk_one_projector"; ""); + let _params = Stubs.Tactics.V2.Builtins.intros np in + let thing : binding = intro () in + let r = t_destruct thing in + match r with + | [(cons, arity)] -> begin + if (i >= arity) then + fail "proj: bad index in mk_one_projector"; + let _ = Stubs.Tactics.V2.Builtins.intros i in + let the_b = intro () in + let _ = Stubs.Tactics.V2.Builtins.intros (arity-i-1) in + let eq_b : binding = intro () in + rewrite eq_b; + norm [iota; delta_only unf; zeta_full]; + (* NB: ^ zeta_full above so we reduce under matches too. Since + we are not unfolding anything but the projectors, which are + not, recursive, this should not bring about any divergence. An + alternative is to use NBE. *) + exact the_b + end + | _ -> fail "proj: more than one case?" + +[@@plugin] +let mk_one_method (proj:string) (np:nat) : Tac unit = + debug (fun () -> dump "ENTRY mk_one_method"; ""); + let nm = explode_qn proj in + let params = repeatn np (fun () -> let b : binding = intro () in + (binding_to_term b, Q_Implicit)) in + let thing : binding = intro () in + let proj = pack (Tv_FVar (pack_fv nm)) in + exact (mk_app proj (params @ [(binding_to_term thing, Q_Explicit)])) + +let subst_map (ss : list (namedv & fv)) (r:term) (t : term) : Tac term = + let subst = List.Tot.map (fun (x, fv) -> NT (Reflection.V2.pack_namedv x) (mk_e_app (Tv_FVar fv) [r])) ss in + subst_term subst t + +let binder_mk_implicit (b:binder) : binder = + let q = + match b.qual with + | Q_Explicit -> Q_Implicit + | q -> q (* keep Q_Meta as it is *) + in + { b with qual = q } + +let binder_to_term (b:binder) : Tot term = + pack (Tv_Var (binder_to_namedv b)) + +let binder_argv (b:binder) : Tot argv = + let q = + match b.qual with + | Q_Meta _ -> Q_Implicit + | q -> q + in + (binder_to_term b, q) + +let rec list_last #a (xs : list a) : Tac a = + match xs with + | [] -> fail "list_last: empty" + | [x] -> x + | _::xs -> list_last xs + +let embed_int (i:int) : term = + let open FStar.Reflection.V2 in + pack_ln (Tv_Const (C_Int i)) + +let embed_string (s:string) : term = + let open FStar.Reflection.V2 in + pack_ln (Tv_Const (C_String s)) + +(* For compatibility: the typechecker sets this attribute for all +projectors. Karamel relies on it to do inlining. *) +let substitute_attr : term = + `Pervasives.Substitute + +let mk_proj_decl (is_method:bool) + (tyqn:name) ctorname + (univs : list univ_name) + (params : list binder) + (idx:nat) + (field : binder) + (unfold_names_tm : term) + (smap : list (namedv & fv)) +: Tac (list sigelt & fv) += + debug (fun () -> "Processing field " ^ unseal field.ppname); + debug (fun () -> "Field typ = " ^ term_to_string field.sort); + let np = length params in + let tyfv = pack_fv tyqn in + let nm : name = cur_module () @ ["__proj__" ^ list_last ctorname ^ "__item__" ^ unseal field.ppname] in + let fv = pack_fv nm in + let rty : term = + let hd = pack (Tv_UInst tyfv (List.Tot.map (fun un -> pack_universe (Uv_Name un)) univs)) in + mk_app hd (List.Tot.map binder_argv params) + in + let rb : binder = fresh_binder rty in + let projty = mk_tot_arr (List.Tot.map binder_mk_implicit params + @ [rb]) + (subst_map smap (binder_to_term rb) field.sort) + in + debug (fun () -> "Proj typ = " ^ term_to_string projty); + let se_proj = pack_sigelt <| + Sg_Let { + isrec = false; + lbs = [{ + lb_fv = fv; + lb_us = univs; + lb_typ = projty; + lb_def = + (* NB: the definition of the projector is again a tactic + invocation, so this whole thing has two phases. *) + (`(_ by (mk_one_projector + (`#unfold_names_tm) + (`#(embed_int np)) + (`#(embed_int idx))))) + }]} + in + let maybe_se_method : list sigelt = + if not is_method then [] else + if List.existsb (Reflection.TermEq.Simple.term_eq (`Typeclasses.no_method)) field.attrs then [] else + let meth_fv = pack_fv (cur_module () @ [unseal field.ppname]) in + let rb = { rb with qual = Q_Meta (`Typeclasses.tcresolve) } in + let projty = mk_tot_arr (List.Tot.map binder_mk_implicit params + @ [rb]) + (subst_map smap (binder_to_term rb) field.sort) + in + (* The method is just defined based on the projector. *) + let lb_def = + if true + then + (* This generates a method defined to be equal to the projector + i.e. method {| c |} = c.method *) + (`(_ by (mk_one_method + (`#(embed_string (implode_qn nm))) + (`#(embed_int np))))) + else + (* This defines the method in the same way as the projector + i.e. method {| c |} = match c with | Mk ... method ... -> method *) + (`(_ by (mk_one_projector + (`#unfold_names_tm) + (`#(embed_int np)) + (`#(embed_int idx))))) + in + (* dump ("returning se with name " ^ unseal field.ppname); *) + (* dump ("def = " ^ term_to_string lb_def); *) + let se = pack_sigelt <| Sg_Let { + isrec = false; + lbs = [{ + lb_fv = meth_fv; + lb_us = univs; + lb_typ = projty; + lb_def = lb_def; + }]} + in + [se] + in + (* Propagate binder attributes, i.e. attributes in the field + decl, to the method itself. *) + let se_proj = set_sigelt_attrs (substitute_attr :: field.attrs @ sigelt_attrs se_proj) se_proj in + + (* Do we need to set the sigelt's Projector qual? If so, + here is how to do it, but F* currently rejects tactics + trying to generate "internal" qualifiers like Projector. However, + it does not seem to make a difference. *) + (* In fact, it seems to trip the encoding as soon as a field + has more binders, since the encoding has some primitive treatment + for projectors/discriminators. *) + //let se_proj = set_sigelt_quals ( + // Projector (ctorname, pack_ident (unseal field.ppname, range_0)) :: + // sigelt_quals se_proj) se_proj + //in + (se_proj :: maybe_se_method , fv) + +[@@plugin] +let mk_projs (is_class:bool) (tyname:string) : Tac decls = + debug (fun () -> "!! mk_projs tactic called on: " ^ tyname); + let tyqn = explode_qn tyname in + match lookup_typ (top_env ()) tyqn with + | None -> + raise NotFound + | Some se -> + match inspect_sigelt se with + | Sg_Inductive {nm; univs; params; typ; ctors} -> + if (length ctors <> 1) then + fail "Expected an inductive with one constructor"; + let indices = fst (collect_arr_bs typ) in + if Cons? indices then + fail "Inductive indices nonempty?"; + let [(ctorname, ctor_t)] = ctors in + (* dump ("ityp = " ^ term_to_string typ); *) + (* dump ("ctor_t = " ^ term_to_string ctor_t); *) + let (fields, _) = collect_arr_bs ctor_t in + let unfold_names_tm = `(Nil u#0 #string) in + let (decls, _, _, _) = + fold_left (fun (decls, smap, unfold_names_tm, idx) (field : binder) -> + let (ds, fv) = mk_proj_decl is_class tyqn ctorname univs params idx field unfold_names_tm smap in + (decls @ ds, + (binder_to_namedv field, fv)::smap, + (`(Cons u#0 #string (`#(embed_string (implode_qn (inspect_fv fv)))) (`#unfold_names_tm))), + idx+1)) + ([], [], unfold_names_tm, 0) + fields + in + decls + | _ -> + fail "not an inductive" diff --git a/stage0/ulib/FStar.Tactics.MkProjectors.fsti b/stage0/ulib/FStar.Tactics.MkProjectors.fsti new file mode 100644 index 00000000000..a31ce5b77aa --- /dev/null +++ b/stage0/ulib/FStar.Tactics.MkProjectors.fsti @@ -0,0 +1,17 @@ +module FStar.Tactics.MkProjectors + +open FStar.Stubs.Reflection.Types +open FStar.Tactics.Effect + +(** Opt-in to the new projector generation by a metaprogram. This also + affects typeclass method generation. *) +val meta_projectors : unit + +[@@plugin] +val mk_one_projector (unf:list string) (np:nat) (i:nat) : Tac unit + +[@@plugin] +val mk_one_method (proj:string) (np:nat) : Tac unit + +[@@plugin] +val mk_projs (is_class:bool) (tyname:string) : Tac (list sigelt) diff --git a/stage0/ulib/FStar.Tactics.NamedView.fst b/stage0/ulib/FStar.Tactics.NamedView.fst new file mode 100644 index 00000000000..0c54161cc3d --- /dev/null +++ b/stage0/ulib/FStar.Tactics.NamedView.fst @@ -0,0 +1,662 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.NamedView + +(* inner let bindings not encoded, OK *) +#set-options "--warn_error -242" + +(* This file is part of the tactics core, we open only what's needed. *) +open FStar.Tactics.Effect +open FStar.Tactics.Util +open FStar.Stubs.Tactics.V2.Builtins + +exception LengthMismatch +exception NotEnoughBinders + +(* We work with reflection V2. *) +module R = FStar.Reflection.V2 +module RD = FStar.Stubs.Reflection.V2.Data + +let open_universe_view (v:RD.universe_view) : named_universe_view = + match v with + | R.Uv_Zero -> Uv_Zero + | R.Uv_Succ u -> Uv_Succ u + | R.Uv_Max us -> Uv_Max us + | R.Uv_BVar n -> Uv_BVar n + | R.Uv_Name i -> Uv_Name (inspect_ident i) + | R.Uv_Unif uvar -> Uv_Unif uvar + | R.Uv_Unk -> Uv_Unk + +let inspect_universe (u:universe) : named_universe_view = + let v = R.inspect_universe u in + open_universe_view v + +let close_universe_view (v:named_universe_view) : R.universe_view = + match v with + | Uv_Zero -> R.Uv_Zero + | Uv_Succ u -> R.Uv_Succ u + | Uv_Max us -> R.Uv_Max us + | Uv_BVar n -> R.Uv_BVar n + | Uv_Name i -> R.Uv_Name (pack_ident i) + | Uv_Unif uvar -> R.Uv_Unif uvar + | Uv_Unk -> R.Uv_Unk + +let pack_universe (uv:named_universe_view) : universe = + let uv = close_universe_view uv in + R.pack_universe uv + +private +let __binding_to_binder (bnd : binding) (b : R.binder) : binder = + { + ppname = bnd.ppname; + uniq = bnd.uniq; + sort = bnd.sort; + qual = (inspect_binder b).qual; + attrs = (inspect_binder b).attrs; + } + +private +let r_binder_to_namedv (b : binder) : R.namedv = + pack_namedv { + uniq = b.uniq; + sort = seal b.sort; + ppname = b.ppname; + } + +private +let open_binder (b : R.binder) : Tac binder = + let n = fresh () in + let bv = inspect_binder b in + { + uniq = n; + sort = bv.sort; + ppname = bv.ppname; + qual = bv.qual; + attrs = bv.attrs; + } + +private +let close_binder (b : binder) : R.binder = + pack_binder { + sort = b.sort; + qual = b.qual; + ppname = b.ppname; + attrs = b.attrs; + } + +private +let open_term_with (b : R.binder) (nb : binder) (t : term) : Tac term = + let nv : R.namedv = pack_namedv { + uniq = nb.uniq; + sort = seal nb.sort; + ppname = nb.ppname; + } + in + let t' = subst_term [DB 0 nv] t in + t' + +private +let open_term (b : R.binder) (t : term) : Tac (binder & term) = + let bndr : binder = open_binder b in + (bndr, open_term_with b bndr t) + +let subst_comp (s : subst_t) (c : comp) : comp = + inspect_comp (R.subst_comp s (pack_comp c)) + +private +let open_comp (b : R.binder) (t : comp) : Tac (binder & comp) = + let n = fresh () in + let bv : binder_view = inspect_binder b in + let nv : R.namedv = pack_namedv { + uniq = n; + sort = seal bv.sort; + ppname = bv.ppname; + } + in + let t' = subst_comp [DB 0 nv] t in + let bndr : binder = { + uniq = n; + sort = bv.sort; + ppname = bv.ppname; + qual = bv.qual; + attrs = bv.attrs; + } + in + (bndr, t') + +private +let open_comp_with (b : R.binder) (nb : binder) (c : comp) : Tac comp = + let nv : R.namedv = pack_namedv { + uniq = nb.uniq; + sort = seal nb.sort; + ppname = nb.ppname; + } + in + let t' = subst_comp [DB 0 nv] c in + t' + +(* FIXME: unfortunate duplication here. The effect means this proof cannot +be done extrinsically. Can we add a refinement to the binder? *) +private +let open_term_simple (b : R.simple_binder) (t : term) : Tac (simple_binder & term) = + let n = fresh () in + let bv : binder_view = inspect_binder b in + let nv : R.namedv = pack_namedv { + uniq = n; + sort = seal bv.sort; + ppname = bv.ppname; + } + in + let t' = subst_term [DB 0 nv] t in + let bndr : binder = { + uniq = n; + sort = bv.sort; + ppname = bv.ppname; + qual = bv.qual; + attrs = bv.attrs; + } + in + (bndr, t') + +private +let open_comp_simple (b : R.simple_binder) (t : comp) : Tac (simple_binder & comp) = + let n = fresh () in + let bv : binder_view = inspect_binder b in + let nv : R.namedv = pack_namedv { + uniq = n; + sort = seal bv.sort; + ppname = bv.ppname; + } + in + let t' = subst_comp [DB 0 nv] t in + let bndr : binder = { + uniq = n; + sort = bv.sort; + ppname = bv.ppname; + qual = bv.qual; + attrs = bv.attrs; + } + in + (bndr, t') + +(* This can be useful externally *) +let close_term (b:binder) (t:term) : R.binder & term = + let nv = r_binder_to_namedv b in + let t' = subst_term [NM nv 0] t in + let b = pack_binder { sort = b.sort; ppname = b.ppname; qual = b.qual; attrs = b.attrs } in + (b, t') +private +let close_comp (b:binder) (t:comp) : R.binder & comp = + let nv = r_binder_to_namedv b in + let t' = subst_comp [NM nv 0] t in + let b = pack_binder { sort = b.sort; ppname = b.ppname; qual = b.qual; attrs = b.attrs } in + (b, t') + +private +let close_term_simple (b:simple_binder) (t:term) : R.simple_binder & term = + let nv = r_binder_to_namedv b in + let t' = subst_term [NM nv 0] t in + let bv : binder_view = { sort = b.sort; ppname = b.ppname; qual = b.qual; attrs = b.attrs } in + let b = pack_binder bv in + inspect_pack_binder bv; + (b, t') +private +let close_comp_simple (b:simple_binder) (t:comp) : R.simple_binder & comp = + let nv = r_binder_to_namedv b in + let t' = subst_comp [NM nv 0] t in + let bv : binder_view = { sort = b.sort; ppname = b.ppname; qual = b.qual; attrs = b.attrs } in + let b = pack_binder bv in + inspect_pack_binder bv; + (b, t') + +private +let r_subst_binder_sort (s : subst_t) (b : R.binder) : R.binder = + let v = inspect_binder b in + let v = { v with sort = subst_term s v.sort } in + pack_binder v + +let subst_binder_sort (s : subst_t) (b : binder) : binder = + { b with sort = subst_term s b.sort } + +(* Can't define this inside open_term_n. See #2955 *) +private +let rec __open_term_n_aux (bs : list R.binder) (nbs : list binder) (s : subst_t) : Tac (list binder & subst_t) = + match bs with + | [] -> nbs, s + | b::bs -> + let b = r_subst_binder_sort s b in + let b = open_binder b in + let nv = r_binder_to_namedv b in + __open_term_n_aux bs (b::nbs) (DB 0 nv :: shift_subst 1 s) + +private +let open_term_n (bs : list R.binder) (t : term) : Tac (list binder & term) = + let nbs, s = __open_term_n_aux bs [] [] in + List.Tot.rev nbs, subst_term s t + +private +let rec open_term_n_with (bs : list R.binder) (nbs : list binder) (t : term) : Tac term = + match bs, nbs with + | [], [] -> t + | b::bs, nb::nbs -> + let t' = open_term_n_with bs nbs t in + let t'' = open_term_with b nb t' in + t'' + | _ -> raise LengthMismatch + +private +let close_term_n (bs : list binder) (t : term) : list R.binder & term = + let rec aux (bs : list binder) (cbs : list R.binder) (s : subst_t) : list R.binder & subst_t = + match bs with + | [] -> cbs, s + | b::bs -> + let b = subst_binder_sort s b in + let nv = r_binder_to_namedv b in + let b = close_binder b in + aux bs (b::cbs) (NM nv 0 :: shift_subst 1 s) + in + let cbs, s = aux bs [] [] in + List.Tot.rev cbs, subst_term s t + +private +let rec open_term_n_simple (bs : list R.simple_binder) (t : term) : Tac (list simple_binder & term) = + match bs with + | [] -> ([], t) + | b::bs -> + let bs', t' = open_term_n_simple bs t in + let b', t'' = open_term_simple b t' in + (b'::bs', t'') + +private +let rec close_term_n_simple (bs : list simple_binder) (t : term) : list R.simple_binder & term = + match bs with + | [] -> ([], t) + | b::bs -> + let bs', t' = close_term_n_simple bs t in + let b', t'' = close_term_simple b t' in + (b'::bs', t'') + +private +let rec open_pat (p : R.pattern) (s : subst_t) : Tac (pattern & subst_t) = + match p with + | R.Pat_Constant c -> + Pat_Constant {c=c}, s + + | R.Pat_Var ssort n -> + let sort = unseal ssort in + let sort = subst_term s sort in + let nvv : namedv = { + uniq = fresh(); + sort = seal sort; + ppname = n; + } + in + let nv = pack_namedv nvv in + Pat_Var {v=nvv; sort=seal sort}, (DB 0 nv) :: shift_subst 1 s + + | R.Pat_Cons head univs subpats -> + let subpats, s = fold_left (fun (pats,s) (pat,b) -> + let pat, s' = open_pat pat s in + ((pat,b)::pats, s')) + ([], s) subpats + in + let subpats = List.Tot.rev subpats in + Pat_Cons {head=head; univs=univs; subpats=subpats}, s + + | R.Pat_Dot_Term None -> + Pat_Dot_Term {t=None}, s + + | R.Pat_Dot_Term (Some t) -> + let t = subst_term s t in + Pat_Dot_Term {t=Some t}, s + +private +let open_branch (b : R.branch) : Tac branch = + let (pat, t) = b in + let pat, s = open_pat pat [] in + let t' = subst_term s t in + (pat, t') + +private +let rec close_pat (p : pattern) (s : subst_t) : Tot (R.pattern & subst_t) = + match p with + | Pat_Constant {c} -> + R.Pat_Constant c, s + + | Pat_Var {v; sort} -> + let nv = pack_namedv v in + (* NOTE: we cannot do anything on the sort wihtout going + into TAC. Need a sealed_bind. *) + //let sort = unseal sort in + //let sort = subst_term s sort in + //let sort = seal sort in + let s = (NM nv 0) :: shift_subst 1 s in + R.Pat_Var sort v.ppname, s + + | Pat_Cons {head; univs; subpats} -> + let subpats, s = List.Tot.fold_left (fun (pats,s) (pat,b) -> + assume(pat << p); + let pat, s' = close_pat pat s in + ((pat,b)::pats, s')) + ([], s) subpats + in + let subpats = List.Tot.rev subpats in + R.Pat_Cons head univs subpats, s + + | Pat_Dot_Term {t=None} -> + R.Pat_Dot_Term None, s + + | Pat_Dot_Term {t=Some t} -> + let t = subst_term s t in + R.Pat_Dot_Term (Some t), s + +private +let close_branch (b : branch) : Tot R.branch = + let (pat, t) = b in + let pat, s = close_pat pat [] in + let t' = subst_term s t in + (pat, t') + +private +let open_match_returns_ascription (mra : R.match_returns_ascription) : Tac match_returns_ascription = + let (b, (ct, topt, use_eq)) = mra in + let nb = open_binder b in + let ct = match ct with + | Inl t -> Inl (open_term_with b nb t) + | Inr c -> + let c = inspect_comp c in + let c = open_comp_with b nb c in + Inr c + in + let topt = + match topt with + | None -> None + | Some t -> Some (open_term_with b nb t) + in + (nb, (ct, topt, use_eq)) + +private +let close_match_returns_ascription (mra : match_returns_ascription) : R.match_returns_ascription = + let (nb, (ct, topt, use_eq)) = mra in + let b = close_binder nb in + // FIXME: all this is repeating the close_binder work, for no good reason + let ct = match ct with + | Inl t -> Inl (snd (close_term nb t)) + | Inr c -> + let _, c = close_comp nb c in + let c = pack_comp c in + Inr c + in + let topt = + match topt with + | None -> None + | Some t -> Some (snd (close_term nb t)) + in + (b, (ct, topt, use_eq)) + +private +let open_view (tv:term_view) : Tac named_term_view = + match tv with + (* Nothing interesting *) + | RD.Tv_Var v -> Tv_Var (inspect_namedv v) + | RD.Tv_BVar v -> Tv_BVar (inspect_bv v) + | RD.Tv_FVar v -> Tv_FVar v + | RD.Tv_UInst v us -> Tv_UInst v us + | RD.Tv_App hd a -> Tv_App hd a + | RD.Tv_Type u -> Tv_Type u + | RD.Tv_Const c -> Tv_Const c + | RD.Tv_Uvar n ctx_uvar_and_subst -> Tv_Uvar n ctx_uvar_and_subst + | RD.Tv_AscribedT e t tac use_eq -> Tv_AscribedT e t tac use_eq + | RD.Tv_AscribedC e c tac use_eq -> Tv_AscribedC e (inspect_comp c) tac use_eq + | RD.Tv_Unknown -> Tv_Unknown + | RD.Tv_Unsupp -> Tv_Unsupp + + (* Below are the nodes that actually involve a binder. + Open them and convert to named binders. *) + + | RD.Tv_Abs b body -> + let nb, body = open_term b body in + Tv_Abs nb body + + | RD.Tv_Arrow b c -> + let nb, c = open_comp b (inspect_comp c) in + Tv_Arrow nb c + + | RD.Tv_Refine b ref -> + let nb, ref = open_term_simple b ref in + Tv_Refine nb ref + + | RD.Tv_Let recf attrs b def body -> + let nb, body = open_term_simple b body in + let def = + if recf + then subst_term [DB 0 (r_binder_to_namedv nb)] def + else def + in + Tv_Let recf attrs nb def body + + | RD.Tv_Match scrutinee ret brs -> + let brs = map open_branch brs in + let ret = map_opt open_match_returns_ascription ret in + Tv_Match scrutinee ret brs + +private +let close_view (tv : named_term_view) : Tot term_view = + match tv with + (* Nothing interesting *) + | Tv_Var v -> RD.Tv_Var (pack_namedv v) + | Tv_BVar v -> RD.Tv_BVar (pack_bv v) + | Tv_FVar v -> RD.Tv_FVar v + | Tv_UInst v us -> RD.Tv_UInst v us + | Tv_App hd a -> RD.Tv_App hd a + | Tv_Type u -> RD.Tv_Type u + | Tv_Const c -> RD.Tv_Const c + | Tv_Uvar n ctx_uvar_and_subst -> RD.Tv_Uvar n ctx_uvar_and_subst + | Tv_AscribedT e t tac use_eq -> RD.Tv_AscribedT e t tac use_eq + | Tv_AscribedC e c tac use_eq -> RD.Tv_AscribedC e (pack_comp c) tac use_eq + | Tv_Unknown -> RD.Tv_Unknown + | Tv_Unsupp -> RD.Tv_Unsupp + + (* Below are the nodes that actually involve a binder. + Open them and convert to named binders. *) + + | Tv_Abs nb body -> + let b, body = close_term nb body in + RD.Tv_Abs b body + + | Tv_Arrow nb c -> + let b, c = close_comp nb c in + let c = pack_comp c in + RD.Tv_Arrow b c + + | Tv_Refine nb ref -> + let b, ref = close_term_simple nb ref in + RD.Tv_Refine b ref + + | Tv_Let recf attrs nb def body -> + let def = + if recf + then subst_term [NM (r_binder_to_namedv nb) 0] def + else def + in + let b, body = close_term_simple nb body in + RD.Tv_Let recf attrs b def body + + | Tv_Match scrutinee ret brs -> + let brs = List.Tot.map close_branch brs in + (* NOTE: this used to use FStar.Option.mapTot, but that brings + in way too many dependencies. *) + let ret = + match ret with + | None -> None + | Some asc -> Some (close_match_returns_ascription asc) + in + RD.Tv_Match scrutinee ret brs + +[@@plugin; coercion] +let inspect (t:term) : Tac named_term_view = + let t = compress t in + let tv = inspect_ln t in + open_view tv + +[@@plugin; coercion] +let pack (tv:named_term_view) : Tot term = + let tv = close_view tv in + pack_ln tv + +private +let open_univ_s (us : list R.univ_name) : Tac (list univ_name & subst_t) = + let n = List.Tot.length us in + let s = mapi (fun i u -> UN (n-1-i) (R.pack_universe (R.Uv_Name u))) us in + Util.map (fun i -> inspect_ident i) us, s + +private +let close_univ_s (us : list univ_name) : list R.univ_name & subst_t = + let n = List.Tot.length us in + let us = List.Tot.map (fun i -> pack_ident i) us in + let s = List.Tot.mapi (fun i u -> UD u (n-i-1)) us in + us, s + +private +let open_lb (lb : R.letbinding) : Tac letbinding = + let {lb_fv; lb_us; lb_typ; lb_def} = inspect_lb lb in + let lb_us, s = open_univ_s lb_us in + let lb_typ = subst_term s lb_typ in + let lb_def = subst_term s lb_def in + { lb_fv; lb_us; lb_typ; lb_def } + +private +let close_lb (lb : letbinding) : R.letbinding = + let {lb_fv; lb_us; lb_typ; lb_def} = lb in + let lb_us, s = close_univ_s lb_us in + let lb_typ = subst_term s lb_typ in + let lb_def = subst_term s lb_def in + pack_lb { lb_fv; lb_us; lb_typ; lb_def } + +private +let subst_r_binders (s:subst_t) (bs : list R.binder) : list R.binder = + List.Tot.mapi (fun i b -> r_subst_binder_sort (shift_subst i s) b) bs + +private +let rec open_n_binders_from_arrow (bs : binders) (t : term) : Tac term = + match bs with + | [] -> t + | b::bs -> + match inspect t with + | Tv_Arrow b' (C_Total t') -> + let t' = subst_term [NT (r_binder_to_namedv b') (pack (Tv_Var (inspect_namedv (r_binder_to_namedv b))))] t' in + open_n_binders_from_arrow bs t' + | _ -> raise NotEnoughBinders + +private +let open_sigelt_view (sv : sigelt_view) : Tac named_sigelt_view = + match sv with + | RD.Sg_Let isrec lbs -> + let lbs = map open_lb lbs in + (* open universes, maybe *) + Sg_Let { isrec; lbs } + + | RD.Sg_Inductive nm univs params typ ctors -> + let nparams = List.Tot.length params in + + (* Open universes everywhere *) + let univs, s = open_univ_s univs in + let params = subst_r_binders s params in + let typ = subst_term (shift_subst nparams s) typ in + let ctors = map (fun (nm, ty) -> nm, subst_term s ty) ctors in + + (* Open parameters in themselves and in type *) + let params, typ = open_term_n params typ in + (* Remove the parameter binders from the constructors, + replace them by the opened param binders. Hence we get + Cons : a0 -> list a0 + instead of + Cons : #a:Type -> a -> list a + for the returned open parameter a0. *) + let ctors = + map (fun (nm, ty) -> + let ty'= open_n_binders_from_arrow params ty in + nm, ty') + ctors + in + + Sg_Inductive {nm; univs; params; typ; ctors} + + | RD.Sg_Val nm univs typ -> + let univs, s = open_univ_s univs in + let typ = subst_term s typ in + Sg_Val {nm; univs; typ} + + | RD.Unk -> Unk + +private +let rec mk_arr (args : list binder) (t : term) : Tac term = + match args with + | [] -> t + | a :: args' -> + let t' = C_Total (mk_arr args' t) in + pack (Tv_Arrow a t') + +private +let close_sigelt_view (sv : named_sigelt_view{~(Unk? sv)}) : Tac (sv:sigelt_view{~(RD.Unk? sv)}) = + match sv with + | Sg_Let { isrec; lbs } -> + let lbs = List.Tot.map close_lb lbs in + RD.Sg_Let isrec lbs + + | Sg_Inductive {nm; univs; params; typ; ctors} -> + let nparams = List.Tot.length params in + (* Abstract constructors by the parameters. This + is the inverse of the open_n_binders_from_arrow above. *) + let ctors = + map (fun (nm, ty) -> + let ty' = mk_arr params ty in + nm, ty') + ctors + in + + (* Close parameters in themselves and typ *) + let params, typ = close_term_n params typ in + + (* close univs *) + let univs, s = close_univ_s univs in + let params = subst_r_binders s params in + let typ = subst_term (shift_subst nparams s) typ in + let ctors = map (fun (nm, ty) -> nm, subst_term s ty) ctors in + + RD.Sg_Inductive nm univs params typ ctors + + | Sg_Val {nm; univs; typ} -> + let univs, s = close_univ_s univs in + let typ = subst_term s typ in + RD.Sg_Val nm univs typ + +[@@plugin] +let inspect_sigelt (s : sigelt) : Tac named_sigelt_view = + let sv = R.inspect_sigelt s in + (* dump ("sv orig = " ^ term_to_string (quote sv)); *) + open_sigelt_view sv + +[@@plugin] +let pack_sigelt (sv:named_sigelt_view{~(Unk? sv)}) : Tac sigelt = + let sv = close_sigelt_view sv in + R.pack_sigelt sv + +let tcc (e:env) (t:term) : Tac comp = + let c : R.comp = Stubs.Tactics.V2.Builtins.tcc e t in + R.inspect_comp c + +let comp_to_string (c:comp) : Tac string = Stubs.Tactics.V2.Builtins.comp_to_string (R.pack_comp c) diff --git a/stage0/ulib/FStar.Tactics.NamedView.fsti b/stage0/ulib/FStar.Tactics.NamedView.fsti new file mode 100644 index 00000000000..46fe0c4e2ae --- /dev/null +++ b/stage0/ulib/FStar.Tactics.NamedView.fsti @@ -0,0 +1,237 @@ +(* + Copyright 2008-2023 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.NamedView + +open FStar.Tactics.Effect +open FStar.Reflection.V2 +module R = FStar.Reflection.V2 + +(* Re export the syntax types. Expose variables as their views, users do +not need to pack/inspect these if they are using the named view. *) +type namedv = R.namedv_view +type bv = R.bv_view +type comp = R.comp_view +type binding = R.binding (* already good *) +(* Terms and universes are still *deep*, so we do not change their +representation, and the user needs to pack/inspect. *) +type term = R.term +type universe = R.universe + +[@@plugin] +noeq +type binder = { + uniq : nat; + + ppname : ppname_t; + sort : R.typ; + qual : aqualv; + attrs : list term; +} +type binders = list binder + +let is_simple_binder (b:binder) = Q_Explicit? b.qual /\ Nil? b.attrs +type simple_binder = b:binder{is_simple_binder b} + +type univ_name = string & Range.range + +[@@plugin] +noeq +type named_universe_view = + | Uv_Zero : named_universe_view + | Uv_Succ : universe -> named_universe_view + | Uv_Max : universes -> named_universe_view + | Uv_BVar : nat -> named_universe_view + | Uv_Name : univ_name -> named_universe_view + | Uv_Unif : R.universe_uvar -> named_universe_view + | Uv_Unk : named_universe_view + +[@@plugin] +noeq +type pattern = + // A built-in constant + | Pat_Constant { + c : vconst + } + + // A fully applied constructor, each boolean marks whether the + // argument was an explicitly-provided implicit argument + | Pat_Cons { + head : fv; + univs : option universes; + subpats : list (pattern & bool) + } + + // A pattern-bound *named* variable. + | Pat_Var { + v : namedv; + sort : sealed typ; + } + + // Dot pattern: resolved by other elements in the pattern and type + | Pat_Dot_Term { + t : option term; + } + +type branch = pattern & term +type match_returns_ascription = binder & (either term comp & option term & bool) + +[@@plugin] +noeq +type named_term_view = + | Tv_Var : v:namedv -> named_term_view + | Tv_BVar : v:bv -> named_term_view + | Tv_FVar : v:fv -> named_term_view + | Tv_UInst : v:fv -> us:universes -> named_term_view + | Tv_App : hd:term -> a:argv -> named_term_view + | Tv_Abs : b:binder -> body:term -> named_term_view + | Tv_Arrow : b:binder -> c:comp -> named_term_view + | Tv_Type : universe -> named_term_view + | Tv_Refine : b:simple_binder -> ref:term -> named_term_view + | Tv_Const : vconst -> named_term_view + | Tv_Uvar : nat -> ctx_uvar_and_subst -> named_term_view + | Tv_Let : recf:bool -> attrs:(list term) -> b:simple_binder -> def:term -> body:term -> named_term_view + | Tv_Match : scrutinee:term -> ret:option match_returns_ascription -> brs:(list branch) -> named_term_view + | Tv_AscribedT : e:term -> t:term -> tac:option term -> use_eq:bool -> named_term_view + | Tv_AscribedC : e:term -> c:comp -> tac:option term -> use_eq:bool -> named_term_view + | Tv_Unknown : named_term_view // An underscore: _ + | Tv_Unsupp : named_term_view // failed to inspect, not supported + +// Repeat from FStar.R.Data +let notAscription (tv:named_term_view) : bool = + not (Tv_AscribedT? tv) && not (Tv_AscribedC? tv) + +[@@plugin] +noeq +type letbinding = { + lb_fv : fv; + lb_us : list univ_name; (* opened *) + lb_typ : typ; + lb_def : term; +} + +[@@plugin] +noeq +type named_sigelt_view = + | Sg_Let { + isrec : bool; + lbs : list letbinding; + } + + // Sg_Inductive basically coalesces the Sig_bundle used internally, + // where the type definition and its constructors are split. + // While that might be better for typechecking, this is probably better for metaprogrammers + // (no mutually defined types for now) + | Sg_Inductive { + nm : name; // name of the inductive type being defined + univs : list univ_name; // named universe variables + params : binders; // parameters + typ : typ; // the type annotation for the inductive, i.e., indices -> Type #u + ctors : list ctor; // the constructors, opened with univs and applied to params already + } + + | Sg_Val { + nm : name; + univs : list univ_name; + typ : typ; + } + + | Unk + +(* Some helpers. The latter two are not marked coercions as they make a +choice to not add qualifiers/attrs, so we let the user call them. *) +[@@coercion] +let binder_to_binding (b : binder) : binding = + { + ppname = b.ppname; + uniq = b.uniq; + sort = b.sort; + } +let binding_to_binder (bnd : binding) : binder = + { + ppname = bnd.ppname; + uniq = bnd.uniq; + sort = bnd.sort; + qual = Q_Explicit; + attrs = [] + } +let namedv_to_binder (v : namedv) (sort : term) : binder = + { + uniq = v.uniq; + sort = sort; + ppname = v.ppname; + qual = Q_Explicit; + attrs = []; + } + +[@@plugin; coercion] +val inspect_universe (u:universe) : Tot named_universe_view + +[@@plugin; coercion] +val pack_universe (uv:named_universe_view) : Tot universe + +[@@plugin] +val close_term (b:binder) (t:term) : Tot (R.binder & term) + +[@@plugin; coercion] +val inspect (t:term) : Tac named_term_view + +[@@plugin; coercion] +val pack (tv:named_term_view) : Tot term + +[@@plugin; coercion] +val inspect_sigelt (s : sigelt) : Tac named_sigelt_view + +[@@plugin; coercion] +val pack_sigelt (sv:named_sigelt_view{~(Unk? sv)}) : Tac sigelt + +(* Some primitives mention `R.comp`, wrap them to use `ThisModule.comp = R.comp_view` *) +[@@plugin] +val tcc (e:env) (t:term) : Tac comp +[@@plugin] +val comp_to_string (c:comp) : Tac string + +(* Clients of this module use the named view. *) +let universe_view = named_universe_view +let term_view = named_term_view +let sigelt_view = named_sigelt_view + +(* Temporary adapters, to avoid breaking existing code too much. *) +let inspect_namedv = id #namedv +let pack_namedv = id #namedv +let inspect_bv = id #bv +let pack_bv = id #bv +let inspect_comp = id #comp +let pack_comp = id #comp + +let tag_of (t:term) : Tac string = + match inspect t with + | Tv_Var bv -> "Tv_Var" + | Tv_BVar fv -> "Tv_BVar" + | Tv_FVar fv -> "Tv_FVar" + | Tv_UInst _ _ -> "Tv_UInst" + | Tv_App f x -> "Tv_App" + | Tv_Abs x t -> "Tv_Abs" + | Tv_Arrow x t -> "Tv_Arrow" + | Tv_Type _ -> "Tv_Type" + | Tv_Refine x t -> "Tv_Refine" + | Tv_Const cst -> "Tv_Const" + | Tv_Uvar i t -> "Tv_Uvar" + | Tv_Let r attrs b t1 t2 -> "Tv_Let" + | Tv_Match t _ branches -> "Tv_Match" + | Tv_AscribedT _ _ _ _ -> "Tv_AscribedT" + | Tv_AscribedC _ _ _ _ -> "Tv_AscribedC" + | Tv_Unknown -> "Tv_Unknown" + | Tv_Unsupp -> "Tv_Unsupp" diff --git a/stage0/ulib/FStar.Tactics.Names.fst b/stage0/ulib/FStar.Tactics.Names.fst new file mode 100644 index 00000000000..496e0d2a0c4 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Names.fst @@ -0,0 +1,25 @@ +module FStar.Tactics.Names + +open FStar.Tactics.NamedView +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Stubs.Reflection.V2.Builtins +module V = FStar.Tactics.Visit + +exception Appears + +(** Decides whether a top-level name [nm] syntactically +appears in the term [t]. *) +let name_appears_in (nm:name) (t:term) : Tac bool = + let ff (t : term) : Tac term = + match inspect t with + | Tv_FVar fv -> + if inspect_fv fv = nm then + raise Appears; + t + | _ -> t + in + match catch (fun () -> ignore (V.visit_tm ff t); false) with + | Inr x -> x + | Inl Appears -> true + | Inl e -> raise e \ No newline at end of file diff --git a/stage0/ulib/FStar.Tactics.Names.fsti b/stage0/ulib/FStar.Tactics.Names.fsti new file mode 100644 index 00000000000..5409a6945bb --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Names.fsti @@ -0,0 +1,9 @@ +module FStar.Tactics.Names + +open FStar.Stubs.Reflection.Types +open FStar.Tactics.Effect + +(** Decides whether a top-level name [nm] syntactically +appears in the term [t]. *) +[@@plugin] +val name_appears_in (nm:name) (t:term) : Tac bool diff --git a/stage0/ulib/FStar.Tactics.Parametricity.fst b/stage0/ulib/FStar.Tactics.Parametricity.fst new file mode 100644 index 00000000000..8f5b3463ac9 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Parametricity.fst @@ -0,0 +1,355 @@ +module FStar.Tactics.Parametricity + +open FStar.List +open FStar.Tactics.V2.Bare + +type bvmap = list (namedv & (binder & binder & binder)) +let fvmap = list (fv & fv) + +noeq +type param_state = { + bvmap : bvmap; + fresh : int; + recs : fvmap; +} + +let rec fold_right2 (f : 'a -> 'b -> 'c -> Tac 'c) (l1:list 'a) (l2:list 'b) (c:'c) : Tac 'c = + match l1, l2 with + | h1::t1, h2::t2 -> f h1 h2 (fold_right2 f t1 t2 c) + | [], [] -> c + | _ -> fail "fold_right2" + +let rec zip3 (l1 : list 'a) (l2 : list 'b) (l3 : list 'c) : list ('a & 'b & 'c) = + match l1, l2, l3 with + | h1::t1, h2::t2, h3::t3 -> (h1, h2, h3) :: (zip3 t1 t2 t3) + | _ -> [] + +let last (xs:list 'a) : Tac 'a = + match List.Tot.rev xs with + | h::_ -> h + | [] -> fail "last: empty list" + +(* Override it to add freshness. The code for typechecking an inductive +raises a failure if two binders of the same constructor have the same name. *) +//noeq type t = | A of x:int -> x:int -> x:int -> t +// but this doesn't fail nor warn... why?? + +let app_binders (t:term) (bs:list binder) : Tac term = + mk_e_app t (List.Tot.map binder_to_term bs) + +let push_var_to_state (v:namedv) (b0 b1 b2 : binder) (s:param_state) : param_state = + { s with bvmap = (v, (b0, b1, b2)) :: s.bvmap } + +exception NotARecFV +exception NotFoundBV of namedv + +let lookup_rec_fv (s:param_state) (f:fv) : Tac fv = + let rec aux (m:fvmap) : Tac fv = + match m with + | [] -> raise NotARecFV + | (f1, k)::fs -> if compare_fv f f1 = Order.Eq + then k + else aux fs + in + aux s.recs + +let push_fv (f1 f2 : fv) (s:param_state) : param_state = + { s with recs = (f1,f2)::s.recs } + +let lookup (s:param_state) (v:namedv) : Tac (binder & binder & binder) = + let rec aux (bvm : bvmap) : Tac (binder & binder & binder) = + match bvm with + | [] -> + raise (NotFoundBV v) + | (v', r)::tl -> + if (inspect_namedv v).uniq = (inspect_namedv v').uniq + then r + else aux tl + in + aux s.bvmap + +let replace_var (s:param_state) (b:bool) (t:term) : Tac term = + match inspect t with + | Tv_Var v -> + begin try + let (x, y, _) = lookup s v in + let bv = binder_to_namedv (if b then y else x) in + pack (Tv_Var bv) + with + (* Maybe we traversed a binder and there are variables not in the state. + * The right thing here would be to track them... but this should do for now. *) + | NotFoundBV _ -> t + | e -> raise e + end + | _ -> t + +let replace_by (s:param_state) (b:bool) (t:term) : Tac term = + let r = visit_tm (replace_var s b) t in + //print ("rep " ^ string_of_bool b ^ " " ^ term_to_string t ^ " = " ^ term_to_string r); + r + +let tapp q t1 t2 = pack (Tv_App t1 (t2, q)) +let tabs b t : Tac term = pack (Tv_Abs b t) + +let rec param' (s:param_state) (t:term) : Tac term = + let r = + match inspect t with + | Tv_Type _u -> // t = Type + (* It's this: + `(fun (s r : (`#t)) -> s -> r -> Type) + just working around extraction bug. *) + let s = fresh_binder_named "s" t in + let r = fresh_binder_named "r" t in + let xs = fresh_binder_named "xs" (Tv_Var s) in + let xr = fresh_binder_named "xr" (Tv_Var r) in + pack <| Tv_Abs s <| Tv_Abs r <| Tv_Arrow xs (C_Total <| Tv_Arrow xr (C_Total <| Tv_Type Uv_Unk)) + + | Tv_Var bv -> + let (_, _, b) = lookup s bv in + binder_to_term b + + | Tv_Arrow b c -> // t1 -> t2 === (x:t1) -> Tot t2 + begin match inspect_comp c with + | C_Total t2 -> + let (s', (bx0, bx1, bxR)) = push_binder b s in + let q = b.qual in + + let bf0 = fresh_binder_named "f0" (replace_by s false t) in + let bf1 = fresh_binder_named "f1" (replace_by s true t) in + let b2t = binder_to_term in + let res = `((`#(param' s' t2)) (`#(tapp q (b2t bf0) (b2t bx0))) (`#(tapp q (b2t bf1) (b2t bx1)))) in + tabs bf0 (tabs bf1 (mk_tot_arr [bx0; bx1; bxR] res)) + | _ -> raise (Unsupported "effects") + end + + | Tv_App l (r, q) -> + let lR = param' s l in + let l0 = replace_by s false r in + let l1 = replace_by s true r in + let rR = param' s r in + mk_app lR [(l0, q); (l1, q); (rR, q)] + + | Tv_Abs b t -> + let abs b t : Tac term = pack (Tv_Abs b t) in + let (s', (bx0, bx1, bxR)) = push_binder b s in + + let t = param' s' t in + abs bx0 (abs bx1 (abs bxR t)) + + | Tv_Match t None brs -> + pack (Tv_Match (param' s t) None (map (param_br s) brs)) + + | Tv_UInst fv _ + | Tv_FVar fv -> + pack (Tv_FVar (param_fv s fv)) + + | Tv_Const c -> + `() + + | Tv_AscribedT t _ _ _ + | Tv_AscribedC t _ _ _ -> param' s t + + | _ -> + raise (Unsupported (Tactics.Print.term_to_ast_string t)) + in + r + +and param_fv (s:param_state) (f : fv) : Tac fv = + (* first of all look for recursive knots *) + try lookup_rec_fv s f + with + | _ -> + + (* try to get it from the same module the FV is defined *) + let nm' = explode_qn (implode_qn (inspect_fv f) ^ "_param") in + //dump ("nm' = " ^ implode_qn nm'); + match lookup_typ (top_env ()) nm' with + | Some se' -> pack_fv nm' + | None -> + + (* or this module, where the translation is defined... *) + let nm' = ["FStar"; "Tactics"; "Parametricity"; last (inspect_fv f) ^ "_param"] in + //dump ("nm' = " ^ implode_qn nm'); + match lookup_typ (top_env ()) nm' with + | Some se' -> pack_fv nm' + | None -> + + (* otherwise, try to get it from the *current* module, where we're running the tactic *) + let nm' = cur_module () @ [last (inspect_fv f) ^ "_param"] in + //dump ("nm' = " ^ implode_qn nm'); + match lookup_typ (top_env ()) nm' with + | Some se' -> pack_fv nm' + + (* TODO: lookup in env *) + + | None -> + raise (NotFoundFV f) + +and param_pat (s:param_state) (p : pattern) : Tac (param_state & (pattern & pattern & pattern)) = + let is_dot_pat (p:pattern) : Tac bool = + match p with + | Pat_Dot_Term _ -> true + | _ -> false + in + //dump ("param_pat of " ^ term_to_string (quote p)); + match p with + | Pat_Cons {head=fv; univs=us; subpats=pats} -> + let fv' = param_fv s fv in + let (s', (pats0, pats1, patsr)) = + fold_left (fun (s, (pats0, pats1, patsr)) (p, i) -> + if is_dot_pat p then (s, (pats0, pats1, patsr)) + else + let (s', (p0, p1, pr)) = param_pat s p in + (s', ( + (p0,i)::pats0, + (p1,i)::pats1, + (pr,false)::(p1,i)::(p0,i)::patsr))) + (s, ([], [], [])) + pats + in + let pats0 = List.Tot.rev pats0 in + let pats1 = List.Tot.rev pats1 in + let patsr = List.Tot.rev patsr in + (s', (Pat_Cons {head=fv; univs=us; subpats=pats0}, + Pat_Cons {head=fv; univs=us; subpats=pats1}, + Pat_Cons {head=fv'; univs=us; subpats=patsr})) + + | Pat_Var {v; sort} -> + let b = namedv_to_binder v (unseal sort) in + let (s', (b0, b1, bR)) = push_binder b s in + (s', (Pat_Var {v=binder_to_namedv b0; sort = Sealed.seal (binder_sort b0)}, + Pat_Var {v=binder_to_namedv b1; sort = Sealed.seal (binder_sort b1)}, + Pat_Var {v=binder_to_namedv bR; sort = Sealed.seal (binder_sort bR)})) + + | Pat_Dot_Term t -> + fail "no dot pats" + //let (s', (b0, b1, bR)) = push_binder (pack_binder bv Q_Explicit) s in + //(s', (Pat_Dot_Term (bv_of_binder b0) (replace_by s' false t), + // Pat_Dot_Term (bv_of_binder b1) (replace_by s' true t), + // Pat_Dot_Term (bv_of_binder bR) (param' s' t))) + + | Pat_Constant c -> + let b = fresh_binder_named "cR" (`_) in + (s, (Pat_Constant c, + Pat_Constant c, + Pat_Var {v=binder_to_namedv b; sort=seal (`_)})) + +and param_br (s:param_state) (br : branch) : Tac branch = + let (pat, t) = br in + let (s', (_, _, pat')) = param_pat s pat in + (pat', param' s' t) + +and push_binder (b:binder) (s:param_state) : Tac (param_state & (binder & binder & binder)) = + let q = b.qual in + let typ = b.sort in + let name = unseal b.ppname in + let decor (s : string) (t : string) : Tac string = (s ^ t) in + let bx0 = fresh_binder_named (decor name "0") (replace_by s false typ) in + let bx1 = fresh_binder_named (decor name "1") (replace_by s true typ) in + let bxr = fresh_binder_named (decor name "R") (`(`#(param' s typ)) (`#(binder_to_term bx0)) (`#(binder_to_term bx1))) in + + (* respect implicits *) + let bx0 = { bx0 with qual = q } in + let bx1 = { bx1 with qual = q } in + let bxr = { bxr with qual = q } in + + let s = push_var_to_state (binder_to_namedv b) bx0 bx1 bxr s in + (s, (bx0, bx1, bxr)) + +let init_param_state : param_state = { + bvmap = []; + fresh = 0; + recs = []; +} + +[@@plugin] +let param (t:term) : Tac term = + let t = param' init_param_state t in + //dump ("res = " ^ term_to_string t); + t + +let fv_to_tm (f:fv) : Tac term = pack (Tv_FVar f) + +let param_ctor (nm_ty:name) (s:param_state) (c:ctor) : Tac ctor = + (* dump ("ctor0: " ^ term_to_string (quote c)); *) + let nm, ty = c in + let nm' = cur_module () @ [last nm ^ "_param"] in + let bs, c = collect_arr_bs ty in + + let orig = app_binders (fv_to_tm (pack_fv nm)) bs in + + let (s, bs) = + fold_left (fun (s, bvs) b -> let (s, (bx0, bx1, bxr)) = push_binder b s in + (s, bxr::bx1::bx0::bvs)) (s, []) bs + in + let bs = List.Tot.rev bs in + + let cod = + match inspect_comp c with + | C_Total ty -> ty + | _ -> fail "param_ctor got a non-tot comp" + in + + let cod = mk_e_app (param' s cod) [replace_by s false orig; replace_by s true orig] in + + let ty' = mk_tot_arr bs cod in + + let r = (nm', ty') in + (* dump ("ctor1: " ^ term_to_string (quote r)); *) + r + +//let absN (bs : list binder) (t : term) : Tac term = +// Tactics.Util.fold_right (fun b t -> tabs b t) bs t + +let param_inductive (se:sigelt) (fv0 fv1 : fv) : Tac decls = + match inspect_sigelt se with + | Sg_Inductive {nm; univs; params; typ; ctors} -> + (* dump ("typ = " ^ term_to_string typ); *) + let s = push_fv fv0 fv1 init_param_state in + let orig = app_binders (fv_to_tm (pack_fv nm)) params in + (* dump ("orig = " ^ term_to_string orig); *) + let (s, param_bs) = + fold_left (fun (s, bvs) b -> let (s, (bx0, bx1, bxr)) = push_binder b s in + //dump ("bx0 = " ^ term_to_string (quote bx0)); + //dump ("bx1 = " ^ term_to_string (quote bx1)); + //dump ("bxr = " ^ term_to_string (quote bxr)); + (s, bxr::bx1::bx0::bvs)) (s, []) params + in + let param_bs = List.Tot.rev param_bs in + //Tactics.Util.iter (fun bv -> dump ("param bv = " ^ binder_to_string bv)) param_bs; + let typ = mk_e_app (param' s typ) [replace_by s false orig; replace_by s true orig] in + (* dump ("new typ = " ^ term_to_string typ); *) + let ctors = Tactics.Util.map (param_ctor nm s) ctors in + let se = Sg_Inductive {nm=inspect_fv fv1; univs; params=param_bs; typ; ctors} in + (* dump ("param_ind : " ^ term_to_string (quote se)); *) + [pack_sigelt se] + | _ -> fail "" + +let param_letbinding (se:sigelt) (fv0 fv1 : fv) : Tac decls = + match inspect_sigelt se with + | Sg_Let {isrec=r; lbs=[lb]} -> + let rrr = param lb.lb_typ in + let expected_typ = norm_term [] (mk_e_app rrr [fv_to_tm fv0; fv_to_tm fv0]) in + let se' = Sg_Let {isrec=r; lbs=[{lb_fv=fv1; lb_us=lb.lb_us ; lb_typ=expected_typ; lb_def= (param lb.lb_def)}]} in + [pack_sigelt se'] + | _ -> fail "no mutual recursion" + +[@@plugin] +let paramd (nm:string) : Tac decls = + let nm' = implode_qn (cur_module () @ [last (explode_qn nm) ^ "_param"]) in + let fv0 = pack_fv (explode_qn nm) in + let fv1 = pack_fv (explode_qn nm') in + let se = lookup_typ (top_env ()) (explode_qn nm) in + match se with | None -> fail "param_letbinding: not found" | Some se -> + let decls = + match inspect_sigelt se with + | Sg_Let _ -> param_letbinding se fv0 fv1 + | Sg_Inductive _ -> param_inductive se fv0 fv1 + | _ -> fail "paramd: unsupported sigelt" + in + //dump ("returning : " ^ term_to_string (quote decls)); + decls + +[@@plugin] +let paramds (nms:list string) : Tac decls = + List.Tot.flatten (map paramd nms) diff --git a/stage0/ulib/FStar.Tactics.Parametricity.fsti b/stage0/ulib/FStar.Tactics.Parametricity.fsti new file mode 100644 index 00000000000..d502f90bbdb --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Parametricity.fsti @@ -0,0 +1,29 @@ +module FStar.Tactics.Parametricity + +open FStar.Tactics.Effect +open FStar.Stubs.Reflection.Types + +(* May be raised by the translations. *) +exception Unsupported of string +exception NotFoundFV of fv + +(* Translate a term or type *) +[@@plugin] +val param (t:term) : Tac term + +(* Take a top-level declaration, of name nm, and generate +declarations for its parametricity translation. *) +[@@plugin] +val paramd (nm:string) : Tac decls + +(* As above for several declarations at once. *) +[@@plugin] +val paramds (nms:list string) : Tac decls + +(* Parametricity principle for some base types. We should use +a typeclass for this. *) +let param_of_eqtype (a:eqtype) : a -> a -> Type0 = (fun (x y : a) -> squash (x == y)) +let int_param = param_of_eqtype int +let bool_param = param_of_eqtype bool +let unit_param = param_of_eqtype unit +let string_param = param_of_eqtype string diff --git a/stage0/ulib/FStar.Tactics.PatternMatching.fst b/stage0/ulib/FStar.Tactics.PatternMatching.fst new file mode 100644 index 00000000000..15982a392a7 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.PatternMatching.fst @@ -0,0 +1,887 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +/// ========================== +/// Pattern-matching tactics +/// ========================== +/// +/// :Author: Clément Pit-Claudel +/// :Contact: clement.pitclaudel@live.com +/// :Date: 2017-10-13 + +module FStar.Tactics.PatternMatching + +open FStar.Tactics.V2 + +/// Contents +/// ======== +/// +/// 1 Contents +/// 2 Motivation +/// 3 Some utility functions +/// 4 Pattern types +/// 5 Pattern matching exceptions +/// 5.1 Types of exceptions +/// 5.2 The exception monad +/// 5.3 Liftings +/// 6 Pattern interpretation +/// 7 Pattern-matching problems +/// 7.1 Definitions +/// 7.2 Resolution +/// 8 A DSL for pattern-matching +/// 8.1 Pattern notations +/// 8.2 Problem notations +/// 8.3 Continuations +/// 9 Putting it all together +/// 10 Examples +/// 10.1 Simple examples +/// 10.2 A real-life example +/// 11 Possible extensions +/// 12 Notes +/// +/// Motivation +/// ========== +/// +/// Suppose you have a goal of the form ``squash (a == b)``. How do you capture +/// `a` and `b` for further inspection? +/// +/// Here's a basic (but cumbersome!) implementation: + +let fetch_eq_side () : Tac (term & term) = + let g = cur_goal () in + match inspect g with + | Tv_App squash (g, _) -> + (match inspect squash with + | Tv_UInst squash _ + | Tv_FVar squash -> + if fv_to_string squash = flatten_name squash_qn then + (match inspect g with + | Tv_App eq_type_x (y, _) -> + (match inspect eq_type_x with + | Tv_App eq_type (x, _) -> + (match inspect eq_type with + | Tv_App eq (typ, _) -> + (match inspect eq with + | Tv_UInst eq _ + | Tv_FVar eq -> + if fv_to_string eq = flatten_name eq2_qn then + (x, y) + else fail "not an equality" + | _ -> fail "not an app2 of fvar: ") + | _ -> fail "not an app3") + | _ -> fail "not an app2") + | _ -> fail "not an app under squash") + else fail "not a squash" + | _ -> fail "not an app of fvar at top level") + | _ -> fail "not an app at top level" + +/// …and here's how you could use it: + +(* let _ = *) +(* assert_by_tactic (1 + 1 == 2) *) +(* (fun () -> let l, r = fetch_eq_side () in *) +(* print (term_to_string l ^ " / " ^ term_to_string r)) *) + +/// This file defines pattern-matching primitives that let you write the same +/// thing like this… +/// +/// .. code:: fstar +/// +/// let fetch_eq_side' #a () : Tac (term * term) = +/// gpm (fun (left right: a) (g: pm_goal (squash (left == right))) -> +/// (quote left, quote right) <: Tac (term * term)) +/// +/// let _ = +/// assert_by_tactic (1 + 1 == 2) +/// (fun () -> let l, r = fetch_eq_side' #int () in +/// print (term_to_string l ^ " / " ^ term_to_string r)) +/// +/// …or, more succinctly, like this: +/// +/// .. code:: fstar +/// +/// let _ = +/// assert_by_tactic (1 + 1 == 2) +/// (gpm (fun (left right: int) (g: pm_goal (squash (left == right))) -> +/// let l, r = quote left, quote right in +/// print (term_to_string l ^ " / " ^ term_to_string r) <: Tac unit)) + + +/// Some utility functions +/// ====================== +/// +/// (Skip over this part on a quick read — these are just convenience functions) + + +(** Ensure that tactic `t` fails. **) +let mustfail #a (t: unit -> Tac a) (message: string) : Tac unit = + match trytac t with + | Some _ -> fail message + | None -> () + +/// The following two tactics are needed because of issues with the ``Tac`` +/// effect. + +let implies_intro' () : Tac unit = + let _ = implies_intro () in () + +let repeat' #a (f: unit -> Tac a) : Tac unit = + let _ = repeat f in () + +let and_elim' (h: binding) : Tac unit = + and_elim (pack (Tv_Var h)); + clear h + +(** Use a hypothesis at type a to satisfy a goal at type squash a *) +let exact_hyp (a: Type0) (h: namedv) : Tac unit = + let hd = quote (FStar.Squash.return_squash #a) in + exact (mk_app hd [((pack (Tv_Var h)), Q_Explicit)]) + +(** Use a hypothesis h (of type a) to satisfy a goal at type a *) +let exact_hyp' (h: namedv): Tac unit = + exact (pack (Tv_Var h)) + +/// Pattern types +/// ============= +/// +/// Patterns are defined using a simple inductive type, mirroring the structure +/// of ``term_view``. + +type varname = string + +type qn = string + +type pattern = +| PVar: name: varname -> pattern +| PQn: qn: qn -> pattern +| PType: pattern +| PApp: hd: pattern -> arg: pattern -> pattern + +let desc_of_pattern = function +| PVar _ -> "a variable" +| PQn qn -> "a constant (" ^ qn ^ ")" +| PType -> "Type" +| PApp _ _ -> "a function application" + +let rec string_of_pattern = function +| PVar x -> "?" ^ x +| PQn qn -> qn +| PType -> "Type" +| PApp l r -> "(" ^ string_of_pattern l ^ " " + ^ string_of_pattern r ^ ")" + +/// Pattern matching exceptions +/// =========================== +/// +/// Pattern-matching is defined as a pure, monadic function (because of issues +/// with combining DM4F effects, but also because it helps with debugging). +/// This section defines the exception monad. +/// +/// Types of exceptions +/// ------------------- + +noeq type match_exception = +| NameMismatch of qn & qn +| SimpleMismatch of pattern & term +| NonLinearMismatch of varname & term & term +| UnsupportedTermInPattern of term +| IncorrectTypeInAbsPatBinder of typ + +let term_head t : Tac string = + match inspect t with + | Tv_Var bv -> "Tv_Var" + | Tv_BVar fv -> "Tv_BVar" + | Tv_FVar fv -> "Tv_FVar" + | Tv_UInst _ _ -> "Tv_UInst" + | Tv_App f x -> "Tv_App" + | Tv_Abs x t -> "Tv_Abs" + | Tv_Arrow x t -> "Tv_Arrow" + | Tv_Type _ -> "Tv_Type" + | Tv_Refine x t -> "Tv_Refine" + | Tv_Const cst -> "Tv_Const" + | Tv_Uvar i t -> "Tv_Uvar" + | Tv_Let r attrs b t1 t2 -> "Tv_Let" + | Tv_Match t _ branches -> "Tv_Match" + | Tv_AscribedT _ _ _ _ -> "Tv_AscribedT" + | Tv_AscribedC _ _ _ _ -> "Tv_AscribedC" + | Tv_Unknown -> "Tv_Unknown" + | Tv_Unsupp -> "Tv_Unsupp" + +let string_of_match_exception = function + | NameMismatch (qn1, qn2) -> + "Match failure (name mismatch): expecting " ^ + qn1 ^ ", found " ^ qn2 + | SimpleMismatch (pat, tm) -> + "Match failure (sort mismatch): expecting " ^ + desc_of_pattern pat ^ ", got " ^ term_to_string tm + | NonLinearMismatch (nm, t1, t2) -> + "Match failure (nonlinear mismatch): variable " ^ nm ^ + " needs to match both " ^ (term_to_string t1) ^ + " and " ^ (term_to_string t2) + | UnsupportedTermInPattern tm -> + "Match failure (unsupported term in pattern): " ^ + term_to_string tm ^ " (" ^ term_head tm ^ ")" + | IncorrectTypeInAbsPatBinder typ -> + "Incorrect type in pattern-matching binder: " ^ + term_to_string typ ^ " (use one of ``var``, ``hyp …``, or ``goal …``)" + +/// The exception monad +/// ------------------- + +noeq type match_res a = +| Success of a +| Failure of match_exception + +let return #a (x: a) : match_res a = + Success x + +let (let?) (#a #b: Type) + (f: match_res a) + (g: a -> Tac (match_res b)) + : Tac (match_res b) = + match f with + | Success aa -> g aa + | Failure ex -> Failure ex + +let raise #a (ex: match_exception) : match_res a = + Failure ex + +/// Liftings +/// -------- +/// +/// There's a natural lifting from the exception monad into the tactic effect: + +let lift_exn_tac #a #b (f: a -> match_res b) (aa: a) : Tac b = + match f aa with + | Success bb -> bb + | Failure ex -> Tactics.fail (string_of_match_exception ex) + +let lift_exn_tactic #a #b (f: a -> match_res b) (aa: a) : Tac b = + match f aa with + | Success bb -> bb + | Failure ex -> Tactics.fail (string_of_match_exception ex) + +/// Pattern interpretation +/// ====================== +/// +/// This section implement pattern-matching. This is strictly a one term, one +/// pattern implementation — handling cases in which mutliple hypotheses match +/// the same pattern is done later. + +type bindings = list (varname & term) +let string_of_bindings (bindings: bindings) = + String.concat "\n" + (map (fun (nm, tm) -> (">> " ^ nm ^ ": " ^ term_to_string tm)) + bindings) + +(** Match a pattern against a term. +`cur_bindings` is a list of bindings collected while matching previous parts of +the pattern. Returns a result in the exception monad. **) +let rec interp_pattern_aux (pat: pattern) (cur_bindings: bindings) (tm:term) + : Tac (match_res bindings) = + let interp_var (v: varname) cur_bindings tm = + match List.Tot.Base.assoc v cur_bindings with + | Some tm' -> if term_eq tm tm' then return cur_bindings + else raise (NonLinearMismatch (v, tm, tm')) + | None -> return ((v, tm) :: cur_bindings) in + let interp_qn (qn: qn) cur_bindings tm = + match inspect tm with + | Tv_UInst fv _ + | Tv_FVar fv -> + if fv_to_string fv = qn then return cur_bindings + else raise (NameMismatch (qn, (fv_to_string fv))) + | _ -> raise (SimpleMismatch (pat, tm)) in + let interp_type cur_bindings tm = + match inspect tm with + | Tv_Type _ -> return cur_bindings + | _ -> raise (SimpleMismatch (pat, tm)) in + let interp_app (p_hd p_arg: (p:pattern{p << pat})) cur_bindings tm = + match inspect tm with + | Tv_App hd (arg, _) -> + let? with_hd = interp_pattern_aux p_hd cur_bindings hd in + let? with_arg = interp_pattern_aux p_arg with_hd arg in + return with_arg + | _ -> raise (SimpleMismatch (pat, tm)) in + match pat with + | PVar var -> interp_var var cur_bindings tm + | PQn qn -> interp_qn qn cur_bindings tm + | PType -> interp_type cur_bindings tm + | PApp p_hd p_arg -> interp_app p_hd p_arg cur_bindings tm + +(** Match a pattern `pat` against a term. +Returns a result in the exception monad. **) +let interp_pattern (pat: pattern) : term -> Tac (match_res bindings) = + fun (tm: term) -> + let? rev_bindings = interp_pattern_aux pat [] tm in + return (List.Tot.Base.rev rev_bindings) + +(** Match a term `tm` against a pattern `pat`. +Raises an exception if the match fails. This is mostly useful for debugging: +use ``mgw`` to capture matches. **) +let match_term pat (tm : term) : Tac bindings = + match interp_pattern pat (norm_term [] tm) with + | Success bb -> bb + | Failure ex -> Tactics.fail (string_of_match_exception ex) + +/// Pattern-matching problems +/// ========================= +/// +/// Generalizing past single-term single-pattern problems, we obtain the +/// following notions of pattern-matching problems and solutions: + +let debug msg : Tac unit = () // print msg + +/// Definitions +/// ----------- + +let absvar = binding +type hypothesis = binding + +/// A matching problem is composed of holes (``mp_vars``), hypothesis patterns +/// (``mp_hyps``), and a goal pattern (``mp_goal``). + +noeq type matching_problem = + { mp_vars: list varname; + mp_hyps: list (varname & pattern); + mp_goal: option pattern } + +let string_of_matching_problem mp = + let vars = + String.concat ", " mp.mp_vars in + let hyps = + String.concat "\n " + (List.Tot.Base.map (fun (nm, pat) -> + nm ^ ": " ^ (string_of_pattern pat)) mp.mp_hyps) in + let goal = match mp.mp_goal with + | None -> "_" + | Some pat -> string_of_pattern pat in + "\n{ vars: " ^ vars ^ "\n" ^ + " hyps: " ^ hyps ^ "\n" ^ + " goal: " ^ goal ^ " }" + +/// A solution is composed of terms captured to mach the holes, and binders +/// captured to match hypothesis patterns. + +noeq type matching_solution = + { ms_vars: list (varname & term); + ms_hyps: list (varname & hypothesis) } + +let string_of_matching_solution ms = + let vars = + String.concat "\n " + (map (fun (varname, tm) -> + varname ^ ": " ^ (term_to_string tm)) ms.ms_vars) in + let hyps = + String.concat "\n " + (map (fun (nm, binding) -> + nm ^ ": " ^ (binding_to_string binding)) ms.ms_hyps) in + "\n{ vars: " ^ vars ^ "\n" ^ + " hyps: " ^ hyps ^ " }" + +(** Find a varname in an association list; fail if it can't be found. **) +let assoc_varname_fail (#b: Type) (key: varname) (ls: list (varname & b)) + : Tac b = + match List.Tot.Base.assoc key ls with + | None -> fail ("Not found: " ^ key) + | Some x -> x + +let ms_locate_hyp (a: Type) (solution: matching_solution) + (name: varname) : Tac hypothesis = + assoc_varname_fail name solution.ms_hyps + +let ms_locate_var (a: Type) (solution: matching_solution) + (name: varname) : Tac a = + unquote #a (assoc_varname_fail name solution.ms_vars) + +let ms_locate_unit (a: Type) _solution _binder_name : Tac unit = + () + +/// Resolution +/// ---------- +/// +/// Solving a matching problem is a two-steps process: find an initial +/// assignment for holes based on the goal pattern, then find a set of +/// hypotheses matching hypothesis patterns. +/// +/// Note that the implementation takes a continuation of type +/// ``matching_solution -> Tac a``. This continuation is needed because we want +/// users to be able to provide extra criteria on matching solutions (most +/// commonly, this criterion is that a particular tactic should run +/// successfuly). +/// +/// This makes it easy to implement a simple for of search through the context, +/// where one can find a hypothesis matching a particular predicate by +/// constructing a trivial matching problem and passing the predicate as the +/// continuation. + +(** Scan ``hypotheses`` for a match for ``pat`` that lets ``body`` succeed. + +``name`` is used to refer to the hypothesis matched in the final solution. +``part_sol`` includes bindings gathered while matching previous solutions. **) +let rec solve_mp_for_single_hyp #a + (name: varname) + (pat: pattern) + (hypotheses: list hypothesis) + (body: matching_solution -> Tac a) + (part_sol: matching_solution) + : Tac a = + match hypotheses with + | [] -> + fail #a "No matching hypothesis" + | h :: hs -> + or_else // Must be in ``Tac`` here to run `body` + (fun () -> + match interp_pattern_aux pat part_sol.ms_vars (type_of_binding h) with + | Failure ex -> + fail ("Failed to match hyp: " ^ (string_of_match_exception ex)) + | Success bindings -> + let ms_hyps = (name, h) :: part_sol.ms_hyps in + body ({ part_sol with ms_vars = bindings; ms_hyps = ms_hyps })) + (fun () -> + solve_mp_for_single_hyp name pat hs body part_sol) + +(** Scan ``hypotheses`` for matches for ``mp_hyps`` that lets ``body`` +succeed. **) +let rec solve_mp_for_hyps #a + (mp_hyps: list (varname & pattern)) + (hypotheses: list hypothesis) + (body: matching_solution -> Tac a) + (partial_solution: matching_solution) + : Tac a = + match mp_hyps with + | [] -> body partial_solution + | (name, pat) :: pats -> + solve_mp_for_single_hyp name pat hypotheses + (solve_mp_for_hyps pats hypotheses body) + partial_solution + +(** Solve a matching problem. + +The solution returned is constructed to ensure that the continuation ``body`` +succeeds: this implements the usual backtracking-match semantics. **) +let solve_mp #a (problem: matching_problem) + (hypotheses: list hypothesis) (goal: term) + (body: matching_solution -> Tac a) + : Tac a = + let goal_ps = + match problem.mp_goal with + | None -> { ms_vars = []; ms_hyps = [] } + | Some pat -> + match interp_pattern pat goal with + | Failure ex -> fail ("Failed to match goal: " ^ (string_of_match_exception ex)) + | Success bindings -> { ms_vars = bindings; ms_hyps = [] } in + solve_mp_for_hyps #a problem.mp_hyps hypotheses body goal_ps + +/// A DSL for pattern-matching +/// ========================== +/// +/// Using pattern-matching problems as defined above is relatively cumbersome, +/// so we now introduce a lightweight notation, in two steps: pattern notations, +/// and matching-problem notations. +/// +/// Pattern notations +/// ----------------- +/// +/// The first part of our pattern-matching syntax is pattern notations: we +/// provide a reflective function which constructs a pattern from a term: +/// variables are holes, free variables are constants, and applications are +/// application patterns. + +(* FIXME: MOVE *) +let name_of_namedv (x:namedv) : Tac string = + unseal (inspect_namedv x).ppname + +(** Compile a term `tm` into a pattern. **) +let rec pattern_of_term_ex tm : Tac (match_res pattern) = + match inspect tm with + | Tv_Var bv -> + return (PVar (name_of_namedv bv)) + | Tv_FVar fv + | Tv_UInst fv _ -> + let qn = fv_to_string fv in + return (PQn qn) + | Tv_Type _ -> + return PType + | Tv_App f (x, _) -> + let? fpat = pattern_of_term_ex f in + let? xpat = pattern_of_term_ex x in + return (PApp fpat xpat) + | _ -> raise (UnsupportedTermInPattern tm) + +(** β-reduce a term `tm`. +This is useful to remove needles function applications introduced by F*, like +``(fun a b c -> a) 1 2 3``. **) +let beta_reduce (tm: term) : Tac term = + norm_term [] tm + +(** Compile a term `tm` into a pattern. **) +let pattern_of_term tm : Tac pattern = + match pattern_of_term_ex tm with + | Success bb -> bb + | Failure ex -> Tactics.fail (string_of_match_exception ex) + +/// Problem notations +/// ----------------- +/// +/// We then introduce a DSL for matching problems, best explained on the +/// following example:: +/// +/// (fun (a b c: ①) (h1 h2 h3: hyp ②) (g: pm_goal ③) → ④) +/// +/// This notation is intended to express a pattern-matching problems with three +/// holes ``a``, ``b``, and ``c`` of type ①, matching hypotheses ``h1``, ``h2``, +/// and ``h3`` against pattern ② and the goal against the pattern ③. The body +/// of the notation (④) is then run with appropriate terms bound to ``a``, +/// ``b``, and ``c``, appropriate binders bound to ``h1``, ``h2``, and ``h3``, +/// and ``()`` bound to ``g``. +/// +/// We call these patterns ``abspat``s (abstraction patterns), and we provide +/// facilities to parse them into matching problems, and to run their bodies +/// against a particular matching solution. + +// We used to annotate variables with an explicit 'var' marker, but then that +// var annotation leaked into the types of other hypotheses due to type +// inference, requiring non-trivial normalization. + +// let var (a: Type) = a +let hyp (a: Type) = binding +let pm_goal (a: Type) = unit + +let hyp_qn = `%hyp +let goal_qn = `%pm_goal + +noeq type abspat_binder_kind = +| ABKVar of typ +| ABKHyp +| ABKGoal + +let string_of_abspat_binder_kind = function + | ABKVar _ -> "varname" + | ABKHyp -> "hyp" + | ABKGoal -> "goal" + +noeq type abspat_argspec = + { asa_name: absvar; + asa_kind: abspat_binder_kind } + +// We must store this continuation, because recomputing it yields different +// names when the binders are re-opened. +type abspat_continuation = + list abspat_argspec & term + +let type_of_named_binder (nb : binder) : term = + nb.sort + +let classify_abspat_binder (b : binder): Tac (abspat_binder_kind & term) = + let varname = "v" in + let hyp_pat = PApp (PQn hyp_qn) (PVar varname) in + let goal_pat = PApp (PQn goal_qn) (PVar varname) in + + let typ = type_of_named_binder b in + match interp_pattern hyp_pat typ with + | Success [(_, hyp_typ)] -> ABKHyp, hyp_typ + | Success _ -> fail "classifiy_abspat_binder: impossible (1)" + | Failure _ -> + match interp_pattern goal_pat typ with + | Success [(_, goal_typ)] -> ABKGoal, goal_typ + | Success _ -> fail "classifiy_abspat_binder: impossible (2)" + | Failure _ -> ABKVar typ, typ + +(** Split an abstraction `tm` into a list of binders and a body. **) +let rec binders_and_body_of_abs tm : Tac (list binder & term) = + match inspect tm with + | Tv_Abs binder tm -> + let binders, body = binders_and_body_of_abs tm in + binder :: binders, body + | _ -> [], tm + +let cleanup_abspat (t: term) : Tac term = + norm_term [] t + + +let name_of_named_binder (nb : binder) : Tac string = + unseal nb.ppname + +(** Parse a notation into a matching problem and a continuation. + +Pattern-matching notations are of the form ``(fun binders… -> continuation)``, +where ``binders`` are of one of the forms ``var …``, ``hyp …``, or ``goal …``. +``var`` binders are typed holes to be used in other binders; ``hyp`` binders +indicate a pattern to be matched against hypotheses; and ``goal`` binders match +the goal. + + +A reduction phase is run to ensure that the pattern looks reasonable; it is +needed because F* tends to infer arguments in β-expanded form. + +The continuation returned can't directly be applied to a pattern-matching +solution; see ``interp_abspat_continuation`` below for that. **) +let matching_problem_of_abs (tm: term) + : Tac (matching_problem & abspat_continuation) = + + let binders, body = binders_and_body_of_abs (cleanup_abspat tm) in + debug ("Got binders: " ^ (String.concat ", " + (map (fun b -> name_of_named_binder b <: Tac string) binders))); + + let classified_binders : list (binder & string & abspat_binder_kind & typ) = + map (fun binder -> + let bv_name = name_of_named_binder binder in + debug ("Got binder: " ^ bv_name ^ "; type is " ^ + term_to_string (type_of_named_binder binder)); + let binder_kind, typ = classify_abspat_binder binder in + (binder, bv_name, binder_kind, typ)) + binders in + + let problem = + fold_left + (fun problem (binder, bv_name, binder_kind, typ) -> + debug ("Compiling binder " ^ name_of_named_binder binder ^ + ", classified as " ^ string_of_abspat_binder_kind binder_kind ^ + ", with type " ^ term_to_string typ); + match binder_kind with + | ABKVar _ -> { problem with mp_vars = bv_name :: problem.mp_vars } + | ABKHyp -> { problem with mp_hyps = (bv_name, (pattern_of_term typ)) + :: problem.mp_hyps } + | ABKGoal -> { problem with mp_goal = Some (pattern_of_term typ) }) + ({ mp_vars = []; mp_hyps = []; mp_goal = None }) + classified_binders in + + let continuation = + let abspat_argspec_of_binder xx : Tac abspat_argspec = + match xx with | (binder, xx, binder_kind, yy) -> + { asa_name = binder_to_binding binder; asa_kind = binder_kind } in + (map abspat_argspec_of_binder classified_binders, tm) in + + let mp = + { mp_vars = List.Tot.Base.rev #varname problem.mp_vars; + mp_hyps = List.Tot.Base.rev #(varname & pattern) problem.mp_hyps; + mp_goal = problem.mp_goal } in + + debug ("Got matching problem: " ^ (string_of_matching_problem mp)); + mp, continuation + +/// Continuations +/// ------------- +/// +/// Parsing an abspat yields a matching problem and a continuation of type +/// ``abspat_continuation``, which is essentially just a list of binders and a +/// term (the body of the abstraction pattern). + +(** Get the (quoted) type expected by a specific kind of abspat binder. **) +let arg_type_of_binder_kind binder_kind : Tac term = + match binder_kind with + | ABKVar typ -> typ + | ABKHyp -> `binder + | ABKGoal -> `unit + +(** Retrieve the function used to locate a value for a given abspat binder. **) +let locate_fn_of_binder_kind binder_kind = + match binder_kind with + | ABKVar _ -> `ms_locate_var + | ABKHyp -> `ms_locate_hyp + | ABKGoal -> `ms_locate_unit + +(** Construct a term fetching the value of an abspat argument from a quoted +matching solution ``solution_term``. **) +let abspat_arg_of_abspat_argspec solution_term (argspec: abspat_argspec) + : Tac term = + let loc_fn = locate_fn_of_binder_kind argspec.asa_kind in + let name_tm = pack (Tv_Const (C_String (unseal argspec.asa_name.ppname))) in + let locate_args = [(arg_type_of_binder_kind argspec.asa_kind, Q_Explicit); + (solution_term, Q_Explicit); (name_tm, Q_Explicit)] in + mk_app loc_fn locate_args + +(** Specialize a continuation of type ``abspat_continuation``. +This constructs a fully applied version of `continuation`, but it requires a +quoted solution to be passed in. **) + +let rec hoist_and_apply (head:term) (arg_terms:list term) (hoisted_args:list argv) + : Tac term = + match arg_terms with + | [] -> mk_app head (List.rev hoisted_args) + | arg_term::rest -> + let n = List.Tot.length hoisted_args in + //let bv = fresh_bv_named ("x" ^ (string_of_int n)) in + let nb : binder = { + ppname = seal ("x" ^ string_of_int n); + sort = pack Tv_Unknown; + uniq = fresh (); + qual = Q_Explicit; + attrs = [] ; + } + in + pack (Tv_Let false [] nb arg_term (hoist_and_apply head rest ((pack (Tv_Var (binder_to_namedv nb)), Q_Explicit)::hoisted_args))) + +let specialize_abspat_continuation' (continuation: abspat_continuation) + (solution_term:term) + : Tac term = + let mk_arg_term argspec = + abspat_arg_of_abspat_argspec solution_term argspec in + let argspecs, body = continuation in + hoist_and_apply body (map mk_arg_term argspecs) [] + +(** Specialize a continuation of type ``abspat_continuation``. This yields a +quoted function taking a matching solution and running its body with appropriate +bindings. **) +let specialize_abspat_continuation (continuation: abspat_continuation) + : Tac term = + let solution_binder = fresh_binder (`matching_solution) in + let solution_term = pack (Tv_Var (binder_to_namedv solution_binder)) in + let applied = specialize_abspat_continuation' continuation solution_term in + let thunked = pack (Tv_Abs solution_binder applied) in + debug ("Specialized into " ^ (term_to_string thunked)); + let normalized = beta_reduce thunked in + debug ("… which reduces to " ^ (term_to_string normalized)); + thunked + +(** Interpret a continuation of type ``abspat_continuation``. +This yields a function taking a matching solution and running the body of the +continuation with appropriate bindings. **) +let interp_abspat_continuation (a:Type0) (continuation: abspat_continuation) + : Tac (matching_solution -> Tac a) = + let applied = specialize_abspat_continuation continuation in + unquote #(matching_solution -> Tac a) applied + +/// Putting it all together +/// ======================= +/// +/// We now have all we need to use pattern-matching, short of a few convenience functions: + +(** Construct a matching problem from an abspat. **) +let interp_abspat #a (abspat: a) + : Tac (matching_problem & abspat_continuation) = + matching_problem_of_abs (quote abspat) + +(** Construct an solve a matching problem. +This higher-order function isn't very usable on its own — it's mostly a +convenience function to avoid duplicating the problem-parsing code. **) +let match_abspat #b #a (abspat: a) + (k: abspat_continuation -> Tac (matching_solution -> Tac b)) + : Tac b = + let goal = cur_goal () in + let hypotheses = vars_of_env (cur_env ()) in + let problem, continuation = interp_abspat abspat in + solve_mp problem hypotheses goal (k continuation) + +(** Inspect the matching problem produced by parsing an abspat. **) +let inspect_abspat_problem #a (abspat: a) : Tac matching_problem = + fst (interp_abspat #a abspat) + +(** Inspect the matching solution produced by parsing and solving an abspat. **) +let inspect_abspat_solution #a (abspat: a) : Tac matching_solution = + match_abspat abspat (fun _ -> (fun solution -> solution <: Tac _) <: Tac _) + +let tpair #a #b (x : a) : Tac (b -> Tac (a & b)) = + fun (y: b) -> (x, y) + +/// Our first convenient entry point! +/// +/// This takes an abspat, parses it, computes a solution, and runs the body of +/// the abspat with appropriate bindings. It implements what others call ‘lazy’ +/// pattern-matching, so called because the success of the body of the pattern +/// isn't taken into account when deciding whether a particular set of matched +/// hypothesis should be retained. In other words, it picks the first matching +/// set of hypotheses, and commits to it. +/// +/// If you think that sounds like a greedy algorithm, it does. That's why it's +/// called ‘gpm’ below: greedy pattern-matching. + +(** Solve a greedy pattern-matching problem and run its continuation. +This if for pattern-matching problems in the ``Tac`` effect. **) +let gpm #b #a (abspat: a) () : Tac b = + let continuation, solution = match_abspat abspat tpair in + interp_abspat_continuation b continuation solution + +/// And here's the non-greedy version of the same. It's informative to compare +/// the implementations! This one will only find assignments that let the body +/// run successfuly. + +(** Solve a greedy pattern-matching problem and run its continuation. +This if for pattern-matching problems in the ``Tac`` effect. **) +let pm #b #a (abspat: a) : Tac b = + match_abspat abspat (interp_abspat_continuation b) + +/// Examples +/// ======== +/// +/// We conclude with a small set of examples. + +/// Simple examples +/// --------------- +/// +/// Here's the example from the intro, which we can now run! + +let fetch_eq_side' #a : Tac (term & term) = + gpm (fun (left right: a) (g: pm_goal (squash (left == right))) -> + (quote left, quote right)) () + +(* let _ = *) +(* assert_by_tactic (1 + 1 == 2) *) +(* (fun () -> let l, r = fetch_eq_side' #int in *) +(* print (term_to_string l ^ " / " ^ term_to_string r)) *) + +(* let _ = *) +(* assert_by_tactic (1 + 1 == 2) *) +(* (gpm (fun (left right: int) (g: pm_goal (squash (left == right))) -> *) +(* let l, r = quote left, quote right in *) +(* print (term_to_string l ^ " / " ^ term_to_string r) <: Tac unit)) *) + +/// Commenting out the following example and comparing ``pm`` and ``gpm`` can be +/// instructive: + +// let test_bt (a: Type0) (b: Type0) (c: Type0) (d: Type0) = +// assert_by_tactic ((a ==> d) ==> (b ==> d) ==> (c ==> d) ==> a ==> d) +// (fun () -> repeat' implies_intro'; +// gpm (fun (a b: Type0) (h: hyp (a ==> b)) -> +// print (binder_to_string h); +// fail "fail here" <: Tac unit); +// qed ()) + +/// A real-life example +/// ------------------- +/// +/// The following tactics combines mutliple simple building blocks to solve a +/// goal. Each use of ``lpm`` recognizes a specific pattern; and each tactic is +/// tried in succession, until one succeeds. The whole process is repeated as +/// long as at least one tactic succeeds. + +(* let example (#a:Type0) (#b:Type0) (#c:Type0) :unit = *) +(* assert_by_tactic (a /\ b ==> c == b ==> c) *) +(* (fun () -> repeat' (fun () -> *) +(* gpm #unit (fun (a: Type) (h: hyp (squash a)) -> *) +(* clear h <: Tac unit) `or_else` *) +(* (fun () -> gpm #unit (fun (a b: Type0) (g: pm_goal (squash (a ==> b))) -> *) +(* implies_intro' () <: Tac unit) `or_else` *) +(* (fun () -> gpm #unit (fun (a b: Type0) (h: hyp (a /\ b)) -> *) +(* and_elim' h <: Tac unit) `or_else` *) +(* (fun () -> gpm #unit (fun (a b: Type0) (h: hyp (a == b)) (g: pm_goal (squash a)) -> *) +(* rewrite h <: Tac unit) `or_else` *) +(* (fun () -> gpm #unit (fun (a: Type0) (h: hyp a) (g: pm_goal (squash a)) -> *) +(* exact_hyp a h <: Tac unit) ()))))); *) +(* qed ()) *) + +/// Possible extensions +/// =================== +/// +/// The following tasks would make for interesting extensions of this +/// experiment: +/// +/// - Handling multiple goal patterns (easy) +/// - Extending the matching language (match under binders?) +/// - Introducing specialized syntax +/// - Thinking about a sound way of supporting ‘match-anything’ patterns in +/// abspat notations +/// - Using the normalizer to partially-evaluated pattern-matching tactics +/// - Migrating to a compile-time version of ``quote`` diff --git a/stage0/ulib/FStar.Tactics.Print.fst b/stage0/ulib/FStar.Tactics.Print.fst new file mode 100644 index 00000000000..77a248e6542 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Print.fst @@ -0,0 +1,111 @@ +module FStar.Tactics.Print + +open FStar.Reflection.V2 +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Tactics.V2.Derived +open FStar.Tactics.NamedView + +let namedv_to_string (x:namedv) : Tac string= + unseal x.ppname ^ "#" ^ string_of_int x.uniq + +private +let paren (s:string) : string = "(" ^ s ^ ")" + +(* TODO: making this a local definition in print_list fails to extract. *) +private +let rec print_list_aux (f:'a -> Tac string) (xs:list 'a) : Tac string = + match xs with + | [] -> "" + | [x] -> f x + | x::xs -> f x ^ "; " ^ print_list_aux f xs + +private +let print_list (f:'a -> Tac string) (l:list 'a) : Tac string = + "[" ^ print_list_aux f l ^ "]" + +let rec universe_to_ast_string (u:universe) : Tac string = + match inspect_universe u with + | Uv_Zero -> "Uv_Zero" + | Uv_Succ u -> "Uv_Succ" ^ paren (universe_to_ast_string u) + | Uv_Max us -> "Uv_Max" ^ print_list universe_to_ast_string us + | Uv_BVar n -> "Uv_BVar" ^ paren (string_of_int n) + | Uv_Name i -> "Uv_Name" ^ paren (fst i) + | Uv_Unif _ -> "Uv_Unif" + | Uv_Unk -> "Uv_Unk" + +let universes_to_ast_string (us:universes) : Tac string = + print_list universe_to_ast_string us + +let rec term_to_ast_string (t:term) : Tac string = + match inspect t with + | Tv_Var bv -> "Tv_Var " ^ namedv_to_string bv + | Tv_BVar bv -> "Tv_BVar " ^ bv_to_string bv + | Tv_FVar fv -> "Tv_FVar " ^ fv_to_string fv + | Tv_UInst fv us -> + "Tv_UInst" ^ paren (fv_to_string fv ^ ", " ^ universes_to_ast_string us) + | Tv_App hd (a, _) -> "Tv_App " ^ paren (term_to_ast_string hd ^ ", " ^ term_to_ast_string a) + | Tv_Abs x e -> "Tv_Abs " ^ paren (binder_to_string x ^ ", " ^ term_to_ast_string e) + | Tv_Arrow x c -> "Tv_Arrow " ^ paren (binder_to_string x ^ ", " ^ comp_to_ast_string c) + | Tv_Type u -> "Type" ^ paren (universe_to_ast_string u) + | Tv_Refine x e -> "Tv_Refine " ^ paren (binder_to_string x ^ ", " ^ term_to_ast_string e) + | Tv_Const c -> const_to_ast_string c + | Tv_Uvar i _ -> "Tv_Uvar " ^ string_of_int i + | Tv_Let recf _ x e1 e2 -> + "Tv_Let " ^ paren (string_of_bool recf ^ ", " ^ + binder_to_string x ^ ", " ^ + term_to_ast_string e1 ^ ", " ^ + term_to_ast_string e2) + | Tv_Match e ret_opt brs -> + "Tv_Match " ^ + paren ( + term_to_ast_string e ^ + ", " ^ + match_returns_to_string ret_opt ^ + ", " ^ + branches_to_ast_string brs) + | Tv_AscribedT e t _ use_eq -> "Tv_AscribedT " ^ paren (term_to_ast_string e ^ ", " ^ term_to_ast_string t ^ ", " ^ string_of_bool use_eq) + | Tv_AscribedC e c _ use_eq -> "Tv_AscribedC " ^ paren (term_to_ast_string e ^ ", " ^ comp_to_ast_string c ^ ", " ^ string_of_bool use_eq) + | Tv_Unknown -> "_" + | Tv_Unsupp -> "" + +and match_returns_to_string (ret_opt:option match_returns_ascription) : Tac string = + let tacopt_to_string tacopt : Tac string = + match tacopt with + | None -> "" + | Some tac -> " by " ^ (term_to_ast_string tac) in + match ret_opt with + | None -> "" + | Some (b, asc) -> + (binder_to_string b ^ " ") + ^ + (match asc with + | Inl t, tacopt, _ -> (term_to_ast_string t) ^ (tacopt_to_string tacopt) + | Inr c, tacopt, _ -> (comp_to_ast_string c) ^ (tacopt_to_string tacopt)) + +and branches_to_ast_string (brs:list branch) : Tac string = + print_list branch_to_ast_string brs + +and branch_to_ast_string (b:branch) : Tac string = + let p, e = b in + paren ("_pat, " ^ term_to_ast_string e) + +and comp_to_ast_string (c:comp) : Tac string = + match inspect_comp c with + | C_Total t -> "Tot " ^ term_to_ast_string t + | C_GTotal t -> "GTot " ^ term_to_ast_string t + | C_Lemma pre post _ -> "Lemma " ^ term_to_ast_string pre ^ " " ^ term_to_ast_string post + | C_Eff us eff res _ _ -> + "Effect" ^ "<" ^ universes_to_ast_string us ^ "> " ^ paren (implode_qn eff ^ ", " ^ term_to_ast_string res) + +and const_to_ast_string (c:vconst) : Tac string = + match c with + | C_Unit -> "C_Unit" + | C_Int i -> "C_Int " ^ string_of_int i + | C_True -> "C_True" + | C_False -> "C_False" + | C_String s -> "C_String " ^ s + | C_Range _ -> "C_Range _" + | C_Reify -> "C_Reify" + | C_Reflect name -> "C_Reflect " ^ implode_qn name + | C_Real r -> "C_Real \"" ^ r ^ "\"" diff --git a/stage0/ulib/FStar.Tactics.Print.fsti b/stage0/ulib/FStar.Tactics.Print.fsti new file mode 100644 index 00000000000..38efbf930bb --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Print.fsti @@ -0,0 +1,32 @@ +module FStar.Tactics.Print + +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Reflection.V2.Data +open FStar.Tactics.Effect + +[@@plugin] +val namedv_to_string (x:namedv) : Tac string + +[@@plugin] +val universe_to_ast_string (u:universe) : Tac string + +[@@plugin] +val universes_to_ast_string (us:universes) : Tac string + +[@@plugin] +val term_to_ast_string (t:term) : Tac string + +[@@plugin] +val match_returns_to_string (ret_opt:option match_returns_ascription) : Tac string + +[@@plugin] +val branches_to_ast_string (brs:list branch) : Tac string + +[@@plugin] +val branch_to_ast_string (b:branch) : Tac string + +[@@plugin] +val comp_to_ast_string (c:comp) : Tac string + +[@@plugin] +val const_to_ast_string (c:vconst) : Tac string diff --git a/stage0/ulib/FStar.Tactics.Result.fsti b/stage0/ulib/FStar.Tactics.Result.fsti new file mode 100644 index 00000000000..85dd4bf3f3a --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Result.fsti @@ -0,0 +1,3 @@ +module FStar.Tactics.Result + +include FStar.Stubs.Tactics.Result diff --git a/stage0/ulib/FStar.Tactics.SMT.fst b/stage0/ulib/FStar.Tactics.SMT.fst new file mode 100644 index 00000000000..0d00a17c40c --- /dev/null +++ b/stage0/ulib/FStar.Tactics.SMT.fst @@ -0,0 +1,35 @@ +module FStar.Tactics.SMT + +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Stubs.VConfig + +(* Alias to just use the current vconfig *) +let smt_sync () : Tac unit = t_smt_sync (get_vconfig ()) + +(* smt_sync': as smt_sync, but using a particular fuel/ifuel *) +let smt_sync' (fuel ifuel : nat) : Tac unit = + let vcfg = get_vconfig () in + let vcfg' = { vcfg with initial_fuel = fuel; max_fuel = fuel + ; initial_ifuel = ifuel; max_ifuel = ifuel } + in + t_smt_sync vcfg' + +(* Getting/setting solver configuration *) + +let get_rlimit () : Tac int = (get_vconfig()).z3rlimit +let set_rlimit (v : int) : Tac unit = set_vconfig { get_vconfig () with z3rlimit = v } + +let get_initial_fuel () : Tac int = (get_vconfig ()).initial_fuel +let get_initial_ifuel () : Tac int = (get_vconfig ()).initial_ifuel +let get_max_fuel () : Tac int = (get_vconfig ()).max_fuel +let get_max_ifuel () : Tac int = (get_vconfig ()).max_ifuel + +let set_initial_fuel (v : int) : Tac unit = set_vconfig { get_vconfig () with initial_fuel = v } +let set_initial_ifuel (v : int) : Tac unit = set_vconfig { get_vconfig () with initial_ifuel = v } +let set_max_fuel (v : int) : Tac unit = set_vconfig { get_vconfig () with max_fuel = v } +let set_max_ifuel (v : int) : Tac unit = set_vconfig { get_vconfig () with max_ifuel = v } + +(* Set both min and max *) +let set_fuel (v : int) : Tac unit = set_vconfig { get_vconfig () with initial_fuel = v; max_fuel = v } +let set_ifuel (v : int) : Tac unit = set_vconfig { get_vconfig () with initial_ifuel = v; max_ifuel = v } diff --git a/stage0/ulib/FStar.Tactics.SMT.fsti b/stage0/ulib/FStar.Tactics.SMT.fsti new file mode 100644 index 00000000000..c9c8719cbd3 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.SMT.fsti @@ -0,0 +1,42 @@ +module FStar.Tactics.SMT + +open FStar.Tactics.Effect + +(* Alias to just use the current vconfig *) +[@@plugin] +val smt_sync () : Tac unit + +(* smt_sync': as smt_sync, but using a particular fuel/ifuel *) +[@@plugin] +val smt_sync' (fuel ifuel : nat) : Tac unit + +(* Getting/setting solver configuration *) + +[@@plugin] +val get_rlimit () : Tac int +[@@plugin] +val set_rlimit (v : int) : Tac unit + +[@@plugin] +val get_initial_fuel () : Tac int +[@@plugin] +val get_initial_ifuel () : Tac int +[@@plugin] +val get_max_fuel () : Tac int +[@@plugin] +val get_max_ifuel () : Tac int + +[@@plugin] +val set_initial_fuel (v : int) : Tac unit +[@@plugin] +val set_initial_ifuel (v : int) : Tac unit +[@@plugin] +val set_max_fuel (v : int) : Tac unit +[@@plugin] +val set_max_ifuel (v : int) : Tac unit + +(* Set both min and max *) +[@@plugin] +val set_fuel (v : int) : Tac unit +[@@plugin] +val set_ifuel (v : int) : Tac unit diff --git a/stage0/ulib/FStar.Tactics.Simplifier.fst b/stage0/ulib/FStar.Tactics.Simplifier.fst new file mode 100644 index 00000000000..e0fae7dbb1d --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Simplifier.fst @@ -0,0 +1,309 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Simplifier + +open FStar.Tactics.V2 +open FStar.Reflection.V2.Formula +open FStar.Reflection.Const + +(* A correct-by-construction logical simplifier + * + * No calling `norm [simpl]`, that's cheating! + *) + +val lem_iff_refl : #a:Type -> Lemma (a <==> a) +let lem_iff_refl #a = () + +val lem_iff_trans : #a:Type -> #b:Type -> #c:Type -> squash (a <==> b) -> squash (b <==> c) + -> Lemma (a <==> c) +let lem_iff_trans #a #b #c _ _ = () + +let tiff () : Tac unit = + apply_lemma (`lem_iff_refl) + +let step () : Tac unit = + apply_lemma (`lem_iff_trans) + +val lem_true_and_p : #p:Type -> Lemma ((True /\ p) <==> p) +let lem_true_and_p #p = () + +val lem_p_and_true : #p:Type -> Lemma ((p /\ True) <==> p) +let lem_p_and_true #p = () + +val lem_false_and_p : #p:Type -> Lemma ((False /\ p) <==> False) +let lem_false_and_p #p = () + +val lem_p_and_false : #p:Type -> Lemma ((p /\ False) <==> False) +let lem_p_and_false #p = () + +val lem_true_or_p : #p:Type -> Lemma ((True \/ p) <==> True) +let lem_true_or_p #p = () + +val lem_p_or_true : #p:Type -> Lemma ((p \/ True) <==> True) +let lem_p_or_true #p = () + +val lem_false_or_p : #p:Type -> Lemma ((False \/ p) <==> p) +let lem_false_or_p #p = () + +val lem_p_or_false : #p:Type -> Lemma ((p \/ False) <==> p) +let lem_p_or_false #p = () + +val lem_true_imp_p : #p:Type -> Lemma ((True ==> p) <==> p) +let lem_true_imp_p #p = () + +val lem_p_imp_true : #p:Type -> Lemma ((p ==> True) <==> True) +let lem_p_imp_true #p = () + +val lem_false_imp_p : #p:Type -> Lemma ((False ==> p) <==> True) +let lem_false_imp_p #p = () + +val lem_fa_true : #a:Type -> Lemma ((forall (x:a). True) <==> True) +let lem_fa_true #a = () + +val lem_fa_false : #a:Type -> (x:a) -> Lemma ((forall (x:a). False) <==> False) +let lem_fa_false #a x = () + +val lem_ex_false : #a:Type -> Lemma ((exists (x:a). False) <==> False) +let lem_ex_false #a = () + +val lem_ex_true : #a:Type -> (x:a) -> Lemma ((exists (x:a). True) <==> True) +let lem_ex_true #a x = () + +val lem_neg_false : unit -> Lemma (~False <==> True) +let lem_neg_false () = () + +val lem_neg_true : unit -> Lemma (~True <==> False) +let lem_neg_true () = () + +val lem_true_iff_p : #p:Type -> Lemma ((True <==> p) <==> p) +let lem_true_iff_p #p = () + +val lem_false_iff_p : #p:Type -> Lemma ((False <==> p) <==> ~p) +let lem_false_iff_p #p = () + +val lem_p_iff_true : #p:Type -> Lemma ((p <==> True) <==> p) +let lem_p_iff_true #p = () + +val lem_p_iff_false : #p:Type -> Lemma ((p <==> False) <==> ~p) +let lem_p_iff_false #p = () + +val and_cong (#p #q #p' #q' : Type) : squash (p <==> p') -> + squash (q <==> q') -> + Lemma ((p /\ q) <==> (p' /\ q')) +let and_cong #p #q #p' #q' _ _ = () + +val or_cong (#p #q #p' #q' : Type) : squash (p <==> p') -> + squash (q <==> q') -> + Lemma ((p \/ q) <==> (p' \/ q')) +let or_cong #p #q #p' #q' _ _ = () + +val imp_cong (#p #q #p' #q' : Type) : squash (p <==> p') -> + squash (q <==> q') -> + Lemma ((p ==> q) <==> (p' ==> q')) +let imp_cong #p #q #p' #q' _ _ = () + +val fa_cong (#a : Type) (#p #q : a -> Type) : + (x:a -> squash (p x <==> q x)) -> + Lemma ((forall (x:a). p x) <==> (forall (x:a). q x)) +let fa_cong #a #p #q f = + assert ((forall (x:a). p x) <==> (forall (x:a). q x)) by ( + split(); + let do1 () : Tac unit = + let _ = l_intros () in + let t = quote f in + let x = nth_var (-1) in + let bb = pose (mk_e_app t [binding_to_term x]) in + () + in + iseq [do1; do1] + ) + +val ex_cong (#a : Type) (#p #q : a -> Type) : + (x:a -> squash (p x <==> q x)) -> + Lemma ((exists (x:a). p x) <==> (exists (x:a). q x)) +let ex_cong #a #p #q f = + assert ((exists (x:a). p x) <==> (exists (x:a). q x)) by (assume_safe (fun () -> + split(); + let do1 () : Tac unit = + let [ex] = l_intros () in + let (b, pf) = elim_exists (binding_to_term ex) in + let t = quote f in + let bb = pose (mk_e_app t [binding_to_term b]) in + () + in + iseq [do1; do1] + )) + +val neg_cong (#p #q:Type) : squash (p <==> q) -> Lemma (~p <==> ~q) +let neg_cong #p #q _ = () + +val iff_cong (#p #p' #q #q' : Type) : squash (p <==> p') -> squash (q <==> q') -> Lemma ((p <==> q) <==> (p' <==> q')) +let iff_cong #p #p' #q #q' _ _ = () + +// Absolutely hideous, do something about normalization +val is_true : term -> Tac bool +let is_true t = + begin match term_as_formula' t with + | True_ -> true + | _ -> begin match inspect t with + | Tv_App l r -> + begin match inspect l with + | Tv_Abs b t -> + begin match term_as_formula' t with + | True_ -> true + | _ -> false + end + | _ -> false + end + | _ -> false + end + end + +val is_false : term -> Tac bool +let is_false t = + begin match term_as_formula' t with + | False_ -> true + | _ -> begin match inspect t with + | Tv_App l r -> + begin match inspect l with + | Tv_Abs b t -> + begin match term_as_formula' t with + | False_ -> true + | _ -> false + end + | _ -> false + end + | _ -> false + end + end + +val inhabit : unit -> Tac unit +let inhabit () = + let t = cur_goal () in + match inspect t with + | Tv_FVar fv -> + let qn = inspect_fv fv in + if qn = int_lid then exact (`42) + else if qn = bool_lid then exact (`true) + else if qn = unit_lid then exact (`()) + else fail "" + | _ -> fail "" + +val simplify_point : unit -> Tac unit +val recurse : unit -> Tac unit + +let rec simplify_point () = + recurse (); + norm []; + let g = cur_goal () in + let f = term_as_formula g in + match f with + | Iff l r -> + begin match term_as_formula' l with + | And p q -> + if is_true p then apply_lemma (`lem_true_and_p) + else if is_true q then apply_lemma (`lem_p_and_true) + else if is_false p then apply_lemma (`lem_false_and_p) + else if is_false q then apply_lemma (`lem_p_and_false) + else tiff () + + | Or p q -> + if is_true p then apply_lemma (`lem_true_or_p) + else if is_true q then apply_lemma (`lem_p_or_true) + else if is_false p then apply_lemma (`lem_false_or_p) + else if is_false q then apply_lemma (`lem_p_or_false) + else tiff () + + | Implies p q -> + if is_true p then apply_lemma (`lem_true_imp_p) + else if is_true q then apply_lemma (`lem_p_imp_true) + else if is_false p then apply_lemma (`lem_false_imp_p) + else tiff () + + | Forall _b _sort p -> + if is_true p then apply_lemma (`lem_fa_true) + else if is_false p then or_else (fun () -> apply_lemma (`lem_fa_false); inhabit ()) tiff + else tiff () + + | Exists _b _sort p -> + if is_false p then apply_lemma (`lem_ex_false) + else if is_true p then or_else (fun () -> apply_lemma (`lem_ex_true); inhabit ()) tiff + else tiff () + + | Not p -> + if is_true p then apply_lemma (`lem_neg_true) + else if is_false p then apply_lemma (`lem_neg_false) + else tiff () + + | Iff p q -> + // After applying the lemma, we might still have more simpl to do, + // so add an intermediate step. + step (); + if is_true p then apply_lemma (`lem_true_iff_p) + else if is_true q then apply_lemma (`lem_p_iff_true) + else if is_false p then apply_lemma (`lem_false_iff_p) + else if is_false q then apply_lemma (`lem_p_iff_false) + else tiff (); + simplify_point () + + | _ -> tiff () + end + | _ -> fail "simplify_point: failed precondition: goal should be `g <==> ?u`" + +and recurse () : Tac unit = + step (); + norm []; + let g = cur_goal () in + let f = term_as_formula g in + match f with + | Iff l r -> + begin match term_as_formula' l with + | And _ _ -> + seq (fun () -> apply_lemma (`and_cong)) simplify_point + + | Or _ _ -> + seq (fun () -> apply_lemma (`or_cong)) simplify_point + + | Implies _ _ -> + seq (fun () -> apply_lemma (`imp_cong)) simplify_point + + | Forall _ _ _ -> + apply_lemma (`fa_cong); + let _ = intro () in + simplify_point () + + | Exists _ _ _ -> + apply_lemma (`ex_cong); + let _ = intro () in + simplify_point () + + | Not _ -> + apply_lemma (`neg_cong); + simplify_point () + + | Iff _ _ -> + seq (fun () -> apply_lemma (`iff_cong)) simplify_point + + | _ -> tiff () + end + | _ -> fail "recurse: failed precondition: goal should be `g <==> ?u`" + +val equiv : #p:Type -> #q:Type -> squash (p <==> q) -> squash q -> Lemma p +let equiv #p #q _ _ = () + +let simplify () : Tac unit = + apply_lemma (`equiv); + simplify_point () diff --git a/stage0/ulib/FStar.Tactics.SyntaxHelpers.fst b/stage0/ulib/FStar.Tactics.SyntaxHelpers.fst new file mode 100644 index 00000000000..73077a511d1 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.SyntaxHelpers.fst @@ -0,0 +1,19 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.SyntaxHelpers + +(* This module is a temporary for Meta-F* migration *) +include FStar.Tactics.V1.SyntaxHelpers diff --git a/stage0/ulib/FStar.Tactics.TypeRepr.fst b/stage0/ulib/FStar.Tactics.TypeRepr.fst new file mode 100644 index 00000000000..5fb2055bf7d --- /dev/null +++ b/stage0/ulib/FStar.Tactics.TypeRepr.fst @@ -0,0 +1,170 @@ +module FStar.Tactics.TypeRepr + +//#set-options "--print_implicits --print_full_names --print_universes" + +open FStar.Tactics.V2.Bare + +let add_suffix (s:string) (nm:name) : name = + explode_qn (implode_qn nm ^ s) + +let unitv_ : term = `() +let unitt_ : term = `(unit) +let empty_ : term = `(empty) +let either_ (a b : term) : term = `(either (`#a) (`#b)) +let tuple2_ (a b : term) : term = `(tuple2 (`#a) (`#b)) +let mktuple2_ (a b : term) : term = `(Mktuple2 (`#a) (`#b)) + +let get_inductive_typ (nm:string) : Tac (se:sigelt_view{Sg_Inductive? se}) = + let e = top_env () in + let se = lookup_typ e (explode_qn nm) in + match se with + | None -> fail "ctors_of_typ: type not found" + | Some se -> + let sev = inspect_sigelt se in + if Sg_Inductive? sev then + sev + else + fail "ctors_of_typ: not an inductive type" + +let alg_ctor (ty : typ) : Tac typ = + let tys, c = collect_arr ty in + Tactics.Util.fold_right (fun ty acc -> tuple2_ ty acc) tys unitt_ + +[@@plugin] +let generate_repr_typ (params : binders) (ctors : list ctor) : Tac typ = + let ctor_typs = Util.map (fun (_, ty) -> alg_ctor ty) ctors in + let alternative_typ = + Util.fold_right (fun ty acc -> either_ ty acc) ctor_typs empty_ in + alternative_typ + +(* Expects a goal of type [t -> t_repr] *) +[@@plugin] +let generate_down () : Tac unit = + let b = intro () in + let cases = t_destruct b in + cases |> Util.iteri #(fv & nat) (fun i (c, n) -> + let bs = repeatn n (fun _ -> intro ()) in + let _b_eq = intro () in + let sol = Util.fold_right (fun (b:binding) acc -> mktuple2_ b acc) bs unitv_ in + let _ = repeatn i (fun _ -> apply (`Inr)) in + apply (`Inl); + exact sol + ) + +let rec get_apply_tuple (b:binding) : Tac (list binding) = + let hd, args = collect_app b.sort in + match inspect hd, args with + | Tv_UInst fv _, [b1; b2] + | Tv_FVar fv, [b1; b2] -> + if inspect_fv fv = explode_qn (`%tuple2) then + let cases = t_destruct b in + guard (List.Tot.length cases = 1 && inspect_fv (fst (List.Tot.hd cases)) = explode_qn (`%Mktuple2) && snd (List.Tot.hd cases) = 2); + let b1 = intro () in + let b2 = intro () in + let _eq = intro () in + b1 :: get_apply_tuple b2 + else + fail ("unexpected term in apply_tuple: " ^ term_to_string b.sort) + | Tv_FVar fv, [] -> + if inspect_fv fv = explode_qn (`%unit) then + [] + else + fail ("unexpected term in apply_tuple: " ^ term_to_string b.sort) + | _ -> + fail ("unexpected term in apply_tuple: " ^ term_to_string b.sort) + +(* Expects a goal of type [t_repr -> t] *) + +let rec generate_up_aux (ctors : list ctor) (b:binding) : Tac unit = + match ctors with + | [] -> + (* b must have type empty, it's the finisher for the cases *) + apply (`empty_elim); + exact b + | c::cs -> + let cases = t_destruct b in + if List.Tot.length cases <> 2 then + fail "generate_up_aux: expected Inl/Inr???"; + focus (fun () -> + let b' = intro () in + let _eq = intro () in + let c_name = fst c in + let args = get_apply_tuple b' in + apply (pack (Tv_FVar (pack_fv c_name))); + Util.iter (fun (b:binding) -> exact b) args; + qed() + ); + let b = intro () in + let _eq = intro () in + generate_up_aux cs b + +(* Expects a goal of type [t_repr -> t] *) +[@@plugin] +let generate_up (nm:string) () : Tac unit = + let Sg_Inductive {ctors} = get_inductive_typ nm in + let b = intro () in + generate_up_aux ctors b + +let make_implicits (bs : binders) : binders = + bs |> List.Tot.map (fun b -> + match b.qual with + | Q_Explicit -> { b with qual = Q_Implicit } + | _ -> b + ) + +let binder_to_argv (b:binder) : argv = + (binder_to_term b, b.qual) + +let generate_all (nm:name) (params:binders) (ctors : list ctor) : Tac decls = + let params_i = make_implicits params in + let t = mk_app (pack (Tv_FVar (pack_fv nm))) (List.Tot.map binder_to_argv params) in + let t_repr = generate_repr_typ params ctors in + let se_repr = pack_sigelt <| Sg_Let { + isrec = false; + lbs = [{ + lb_fv = pack_fv (add_suffix "_repr" nm); + lb_us = []; + lb_typ = mk_arr params <| C_Total (`Type); + lb_def = mk_abs params t_repr; + }] + } + in + + let down_def = + `(_ by (generate_down ())) + in + let down_def = mk_abs params_i down_def in + let se_down = + let b = fresh_binder t in + pack_sigelt <| Sg_Let { + isrec = false; + lbs = [{ + lb_fv = pack_fv (add_suffix "_down" nm); + lb_us = []; + lb_typ = mk_tot_arr params_i <| Tv_Arrow b (C_Total t_repr); + lb_def = down_def; + }] + } + in + let up_def = + `(_ by (generate_up (`#(pack (Tv_Const (C_String (implode_qn nm))))) ())) + in + let up_def = mk_abs params_i up_def in + let se_up = + let b = fresh_binder t_repr in + pack_sigelt <| Sg_Let { + isrec = false; + lbs = [{ + lb_fv = pack_fv (add_suffix "_up" nm); + lb_us = []; + lb_typ = mk_tot_arr params_i <| Tv_Arrow b (C_Total t); + lb_def = up_def; + }] + } + in + [se_repr; se_down; se_up] + +[@@plugin] +let entry (nm : string) : Tac decls = + let Sg_Inductive {params; nm; ctors} = get_inductive_typ nm in + generate_all nm params ctors diff --git a/stage0/ulib/FStar.Tactics.TypeRepr.fsti b/stage0/ulib/FStar.Tactics.TypeRepr.fsti new file mode 100644 index 00000000000..f9730db5dd0 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.TypeRepr.fsti @@ -0,0 +1,21 @@ +module FStar.Tactics.TypeRepr + +open FStar.Tactics.V2.Bare + +private +let empty_elim (e:empty) (#a:Type) : a = match e with + +(* Do not use directly. *) +[@@plugin] +val generate_repr_typ (params : binders) (ctors : list ctor) : Tac typ + +(* Do not use directly. *) +[@@plugin] +val generate_down () : Tac unit + +(* Do not use directly. *) +[@@plugin] +val generate_up (nm:string) () : Tac unit + +[@@plugin] +val entry (nm : string) : Tac decls diff --git a/stage0/ulib/FStar.Tactics.Typeclasses.fst b/stage0/ulib/FStar.Tactics.Typeclasses.fst new file mode 100644 index 00000000000..29a67e6b06b --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Typeclasses.fst @@ -0,0 +1,426 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Typeclasses + +open FStar.Reflection.V2 +module R = FStar.Reflection.V2 +open FStar.Stubs.Tactics.Common +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Tactics.V2.SyntaxHelpers +open FStar.Tactics.V2.Derived +open FStar.Tactics.V2.SyntaxCoercions +open FStar.Tactics.NamedView + +(* Thunked version of debug *) +let debug (f : unit -> Tac string) : Tac unit = + if debugging () then + print (f ()) + +module L = FStar.List.Tot.Base +let (@) = L.op_At + +(* The attribute that marks classes *) +irreducible +let tcclass : unit = () + +(* The attribute that marks instances *) +irreducible +let tcinstance : unit = () + +(* Functional dependencies of a class. *) +irreducible +let fundeps (_ : list int) : unit = () + +(* The attribute that marks class fields + to signal that no method should be generated for them *) +irreducible +let no_method : unit = () + +noeq +type st_t = { + seen : list term; + glb : list (sigelt & fv); + fuel : int; +} + +noeq +type tc_goal = { + g : term; + (* ^ The goal as a term *) + head_fv : fv; + (* ^ Head fv of goal (g), i.e. the class name *) + c_se : option sigelt; + (* ^ Class sigelt *) + fundeps : option (list int); + (* ^ Functional dependendcies of class, if any. *) + args_and_uvars : list (argv & bool); + (* ^ The arguments of the goal, and whether they are + unresolved, even partially. I.e. the boolean is true + when the arg contains uvars. *) +} + + +val fv_eq : fv -> fv -> Tot bool +let fv_eq fv1 fv2 = + let n1 = inspect_fv fv1 in + let n2 = inspect_fv fv2 in + n1 = n2 + +let rec head_of (t:term) : Tac (option fv) = + (* NB: must use `inspect` to make use of unionfind graph. + inspect_ln won't work. *) + match inspect t with + | Tv_FVar fv + | Tv_UInst fv _ -> Some fv + | Tv_App h _ -> head_of h + | v -> None + +let rec res_typ (t:term) : Tac term = + match inspect t with + | Tv_Arrow _ c -> ( + match inspect_comp c with + | C_Total t -> res_typ t + | _ -> t + ) + | _ -> t + +(* Would be good to use different exceptions for each reason +the search stops, but it takes some work to properly account +for them and report proper errors. *) +exception NoInst + +private +let rec first (f : 'a -> Tac 'b) (l : list 'a) : Tac 'b = + match l with + | [] -> raise NoInst + | x::xs -> (fun () -> f x) `or_else` (fun () -> first f xs) + +private +let rec maybe_intros () : Tac unit = + let g = cur_goal () in + match inspect g with + | Tv_Arrow _ _ -> + ignore (intro ()); + maybe_intros () + | _ -> () + +let sigelt_name (se:sigelt) : list fv = + match FStar.Stubs.Reflection.V2.Builtins.inspect_sigelt se with + | Stubs.Reflection.V2.Data.Sg_Let _ lbs -> ( + match lbs with + | [lb] -> [(FStar.Stubs.Reflection.V2.Builtins.inspect_lb lb).lb_fv] + | _ -> [] + ) + | Stubs.Reflection.V2.Data.Sg_Val nm _ _ -> [pack_fv nm] + | _ -> [] + +(* Would be nice to define an unembedding class here.. but it's circular. *) +let unembed_int (t:term) : Tac (option int) = + match inspect_ln t with + | R.Tv_Const (C_Int i) -> Some i + | _ -> None + +let rec unembed_list (#a:Type) (u : term -> Tac (option a)) (t:term) : Tac (option (list a)) = + match hua t with + | Some (fv, _, [(ty, Q_Implicit); (hd, Q_Explicit); (tl, Q_Explicit)]) -> + if implode_qn (inspect_fv fv) = `%Prims.Cons then + match u hd, unembed_list u tl with + | Some hd, Some tl -> Some (hd::tl) + | _ -> None + else + None + | Some (fv, _, [(ty, Q_Implicit)]) -> + if implode_qn (inspect_fv fv) = `%Prims.Nil then + Some [] + else + None + | _ -> + None + +let extract_fundeps (se : sigelt) : Tac (option (list int)) = + let attrs = sigelt_attrs se in + let rec aux (attrs : list term) : Tac (option (list int)) = + match attrs with + | [] -> None + | attr::attrs' -> + match collect_app attr with + | hd, [(a0, Q_Explicit)] -> + if FStar.Reflection.TermEq.Simple.term_eq hd (`fundeps) then ( + unembed_list unembed_int a0 + ) else + aux attrs' + | _ -> + aux attrs' + in + aux attrs + +let trywith (st:st_t) (g:tc_goal) (t typ : term) (k : st_t -> Tac unit) : Tac unit = + // print ("head_fv = " ^ fv_to_string g.head_fv); + // print ("fundeps = " ^ Util.string_of_option (Util.string_of_list (fun i -> string_of_int i)) fundeps); + let unresolved_args = g.args_and_uvars |> Util.mapi (fun i (_, b) -> if b then [i <: int] else []) |> List.Tot.flatten in + // print ("unresolved_args = " ^ Util.string_of_list (fun i -> string_of_int i) unresolved_args); + + match head_of (res_typ typ) with + | None -> + debug (fun () -> "no head for typ of this? " ^ term_to_string t ^ " typ=" ^ term_to_string typ); + raise NoInst + | Some fv' -> + if not (fv_eq fv' g.head_fv) then + raise NoInst; // class mismatch, would be better to not even get here + debug (fun () -> "Trying to apply hypothesis/instance: " ^ term_to_string t); + (fun () -> + if Cons? unresolved_args && None? g.fundeps then + fail "Will not continue as there are unresolved args (and no fundeps)" + else if Cons? unresolved_args && Some? g.fundeps then ( + let Some fundeps = g.fundeps in + debug (fun () -> "checking fundeps"); + let all_good = List.Tot.for_all (fun i -> List.Tot.mem i fundeps) unresolved_args in + if all_good then apply t else fail "fundeps" + ) else ( + apply_noinst t + ) + ) `seq` (fun () -> + debug (fun () -> dump "next"; "apply seems to have worked"); + let st = { st with fuel = st.fuel - 1 } in + k st) + +let local (st:st_t) (g:tc_goal) (k : st_t -> Tac unit) () : Tac unit = + debug (fun () -> "local, goal = " ^ term_to_string g.g); + let bs = vars_of_env (cur_env ()) in + first (fun (b:binding) -> + trywith st g (pack (Tv_Var b)) b.sort k) + bs + +let global (st:st_t) (g:tc_goal) (k : st_t -> Tac unit) () : Tac unit = + debug (fun () -> "global, goal = " ^ term_to_string g.g); + first (fun (se, fv) -> + let typ = tc (cur_env()) (pack (Tv_FVar fv)) in // FIXME: a bit slow.. but at least it's a simple fvar + trywith st g (pack (Tv_FVar fv)) typ k) + st.glb + +exception Next +let try_trivial (st:st_t) (g:tc_goal) (k : st_t -> Tac unit) () : Tac unit = + match g.g with + | Tv_FVar fv -> + if implode_qn (inspect_fv fv) = `%unit + then exact (`()) + else raise Next + | _ -> raise Next + +let ( <|> ) (t1 t2 : unit -> Tac 'a) : unit -> Tac 'a = + fun () -> + try t1 () with _ -> t2 () + +(* + tcresolve': the main typeclass instantiation function. + + It mostly creates a tc_goal record and calls the functions above. +*) +let rec tcresolve' (st:st_t) : Tac unit = + if st.fuel <= 0 then + raise NoInst; + debug (fun () -> "fuel = " ^ string_of_int st.fuel); + + maybe_intros(); + let g = cur_goal () in + + (* Try to detect loops *) + if L.existsb (Reflection.TermEq.Simple.term_eq g) st.seen then ( + debug (fun () -> "loop"); + raise NoInst + ); + + match hua g with + | None -> + debug (fun () -> "Goal does not look like a typeclass"); + raise NoInst + + | Some (head_fv, us, args) -> + (* ^ Maybe should check is this really is a class too? *) + let c_se = lookup_typ (cur_env ()) (inspect_fv head_fv) in + let fundeps = match c_se with + | None -> None + | Some se -> extract_fundeps se + in + + let args_and_uvars = args |> Util.map (fun (a, q) -> (a, q), Cons? (free_uvars a )) in + let st = { st with seen = g :: st.seen } in + let g = { g; head_fv; c_se; fundeps; args_and_uvars } in + (try_trivial st g tcresolve' <|> + local st g tcresolve' <|> + global st g tcresolve') () + +[@@plugin] +let tcresolve () : Tac unit = + let open FStar.Pprint in + debug (fun () -> dump ""; "tcresolve entry point"); + norm []; + let w = cur_witness () in + set_dump_on_failure false; (* We report our own errors *) + + // Not using intros () directly, since that unfolds aggressively if the term is not a literal arrow + maybe_intros (); + + // Fetch a list of all instances in scope right now. + // TODO: turn this into a hash map per class, ideally one that can be + // persisted across calss. + let glb = lookup_attr_ses (`tcinstance) (cur_env ()) in + let glb = glb |> Tactics.Util.concatMap (fun se -> + sigelt_name se |> Tactics.Util.concatMap (fun fv -> [(se, fv)]) + ) + in + let st0 = { + seen = []; + glb = glb; + fuel = 16; + } in + try + tcresolve' st0; + debug (fun () -> "Solved to:\n\t" ^ term_to_string w) + with + | NoInst -> + let open FStar.Pprint in + fail_doc [ + prefix 2 1 (text "Could not solve typeclass constraint") + (bquotes (term_to_doc (cur_goal ()))); + ] + | TacticFailure (msg,r) -> + fail_doc_at ([text "Typeclass resolution failed."] @ msg) r + | e -> raise e + +(**** Generating methods from a class ****) + +(* In TAC, not Tot *) +private +let rec mk_abs (bs : list binder) (body : term) : Tac term (decreases bs) = + match bs with + | [] -> body + | b::bs -> pack (Tv_Abs b (mk_abs bs body)) + +private +let rec last (l : list 'a) : Tac 'a = + match l with + | [] -> fail "last: empty list" + | [x] -> x + | _::xs -> last xs + +private +let filter_no_method_binders (bs:binders) + : binders + = let open FStar.Reflection.TermEq.Simple in + let has_no_method_attr (b:binder) : bool = + L.existsb (term_eq (`no_method)) b.attrs + in + bs |> L.filter (fun b -> not (has_no_method_attr b)) + +private +let binder_set_meta (b : binder) (t : term) : binder = + { b with qual = Q_Meta t } + +[@@plugin] +let mk_class (nm:string) : Tac decls = + let ns = explode_qn nm in + let r = lookup_typ (top_env ()) ns in + guard (Some? r); + let Some se = r in + let to_propagate = L.filter (function Inline_for_extraction | NoExtract -> true | _ -> false) (sigelt_quals se) in + let sv = inspect_sigelt se in + guard (Sg_Inductive? sv); + let Sg_Inductive {nm=name;univs=us;params;typ=ity;ctors} = sv in + debug (fun () -> "params = " ^ Tactics.Util.string_of_list binder_to_string params); + debug (fun () -> "got it, name = " ^ implode_qn name); + debug (fun () -> "got it, ity = " ^ term_to_string ity); + let ctor_name = last name in + // Must have a single constructor + guard (L.length ctors = 1); + let [(c_name, ty)] = ctors in + debug (fun () -> "got ctor " ^ implode_qn c_name ^ " of type " ^ term_to_string ty); + let bs, cod = collect_arr_bs ty in + let r = inspect_comp cod in + guard (C_Total? r); + let C_Total cod = r in (* must be total *) + + debug (fun () -> "params = " ^ Tactics.Util.string_of_list binder_to_string params); + debug (fun () -> "n_params = " ^ string_of_int (List.Tot.Base.length params)); + debug (fun () -> "n_univs = " ^ string_of_int (List.Tot.Base.length us)); + debug (fun () -> "cod = " ^ term_to_string cod); + + (* print ("n_bs = " ^ string_of_int (List.Tot.Base.length bs)); *) + + let base : string = "__proj__Mk" ^ ctor_name ^ "__item__" in + + (* Make a sigelt for each method *) + filter_no_method_binders bs + |> Tactics.Util.map (fun (b:binder) -> + let s = name_of_binder b in + debug (fun () -> "processing method " ^ s); + let ns = cur_module () in + let sfv = pack_fv (ns @ [s]) in + let dbv = fresh_namedv_named "d" in + let tcr = (`tcresolve) in + let tcdict = { + ppname = seal "dict"; + sort = cod; + uniq = fresh(); + qual = Q_Meta tcr; + attrs = []; + } in + let proj_name = cur_module () @ [base ^ s] in + let proj = pack (Tv_FVar (pack_fv proj_name)) in + + let proj_lb = + match lookup_typ (top_env ()) proj_name with + | None -> fail "mk_class: proj not found?" + | Some se -> + match inspect_sigelt se with + | Sg_Let {lbs} -> lookup_lb lbs proj_name + | _ -> fail "mk_class: proj not Sg_Let?" + in + debug (fun () -> "proj_ty = " ^ term_to_string proj_lb.lb_typ); + + let ty = + let bs, cod = collect_arr_bs proj_lb.lb_typ in + let ps, bs = List.Tot.Base.splitAt (List.Tot.Base.length params) bs in + match bs with + | [] -> fail "mk_class: impossible, no binders" + | b1::bs' -> + let b1 = binder_set_meta b1 tcr in + mk_arr (ps@(b1::bs')) cod + in + let def = + let bs, body = collect_abs proj_lb.lb_def in + let ps, bs = List.Tot.Base.splitAt (List.Tot.Base.length params) bs in + match bs with + | [] -> fail "mk_class: impossible, no binders" + | b1::bs' -> + let b1 = binder_set_meta b1 tcr in + mk_abs (ps@(b1::bs')) body + in + debug (fun () -> "def = " ^ term_to_string def); + debug (fun () -> "ty = " ^ term_to_string ty); + + let ty : term = ty in + let def : term = def in + let sfv : fv = sfv in + + let lb = { lb_fv=sfv; lb_us=proj_lb.lb_us; lb_typ=ty; lb_def=def } in + let se = pack_sigelt (Sg_Let {isrec=false; lbs=[lb]}) in + let se = set_sigelt_quals to_propagate se in + let se = set_sigelt_attrs b.attrs se in + //debug (fun () -> "trying to return : " ^ term_to_string (quote se)); + se + ) diff --git a/stage0/ulib/FStar.Tactics.Typeclasses.fsti b/stage0/ulib/FStar.Tactics.Typeclasses.fsti new file mode 100644 index 00000000000..eac4b77fc7b --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Typeclasses.fsti @@ -0,0 +1,50 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Typeclasses + +open FStar.Tactics.Effect +open FStar.Stubs.Reflection.Types (* for `decls` *) + +(* The attribute that marks classes *) +val tcclass : unit + +(* The attribute that marks instances *) +val tcinstance : unit + +(* Functional dependencies of a class. It takes an int list +representing the arguments of the class (starting from 0, both explicit +and implicit alike) that are dependent on the rest. When trying to apply +an instance, if the fundeps are unresolved (i.e. contain uvars) but the +other arguments do not, we will apply the instance and instantiate the +fundeps. *) +val fundeps : list int -> unit + +(* The attribute that marks class fields + to signal that no method should be generated for them *) +val no_method : unit + +(* The typeclass resolution metaprogram. This is a plugin, clients can +run this tactics without having to know its definition in the .fst *) +val tcresolve : unit -> Tac unit + +(* The metaprogram to generate class methods. Also a plugin. This +is inserted automatically by the desugaring phase for any `class` +declaration. *) +val mk_class (nm:string) : Tac decls + +(* Helper to solve an explicit argument by typeclass resolution *) +[@@tcnorm] +unfold let solve (#a:Type) (#[tcresolve ()] ev : a) : Tot a = ev diff --git a/stage0/ulib/FStar.Tactics.Util.fst b/stage0/ulib/FStar.Tactics.Util.fst new file mode 100644 index 00000000000..25fb09dafd6 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Util.fst @@ -0,0 +1,132 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Util + +open FStar.Tactics.Effect +open FStar.List.Tot.Base + +(* Tac list functions, since there's no effect polymorphism *) +val map: ('a -> Tac 'b) -> list 'a -> Tac (list 'b) +let rec map f x = match x with + | [] -> [] + | a::tl -> f a::map f tl + +let rec concatMap (f : 'a -> Tac (list 'b)) (l : list 'a) : Tac (list 'b) = + match l with + | [] -> [] + | x::xs -> f x @ concatMap f xs + +val __mapi: nat -> (nat -> 'a -> Tac 'b) -> list 'a -> Tac (list 'b) +let rec __mapi i f x = match x with + | [] -> [] + | a::tl -> f i a::__mapi (i+1) f tl + +val mapi: (nat -> 'a -> Tac 'b) -> list 'a -> Tac (list 'b) +let mapi f l = __mapi 0 f l + +val iter : ('a -> Tac unit) -> list 'a -> Tac unit +let rec iter f x = match x with + | [] -> () + | a::tl -> f a; iter f tl + +val iteri_aux: int -> (int -> 'a -> Tac unit) -> list 'a -> Tac unit +let rec iteri_aux i f x = match x with + | [] -> () + | a::tl -> f i a; iteri_aux (i+1) f tl + +val iteri: (int -> 'a -> Tac unit) -> list 'a -> Tac unit +let iteri f x = iteri_aux 0 f x + +val fold_left: ('a -> 'b -> Tac 'a) -> 'a -> l:list 'b -> Tac 'a +let rec fold_left f x l = match l with + | [] -> x + | hd::tl -> fold_left f (f x hd) tl + +val fold_right: ('a -> 'b -> Tac 'b) -> list 'a -> 'b -> Tac 'b +let rec fold_right f l x = match l with + | [] -> x + | hd::tl -> f hd (fold_right f tl x) + +(* There's no unconditionally total zip like this in Tot.Base, why? Anyway use this *) +val zip : (#a:Type) -> (#b:Type) -> list a -> list b -> Tac (list (a & b)) +let rec zip #a #b l1 l2 = match l1, l2 with + | x::xs, y::ys -> (x,y) :: (zip xs ys) + | _ -> [] + +val filter: ('a -> Tac bool) -> list 'a -> Tac (list 'a) +let rec filter f = function + | [] -> [] + | hd::tl -> if f hd then hd::(filter f tl) else filter f tl + +private let rec filter_map_acc (f:'a -> Tac (option 'b)) (acc:list 'b) (l:list 'a) + : Tac (list 'b) = + match l with + | [] -> + rev acc + | hd :: tl -> + match f hd with + | Some hd -> + filter_map_acc f (hd :: acc) tl + | None -> + filter_map_acc f acc tl + +let filter_map (f:'a -> Tac (option 'b)) (l:list 'a) : Tac (list 'b) = + filter_map_acc f [] l + +val tryPick: ('a -> Tac (option 'b)) -> list 'a -> Tac (option 'b) +let rec tryPick f l = match l with + | [] -> None + | hd::tl -> + match f hd with + | Some x -> Some x + | None -> tryPick f tl + +let map_opt (f:'a -> Tac 'b) (x:option 'a) : Tac (option 'b) = + match x with + | None -> None + | Some x -> Some (f x) + +(** Apply a given tactic [t] repeatedly [n] times and return the results. *) +let rec repeatn (#a:Type) (n : int) (t : unit -> Tac a) : Tac (l:list a{n < 0 \/ length l == n}) = + if n <= 0 + then [] + else t () :: repeatn (n - 1) t + +let rec tryFind (#a:Type) (f:a -> Tac bool) (l:list a) : Tac bool = + match l with + | [] -> false + | hd::tl -> + if f hd then true + else tryFind f tl + +let rec fold_left2 (#a #b #c:Type) (f:a -> b -> c -> Tac a) (x:a) (l1:list b) (l2:list c) + : TacH a + (requires fun _ -> length l1 == length l2) + (ensures fun _ _ -> True) = + match l1, l2 with + | [], [] -> x + | hd1::tl1, hd2::tl2 -> + fold_left2 f (f x hd1 hd2) tl1 tl2 + +let rec string_of_list #a (f : a -> Tac string) (l : list a) : Tac string = + match l with + | [] -> "" + | x::xs -> f x ^ ";" ^ string_of_list f xs + +let string_of_option #a (f : a -> Tac string) (o : option a) : Tac string = + match o with + | Some x -> "Some " ^ f x + | None -> "None" diff --git a/stage0/ulib/FStar.Tactics.V1.Derived.fst b/stage0/ulib/FStar.Tactics.V1.Derived.fst new file mode 100644 index 00000000000..a4478fa47b2 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V1.Derived.fst @@ -0,0 +1,943 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.V1.Derived + +open FStar.Reflection.V1 +open FStar.Reflection.V1.Formula +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.Types +open FStar.Stubs.Tactics.Result +open FStar.Tactics.Util +open FStar.Stubs.Tactics.V1.Builtins +open FStar.Tactics.V1.SyntaxHelpers +open FStar.Stubs.VConfig +include FStar.Tactics.Names + +module L = FStar.List.Tot.Base +module V = FStar.Tactics.Visit +private let (@) = L.op_At + +let name_of_bv (bv : bv) : Tac string = + unseal ((inspect_bv bv).bv_ppname) + +let bv_to_string (bv : bv) : Tac string = + (* Could also print type...? *) + name_of_bv bv + +let name_of_binder (b : binder) : Tac string = + name_of_bv (bv_of_binder b) + +let binder_to_string (b : binder) : Tac string = + bv_to_string (bv_of_binder b) //TODO: print aqual, attributes + +exception Goal_not_trivial + +let goals () : Tac (list goal) = goals_of (get ()) +let smt_goals () : Tac (list goal) = smt_goals_of (get ()) + +let fail (#a:Type) (m:string) + : TAC a (fun ps post -> post (Failed (TacticFailure (mkmsg m, None)) ps)) + = raise #a (TacticFailure (mkmsg m, None)) + +let fail_silently (#a:Type) (m:string) + : TAC a (fun _ post -> forall ps. post (Failed (TacticFailure (mkmsg m, None)) ps)) + = set_urgency 0; + raise #a (TacticFailure (mkmsg m, None)) + +(** Return the current *goal*, not its type. (Ignores SMT goals) *) +let _cur_goal () : Tac goal = + match goals () with + | [] -> fail "no more goals" + | g::_ -> g + +(** [cur_env] returns the current goal's environment *) +let cur_env () : Tac env = goal_env (_cur_goal ()) + +(** [cur_goal] returns the current goal's type *) +let cur_goal () : Tac typ = goal_type (_cur_goal ()) + +(** [cur_witness] returns the current goal's witness *) +let cur_witness () : Tac term = goal_witness (_cur_goal ()) + +(** [cur_goal_safe] will always return the current goal, without failing. +It must be statically verified that there indeed is a goal in order to +call it. *) +let cur_goal_safe () : TacH goal (requires (fun ps -> ~(goals_of ps == []))) + (ensures (fun ps0 r -> exists g. r == Success g ps0)) + = match goals_of (get ()) with + | g :: _ -> g + +(** [cur_binders] returns the list of binders in the current goal. *) +let cur_binders () : Tac binders = + binders_of_env (cur_env ()) + +(** Set the guard policy only locally, without affecting calling code *) +let with_policy pol (f : unit -> Tac 'a) : Tac 'a = + let old_pol = get_guard_policy () in + set_guard_policy pol; + let r = f () in + set_guard_policy old_pol; + r + +(** [exact e] will solve a goal [Gamma |- w : t] if [e] has type exactly +[t] in [Gamma]. *) +let exact (t : term) : Tac unit = + with_policy SMT (fun () -> t_exact true false t) + +(** [exact_with_ref e] will solve a goal [Gamma |- w : t] if [e] has +type [t'] where [t'] is a subtype of [t] in [Gamma]. This is a more +flexible variant of [exact]. *) +let exact_with_ref (t : term) : Tac unit = + with_policy SMT (fun () -> t_exact true true t) + +let trivial () : Tac unit = + norm [iota; zeta; reify_; delta; primops; simplify; unmeta]; + let g = cur_goal () in + match term_as_formula g with + | True_ -> exact (`()) + | _ -> raise Goal_not_trivial + +(* Another hook to just run a tactic without goals, just by reusing `with_tactic` *) +let run_tactic (t:unit -> Tac unit) + : Pure unit + (requires (set_range_of (with_tactic (fun () -> trivial (); t ()) (squash True)) (range_of t))) + (ensures (fun _ -> True)) + = () + +(** Ignore the current goal. If left unproven, this will fail after +the tactic finishes. *) +let dismiss () : Tac unit = + match goals () with + | [] -> fail "dismiss: no more goals" + | _::gs -> set_goals gs + +(** Flip the order of the first two goals. *) +let flip () : Tac unit = + let gs = goals () in + match goals () with + | [] | [_] -> fail "flip: less than two goals" + | g1::g2::gs -> set_goals (g2::g1::gs) + +(** Succeed if there are no more goals left, and fail otherwise. *) +let qed () : Tac unit = + match goals () with + | [] -> () + | _ -> fail "qed: not done!" + +(** [debug str] is similar to [print str], but will only print the message +if [--debug Tac] is on. *) +let debug (m:string) : Tac unit = + if debugging () then print m + +(** [smt] will mark the current goal for being solved through the SMT. +This does not immediately run the SMT: it just dumps the goal in the +SMT bin. Note, if you dump a proof-relevant goal there, the engine will +later raise an error. *) +let smt () : Tac unit = + match goals (), smt_goals () with + | [], _ -> fail "smt: no active goals" + | g::gs, gs' -> + begin + set_goals gs; + set_smt_goals (g :: gs') + end + +let idtac () : Tac unit = () + +(** Push the current goal to the back. *) +let later () : Tac unit = + match goals () with + | g::gs -> set_goals (gs @ [g]) + | _ -> fail "later: no goals" + +(** [apply f] will attempt to produce a solution to the goal by an application +of [f] to any amount of arguments (which need to be solved as further goals). +The amount of arguments introduced is the least such that [f a_i] unifies +with the goal's type. *) +let apply (t : term) : Tac unit = + t_apply true false false t + +let apply_noinst (t : term) : Tac unit = + t_apply true true false t + +(** [apply_lemma l] will solve a goal of type [squash phi] when [l] is a +Lemma ensuring [phi]. The arguments to [l] and its requires clause are +introduced as new goals. As a small optimization, [unit] arguments are +discharged by the engine. Just a thin wrapper around [t_apply_lemma]. *) +let apply_lemma (t : term) : Tac unit = + t_apply_lemma false false t + +(** See docs for [t_trefl] *) +let trefl () : Tac unit = + t_trefl false + +(** See docs for [t_trefl] *) +let trefl_guard () : Tac unit = + t_trefl true + +(** See docs for [t_commute_applied_match] *) +let commute_applied_match () : Tac unit = + t_commute_applied_match () + +(** Similar to [apply_lemma], but will not instantiate uvars in the +goal while applying. *) +let apply_lemma_noinst (t : term) : Tac unit = + t_apply_lemma true false t + +let apply_lemma_rw (t : term) : Tac unit = + t_apply_lemma false true t + +(** [apply_raw f] is like [apply], but will ask for all arguments +regardless of whether they appear free in further goals. See the +explanation in [t_apply]. *) +let apply_raw (t : term) : Tac unit = + t_apply false false false t + +(** Like [exact], but allows for the term [e] to have a type [t] only +under some guard [g], adding the guard as a goal. *) +let exact_guard (t : term) : Tac unit = + with_policy Goal (fun () -> t_exact true false t) + +(** (TODO: explain better) When running [pointwise tau] For every +subterm [t'] of the goal's type [t], the engine will build a goal [Gamma +|= t' == ?u] and run [tau] on it. When the tactic proves the goal, +the engine will rewrite [t'] for [?u] in the original goal type. This +is done for every subterm, bottom-up. This allows to recurse over an +unknown goal type. By inspecting the goal, the [tau] can then decide +what to do (to not do anything, use [trefl]). *) +let t_pointwise (d:direction) (tau : unit -> Tac unit) : Tac unit = + let ctrl (t:term) : Tac (bool & ctrl_flag) = + true, Continue + in + let rw () : Tac unit = + tau () + in + ctrl_rewrite d ctrl rw + +(** [topdown_rewrite ctrl rw] is used to rewrite those sub-terms [t] + of the goal on which [fst (ctrl t)] returns true. + + On each such sub-term, [rw] is presented with an equality of goal + of the form [Gamma |= t == ?u]. When [rw] proves the goal, + the engine will rewrite [t] for [?u] in the original goal + type. + + The goal formula is traversed top-down and the traversal can be + controlled by [snd (ctrl t)]: + + When [snd (ctrl t) = 0], the traversal continues down through the + position in the goal term. + + When [snd (ctrl t) = 1], the traversal continues to the next + sub-tree of the goal. + + When [snd (ctrl t) = 2], no more rewrites are performed in the + goal. +*) +let topdown_rewrite (ctrl : term -> Tac (bool & int)) + (rw:unit -> Tac unit) : Tac unit + = let ctrl' (t:term) : Tac (bool & ctrl_flag) = + let b, i = ctrl t in + let f = + match i with + | 0 -> Continue + | 1 -> Skip + | 2 -> Abort + | _ -> fail "topdown_rewrite: bad value from ctrl" + in + b, f + in + ctrl_rewrite TopDown ctrl' rw + +let pointwise (tau : unit -> Tac unit) : Tac unit = t_pointwise BottomUp tau +let pointwise' (tau : unit -> Tac unit) : Tac unit = t_pointwise TopDown tau + +let cur_module () : Tac name = + moduleof (top_env ()) + +let open_modules () : Tac (list name) = + env_open_modules (top_env ()) + +let fresh_uvar (o : option typ) : Tac term = + let e = cur_env () in + uvar_env e o + +let unify (t1 t2 : term) : Tac bool = + let e = cur_env () in + unify_env e t1 t2 + +let unify_guard (t1 t2 : term) : Tac bool = + let e = cur_env () in + unify_guard_env e t1 t2 + +let tmatch (t1 t2 : term) : Tac bool = + let e = cur_env () in + match_env e t1 t2 + +(** [divide n t1 t2] will split the current set of goals into the [n] +first ones, and the rest. It then runs [t1] on the first set, and [t2] +on the second, returning both results (and concatenating remaining goals). *) +let divide (n:int) (l : unit -> Tac 'a) (r : unit -> Tac 'b) : Tac ('a & 'b) = + if n < 0 then + fail "divide: negative n"; + let gs, sgs = goals (), smt_goals () in + let gs1, gs2 = List.Tot.Base.splitAt n gs in + + set_goals gs1; set_smt_goals []; + let x = l () in + let gsl, sgsl = goals (), smt_goals () in + + set_goals gs2; set_smt_goals []; + let y = r () in + let gsr, sgsr = goals (), smt_goals () in + + set_goals (gsl @ gsr); set_smt_goals (sgs @ sgsl @ sgsr); + (x, y) + +let rec iseq (ts : list (unit -> Tac unit)) : Tac unit = + match ts with + | t::ts -> let _ = divide 1 t (fun () -> iseq ts) in () + | [] -> () + +(** [focus t] runs [t ()] on the current active goal, hiding all others +and restoring them at the end. *) +let focus (t : unit -> Tac 'a) : Tac 'a = + match goals () with + | [] -> fail "focus: no goals" + | g::gs -> + let sgs = smt_goals () in + set_goals [g]; set_smt_goals []; + let x = t () in + set_goals (goals () @ gs); set_smt_goals (smt_goals () @ sgs); + x + +(** Similar to [dump], but only dumping the current goal. *) +let dump1 (m : string) = focus (fun () -> dump m) + +let rec mapAll (t : unit -> Tac 'a) : Tac (list 'a) = + match goals () with + | [] -> [] + | _::_ -> let (h, t) = divide 1 t (fun () -> mapAll t) in h::t + +let rec iterAll (t : unit -> Tac unit) : Tac unit = + (* Could use mapAll, but why even build that list *) + match goals () with + | [] -> () + | _::_ -> let _ = divide 1 t (fun () -> iterAll t) in () + +let iterAllSMT (t : unit -> Tac unit) : Tac unit = + let gs, sgs = goals (), smt_goals () in + set_goals sgs; + set_smt_goals []; + iterAll t; + let gs', sgs' = goals (), smt_goals () in + set_goals gs; + set_smt_goals (gs'@sgs') + +(** Runs tactic [t1] on the current goal, and then tactic [t2] on *each* +subgoal produced by [t1]. Each invocation of [t2] runs on a proofstate +with a single goal (they're "focused"). *) +let seq (f : unit -> Tac unit) (g : unit -> Tac unit) : Tac unit = + focus (fun () -> f (); iterAll g) + +let exact_args (qs : list aqualv) (t : term) : Tac unit = + focus (fun () -> + let n = List.Tot.Base.length qs in + let uvs = repeatn n (fun () -> fresh_uvar None) in + let t' = mk_app t (zip uvs qs) in + exact t'; + iter (fun uv -> if is_uvar uv + then unshelve uv + else ()) (L.rev uvs) + ) + +let exact_n (n : int) (t : term) : Tac unit = + exact_args (repeatn n (fun () -> Q_Explicit)) t + +(** [ngoals ()] returns the number of goals *) +let ngoals () : Tac int = List.Tot.Base.length (goals ()) + +(** [ngoals_smt ()] returns the number of SMT goals *) +let ngoals_smt () : Tac int = List.Tot.Base.length (smt_goals ()) + +(* Create a fresh bound variable (bv), using a generic name. See also +[fresh_bv_named]. *) +let fresh_bv () : Tac bv = + (* These bvs are fresh anyway through a separate counter, + * but adding the integer allows for more readability when + * generating code *) + let i = fresh () in + fresh_bv_named ("x" ^ string_of_int i) + +let fresh_binder_named nm t : Tac binder = + mk_binder (fresh_bv_named nm) t + +let fresh_binder t : Tac binder = + (* See comment in fresh_bv *) + let i = fresh () in + fresh_binder_named ("x" ^ string_of_int i) t + +let fresh_implicit_binder_named nm t : Tac binder = + mk_implicit_binder (fresh_bv_named nm) t + +let fresh_implicit_binder t : Tac binder = + (* See comment in fresh_bv *) + let i = fresh () in + fresh_implicit_binder_named ("x" ^ string_of_int i) t + +let guard (b : bool) : TacH unit (requires (fun _ -> True)) + (ensures (fun ps r -> if b + then Success? r /\ Success?.ps r == ps + else Failed? r)) + (* ^ the proofstate on failure is not exactly equal (has the psc set) *) + = + if not b then + fail "guard failed" + else () + +let try_with (f : unit -> Tac 'a) (h : exn -> Tac 'a) : Tac 'a = + match catch f with + | Inl e -> h e + | Inr x -> x + +let trytac (t : unit -> Tac 'a) : Tac (option 'a) = + try Some (t ()) + with + | _ -> None + +let or_else (#a:Type) (t1 : unit -> Tac a) (t2 : unit -> Tac a) : Tac a = + try t1 () + with | _ -> t2 () + +val (<|>) : (unit -> Tac 'a) -> + (unit -> Tac 'a) -> + (unit -> Tac 'a) +let (<|>) t1 t2 = fun () -> or_else t1 t2 + +let first (ts : list (unit -> Tac 'a)) : Tac 'a = + L.fold_right (<|>) ts (fun () -> fail "no tactics to try") () + +let rec repeat (#a:Type) (t : unit -> Tac a) : Tac (list a) = + match catch t with + | Inl _ -> [] + | Inr x -> x :: repeat t + +let repeat1 (#a:Type) (t : unit -> Tac a) : Tac (list a) = + t () :: repeat t + +let repeat' (f : unit -> Tac 'a) : Tac unit = + let _ = repeat f in () + +let norm_term (s : list norm_step) (t : term) : Tac term = + let e = + try cur_env () + with | _ -> top_env () + in + norm_term_env e s t + +(** Join all of the SMT goals into one. This helps when all of them are +expected to be similar, and therefore easier to prove at once by the SMT +solver. TODO: would be nice to try to join them in a more meaningful +way, as the order can matter. *) +let join_all_smt_goals () = + let gs, sgs = goals (), smt_goals () in + set_smt_goals []; + set_goals sgs; + repeat' join; + let sgs' = goals () in // should be a single one + set_goals gs; + set_smt_goals sgs' + +let discard (tau : unit -> Tac 'a) : unit -> Tac unit = + fun () -> let _ = tau () in () + +// TODO: do we want some value out of this? +let rec repeatseq (#a:Type) (t : unit -> Tac a) : Tac unit = + let _ = trytac (fun () -> (discard t) `seq` (discard (fun () -> repeatseq t))) in () + +let tadmit () = tadmit_t (`()) + +let admit1 () : Tac unit = + tadmit () + +let admit_all () : Tac unit = + let _ = repeat tadmit in + () + +(** [is_guard] returns whether the current goal arose from a typechecking guard *) +let is_guard () : Tac bool = + Stubs.Tactics.Types.is_guard (_cur_goal ()) + +let skip_guard () : Tac unit = + if is_guard () + then smt () + else fail "" + +let guards_to_smt () : Tac unit = + let _ = repeat skip_guard in + () + +let simpl () : Tac unit = norm [simplify; primops] +let whnf () : Tac unit = norm [weak; hnf; primops; delta] +let compute () : Tac unit = norm [primops; iota; delta; zeta] + +let intros () : Tac (list binder) = repeat intro + +let intros' () : Tac unit = let _ = intros () in () +let destruct tm : Tac unit = let _ = t_destruct tm in () +let destruct_intros tm : Tac unit = seq (fun () -> let _ = t_destruct tm in ()) intros' + +private val __cut : (a:Type) -> (b:Type) -> (a -> b) -> a -> b +private let __cut a b f x = f x + +let tcut (t:term) : Tac binder = + let g = cur_goal () in + let tt = mk_e_app (`__cut) [t; g] in + apply tt; + intro () + +let pose (t:term) : Tac binder = + apply (`__cut); + flip (); + exact t; + intro () + +let intro_as (s:string) : Tac binder = + let b = intro () in + rename_to b s + +let pose_as (s:string) (t:term) : Tac binder = + let b = pose t in + rename_to b s + +let for_each_binder (f : binder -> Tac 'a) : Tac (list 'a) = + map f (cur_binders ()) + +let rec revert_all (bs:binders) : Tac unit = + match bs with + | [] -> () + | _::tl -> revert (); + revert_all tl + +(* Some syntax utility functions *) +let bv_to_term (bv : bv) : Tac term = pack (Tv_Var bv) + +[@@coercion] +let binder_to_term (b : binder) : Tac term = + let bview = inspect_binder b in + bv_to_term bview.binder_bv + +let binder_sort (b : binder) : Tac typ = + (inspect_binder b).binder_sort + +// Cannot define this inside `assumption` due to #1091 +private +let rec __assumption_aux (bs : binders) : Tac unit = + match bs with + | [] -> + fail "no assumption matches goal" + | b::bs -> + let t = binder_to_term b in + try exact t with | _ -> + try (apply (`FStar.Squash.return_squash); + exact t) with | _ -> + __assumption_aux bs + +let assumption () : Tac unit = + __assumption_aux (cur_binders ()) + +let destruct_equality_implication (t:term) : Tac (option (formula & term)) = + match term_as_formula t with + | Implies lhs rhs -> + let lhs = term_as_formula' lhs in + begin match lhs with + | Comp (Eq _) _ _ -> Some (lhs, rhs) + | _ -> None + end + | _ -> None + +private +let __eq_sym #t (a b : t) : Lemma ((a == b) == (b == a)) = + FStar.PropositionalExtensionality.apply (a==b) (b==a) + +(** Like [rewrite], but works with equalities [v == e] and [e == v] *) +let rewrite' (b:binder) : Tac unit = + ((fun () -> rewrite b) + <|> (fun () -> binder_retype b; + apply_lemma (`__eq_sym); + rewrite b) + <|> (fun () -> fail "rewrite' failed")) + () + +let rec try_rewrite_equality (x:term) (bs:binders) : Tac unit = + match bs with + | [] -> () + | x_t::bs -> + begin match term_as_formula (type_of_binder x_t) with + | Comp (Eq _) y _ -> + if term_eq x y + then rewrite x_t + else try_rewrite_equality x bs + | _ -> + try_rewrite_equality x bs + end + +let rec rewrite_all_context_equalities (bs:binders) : Tac unit = + match bs with + | [] -> () + | x_t::bs -> begin + (try rewrite x_t with | _ -> ()); + rewrite_all_context_equalities bs + end + +let rewrite_eqs_from_context () : Tac unit = + rewrite_all_context_equalities (cur_binders ()) + +let rewrite_equality (t:term) : Tac unit = + try_rewrite_equality t (cur_binders ()) + +let unfold_def (t:term) : Tac unit = + match inspect t with + | Tv_FVar fv -> + let n = implode_qn (inspect_fv fv) in + norm [delta_fully [n]] + | _ -> fail "unfold_def: term is not a fv" + +(** Rewrites left-to-right, and bottom-up, given a set of lemmas stating +equalities. The lemmas need to prove *propositional* equalities, that +is, using [==]. *) +let l_to_r (lems:list term) : Tac unit = + let first_or_trefl () : Tac unit = + fold_left (fun k l () -> + (fun () -> apply_lemma_rw l) + `or_else` k) + trefl lems () in + pointwise first_or_trefl + +let mk_squash (t : term) : term = + let sq : term = pack_ln (Tv_FVar (pack_fv squash_qn)) in + mk_e_app sq [t] + +let mk_sq_eq (t1 t2 : term) : term = + let eq : term = pack_ln (Tv_FVar (pack_fv eq2_qn)) in + mk_squash (mk_e_app eq [t1; t2]) + +(** Rewrites all appearances of a term [t1] in the goal into [t2]. +Creates a new goal for [t1 == t2]. *) +let grewrite (t1 t2 : term) : Tac unit = + let e = tcut (mk_sq_eq t1 t2) in + let e = pack_ln (Tv_Var (bv_of_binder e)) in + pointwise (fun () -> + (* If the LHS is a uvar, do nothing, so we do not instantiate it. *) + let is_uvar = + match term_as_formula (cur_goal()) with + | Comp (Eq _) lhs rhs -> + (match inspect_ln lhs with + | Tv_Uvar _ _ -> true + | _ -> false) + | _ -> false + in + if is_uvar + then trefl () + else try exact e with | _ -> trefl ()) + +private +let __un_sq_eq (#a:Type) (x y : a) (_ : (x == y)) : Lemma (x == y) = () + +(** A wrapper to [grewrite] which takes a binder of an equality type *) +let grewrite_eq (b:binder) : Tac unit = + match term_as_formula (type_of_binder b) with + | Comp (Eq _) l r -> + grewrite l r; + iseq [idtac; (fun () -> exact (binder_to_term b))] + | _ -> + begin match term_as_formula' (type_of_binder b) with + | Comp (Eq _) l r -> + grewrite l r; + iseq [idtac; (fun () -> apply_lemma (`__un_sq_eq); + exact (binder_to_term b))] + | _ -> + fail "grewrite_eq: binder type is not an equality" + end + +private val push1 : (#p:Type) -> (#q:Type) -> + squash (p ==> q) -> + squash p -> + squash q +private let push1 #p #q f u = () + +private val push1' : (#p:Type) -> (#q:Type) -> + (p ==> q) -> + squash p -> + squash q +private let push1' #p #q f u = () + +(* + * Some easier applying, which should prevent frustration + * (or cause more when it doesn't do what you wanted to) + *) +val apply_squash_or_lem : d:nat -> term -> Tac unit +let rec apply_squash_or_lem d t = + (* Before anything, try a vanilla apply and apply_lemma *) + try apply t with | _ -> + try apply (`FStar.Squash.return_squash); apply t with | _ -> + try apply_lemma t with | _ -> + + // Fuel cutoff, just in case. + if d <= 0 then fail "mapply: out of fuel" else begin + + let ty = tc (cur_env ()) t in + let tys, c = collect_arr ty in + match inspect_comp c with + | C_Lemma pre post _ -> + begin + let post = `((`#post) ()) in (* unthunk *) + let post = norm_term [] post in + (* Is the lemma an implication? We can try to intro *) + match term_as_formula' post with + | Implies p q -> + apply_lemma (`push1); + apply_squash_or_lem (d-1) t + + | _ -> + fail "mapply: can't apply (1)" + end + | C_Total rt -> + begin match unsquash_term rt with + (* If the function returns a squash, just apply it, since our goals are squashed *) + | Some rt -> + // DUPLICATED, refactor! + begin + let rt = norm_term [] rt in + (* Is the lemma an implication? We can try to intro *) + match term_as_formula' rt with + | Implies p q -> + apply_lemma (`push1); + apply_squash_or_lem (d-1) t + + | _ -> + fail "mapply: can't apply (1)" + end + + (* If not, we can try to introduce the squash ourselves first *) + | None -> + // DUPLICATED, refactor! + begin + let rt = norm_term [] rt in + (* Is the lemma an implication? We can try to intro *) + match term_as_formula' rt with + | Implies p q -> + apply_lemma (`push1); + apply_squash_or_lem (d-1) t + + | _ -> + apply (`FStar.Squash.return_squash); + apply t + end + end + | _ -> fail "mapply: can't apply (2)" + end + +(* `m` is for `magic` *) +let mapply (t : term) : Tac unit = + apply_squash_or_lem 10 t + + +private +let admit_dump_t () : Tac unit = + dump "Admitting"; + apply (`admit) + +val admit_dump : #a:Type -> (#[admit_dump_t ()] x : (unit -> Admit a)) -> unit -> Admit a +let admit_dump #a #x () = x () + +private +let magic_dump_t () : Tac unit = + dump "Admitting"; + apply (`magic); + exact (`()); + () + +val magic_dump : #a:Type -> (#[magic_dump_t ()] x : a) -> unit -> Tot a +let magic_dump #a #x () = x + +let change_with t1 t2 : Tac unit = + focus (fun () -> + grewrite t1 t2; + iseq [idtac; trivial] + ) + +let change_sq (t1 : term) : Tac unit = + change (mk_e_app (`squash) [t1]) + +let finish_by (t : unit -> Tac 'a) : Tac 'a = + let x = t () in + or_else qed (fun () -> fail "finish_by: not finished"); + x + +let solve_then #a #b (t1 : unit -> Tac a) (t2 : a -> Tac b) : Tac b = + dup (); + let x = focus (fun () -> finish_by t1) in + let y = t2 x in + trefl (); + y + +let add_elem (t : unit -> Tac 'a) : Tac 'a = focus (fun () -> + apply (`Cons); + focus (fun () -> + let x = t () in + qed (); + x + ) + ) + +(* + * Specialize a function by partially evaluating it + * For example: + * let rec foo (l:list int) (x:int) :St int = + match l with + | [] -> x + | hd::tl -> x + foo tl x + + let f :int -> St int = synth_by_tactic (specialize (foo [1; 2]) [%`foo]) + + * would make the definition of f as x + x + x + * + * f is the term that needs to be specialized + * l is the list of names to be delta-ed + *) +let specialize (#a:Type) (f:a) (l:list string) :unit -> Tac unit + = fun () -> solve_then (fun () -> exact (quote f)) (fun () -> norm [delta_only l; iota; zeta]) + +let tlabel (l:string) = + match goals () with + | [] -> fail "tlabel: no goals" + | h::t -> + set_goals (set_label l h :: t) + +let tlabel' (l:string) = + match goals () with + | [] -> fail "tlabel': no goals" + | h::t -> + let h = set_label (l ^ get_label h) h in + set_goals (h :: t) + +let focus_all () : Tac unit = + set_goals (goals () @ smt_goals ()); + set_smt_goals [] + +private +let rec extract_nth (n:nat) (l : list 'a) : option ('a & list 'a) = + match n, l with + | _, [] -> None + | 0, hd::tl -> Some (hd, tl) + | _, hd::tl -> begin + match extract_nth (n-1) tl with + | Some (hd', tl') -> Some (hd', hd::tl') + | None -> None + end + +let bump_nth (n:pos) : Tac unit = + // n-1 since goal numbering begins at 1 + match extract_nth (n - 1) (goals ()) with + | None -> fail "bump_nth: not that many goals" + | Some (h, t) -> set_goals (h :: t) + +let rec destruct_list (t : term) : Tac (list term) = + let head, args = collect_app t in + match inspect_ln head, args with + | Tv_FVar fv, [(a1, Q_Explicit); (a2, Q_Explicit)] + | Tv_FVar fv, [(_, Q_Implicit); (a1, Q_Explicit); (a2, Q_Explicit)] -> + if inspect_fv fv = cons_qn + then a1 :: destruct_list a2 + else raise NotAListLiteral + | Tv_FVar fv, _ -> + if inspect_fv fv = nil_qn + then [] + else raise NotAListLiteral + | _ -> + raise NotAListLiteral + +private let get_match_body () : Tac term = + match unsquash_term (cur_goal ()) with + | None -> fail "" + | Some t -> match inspect_unascribe t with + | Tv_Match sc _ _ -> sc + | _ -> fail "Goal is not a match" + +private let rec last (x : list 'a) : Tac 'a = + match x with + | [] -> fail "last: empty list" + | [x] -> x + | _::xs -> last xs + +(** When the goal is [match e with | p1 -> e1 ... | pn -> en], +destruct it into [n] goals for each possible case, including an +hypothesis for [e] matching the corresponding pattern. *) +let branch_on_match () : Tac unit = + focus (fun () -> + let x = get_match_body () in + let _ = t_destruct x in + iterAll (fun () -> + let bs = repeat intro in + let b = last bs in (* this one is the equality *) + grewrite_eq b; + norm [iota]) + ) + +(** When the argument [i] is non-negative, [nth_binder] grabs the nth +binder in the current goal. When it is negative, it grabs the (-i-1)th +binder counting from the end of the goal. That is, [nth_binder (-1)] +will return the last binder, [nth_binder (-2)] the second to last, and +so on. *) +let nth_binder (i:int) : Tac binder = + let bs = cur_binders () in + let k : int = if i >= 0 then i else List.Tot.Base.length bs + i in + let k : nat = if k < 0 then fail "not enough binders" else k in + match List.Tot.Base.nth bs k with + | None -> fail "not enough binders" + | Some b -> b + +(** [mk_abs [x1; ...; xn] t] returns the term [fun x1 ... xn -> t] *) +let rec mk_abs (args : list binder) (t : term) : Tac term (decreases args) = + match args with + | [] -> t + | a :: args' -> + let t' = mk_abs args' t in + pack (Tv_Abs a t') + +(** [string_to_term_with_lb [(id1, t1); ...; (idn, tn)] e s] parses +[s] as a term in environment [e] augmented with bindings +[id1, t1], ..., [idn, tn]. *) +let string_to_term_with_lb + (letbindings: list (string & term)) + (e: env) (t: string): Tac term + = let unk = pack_ln Tv_Unknown in + let e, lb_bvs = fold_left (fun (e, lb_bvs) (i, v) -> + let e, bv = push_bv_dsenv e i in + e, (v,bv)::lb_bvs + ) (e, []) letbindings in + let t = string_to_term e t in + fold_left (fun t (i, bv) -> pack (Tv_Let false [] bv unk i t)) t lb_bvs + +private +val lem_trans : (#a:Type) -> (#x:a) -> (#z:a) -> (#y:a) -> + squash (x == y) -> squash (y == z) -> Lemma (x == z) +private +let lem_trans #a #x #z #y e1 e2 = () + +(** Transivity of equality: reduce [x == z] to [x == ?u] and [?u == z]. *) +let trans () : Tac unit = apply_lemma (`lem_trans) diff --git a/stage0/ulib/FStar.Tactics.V1.Logic.Lemmas.fst b/stage0/ulib/FStar.Tactics.V1.Logic.Lemmas.fst new file mode 100644 index 00000000000..6c659076957 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V1.Logic.Lemmas.fst @@ -0,0 +1,86 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.V1.Logic.Lemmas + +let fa_intro_lem (#a:Type) (#p:a -> Type) (f:(x:a -> squash (p x))) : Lemma (forall (x:a). p x) = + FStar.Classical.lemma_forall_intro_gtot + ((fun x -> FStar.IndefiniteDescription.elim_squash (f x)) <: (x:a -> GTot (p x))) + +let split_lem #a #b sa sb = () + +let imp_intro_lem #a #b f = + FStar.Classical.give_witness (FStar.Classical.arrow_to_impl (fun (x:squash a) -> FStar.Squash.bind_squash x f)) + +let __lemma_to_squash #req #ens (_ : squash req) (h : (unit -> Lemma (requires req) (ensures ens))) : squash ens = + h () + +let vbind #p #q sq f = FStar.Classical.give_witness_from_squash (FStar.Squash.bind_squash sq f) + +let or_ind #p #q #phi o l r = () + +let bool_ind b phi l r = () + +let or_intro_1 #p #q _ = () + +let or_intro_2 #p #q _ = () + +let __and_elim #p #q #phi p_and_q f = () + +let __and_elim' #p #q #phi p_and_q f = () + +let __witness #a x #p _ = () + +let __elim_exists' #t (#pred : t -> Type0) #goal (h : (exists x. pred x)) + (k : (x:t -> pred x -> squash goal)) : squash goal = + FStar.Squash.bind_squash #(x:t & pred x) h (fun (|x, pf|) -> k x pf) + +let __forall_inst #t (#pred : t -> Type0) (h : (forall x. pred x)) (x : t) : squash (pred x) = + () + +let __forall_inst_sq #t (#pred : t -> Type0) (h : squash (forall x. pred x)) (x : t) : squash (pred x) = + () + +let sklem0 (#a:Type) (#p : a -> Type0) ($v : (exists (x:a). p x)) (phi:Type0) : + Lemma (requires (forall x. p x ==> phi)) + (ensures phi) = () + +let lemma_from_squash #a #b f x = let _ = f x in assert (b x) + +let lem1_fa #a #pre #post + ($lem : (x:a -> Lemma (requires pre x) (ensures post x))) : + Lemma (forall (x:a). pre x ==> post x) = + let l' x : Lemma (pre x ==> post x) = + Classical.move_requires lem x + in + Classical.forall_intro l' + +let lem2_fa #a #b #pre #post + ($lem : (x:a -> y:b -> Lemma (requires pre x y) (ensures post x y))) : + Lemma (forall (x:a) (y:b). pre x y ==> post x y) = + let l' x y : Lemma (pre x y ==> post x y) = + Classical.move_requires (lem x) y + in + Classical.forall_intro_2 l' + +let lem3_fa #a #b #c #pre #post + ($lem : (x:a -> y:b -> z:c -> Lemma (requires pre x y z) (ensures post x y z))) : + Lemma (forall (x:a) (y:b) (z:c). pre x y z ==> post x y z) = + let l' x y z : Lemma (pre x y z ==> post x y z) = + Classical.move_requires (lem x y) z + in + Classical.forall_intro_3 l' + +let revert_squash #a #b s x = let x : (_:unit{forall x. b x}) = s in () \ No newline at end of file diff --git a/stage0/ulib/FStar.Tactics.V1.Logic.Lemmas.fsti b/stage0/ulib/FStar.Tactics.V1.Logic.Lemmas.fsti new file mode 100644 index 00000000000..ac111b0f954 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V1.Logic.Lemmas.fsti @@ -0,0 +1,82 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.V1.Logic.Lemmas + +val fa_intro_lem (#a:Type) (#p:a -> Type) (f:(x:a -> squash (p x))) : Lemma (forall (x:a). p x) + +val split_lem : (#a:Type) -> (#b:Type) -> + squash a -> squash b -> Lemma (a /\ b) +val imp_intro_lem : (#a:Type) -> (#b : Type) -> + (a -> squash b) -> + Lemma (a ==> b) +val __lemma_to_squash #req #ens (_ : squash req) (h : (unit -> Lemma (requires req) (ensures ens))) : squash ens + +val vbind : (#p:Type) -> (#q:Type) -> squash p -> (p -> squash q) -> Lemma q + +val or_ind : (#p:Type) -> (#q:Type) -> (#phi:Type) -> + (p \/ q) -> + (squash (p ==> phi)) -> + (squash (q ==> phi)) -> + Lemma phi + +val bool_ind : (b:bool) -> (phi:Type) -> (squash (b == true ==> phi)) -> + (squash (b == false ==> phi)) -> + Lemma phi + +val or_intro_1 : (#p:Type) -> (#q:Type) -> squash p -> Lemma (p \/ q) + +val or_intro_2 : (#p:Type) -> (#q:Type) -> squash q -> Lemma (p \/ q) + +val __and_elim : (#p:Type) -> (#q:Type) -> (#phi:Type) -> + (p /\ q) -> + squash (p ==> q ==> phi) -> + Lemma phi + +val __and_elim' : (#p:Type) -> (#q:Type) -> (#phi:Type) -> + squash (p /\ q) -> + squash (p ==> q ==> phi) -> + Lemma phi + +val __witness : (#a:Type) -> (x:a) -> (#p:(a -> Type)) -> squash (p x) -> squash (exists (x:a). p x) + +val __elim_exists' #t (#pred : t -> Type0) #goal (h : (exists x. pred x)) + (k : (x:t -> pred x -> squash goal)) : squash goal + +val __forall_inst #t (#pred : t -> Type0) (h : (forall x. pred x)) (x : t) : squash (pred x) + +val __forall_inst_sq #t (#pred : t -> Type0) (h : squash (forall x. pred x)) (x : t) : squash (pred x) + +val sklem0 (#a:Type) (#p : a -> Type0) ($v : (exists (x:a). p x)) (phi:Type0) : + Lemma (requires (forall x. p x ==> phi)) + (ensures phi) + +val lemma_from_squash : #a:Type -> #b:(a -> Type) -> (x:a -> squash (b x)) -> x:a -> Lemma (b x) + +val lem1_fa #a #pre #post + ($lem : (x:a -> Lemma (requires pre x) (ensures post x))) : + Lemma (forall (x:a). pre x ==> post x) + +val lem2_fa #a #b #pre #post + ($lem : (x:a -> y:b -> Lemma (requires pre x y) (ensures post x y))) : + Lemma (forall (x:a) (y:b). pre x y ==> post x y) + +val lem3_fa #a #b #c #pre #post + ($lem : (x:a -> y:b -> z:c -> Lemma (requires pre x y z) (ensures post x y z))) : + Lemma (forall (x:a) (y:b) (z:c). pre x y z ==> post x y z) + +val revert_squash : (#a:Type) -> (#b : (a -> Type)) -> + (squash (forall (x:a). b x)) -> + x:a -> squash (b x) diff --git a/stage0/ulib/FStar.Tactics.V1.Logic.fst b/stage0/ulib/FStar.Tactics.V1.Logic.fst new file mode 100644 index 00000000000..de2b2c293c5 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V1.Logic.fst @@ -0,0 +1,251 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.V1.Logic + +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.V1.Builtins +open FStar.Tactics.V1.Derived +open FStar.Tactics.Util +open FStar.Reflection.V1 +open FStar.Reflection.V1.Formula + +open FStar.Tactics.V1.Logic.Lemmas + +(** Revert an introduced binder as a forall. *) +let l_revert () : Tac unit = + revert (); + apply (`revert_squash) + +(** Repeated [l_revert]. *) +let rec l_revert_all (bs:binders) : Tac unit = + match bs with + | [] -> () + | _::tl -> begin l_revert (); l_revert_all tl end + +(** Introduce a forall. *) +let forall_intro () : Tac binder = + apply_lemma (`fa_intro_lem); + intro () + +(** Introduce a forall, with some given name. *) +let forall_intro_as (s:string) : Tac binder = + apply_lemma (`fa_intro_lem); + intro_as s + +(** Repeated [forall_intro]. *) +let forall_intros () : Tac binders = repeat1 forall_intro + +(** Split a conjunction into two goals. *) +let split () : Tac unit = + try apply_lemma (`split_lem) + with | _ -> fail "Could not split goal" + +(** Introduce an implication. *) +let implies_intro () : Tac binder = + apply_lemma (`imp_intro_lem); + intro () + +let implies_intro_as (s:string) : Tac binder = + apply_lemma (`imp_intro_lem); + intro_as s + +(** Repeated [implies_intro]. *) +let implies_intros () : Tac binders = repeat1 implies_intro + +(** "Logical" intro: introduce a forall or an implication. *) +let l_intro () = forall_intro `or_else` implies_intro + +(** Repeated [l]. *) +let l_intros () = repeat l_intro + +let squash_intro () : Tac unit = + apply (`FStar.Squash.return_squash) + +let l_exact (t:term) = + try exact t with + | _ -> (squash_intro (); exact t) + +let hyp (b:binder) : Tac unit = l_exact (binder_to_term b) + +let pose_lemma (t : term) : Tac binder = + let c = tcc (cur_env ()) t in + let pre, post = + match inspect_comp c with + | C_Lemma pre post _ -> pre, post + | _ -> fail "" + in + let post = `((`#post) ()) in (* unthunk *) + let post = norm_term [] post in + (* If the precondition is trivial, do not cut by it *) + match term_as_formula' pre with + | True_ -> + pose (`(__lemma_to_squash #(`#pre) #(`#post) () (fun () -> (`#t)))) + | _ -> + let reqb = tcut (`squash (`#pre)) in + + let b = pose (`(__lemma_to_squash #(`#pre) #(`#post) (`#(binder_to_term reqb)) (fun () -> (`#t)))) in + flip (); + ignore (trytac trivial); + b + +let explode () : Tac unit = + ignore ( + repeatseq (fun () -> first [(fun () -> ignore (l_intro ())); + (fun () -> ignore (split ()))])) + +let rec visit (callback:unit -> Tac unit) : Tac unit = + focus (fun () -> + or_else callback + (fun () -> + let g = cur_goal () in + match term_as_formula g with + | Forall _b _sort _phi -> + let binders = forall_intros () in + seq (fun () -> visit callback) (fun () -> l_revert_all binders) + | And p q -> + seq split (fun () -> visit callback) + | Implies p q -> + let _ = implies_intro () in + seq (fun () -> visit callback) l_revert + | _ -> + () + ) + ) + +let rec simplify_eq_implication () : Tac unit = + let e = cur_env () in + let g = cur_goal () in + let r = destruct_equality_implication g in + match r with + | None -> + fail "Not an equality implication" + | Some (_, rhs) -> + let eq_h = implies_intro () in // G, eq_h:x=e |- P + rewrite eq_h; // G, eq_h:x=e |- P[e/x] + clear_top (); // G |- P[e/x] + visit simplify_eq_implication + +let rewrite_all_equalities () : Tac unit = + visit simplify_eq_implication + +let rec unfold_definition_and_simplify_eq (tm:term) : Tac unit = + let g = cur_goal () in + match term_as_formula g with + | App hd arg -> + if term_eq hd tm + then trivial () + else () + | _ -> begin + let r = destruct_equality_implication g in + match r with + | None -> fail "Not an equality implication" + | Some (_, rhs) -> + let eq_h = implies_intro () in + rewrite eq_h; + clear_top (); + visit (fun () -> unfold_definition_and_simplify_eq tm) + end + +(** A tactic to unsquash a hypothesis. Perhaps you are looking +for [unsquash_term]. *) +let unsquash (t:term) : Tac term = + let v = `vbind in + apply_lemma (mk_e_app v [t]); + let b = intro () in + pack_ln (Tv_Var (bv_of_binder b)) + +let cases_or (o:term) : Tac unit = + apply_lemma (mk_e_app (`or_ind) [o]) + +let cases_bool (b:term) : Tac unit = + let bi = `bool_ind in + seq (fun () -> apply_lemma (mk_e_app bi [b])) + (fun () -> let _ = trytac (fun () -> let b = implies_intro () in rewrite b; clear_top ()) in ()) + +let left () : Tac unit = + apply_lemma (`or_intro_1) + +let right () : Tac unit = + apply_lemma (`or_intro_2) + +let and_elim (t : term) : Tac unit = + begin + try apply_lemma (`(__and_elim (`#t))) + with | _ -> apply_lemma (`(__and_elim' (`#t))) + end + +let destruct_and (t : term) : Tac (binder & binder) = + and_elim t; + (implies_intro (), implies_intro ()) + +let witness (t : term) : Tac unit = + apply_raw (`__witness); + exact t + +(* returns witness and proof as binders *) +let elim_exists (t : term) : Tac (binder & binder) = + apply_lemma (`(__elim_exists' (`#(t)))); + let x = intro () in + let pf = intro () in + (x, pf) + +let instantiate (fa : term) (x : term) : Tac binder = + try pose (`__forall_inst_sq (`#fa) (`#x)) with | _ -> + try pose (`__forall_inst (`#fa) (`#x)) with | _ -> + fail "could not instantiate" + +let instantiate_as (fa : term) (x : term) (s : string) : Tac binder = + let b = instantiate fa x in + rename_to b s + +private +let rec sk_binder' (acc:binders) (b:binder) : Tac (binders & binder) = + focus (fun () -> + try + apply_lemma (`(sklem0 (`#(binder_to_term b)))); + if ngoals () <> 1 then fail "no"; + clear b; + let bx = forall_intro () in + let b' = implies_intro () in + sk_binder' (bx::acc) b' (* We might have introduced a new existential, so possibly recurse *) + with | _ -> (acc, b) (* If the above failed, just return *) + ) + +(* Skolemizes a given binder for an existential, returning the introduced new binders + * and the skolemized formula. *) +let sk_binder b = sk_binder' [] b + +let skolem () = + let bs = binders_of_env (cur_env ()) in + map sk_binder bs + +let easy_fill () = + let _ = repeat intro in + (* If the goal is `a -> Lemma b`, intro will fail, try to use this switch *) + let _ = trytac (fun () -> apply (`lemma_from_squash); intro ()) in + smt () + +let easy #a #x = x + +(** Add a lemma into the local context, quantified for all arguments. +Only works for lemmas with up to 3 arguments for now. It is expected +that `t` is a top-level name, this has not been battle-tested for other +kinds of terms. *) +let using_lemma (t : term) : Tac binder = + try pose_lemma (`(lem1_fa (`#t))) with | _ -> + try pose_lemma (`(lem2_fa (`#t))) with | _ -> + try pose_lemma (`(lem3_fa (`#t))) with | _ -> + fail #binder "using_lemma: failed to instantiate" diff --git a/stage0/ulib/FStar.Tactics.V1.Logic.fsti b/stage0/ulib/FStar.Tactics.V1.Logic.fsti new file mode 100644 index 00000000000..8b2ebf89a32 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V1.Logic.fsti @@ -0,0 +1,151 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.V1.Logic + +open FStar.Tactics.Effect +open FStar.Reflection.V1 +open FStar.Reflection.V1.Formula +open FStar.Tactics.V1.Logic.Lemmas {} (* bring lemmas into TC scope *) + +(* Repeated to avoid importing FStar.Tactics.V1.Derived. *) +private let cur_goal () : Tac typ = + let open FStar.Stubs.Tactics.Types in + let open FStar.Stubs.Tactics.V1.Builtins in + match goals_of (get ()) with + | g::_ -> goal_type g + | _ -> raise (TacticFailure (mkmsg "no more goals", None)) + +(** Returns the current goal as a [formula]. *) +let cur_formula () : Tac formula = term_as_formula (cur_goal ()) + +(** Revert an introduced binder as a forall. *) +[@@plugin] +val l_revert () : Tac unit + +(** Repeated [l_revert]. *) +[@@plugin] +val l_revert_all (bs:binders) : Tac unit + +(** Introduce a forall. *) +[@@plugin] +val forall_intro () : Tac binder + +(** Introduce a forall, with some given name. *) +[@@plugin] +val forall_intro_as (s:string) : Tac binder + +(** Repeated [forall_intro]. *) +[@@plugin] +val forall_intros () : Tac binders + +(** Split a conjunction into two goals. *) +[@@plugin] +val split () : Tac unit + +(** Introduce an implication. *) +[@@plugin] +val implies_intro () : Tac binder + +[@@plugin] +val implies_intro_as (s:string) : Tac binder + +(** Repeated [implies_intro]. *) +[@@plugin] +val implies_intros () : Tac binders + +(** "Logical" intro: introduce a forall or an implication. *) +[@@plugin] +val l_intro () : Tac binder + +(** Repeated [l]. *) +[@@plugin] +val l_intros () : Tac (list binder) + +[@@plugin] +val squash_intro () : Tac unit + +[@@plugin] +val l_exact (t:term) : Tac unit + +[@@plugin] +val hyp (b:binder) : Tac unit + +[@@plugin] +val pose_lemma (t : term) : Tac binder + +[@@plugin] +val explode () : Tac unit + +[@@plugin] +val simplify_eq_implication () : Tac unit + +[@@plugin] +val rewrite_all_equalities () : Tac unit + +[@@plugin] +val unfold_definition_and_simplify_eq (tm:term) : Tac unit + +(** A tactic to unsquash a hypothesis. Perhaps you are looking +for [unsquash_term]. *) +[@@plugin] +val unsquash (t:term) : Tac term + +[@@plugin] +val cases_or (o:term) : Tac unit + +[@@plugin] +val cases_bool (b:term) : Tac unit + +[@@plugin] +val left () : Tac unit + +[@@plugin] +val right () : Tac unit + +[@@plugin] +val and_elim (t : term) : Tac unit + +[@@plugin] +val destruct_and (t : term) : Tac (binder & binder) + +[@@plugin] +val witness (t : term) : Tac unit + +(* returns witness and proof as binders *) +[@@plugin] +val elim_exists (t : term) : Tac (binder & binder) + +[@@plugin] +val instantiate (fa : term) (x : term) : Tac binder + +[@@plugin] +val instantiate_as (fa : term) (x : term) (s : string) : Tac binder + +[@@plugin] +val skolem () : Tac (list (binders & binder)) + +[@@plugin] +val easy_fill () : Tac unit + +[@@plugin] +val easy : #a:Type -> (#[easy_fill ()] _ : a) -> a + +(** Add a lemma into the local context, quantified for all arguments. +Only works for lemmas with up to 3 arguments for now. It is expected +that `t` is a top-level name, this has not been battle-tested for other +kinds of terms. *) +[@@plugin] +val using_lemma (t : term) : Tac binder diff --git a/stage0/ulib/FStar.Tactics.V1.SyntaxHelpers.fst b/stage0/ulib/FStar.Tactics.V1.SyntaxHelpers.fst new file mode 100644 index 00000000000..919809cffaf --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V1.SyntaxHelpers.fst @@ -0,0 +1,95 @@ +module FStar.Tactics.V1.SyntaxHelpers + +open FStar.Reflection.V1 +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.Types +open FStar.Stubs.Tactics.V1.Builtins + +(* These are fully-named variants of functions found in FStar.Reflection *) + +private +let rec collect_arr' (bs : list binder) (c : comp) : Tac (list binder & comp) = + begin match inspect_comp c with + | C_Total t -> + begin match inspect t with + | Tv_Arrow b c -> + collect_arr' (b::bs) c + | _ -> + (bs, c) + end + | _ -> (bs, c) + end + +val collect_arr_bs : typ -> Tac (list binder & comp) +let collect_arr_bs t = + let (bs, c) = collect_arr' [] (pack_comp (C_Total t)) in + (List.Tot.Base.rev bs, c) + +val collect_arr : typ -> Tac (list typ & comp) +let collect_arr t = + let (bs, c) = collect_arr' [] (pack_comp (C_Total t)) in + let ts = List.Tot.Base.map type_of_binder bs in + (List.Tot.Base.rev ts, c) + +private +let rec collect_abs' (bs : list binder) (t : term) : Tac (list binder & term) (decreases t) = + match inspect t with + | Tv_Abs b t' -> + collect_abs' (b::bs) t' + | _ -> (bs, t) + +val collect_abs : term -> Tac (list binder & term) +let collect_abs t = + let (bs, t') = collect_abs' [] t in + (List.Tot.Base.rev bs, t') + +(* Copied from FStar.Tactics.Derived *) +private +let fail (#a:Type) (m:string) = raise #a (TacticFailure (mkmsg m, None)) + +let rec mk_arr (bs: list binder) (cod : comp) : Tac term = + match bs with + | [] -> fail "mk_arr, empty binders" + | [b] -> pack (Tv_Arrow b cod) + | (b::bs) -> + pack (Tv_Arrow b (pack_comp (C_Total (mk_arr bs cod)))) + +let rec mk_arr_curried (bs: list binder) (cod : comp) : Tac term = + match bs with + | [] -> fail "mk_arr, empty binders" + | [b] -> pack_curried (Tv_Arrow b cod) + | (b::bs) -> pack_curried (Tv_Arrow b (pack_comp (C_Total (mk_arr_curried bs cod)))) + +let rec mk_tot_arr (bs: list binder) (cod : term) : Tac term = + match bs with + | [] -> cod + | (b::bs) -> + pack (Tv_Arrow b (pack_comp (C_Total (mk_tot_arr bs cod)))) + +let lookup_lb_view (lbs:list letbinding) (nm:name) : Tac lb_view = + let o = FStar.List.Tot.Base.find + (fun lb -> + let lbv = inspect_lb lb in + (inspect_fv lbv.lb_fv) = nm) + lbs + in + match o with + | Some lb -> inspect_lb lb + | None -> fail "lookup_lb_view: Name not in let group" + +let rec inspect_unascribe (t:term) : Tac (tv:term_view{notAscription tv}) = + match inspect t with + | Tv_AscribedT t _ _ _ + | Tv_AscribedC t _ _ _ -> + inspect_unascribe t + | tv -> tv + +(* Helpers for dealing with nested applications and arrows *) +let rec collect_app' (args : list argv) (t : term) + : Tac (term & list argv) = + match inspect_unascribe t with + | Tv_App l r -> + collect_app' (r::args) l + | _ -> (t, args) + +let collect_app = collect_app' [] diff --git a/stage0/ulib/FStar.Tactics.V1.fsti b/stage0/ulib/FStar.Tactics.V1.fsti new file mode 100644 index 00000000000..dfa6041bc38 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V1.fsti @@ -0,0 +1,37 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.V1 + +(* I don't expect many uses of tactics without syntax handling *) +include FStar.Stubs.Reflection.Types +include FStar.Reflection.Const +include FStar.Stubs.Reflection.V1.Data +include FStar.Stubs.Reflection.V1.Builtins +include FStar.Reflection.V1.Derived +include FStar.Reflection.V1.Formula +include FStar.Reflection.V1.Compare + +include FStar.Stubs.Tactics.Types +include FStar.Tactics.Effect +include FStar.Stubs.Tactics.V1.Builtins +include FStar.Tactics.V1.Derived +include FStar.Tactics.V1.SyntaxHelpers +include FStar.Tactics.V1.Logic +include FStar.Tactics.Util +include FStar.Tactics.Print +include FStar.Tactics.Visit + +include FStar.Tactics.SMT (* Version agnostic *) diff --git a/stage0/ulib/FStar.Tactics.V2.Bare.fsti b/stage0/ulib/FStar.Tactics.V2.Bare.fsti new file mode 100644 index 00000000000..8cffaa8dd62 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V2.Bare.fsti @@ -0,0 +1,35 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.V2.Bare + +include FStar.Stubs.Reflection.Types +include FStar.Reflection.V2 +include FStar.Reflection.V2.Formula + +include FStar.Stubs.Tactics.Types +include FStar.Tactics.Effect +include FStar.Stubs.Tactics.V2.Builtins +include FStar.Tactics.V2.Derived +include FStar.Tactics.V2.SyntaxHelpers +include FStar.Tactics.V2.Logic +include FStar.Tactics.V2.SyntaxCoercions +include FStar.Tactics.Util +include FStar.Tactics.Print +include FStar.Tactics.Visit +include FStar.Tactics.NamedView +include FStar.Tactics.SMT + +include FStar.Reflection.TermEq.Simple diff --git a/stage0/ulib/FStar.Tactics.V2.Derived.fst b/stage0/ulib/FStar.Tactics.V2.Derived.fst new file mode 100644 index 00000000000..06f6a002612 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V2.Derived.fst @@ -0,0 +1,946 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.V2.Derived + +open FStar.Reflection.V2 +open FStar.Reflection.V2.Formula +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.Types +open FStar.Stubs.Tactics.Result +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Tactics.Util +open FStar.Tactics.V2.SyntaxHelpers +open FStar.Stubs.VConfig +open FStar.Tactics.NamedView +open FStar.Tactics.V2.SyntaxCoercions +include FStar.Tactics.Names + +module L = FStar.List.Tot.Base +module V = FStar.Tactics.Visit +private let (@) = L.op_At + +private +let term_eq = FStar.Reflection.TermEq.Simple.term_eq + +let name_of_bv (bv : bv) : Tac string = + unseal ((inspect_bv bv).ppname) + +let bv_to_string (bv : bv) : Tac string = + (* Could also print type...? *) + name_of_bv bv + +let name_of_binder (b : binder) : Tac string = + unseal b.ppname + +let binder_to_string (b : binder) : Tac string = + // TODO: print aqual, attributes..? or no? + name_of_binder b ^ "@@" ^ string_of_int b.uniq ^ "::(" ^ term_to_string b.sort ^ ")" + +let binding_to_string (b : binding) : Tac string = + unseal b.ppname + +let type_of_var (x : namedv) : Tac typ = + unseal ((inspect_namedv x).sort) + +let type_of_binding (x : binding) : Tot typ = + x.sort + +exception Goal_not_trivial + +let goals () : Tac (list goal) = goals_of (get ()) +let smt_goals () : Tac (list goal) = smt_goals_of (get ()) + +let fail_doc_at (#a:Type) (m:error_message) (r:option range) + : TAC a (fun ps post -> post (Failed (TacticFailure (m, r)) ps)) + = raise #a (TacticFailure (m, r)) + +let fail_doc (#a:Type) (m:error_message) + : TAC a (fun ps post -> post (Failed (TacticFailure (m, None)) ps)) + = raise #a (TacticFailure (m, None)) + +let fail_at (#a:Type) (m:string) (r:option range) + : TAC a (fun ps post -> post (Failed (TacticFailure (mkmsg m, r)) ps)) + = fail_doc_at (mkmsg m) r + +let fail (#a:Type) (m:string) + : TAC a (fun ps post -> post (Failed (TacticFailure (mkmsg m, None)) ps)) + = fail_at m None + +let fail_silently_doc (#a:Type) (m:error_message) + : TAC a (fun _ post -> forall ps. post (Failed (TacticFailure (m, None)) ps)) + = set_urgency 0; + raise #a (TacticFailure (m, None)) + +let fail_silently (#a:Type) (m:string) + : TAC a (fun _ post -> forall ps. post (Failed (TacticFailure (mkmsg m, None)) ps)) + = fail_silently_doc (mkmsg m) + +(** Return the current *goal*, not its type. (Ignores SMT goals) *) +let _cur_goal () : Tac goal = + match goals () with + | [] -> fail "no more goals" + | g::_ -> g + +(** [cur_env] returns the current goal's environment *) +let cur_env () : Tac env = goal_env (_cur_goal ()) + +(** [cur_goal] returns the current goal's type *) +let cur_goal () : Tac typ = goal_type (_cur_goal ()) + +(** [cur_witness] returns the current goal's witness *) +let cur_witness () : Tac term = goal_witness (_cur_goal ()) + +(** [cur_goal_safe] will always return the current goal, without failing. +It must be statically verified that there indeed is a goal in order to +call it. *) +let cur_goal_safe () : TacH goal (requires (fun ps -> ~(goals_of ps == []))) + (ensures (fun ps0 r -> exists g. r == Success g ps0)) + = match goals_of (get ()) with + | g :: _ -> g + +let cur_vars () : Tac (list binding) = + vars_of_env (cur_env ()) + +(** Set the guard policy only locally, without affecting calling code *) +let with_policy pol (f : unit -> Tac 'a) : Tac 'a = + let old_pol = get_guard_policy () in + set_guard_policy pol; + let r = f () in + set_guard_policy old_pol; + r + +(** [exact e] will solve a goal [Gamma |- w : t] if [e] has type exactly +[t] in [Gamma]. *) +let exact (t : term) : Tac unit = + with_policy SMT (fun () -> t_exact true false t) + +(** [exact_with_ref e] will solve a goal [Gamma |- w : t] if [e] has +type [t'] where [t'] is a subtype of [t] in [Gamma]. This is a more +flexible variant of [exact]. *) +let exact_with_ref (t : term) : Tac unit = + with_policy SMT (fun () -> t_exact true true t) + +let trivial () : Tac unit = + norm [iota; zeta; reify_; delta; primops; simplify; unmeta]; + let g = cur_goal () in + match term_as_formula g with + | True_ -> exact (`()) + | _ -> raise Goal_not_trivial + +(* Another hook to just run a tactic without goals, just by reusing `with_tactic` *) +let run_tactic (t:unit -> Tac unit) + : Pure unit + (requires (set_range_of (with_tactic (fun () -> trivial (); t ()) (squash True)) (range_of t))) + (ensures (fun _ -> True)) + = () + +(** Ignore the current goal. If left unproven, this will fail after +the tactic finishes. *) +let dismiss () : Tac unit = + match goals () with + | [] -> fail "dismiss: no more goals" + | _::gs -> set_goals gs + +(** Flip the order of the first two goals. *) +let flip () : Tac unit = + let gs = goals () in + match goals () with + | [] | [_] -> fail "flip: less than two goals" + | g1::g2::gs -> set_goals (g2::g1::gs) + +(** Succeed if there are no more goals left, and fail otherwise. *) +let qed () : Tac unit = + match goals () with + | [] -> () + | _ -> fail "qed: not done!" + +(** [debug str] is similar to [print str], but will only print the message +if [--debug Tac] is on. *) +let debug (m:string) : Tac unit = + if debugging () then print m + +(** [smt] will mark the current goal for being solved through the SMT. +This does not immediately run the SMT: it just dumps the goal in the +SMT bin. Note, if you dump a proof-relevant goal there, the engine will +later raise an error. *) +let smt () : Tac unit = + match goals (), smt_goals () with + | [], _ -> fail "smt: no active goals" + | g::gs, gs' -> + begin + set_goals gs; + set_smt_goals (g :: gs') + end + +let idtac () : Tac unit = () + +(** Push the current goal to the back. *) +let later () : Tac unit = + match goals () with + | g::gs -> set_goals (gs @ [g]) + | _ -> fail "later: no goals" + +(** [apply f] will attempt to produce a solution to the goal by an application +of [f] to any amount of arguments (which need to be solved as further goals). +The amount of arguments introduced is the least such that [f a_i] unifies +with the goal's type. *) +let apply (t : term) : Tac unit = + t_apply true false false t + +let apply_noinst (t : term) : Tac unit = + t_apply true true false t + +(** [apply_lemma l] will solve a goal of type [squash phi] when [l] is a +Lemma ensuring [phi]. The arguments to [l] and its requires clause are +introduced as new goals. As a small optimization, [unit] arguments are +discharged by the engine. Just a thin wrapper around [t_apply_lemma]. *) +let apply_lemma (t : term) : Tac unit = + t_apply_lemma false false t + +(** See docs for [t_trefl] *) +let trefl () : Tac unit = + t_trefl false + +(** See docs for [t_trefl] *) +let trefl_guard () : Tac unit = + t_trefl true + +(** See docs for [t_commute_applied_match] *) +let commute_applied_match () : Tac unit = + t_commute_applied_match () + +(** Similar to [apply_lemma], but will not instantiate uvars in the +goal while applying. *) +let apply_lemma_noinst (t : term) : Tac unit = + t_apply_lemma true false t + +let apply_lemma_rw (t : term) : Tac unit = + t_apply_lemma false true t + +(** [apply_raw f] is like [apply], but will ask for all arguments +regardless of whether they appear free in further goals. See the +explanation in [t_apply]. *) +let apply_raw (t : term) : Tac unit = + t_apply false false false t + +(** Like [exact], but allows for the term [e] to have a type [t] only +under some guard [g], adding the guard as a goal. *) +let exact_guard (t : term) : Tac unit = + with_policy Goal (fun () -> t_exact true false t) + +(** (TODO: explain better) When running [pointwise tau] For every +subterm [t'] of the goal's type [t], the engine will build a goal [Gamma +|= t' == ?u] and run [tau] on it. When the tactic proves the goal, +the engine will rewrite [t'] for [?u] in the original goal type. This +is done for every subterm, bottom-up. This allows to recurse over an +unknown goal type. By inspecting the goal, the [tau] can then decide +what to do (to not do anything, use [trefl]). *) +let t_pointwise (d:direction) (tau : unit -> Tac unit) : Tac unit = + let ctrl (t:term) : Tac (bool & ctrl_flag) = + true, Continue + in + let rw () : Tac unit = + tau () + in + ctrl_rewrite d ctrl rw + +(** [topdown_rewrite ctrl rw] is used to rewrite those sub-terms [t] + of the goal on which [fst (ctrl t)] returns true. + + On each such sub-term, [rw] is presented with an equality of goal + of the form [Gamma |= t == ?u]. When [rw] proves the goal, + the engine will rewrite [t] for [?u] in the original goal + type. + + The goal formula is traversed top-down and the traversal can be + controlled by [snd (ctrl t)]: + + When [snd (ctrl t) = 0], the traversal continues down through the + position in the goal term. + + When [snd (ctrl t) = 1], the traversal continues to the next + sub-tree of the goal. + + When [snd (ctrl t) = 2], no more rewrites are performed in the + goal. +*) +let topdown_rewrite (ctrl : term -> Tac (bool & int)) + (rw:unit -> Tac unit) : Tac unit + = let ctrl' (t:term) : Tac (bool & ctrl_flag) = + let b, i = ctrl t in + let f = + match i with + | 0 -> Continue + | 1 -> Skip + | 2 -> Abort + | _ -> fail "topdown_rewrite: bad value from ctrl" + in + b, f + in + ctrl_rewrite TopDown ctrl' rw + +let pointwise (tau : unit -> Tac unit) : Tac unit = t_pointwise BottomUp tau +let pointwise' (tau : unit -> Tac unit) : Tac unit = t_pointwise TopDown tau + +let cur_module () : Tac name = + moduleof (top_env ()) + +let open_modules () : Tac (list name) = + env_open_modules (top_env ()) + +let fresh_uvar (o : option typ) : Tac term = + let e = cur_env () in + uvar_env e o + +let unify (t1 t2 : term) : Tac bool = + let e = cur_env () in + unify_env e t1 t2 + +let unify_guard (t1 t2 : term) : Tac bool = + let e = cur_env () in + unify_guard_env e t1 t2 + +let tmatch (t1 t2 : term) : Tac bool = + let e = cur_env () in + match_env e t1 t2 + +(** [divide n t1 t2] will split the current set of goals into the [n] +first ones, and the rest. It then runs [t1] on the first set, and [t2] +on the second, returning both results (and concatenating remaining goals). *) +let divide (n:int) (l : unit -> Tac 'a) (r : unit -> Tac 'b) : Tac ('a & 'b) = + if n < 0 then + fail "divide: negative n"; + let gs, sgs = goals (), smt_goals () in + let gs1, gs2 = List.Tot.Base.splitAt n gs in + + set_goals gs1; set_smt_goals []; + let x = l () in + let gsl, sgsl = goals (), smt_goals () in + + set_goals gs2; set_smt_goals []; + let y = r () in + let gsr, sgsr = goals (), smt_goals () in + + set_goals (gsl @ gsr); set_smt_goals (sgs @ sgsl @ sgsr); + (x, y) + +let rec iseq (ts : list (unit -> Tac unit)) : Tac unit = + match ts with + | t::ts -> let _ = divide 1 t (fun () -> iseq ts) in () + | [] -> () + +(** [focus t] runs [t ()] on the current active goal, hiding all others +and restoring them at the end. *) +let focus (t : unit -> Tac 'a) : Tac 'a = + match goals () with + | [] -> fail "focus: no goals" + | g::gs -> + let sgs = smt_goals () in + set_goals [g]; set_smt_goals []; + let x = t () in + set_goals (goals () @ gs); set_smt_goals (smt_goals () @ sgs); + x + +(** Similar to [dump], but only dumping the current goal. *) +let dump1 (m : string) = focus (fun () -> dump m) + +let rec mapAll (t : unit -> Tac 'a) : Tac (list 'a) = + match goals () with + | [] -> [] + | _::_ -> let (h, t) = divide 1 t (fun () -> mapAll t) in h::t + +let rec iterAll (t : unit -> Tac unit) : Tac unit = + (* Could use mapAll, but why even build that list *) + match goals () with + | [] -> () + | _::_ -> let _ = divide 1 t (fun () -> iterAll t) in () + +let iterAllSMT (t : unit -> Tac unit) : Tac unit = + let gs, sgs = goals (), smt_goals () in + set_goals sgs; + set_smt_goals []; + iterAll t; + let gs', sgs' = goals (), smt_goals () in + set_goals gs; + set_smt_goals (gs'@sgs') + +(** Runs tactic [t1] on the current goal, and then tactic [t2] on *each* +subgoal produced by [t1]. Each invocation of [t2] runs on a proofstate +with a single goal (they're "focused"). *) +let seq (f : unit -> Tac unit) (g : unit -> Tac unit) : Tac unit = + focus (fun () -> f (); iterAll g) + +let exact_args (qs : list aqualv) (t : term) : Tac unit = + focus (fun () -> + let n = List.Tot.Base.length qs in + let uvs = repeatn n (fun () -> fresh_uvar None) in + let t' = mk_app t (zip uvs qs) in + exact t'; + iter (fun uv -> if is_uvar uv + then unshelve uv + else ()) (L.rev uvs) + ) + +let exact_n (n : int) (t : term) : Tac unit = + exact_args (repeatn n (fun () -> Q_Explicit)) t + +(** [ngoals ()] returns the number of goals *) +let ngoals () : Tac int = List.Tot.Base.length (goals ()) + +(** [ngoals_smt ()] returns the number of SMT goals *) +let ngoals_smt () : Tac int = List.Tot.Base.length (smt_goals ()) + +(* sigh GGG fix names!! *) +let fresh_namedv_named (s:string) : Tac namedv = + let n = fresh () in + pack_namedv ({ + ppname = seal s; + sort = seal (pack Tv_Unknown); + uniq = n; + }) + +(* Create a fresh bound variable (bv), using a generic name. See also +[fresh_namedv_named]. *) +let fresh_namedv () : Tac namedv = + let n = fresh () in + pack_namedv ({ + ppname = seal ("x" ^ string_of_int n); + sort = seal (pack Tv_Unknown); + uniq = n; + }) + +let fresh_binder_named (s : string) (t : typ) : Tac simple_binder = + let n = fresh () in + { + ppname = seal s; + sort = t; + uniq = n; + qual = Q_Explicit; + attrs = [] ; + } + +let fresh_binder (t : typ) : Tac simple_binder = + let n = fresh () in + { + ppname = seal ("x" ^ string_of_int n); + sort = t; + uniq = n; + qual = Q_Explicit; + attrs = [] ; + } + +let fresh_implicit_binder (t : typ) : Tac binder = + let n = fresh () in + { + ppname = seal ("x" ^ string_of_int n); + sort = t; + uniq = n; + qual = Q_Implicit; + attrs = [] ; + } + +let guard (b : bool) : TacH unit (requires (fun _ -> True)) + (ensures (fun ps r -> if b + then Success? r /\ Success?.ps r == ps + else Failed? r)) + (* ^ the proofstate on failure is not exactly equal (has the psc set) *) + = + if not b then + fail "guard failed" + else () + +let try_with (f : unit -> Tac 'a) (h : exn -> Tac 'a) : Tac 'a = + match catch f with + | Inl e -> h e + | Inr x -> x + +let trytac (t : unit -> Tac 'a) : Tac (option 'a) = + try Some (t ()) + with + | _ -> None + +let or_else (#a:Type) (t1 : unit -> Tac a) (t2 : unit -> Tac a) : Tac a = + try t1 () + with | _ -> t2 () + +val (<|>) : (unit -> Tac 'a) -> + (unit -> Tac 'a) -> + (unit -> Tac 'a) +let (<|>) t1 t2 = fun () -> or_else t1 t2 + +let first (ts : list (unit -> Tac 'a)) : Tac 'a = + L.fold_right (<|>) ts (fun () -> fail "no tactics to try") () + +let rec repeat (#a:Type) (t : unit -> Tac a) : Tac (list a) = + match catch t with + | Inl _ -> [] + | Inr x -> x :: repeat t + +let repeat1 (#a:Type) (t : unit -> Tac a) : Tac (list a) = + t () :: repeat t + +let repeat' (f : unit -> Tac 'a) : Tac unit = + let _ = repeat f in () + +let norm_term (s : list norm_step) (t : term) : Tac term = + let e = + try cur_env () + with | _ -> top_env () + in + norm_term_env e s t + +(** Join all of the SMT goals into one. This helps when all of them are +expected to be similar, and therefore easier to prove at once by the SMT +solver. TODO: would be nice to try to join them in a more meaningful +way, as the order can matter. *) +let join_all_smt_goals () = + let gs, sgs = goals (), smt_goals () in + set_smt_goals []; + set_goals sgs; + repeat' join; + let sgs' = goals () in // should be a single one + set_goals gs; + set_smt_goals sgs' + +let discard (tau : unit -> Tac 'a) : unit -> Tac unit = + fun () -> let _ = tau () in () + +// TODO: do we want some value out of this? +let rec repeatseq (#a:Type) (t : unit -> Tac a) : Tac unit = + let _ = trytac (fun () -> (discard t) `seq` (discard (fun () -> repeatseq t))) in () + +let tadmit () = tadmit_t (`()) + +let admit1 () : Tac unit = + tadmit () + +let admit_all () : Tac unit = + let _ = repeat tadmit in + () + +(** [is_guard] returns whether the current goal arose from a typechecking guard *) +let is_guard () : Tac bool = + Stubs.Tactics.Types.is_guard (_cur_goal ()) + +let skip_guard () : Tac unit = + if is_guard () + then smt () + else fail "" + +let guards_to_smt () : Tac unit = + let _ = repeat skip_guard in + () + +let simpl () : Tac unit = norm [simplify; primops] +let whnf () : Tac unit = norm [weak; hnf; primops; delta] +let compute () : Tac unit = norm [primops; iota; delta; zeta] + +let intros () : Tac (list binding) = intros (-1) + +let intros' () : Tac unit = let _ = intros () in () +let destruct tm : Tac unit = let _ = t_destruct tm in () +let destruct_intros tm : Tac unit = seq (fun () -> let _ = t_destruct tm in ()) intros' + +private val __cut : (a:Type) -> (b:Type) -> (a -> b) -> a -> b +private let __cut a b f x = f x + +let tcut (t:term) : Tac binding = + let g = cur_goal () in + let tt = mk_e_app (`__cut) [t; g] in + apply tt; + intro () + +let pose (t:term) : Tac binding = + apply (`__cut); + flip (); + exact t; + intro () + +let intro_as (s:string) : Tac binding = + let b = intro () in + rename_to b s + +let pose_as (s:string) (t:term) : Tac binding = + let b = pose t in + rename_to b s + +let for_each_binding (f : binding -> Tac 'a) : Tac (list 'a) = + map f (cur_vars ()) + +let rec revert_all (bs:list binding) : Tac unit = + match bs with + | [] -> () + | _::tl -> revert (); + revert_all tl + +let binder_sort (b : binder) : Tot typ = b.sort + +// Cannot define this inside `assumption` due to #1091 +private +let rec __assumption_aux (xs : list binding) : Tac unit = + match xs with + | [] -> + fail "no assumption matches goal" + | b::bs -> + try exact b with | _ -> + try (apply (`FStar.Squash.return_squash); + exact b) with | _ -> + __assumption_aux bs + +let assumption () : Tac unit = + __assumption_aux (cur_vars ()) + +let destruct_equality_implication (t:term) : Tac (option (formula & term)) = + match term_as_formula t with + | Implies lhs rhs -> + let lhs = term_as_formula' lhs in + begin match lhs with + | Comp (Eq _) _ _ -> Some (lhs, rhs) + | _ -> None + end + | _ -> None + +private +let __eq_sym #t (a b : t) : Lemma ((a == b) == (b == a)) = + FStar.PropositionalExtensionality.apply (a==b) (b==a) + +(** Like [rewrite], but works with equalities [v == e] and [e == v] *) +let rewrite' (x:binding) : Tac unit = + ((fun () -> rewrite x) + <|> (fun () -> var_retype x; + apply_lemma (`__eq_sym); + rewrite x) + <|> (fun () -> fail "rewrite' failed")) + () + +let rec try_rewrite_equality (x:term) (bs:list binding) : Tac unit = + match bs with + | [] -> () + | x_t::bs -> + begin match term_as_formula (type_of_binding x_t) with + | Comp (Eq _) y _ -> + if term_eq x y + then rewrite x_t + else try_rewrite_equality x bs + | _ -> + try_rewrite_equality x bs + end + +let rec rewrite_all_context_equalities (bs:list binding) : Tac unit = + match bs with + | [] -> () + | x_t::bs -> begin + (try rewrite x_t with | _ -> ()); + rewrite_all_context_equalities bs + end + +let rewrite_eqs_from_context () : Tac unit = + rewrite_all_context_equalities (cur_vars ()) + +let rewrite_equality (t:term) : Tac unit = + try_rewrite_equality t (cur_vars ()) + +let unfold_def (t:term) : Tac unit = + match inspect t with + | Tv_FVar fv -> + let n = implode_qn (inspect_fv fv) in + norm [delta_fully [n]] + | _ -> fail "unfold_def: term is not a fv" + +(** Rewrites left-to-right, and bottom-up, given a set of lemmas stating +equalities. The lemmas need to prove *propositional* equalities, that +is, using [==]. *) +let l_to_r (lems:list term) : Tac unit = + let first_or_trefl () : Tac unit = + fold_left (fun k l () -> + (fun () -> apply_lemma_rw l) + `or_else` k) + trefl lems () in + pointwise first_or_trefl + +let mk_squash (t : term) : Tot term = + let sq : term = pack (Tv_FVar (pack_fv squash_qn)) in + mk_e_app sq [t] + +let mk_sq_eq (t1 t2 : term) : Tot term = + let eq : term = pack (Tv_FVar (pack_fv eq2_qn)) in + mk_squash (mk_e_app eq [t1; t2]) + +(** Rewrites all appearances of a term [t1] in the goal into [t2]. +Creates a new goal for [t1 == t2]. Note: there is a primitive grewrite +which is likely a better choice. This in case it's useful for backwards +compatibility. *) +let __grewrite_derived (t1 t2 : term) : Tac unit = + let e = tcut (mk_sq_eq t1 t2) in + let e = pack (Tv_Var e) in + pointwise (fun () -> + let (lhs, rhs) : term & term = + match term_as_formula (cur_goal()) with + | Comp (Eq _) lhs rhs -> + lhs, rhs + | _ -> + raise SKIP + in + if Tv_Uvar? lhs then + trefl () (* If the LHS is a uvar, do nothing, so we do not instantiate it. *) + else if not (term_eq lhs t1) then + raise SKIP + else + try exact e with | _ -> trefl () + ) + +private +let __un_sq_eq (#a:Type) (x y : a) (_ : (x == y)) : Lemma (x == y) = () + +(** A wrapper to [grewrite] which takes a binder of an equality type *) +let grewrite_eq (b:binding) : Tac unit = + match term_as_formula (type_of_binding b) with + | Comp (Eq _) l r -> + grewrite l r; + iseq [idtac; (fun () -> exact b)] + | _ -> + begin match term_as_formula' (type_of_binding b) with + | Comp (Eq _) l r -> + grewrite l r; + iseq [idtac; (fun () -> apply_lemma (`__un_sq_eq); + exact b)] + | _ -> + fail "grewrite_eq: binder type is not an equality" + end + +private +let admit_dump_t () : Tac unit = + dump "Admitting"; + apply (`admit) + +val admit_dump : #a:Type -> (#[admit_dump_t ()] x : (unit -> Admit a)) -> unit -> Admit a +let admit_dump #a #x () = x () + +private +let magic_dump_t () : Tac unit = + dump "Admitting"; + apply (`magic); + exact (`()); + () + +val magic_dump : #a:Type -> (#[magic_dump_t ()] x : a) -> unit -> Tot a +let magic_dump #a #x () = x + +let change_with t1 t2 : Tac unit = + focus (fun () -> + grewrite t1 t2; + iseq [idtac; trivial] + ) + +let change_sq (t1 : term) : Tac unit = + change (mk_e_app (`squash) [t1]) + +let finish_by (t : unit -> Tac 'a) : Tac 'a = + let x = t () in + or_else qed (fun () -> fail "finish_by: not finished"); + x + +let solve_then #a #b (t1 : unit -> Tac a) (t2 : a -> Tac b) : Tac b = + dup (); + let x = focus (fun () -> finish_by t1) in + let y = t2 x in + trefl (); + y + +let add_elem (t : unit -> Tac 'a) : Tac 'a = focus (fun () -> + apply (`Cons); + focus (fun () -> + let x = t () in + qed (); + x + ) + ) + +(* + * Specialize a function by partially evaluating it + * For example: + * let rec foo (l:list int) (x:int) :St int = + match l with + | [] -> x + | hd::tl -> x + foo tl x + + let f :int -> St int = synth_by_tactic (specialize (foo [1; 2]) [%`foo]) + + * would make the definition of f as x + x + x + * + * f is the term that needs to be specialized + * l is the list of names to be delta-ed + *) +let specialize (#a:Type) (f:a) (l:list string) :unit -> Tac unit + = fun () -> solve_then (fun () -> exact (quote f)) (fun () -> norm [delta_only l; iota; zeta]) + +let tlabel (l:string) = + match goals () with + | [] -> fail "tlabel: no goals" + | h::t -> + set_goals (set_label l h :: t) + +let tlabel' (l:string) = + match goals () with + | [] -> fail "tlabel': no goals" + | h::t -> + let h = set_label (l ^ get_label h) h in + set_goals (h :: t) + +let focus_all () : Tac unit = + set_goals (goals () @ smt_goals ()); + set_smt_goals [] + +private +let rec extract_nth (n:nat) (l : list 'a) : option ('a & list 'a) = + match n, l with + | _, [] -> None + | 0, hd::tl -> Some (hd, tl) + | _, hd::tl -> begin + match extract_nth (n-1) tl with + | Some (hd', tl') -> Some (hd', hd::tl') + | None -> None + end + +let bump_nth (n:pos) : Tac unit = + // n-1 since goal numbering begins at 1 + match extract_nth (n - 1) (goals ()) with + | None -> fail "bump_nth: not that many goals" + | Some (h, t) -> set_goals (h :: t) + +let rec destruct_list (t : term) : Tac (list term) = + let head, args = collect_app t in + match inspect head, args with + | Tv_FVar fv, [(a1, Q_Explicit); (a2, Q_Explicit)] + | Tv_FVar fv, [(_, Q_Implicit); (a1, Q_Explicit); (a2, Q_Explicit)] -> + if inspect_fv fv = cons_qn + then a1 :: destruct_list a2 + else raise NotAListLiteral + | Tv_FVar fv, _ -> + if inspect_fv fv = nil_qn + then [] + else raise NotAListLiteral + | _ -> + raise NotAListLiteral + +private let get_match_body () : Tac term = + match unsquash_term (cur_goal ()) with + | None -> fail "" + | Some t -> match inspect_unascribe t with + | Tv_Match sc _ _ -> sc + | _ -> fail "Goal is not a match" + +private let rec last (x : list 'a) : Tac 'a = + match x with + | [] -> fail "last: empty list" + | [x] -> x + | _::xs -> last xs + +(** When the goal is [match e with | p1 -> e1 ... | pn -> en], +destruct it into [n] goals for each possible case, including an +hypothesis for [e] matching the corresponding pattern. *) +let branch_on_match () : Tac unit = + focus (fun () -> + let x = get_match_body () in + let _ = t_destruct x in + iterAll (fun () -> + let bs = repeat intro in + let b = last bs in (* this one is the equality *) + grewrite_eq b; + norm [iota]) + ) + +(** When the argument [i] is non-negative, [nth_var] grabs the nth +binder in the current goal. When it is negative, it grabs the (-i-1)th +binder counting from the end of the goal. That is, [nth_var (-1)] +will return the last binder, [nth_var (-2)] the second to last, and +so on. *) +let nth_var (i:int) : Tac binding = + let bs = cur_vars () in + let k : int = if i >= 0 then i else List.Tot.Base.length bs + i in + let k : nat = if k < 0 then fail "not enough binders" else k in + match List.Tot.Base.nth bs k with + | None -> fail "not enough binders" + | Some b -> b + +(** [mk_abs [x1; ...; xn] t] returns the term [fun x1 ... xn -> t] *) +let rec mk_abs (args : list binder) (t : term) : Tac term (decreases args) = + match args with + | [] -> t + | a :: args' -> + let t' = mk_abs args' t in + pack (Tv_Abs a t') + +// GGG Needed? delete if not +let namedv_to_simple_binder (n : namedv) : Tac simple_binder = + let nv = inspect_namedv n in + { + ppname = nv.ppname; + uniq = nv.uniq; + sort = unseal nv.sort; (* GGG USINGSORT *) + qual = Q_Explicit; + attrs = []; + } + +[@@coercion] +let binding_to_simple_binder (b : binding) : Tot simple_binder = + { + ppname = b.ppname; + uniq = b.uniq; + sort = b.sort; + qual = Q_Explicit; + attrs = []; + } + +(** [string_to_term_with_lb [(id1, t1); ...; (idn, tn)] e s] parses +[s] as a term in environment [e] augmented with bindings +[id1, t1], ..., [idn, tn]. *) +let string_to_term_with_lb + (letbindings: list (string & term)) + (e: env) (t: string): Tac term + = let e, lb_bindings : env & list (term & binding) = + fold_left (fun (e, lb_bvs) (i, v) -> + let e, b = push_bv_dsenv e i in + e, (v, b)::lb_bvs + ) (e, []) letbindings in + let t = string_to_term e t in + fold_left (fun t (i, b) -> + pack (Tv_Let false [] (binding_to_simple_binder b) i t)) + t lb_bindings + +private +val lem_trans : (#a:Type) -> (#x:a) -> (#z:a) -> (#y:a) -> + squash (x == y) -> squash (y == z) -> Lemma (x == z) +private +let lem_trans #a #x #z #y e1 e2 = () + +(** Transivity of equality: reduce [x == z] to [x == ?u] and [?u == z]. *) +let trans () : Tac unit = apply_lemma (`lem_trans) + +(* Alias to just use the current vconfig *) +let smt_sync () : Tac unit = t_smt_sync (get_vconfig ()) + +(* smt_sync': as smt_sync, but using a particular fuel/ifuel *) +let smt_sync' (fuel ifuel : nat) : Tac unit = + let vcfg = get_vconfig () in + let vcfg' = { vcfg with initial_fuel = fuel; max_fuel = fuel + ; initial_ifuel = ifuel; max_ifuel = ifuel } + in + t_smt_sync vcfg' + +(* t_check_equiv wrappers. *) +let check_equiv g t0 t1 = t_check_equiv true true g t0 t1 +let check_equiv_nosmt g t0 t1 = t_check_equiv false false g t0 t1 diff --git a/stage0/ulib/FStar.Tactics.V2.Logic.fst b/stage0/ulib/FStar.Tactics.V2.Logic.fst new file mode 100644 index 00000000000..16ab7413ae9 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V2.Logic.fst @@ -0,0 +1,267 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.V2.Logic + +open FStar.Reflection.V2 +open FStar.Reflection.V2.Formula +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Tactics.V2.Derived +open FStar.Tactics.V2.SyntaxCoercions +open FStar.Tactics.NamedView +open FStar.Tactics.Util + +open FStar.Tactics.V1.Logic.Lemmas + +private +let term_eq = FStar.Reflection.TermEq.Simple.term_eq + +(** Revert an introduced binder as a forall. *) +let l_revert () : Tac unit = + revert (); + apply (`revert_squash) + +(** Repeated [l_revert]. *) +let rec l_revert_all (bs:list binding) : Tac unit = + match bs with + | [] -> () + | _::tl -> begin l_revert (); l_revert_all tl end + +(** Introduce a forall. *) +let forall_intro () : Tac binding = + apply_lemma (`fa_intro_lem); + intro () + +(** Introduce a forall, with some given name. *) +let forall_intro_as (s:string) : Tac binding = + apply_lemma (`fa_intro_lem); + intro_as s + +(** Repeated [forall_intro]. *) +let forall_intros () : Tac (list binding) = repeat1 forall_intro + +(** Split a conjunction into two goals. *) +let split () : Tac unit = + try apply_lemma (`split_lem) + with | _ -> fail "Could not split goal" + +(** Introduce an implication. *) +let implies_intro () : Tac binding = + apply_lemma (`imp_intro_lem); + intro () + +let implies_intro_as (s:string) : Tac binding = + apply_lemma (`imp_intro_lem); + intro_as s + +(** Repeated [implies_intro]. *) +let implies_intros () : Tac (list binding) = repeat1 implies_intro + +(** "Logical" intro: introduce a forall or an implication. *) +let l_intro () = forall_intro `or_else` implies_intro + +(** Repeated [l]. *) +let l_intros () = repeat l_intro + +let squash_intro () : Tac unit = + apply (`FStar.Squash.return_squash) + +let l_exact (t:term) = + try exact t with + | _ -> (squash_intro (); exact t) + +// FIXME: should this take a binding? It's less general... +// but usually what we want. Coercions could help. +let hyp (x:namedv) : Tac unit = l_exact (namedv_to_term x) + +let pose_lemma (t : term) : Tac binding = + let c = tcc (cur_env ()) t in + let pre, post = + match c with + | C_Lemma pre post _ -> pre, post + | _ -> fail "" + in + let post = `((`#post) ()) in (* unthunk *) + let post = norm_term [] post in + (* If the precondition is trivial, do not cut by it *) + match term_as_formula' pre with + | True_ -> + pose (`(__lemma_to_squash #(`#pre) #(`#post) () (fun () -> (`#t)))) + | _ -> + let reqb = tcut (`squash (`#pre)) in + + let b = pose (`(__lemma_to_squash #(`#pre) #(`#post) (`#(reqb <: term)) (fun () -> (`#t)))) in + flip (); + ignore (trytac trivial); + b + +let explode () : Tac unit = + ignore ( + repeatseq (fun () -> first [(fun () -> ignore (l_intro ())); + (fun () -> ignore (split ()))])) + +let rec visit (callback:unit -> Tac unit) : Tac unit = + focus (fun () -> + or_else callback + (fun () -> + let g = cur_goal () in + match term_as_formula g with + | Forall _b _sort _phi -> + let binders = forall_intros () in + seq (fun () -> visit callback) (fun () -> l_revert_all binders) + | And p q -> + seq split (fun () -> visit callback) + | Implies p q -> + let _ = implies_intro () in + seq (fun () -> visit callback) l_revert + | _ -> + () + ) + ) + +let rec simplify_eq_implication () : Tac unit = + let e = cur_env () in + let g = cur_goal () in + let r = destruct_equality_implication g in + match r with + | None -> + fail "Not an equality implication" + | Some (_, rhs) -> + let eq_h = implies_intro () in // G, eq_h:x=e |- P + rewrite eq_h; // G, eq_h:x=e |- P[e/x] + clear_top (); // G |- P[e/x] + visit simplify_eq_implication + +let rewrite_all_equalities () : Tac unit = + visit simplify_eq_implication + +let rec unfold_definition_and_simplify_eq (tm:term) : Tac unit = + let g = cur_goal () in + match term_as_formula g with + | App hd arg -> + if term_eq hd tm + then trivial () + else () + | _ -> begin + let r = destruct_equality_implication g in + match r with + | None -> fail "Not an equality implication" + | Some (_, rhs) -> + let eq_h = implies_intro () in + rewrite eq_h; + clear_top (); + visit (fun () -> unfold_definition_and_simplify_eq tm) + end + +(** A tactic to unsquash a hypothesis. Perhaps you are looking +for [unsquash_term]. + +Pre: + goal = + G |- e : squash s + t : squash r + +Post: + G, x:r |- e : squash s + `x` is returned as a term +*) +let unsquash (t : term) : Tac term = + let v = `vbind in + apply_lemma (mk_e_app v [t]); + let b = intro () in + pack (Tv_Var b) + +let cases_or (o:term) : Tac unit = + apply_lemma (mk_e_app (`or_ind) [o]) + +let cases_bool (b:term) : Tac unit = + let bi = `bool_ind in + seq (fun () -> apply_lemma (mk_e_app bi [b])) + (fun () -> let _ = trytac (fun () -> let b = implies_intro () in rewrite b; clear_top ()) in ()) + +let left () : Tac unit = + apply_lemma (`or_intro_1) + +let right () : Tac unit = + apply_lemma (`or_intro_2) + +let and_elim (t : term) : Tac unit = + begin + try apply_lemma (`(__and_elim (`#t))) + with | _ -> apply_lemma (`(__and_elim' (`#t))) + end + +let destruct_and (t : term) : Tac (binding & binding) = + and_elim t; + (implies_intro (), implies_intro ()) + +let witness (t : term) : Tac unit = + apply_raw (`__witness); + exact t + +(* returns witness and proof as binders *) +let elim_exists (t : term) : Tac (binding & binding) = + apply_lemma (`(__elim_exists' (`#(t)))); + let x = intro () in + let pf = intro () in + (x, pf) + +let instantiate (fa : term) (x : term) : Tac binding = + try pose (`__forall_inst_sq (`#fa) (`#x)) with | _ -> + try pose (`__forall_inst (`#fa) (`#x)) with | _ -> + fail "could not instantiate" + +let instantiate_as (fa : term) (x : term) (s : string) : Tac binding = + let b = instantiate fa x in + rename_to b s + +let rec sk_binder' (acc:list binding) (b:binding) : Tac (list binding & binding) = + focus (fun () -> + try + apply_lemma (`(sklem0 (`#b))); + if ngoals () <> 1 then fail "no"; + clear b; + let bx = forall_intro () in + let b' = implies_intro () in + sk_binder' (bx::acc) b' (* We might have introduced a new existential, so possibly recurse *) + with | _ -> (acc, b) (* If the above failed, just return *) + ) + +(* Skolemizes a given binder for an existential, returning the introduced new binders + * and the skolemized formula. *) +let sk_binder b = sk_binder' [] b + +let skolem () = + let bs = vars_of_env (cur_env ()) in + map sk_binder bs + +let easy_fill () = + let _ = repeat intro in + (* If the goal is `a -> Lemma b`, intro will fail, try to use this switch *) + let _ = trytac (fun () -> apply (`lemma_from_squash); intro ()) in + smt () + +let easy #a #x = x + +(** Add a lemma into the local context, quantified for all arguments. +Only works for lemmas with up to 3 arguments for now. It is expected +that `t` is a top-level name, this has not been battle-tested for other +kinds of terms. *) +let using_lemma (t : term) : Tac binding = + try pose_lemma (`(lem1_fa (`#t))) with | _ -> + try pose_lemma (`(lem2_fa (`#t))) with | _ -> + try pose_lemma (`(lem3_fa (`#t))) with | _ -> + fail "using_lemma: failed to instantiate" diff --git a/stage0/ulib/FStar.Tactics.V2.Logic.fsti b/stage0/ulib/FStar.Tactics.V2.Logic.fsti new file mode 100644 index 00000000000..4898bdad08c --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V2.Logic.fsti @@ -0,0 +1,166 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.V2.Logic + +open FStar.Tactics.Effect +open FStar.Reflection.V2 +open FStar.Reflection.V2.Formula +open FStar.Tactics.NamedView +open FStar.Tactics.V1.Logic.Lemmas {} (* bring lemmas into TC scope *) + +(* Repeated to avoid importing FStar.Tactics.V1.Derived. *) +private let cur_goal () : Tac typ = + let open FStar.Stubs.Tactics.Types in + let open FStar.Stubs.Tactics.V2.Builtins in + match goals_of (get ()) with + | g::_ -> goal_type g + | _ -> raise (TacticFailure (mkmsg "no more goals", None)) + +(** Returns the current goal as a [formula]. *) +let cur_formula () : Tac formula = term_as_formula (cur_goal ()) + +(** Revert an introduced binder as a forall. *) +[@@plugin] +val l_revert () : Tac unit + +(** Repeated [l_revert]. *) +[@@plugin] +val l_revert_all (bs:list binding) : Tac unit + +(** Introduce a forall. *) +[@@plugin] +val forall_intro () : Tac binding + +(** Introduce a forall, with some given name. *) +[@@plugin] +val forall_intro_as (s:string) : Tac binding + +(** Repeated [forall_intro]. *) +[@@plugin] +val forall_intros () : Tac (list binding) + +(** Split a conjunction into two goals. *) +[@@plugin] +val split () : Tac unit + +(** Introduce an implication. *) +[@@plugin] +val implies_intro () : Tac binding + +[@@plugin] +val implies_intro_as (s:string) : Tac binding + +(** Repeated [implies_intro]. *) +[@@plugin] +val implies_intros () : Tac (list binding) + +(** "Logical" intro: introduce a forall or an implication. *) +[@@plugin] +val l_intro () : Tac binding + +(** Repeated [l]. *) +[@@plugin] +val l_intros () : Tac (list binding) + +[@@plugin] +val squash_intro () : Tac unit + +[@@plugin] +val l_exact (t:term) : Tac unit + +// FIXME: should this take a binding? It's less general... +// but usually what we want. Coercions could help. +[@@plugin] +val hyp (x:namedv) : Tac unit + +[@@plugin] +val pose_lemma (t : term) : Tac binding + +[@@plugin] +val explode () : Tac unit + +[@@plugin] +val simplify_eq_implication () : Tac unit + +[@@plugin] +val rewrite_all_equalities () : Tac unit + +[@@plugin] +val unfold_definition_and_simplify_eq (tm:term) : Tac unit + +(** A tactic to unsquash a hypothesis. Perhaps you are looking +for [unsquash_term]. + +Pre: + goal = + G |- e : squash s + t : squash r + +Post: + G, x:r |- e : squash s + `x` is returned as a term +*) +[@@plugin] +val unsquash (t : term) : Tac term + +[@@plugin] +val cases_or (o:term) : Tac unit + +[@@plugin] +val cases_bool (b:term) : Tac unit + +[@@plugin] +val left () : Tac unit + +[@@plugin] +val right () : Tac unit + +[@@plugin] +val and_elim (t : term) : Tac unit + +[@@plugin] +val destruct_and (t : term) : Tac (binding & binding) + +[@@plugin] +val witness (t : term) : Tac unit + +(* returns witness and proof as binders *) +[@@plugin] +val elim_exists (t : term) : Tac (binding & binding) + +[@@plugin] +val instantiate (fa : term) (x : term) : Tac binding + +[@@plugin] +val instantiate_as (fa : term) (x : term) (s : string) : Tac binding + +[@@plugin] +val skolem () : Tac (list (list binding & binding)) + +[@@plugin] +val easy_fill () : Tac unit + +(* We mark this as a plugin so it can reduce. Some kind of 'transparent' attribute +would be better. `inline_for_extraction` is almost that? *) +[@@plugin] +val easy : #a:Type -> (#[easy_fill ()] _ : a) -> a + +(** Add a lemma into the local context, quantified for all arguments. +Only works for lemmas with up to 3 arguments for now. It is expected +that `t` is a top-level name, this has not been battle-tested for other +kinds of terms. *) +[@@plugin] +val using_lemma (t : term) : Tac binding diff --git a/stage0/ulib/FStar.Tactics.V2.SyntaxCoercions.fst b/stage0/ulib/FStar.Tactics.V2.SyntaxCoercions.fst new file mode 100644 index 00000000000..c25c9670a7a --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V2.SyntaxCoercions.fst @@ -0,0 +1,33 @@ +module FStar.Tactics.V2.SyntaxCoercions + +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Tactics.NamedView +open FStar.Sealed + +[@@coercion] +let namedv_to_term (x : namedv) : Tot term = + pack (Tv_Var x) + +[@@coercion] +let binder_to_namedv (b : binder) : Tot namedv = + { + ppname = b.ppname; + uniq = b.uniq; + sort = seal b.sort; + } + +[@@coercion] +let binder_to_term (b : binder) : Tot term = + pack (Tv_Var (binder_to_namedv b)) + +[@@coercion] +let binding_to_namedv (b : binding) : Tot namedv = + { + ppname = b.ppname; + sort = seal b.sort; + uniq = b.uniq + } + +[@@coercion] +let binding_to_term (x : binding) : Tot term = + namedv_to_term (binding_to_namedv x) diff --git a/stage0/ulib/FStar.Tactics.V2.SyntaxHelpers.fst b/stage0/ulib/FStar.Tactics.V2.SyntaxHelpers.fst new file mode 100644 index 00000000000..27f0ff9f74c --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V2.SyntaxHelpers.fst @@ -0,0 +1,93 @@ +module FStar.Tactics.V2.SyntaxHelpers + +open FStar.Reflection.V2 +open FStar.Tactics.Effect +open FStar.Stubs.Tactics.Types +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Tactics.NamedView + +(* These are fully-named variants of functions found in FStar.Reflection *) + +private +let rec collect_arr' (bs : list binder) (c : comp) : Tac (list binder & comp) = + begin match c with + | C_Total t -> + begin match inspect t with + | Tv_Arrow b c -> + collect_arr' (b::bs) c + | _ -> + (bs, c) + end + | _ -> (bs, c) + end + +let collect_arr_bs t = + let (bs, c) = collect_arr' [] (C_Total t) in + (List.Tot.Base.rev bs, c) + +let collect_arr t = + let (bs, c) = collect_arr' [] (C_Total t) in + let ts = List.Tot.Base.map (fun (b:binder) -> b.sort) bs in + (List.Tot.Base.rev ts, c) + +private +let rec collect_abs' (bs : list binder) (t : term) : Tac (list binder & term) (decreases t) = + match inspect t with + | Tv_Abs b t' -> + collect_abs' (b::bs) t' + | _ -> (bs, t) + +let collect_abs t = + let (bs, t') = collect_abs' [] t in + (List.Tot.Base.rev bs, t') + +(* Copied from FStar.Tactics.V2.Derived *) +private +let fail (#a:Type) (m:string) = raise #a (TacticFailure (mkmsg m, None)) + +let rec mk_arr (bs: list binder) (cod : comp) : Tac term = + match bs with + | [] -> fail "mk_arr, empty binders" + | [b] -> pack (Tv_Arrow b cod) + | (b::bs) -> + pack (Tv_Arrow b (C_Total (mk_arr bs cod))) + +let rec mk_tot_arr (bs: list binder) (cod : term) : Tac term = + match bs with + | [] -> cod + | (b::bs) -> + pack (Tv_Arrow b (C_Total (mk_tot_arr bs cod))) + +let lookup_lb (lbs:list letbinding) (nm:name) : Tac letbinding = + let o = FStar.List.Tot.Base.find + (fun lb -> (inspect_fv lb.lb_fv) = nm) + lbs + in + match o with + | Some lb -> lb + | None -> fail "lookup_letbinding: Name not in let group" + +let rec inspect_unascribe (t:term) : Tac (tv:term_view{notAscription tv}) = + match inspect t with + | Tv_AscribedT t _ _ _ + | Tv_AscribedC t _ _ _ -> + inspect_unascribe t + | tv -> tv + +(* Helpers for dealing with nested applications and arrows *) +let rec collect_app' (args : list argv) (t : term) + : Tac (term & list argv) = + match inspect_unascribe t with + | Tv_App l r -> + collect_app' (r::args) l + | _ -> (t, args) + +let collect_app = collect_app' [] + +(* Destruct an application into [h]ead fv, [u]niverses, and [a]rguments. *) +let hua (t:term) : Tac (option (fv & universes & list argv)) = + let hd, args = collect_app t in + match inspect hd with + | Tv_FVar fv -> Some (fv, [], args) + | Tv_UInst fv us -> Some (fv, us, args) + | _ -> None diff --git a/stage0/ulib/FStar.Tactics.V2.SyntaxHelpers.fsti b/stage0/ulib/FStar.Tactics.V2.SyntaxHelpers.fsti new file mode 100644 index 00000000000..a9f86886d72 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V2.SyntaxHelpers.fsti @@ -0,0 +1,37 @@ +module FStar.Tactics.V2.SyntaxHelpers + +open FStar.Reflection.V2 +open FStar.Tactics.Effect +open FStar.Tactics.NamedView + +(* These are fully-named variants of functions found in FStar.Reflection *) + +[@@plugin] +val collect_arr_bs : typ -> Tac (list binder & comp) + +[@@plugin] +val collect_arr : typ -> Tac (list typ & comp) + +[@@plugin] +val collect_abs : term -> Tac (list binder & term) + +[@@plugin] +val mk_arr (bs: list binder) (cod : comp) : Tac term + +[@@plugin] +val mk_tot_arr (bs: list binder) (cod : term) : Tac term + +[@@plugin] +val lookup_lb (lbs:list letbinding) (nm:name) : Tac letbinding + +[@@plugin] +val inspect_unascribe (t:term) : Tac (tv:term_view{notAscription tv}) + +(* Helpers for dealing with nested applications and arrows *) + +[@@plugin] +val collect_app (t:term) : Tac (term & list argv) + +(* Destruct an application into [h]ead fv, [u]niverses, and [a]rguments. *) +[@@plugin] +val hua (t:term) : Tac (option (fv & universes & list argv)) diff --git a/stage0/ulib/FStar.Tactics.V2.fsti b/stage0/ulib/FStar.Tactics.V2.fsti new file mode 100644 index 00000000000..5376f07e4b0 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.V2.fsti @@ -0,0 +1,24 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.V2 + +(* The bare version, plus some particular things we expose to users +for convenience. Crucially, mapply must be here, so we can open +tactics.v2.bare in typeclasses, and typeclasses in mapply, and not +trigger a cycle. *) +include FStar.Tactics.V2.Bare +include FStar.Tactics.MApply0 +include FStar.Tactics.MApply diff --git a/stage0/ulib/FStar.Tactics.Visit.fst b/stage0/ulib/FStar.Tactics.Visit.fst new file mode 100644 index 00000000000..80e146821c7 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.Visit.fst @@ -0,0 +1,136 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics.Visit + +(* Visit a term and transform it step by step. *) + +open FStar.Stubs.Reflection.Types +open FStar.Stubs.Reflection.V2.Data +open FStar.Stubs.Reflection.V2.Builtins +open FStar.Tactics.Effect +open FStar.Tactics.Util + +let on_sort_binder (f : term -> Tac term) (b:binder) : Tac binder = + let bview = inspect_binder b in + let bview = { bview with sort = f bview.sort } in + pack_binder bview + +(* Same *) +let on_sort_simple_binder (f : term -> Tac term) (b:simple_binder) : Tac simple_binder = + let bview = inspect_binder b in + let bview = { bview with sort = f bview.sort } in + inspect_pack_binder bview; + pack_binder bview + +let rec visit_tm (ff : term -> Tac term) (t : term) : Tac term = + let tv = inspect_ln t in + let tv' = + match tv with + | Tv_FVar _ + | Tv_Var _ + | Tv_BVar _ + | Tv_UInst _ _ -> tv + + | Tv_Type u -> Tv_Type u + | Tv_Const c -> Tv_Const c + | Tv_Uvar i u -> Tv_Uvar i u + | Tv_Unknown -> Tv_Unknown + | Tv_Unsupp -> Tv_Unsupp + | Tv_Arrow b c -> + let b = on_sort_binder (visit_tm ff) b in + let c = visit_comp ff c in + Tv_Arrow b c + | Tv_Abs b t -> + let b = on_sort_binder (visit_tm ff) b in + let t = visit_tm ff t in + Tv_Abs b t + | Tv_App l (r, q) -> + let l = visit_tm ff l in + let r = visit_tm ff r in + Tv_App l (r, q) + | Tv_Refine b r -> + let b = on_sort_simple_binder (visit_tm ff) b in + let r = visit_tm ff r in + Tv_Refine b r + | Tv_Let r attrs b def t -> + let b = on_sort_simple_binder (visit_tm ff) b in + let def = visit_tm ff def in + let t = visit_tm ff t in + Tv_Let r attrs b def t + | Tv_Match sc ret_opt brs -> + let sc = visit_tm ff sc in + let ret_opt = map_opt (fun (b, asc) -> + let b = on_sort_binder (visit_tm ff) b in + let asc = + match asc with + | Inl t, tacopt, use_eq -> + Inl (visit_tm ff t), map_opt (visit_tm ff) tacopt, use_eq + | Inr c, tacopt, use_eq-> + Inr (visit_comp ff c), map_opt (visit_tm ff) tacopt, use_eq in + b, asc) ret_opt in + let brs = map (visit_br ff) brs in + Tv_Match sc ret_opt brs + | Tv_AscribedT e t topt use_eq -> + let e = visit_tm ff e in + let t = visit_tm ff t in + Tv_AscribedT e t topt use_eq + | Tv_AscribedC e c topt use_eq -> + let e = visit_tm ff e in + let c = visit_comp ff c in + Tv_AscribedC e c topt use_eq + in + ff (pack_ln tv') +and visit_br (ff : term -> Tac term) (b:branch) : Tac branch = + let (p, t) = b in + let p = visit_pat ff p in + let t = visit_tm ff t in + (p, t) +and visit_pat (ff : term -> Tac term) (p:pattern) : Tac pattern = + match p with + | Pat_Constant _ -> p + | Pat_Var v s -> Pat_Var v s + | Pat_Cons head univs subpats -> + let subpats = (map (fun(p,b) -> (visit_pat ff p, b)) subpats) in + Pat_Cons head univs subpats + | Pat_Dot_Term t -> + let t = map_opt (visit_tm ff) t in + Pat_Dot_Term t + +and visit_comp (ff : term -> Tac term) (c : comp) : Tac comp = + let cv = inspect_comp c in + let cv' = + match cv with + | C_Total ret -> + let ret = visit_tm ff ret in + C_Total ret + + | C_GTotal ret -> + let ret = visit_tm ff ret in + C_GTotal ret + + | C_Lemma pre post pats -> + let pre = visit_tm ff pre in + let post = visit_tm ff post in + let pats = visit_tm ff pats in + C_Lemma pre post pats + + | C_Eff us eff res args decrs -> + let res = visit_tm ff res in + let args = map (fun (a, q) -> (visit_tm ff a, q)) args in + let decrs = map (visit_tm ff) decrs in + C_Eff us eff res args decrs + in + pack_comp cv' diff --git a/stage0/ulib/FStar.Tactics.fsti b/stage0/ulib/FStar.Tactics.fsti new file mode 100644 index 00000000000..feedb662246 --- /dev/null +++ b/stage0/ulib/FStar.Tactics.fsti @@ -0,0 +1,18 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tactics + +include FStar.Tactics.V1 diff --git a/stage0/ulib/FStar.Tcp.fsti b/stage0/ulib/FStar.Tcp.fsti new file mode 100644 index 00000000000..7f3e7f87643 --- /dev/null +++ b/stage0/ulib/FStar.Tcp.fsti @@ -0,0 +1,58 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Tcp + +open FStar.Bytes +open FStar.Error + +new val networkStream: eqtype +new val tcpListener: Type0 + +val set_nonblock: networkStream -> unit +val clear_nonblock: networkStream -> unit + +(* Server side *) + +val listen: string -> nat -> EXT tcpListener +val acceptTimeout: nat -> tcpListener -> EXT networkStream +val accept: tcpListener -> EXT networkStream +val stop: tcpListener -> EXT unit + +(* Client side *) + +val connectTimeout: nat -> string -> nat -> EXT networkStream +val connect: string -> nat -> EXT networkStream + +(* Input/Output *) + +// adding support for (potentially) non-blocking I/O +// NB for now, send *fails* on partial writes, and *loops* on EAGAIN/EWOULDBLOCK. + +type recv_result (max:nat) = + | RecvWouldBlock + | RecvError of string + | Received of b:bytes {length b <= max} +val recv_async: networkStream -> max:nat -> EXT (recv_result max) + +val recv: networkStream -> max:nat -> EXT (optResult string (b:bytes {length b <= max})) +val send: networkStream -> bytes -> EXT (optResult string unit) +val close: networkStream -> EXT unit + +(* Create a network stream from a given stream. + Only used by the application interface TLSharp. *) +(* assume val create: System.IO.Stream -> NetworkStream*) + + diff --git a/stage0/ulib/FStar.UInt.fst b/stage0/ulib/FStar.UInt.fst new file mode 100644 index 00000000000..a4cd0ca0bc3 --- /dev/null +++ b/stage0/ulib/FStar.UInt.fst @@ -0,0 +1,633 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.UInt + +(* NOTE: anything that you fix/update here should be reflected in [FStar.Int.fst], which is mostly + * a copy-paste of this module. *) + +open FStar.Mul +open FStar.BitVector +open FStar.Math.Lemmas + +let pow2_values x = + match x with + | 0 -> assert_norm (pow2 0 == 1) + | 1 -> assert_norm (pow2 1 == 2) + | 8 -> assert_norm (pow2 8 == 256) + | 16 -> assert_norm (pow2 16 == 65536) + | 31 -> assert_norm (pow2 31 == 2147483648) + | 32 -> assert_norm (pow2 32 == 4294967296) + | 63 -> assert_norm (pow2 63 == 9223372036854775808) + | 64 -> assert_norm (pow2 64 == 18446744073709551616) + | 128 -> assert_norm (pow2 128 = 0x100000000000000000000000000000000) + | _ -> () + +let incr_underspec #n a = + if a < max_int n then a + 1 else 0 + +let decr_underspec #n a = + if a > min_int n then a - 1 else 0 + +let add_underspec #n a b = + if fits (a+b) n then a + b else 0 + +let sub_underspec #n a b = + if fits (a-b) n then a - b else 0 + +let mul_underspec #n a b = + if fits (a*b) n then a * b else 0 + +#push-options "--fuel 0 --ifuel 0" +let lt_square_div_lt a b = () + +let div_underspec #n a b = + if fits (a / b) n then a / b else 0 +#pop-options + +let div_size #n a b = + FStar.Math.Lib.slash_decr_axiom a b; () + +open FStar.Seq + +let to_vec_lemma_1 #n a b = () + +let rec to_vec_lemma_2 #n a b = + if n = 0 then () else begin + assert(equal (slice (to_vec b) 0 (n - 1)) (to_vec #(n - 1) (b / 2))); + assert(equal (slice (to_vec a) 0 (n - 1)) (to_vec #(n - 1) (a / 2))); + to_vec_lemma_2 #(n - 1) (a / 2) (b / 2); + assert(a % 2 = (if index (to_vec a) (n - 1) then 1 else 0)); + assert(b % 2 = (if index (to_vec b) (n - 1) then 1 else 0)); + assert(a % 2 = b % 2) + end + +let rec inverse_aux #n vec i = + if i = n - 1 then + assert((from_vec vec) % 2 = (if index vec (n - 1) then 1 else 0)) + else inverse_aux #(n - 1) (slice vec 0 (n - 1)) i + +let inverse_vec_lemma #n vec = () + +let inverse_num_lemma #n num = to_vec_lemma_2 #n num (from_vec (to_vec num)) + +let from_vec_lemma_1 #n a b = () + +let from_vec_lemma_2 #n a b = inverse_vec_lemma a; inverse_vec_lemma b + +#push-options "--fuel 0 --ifuel 0" +let from_vec_aux #n a s1 s2 = + paren_mul_left (from_vec #s2 (slice a 0 s2)) (pow2 (s1 - s2)) (pow2 (n - s1)); + paren_mul_right (from_vec #s2 (slice a 0 s2)) (pow2 (s1 - s2)) (pow2 (n - s1)); + pow2_plus (s1 - s2) (n - s1) +#pop-options + +let seq_slice_lemma #n a s1 t1 s2 t2 = () + +#push-options "--initial_fuel 1 --max_fuel 1" +let rec from_vec_propriety #n a s = + if s = n - 1 then () else begin + from_vec_propriety #n a (s + 1); + from_vec_propriety #(s + 1) (slice a 0 (s + 1)) s; + seq_slice_lemma #n a 0 (s + 1) 0 s; + seq_slice_lemma #n a 0 (s + 1) s (s + 1); + from_vec_aux #n a (s + 1) s; + from_vec_propriety #(n - s) (slice a s n) 1; + seq_slice_lemma #n a s n 0 1; + seq_slice_lemma #n a s n 1 (n - s) + end +#pop-options + +let append_lemma #n #m a b = + assert(equal a (slice (append a b) 0 n)); + assert(equal b (slice (append a b) n (n + m))); + from_vec_propriety #(n + m) (append a b) n + +let slice_left_lemma #n a s = + from_vec_propriety #n a s; + division_addition_lemma (from_vec #(n - s) (slice a s n)) (pow2 (n - s)) (from_vec #s (slice a 0 s)); + small_division_lemma_1 (from_vec #(n - s) (slice a s n)) (pow2 (n - s)) + +let slice_right_lemma #n a s = + from_vec_propriety #n a (n - s); + modulo_addition_lemma (from_vec #s (slice a (n - s) n)) (pow2 s) (from_vec #(n - s) (slice a 0 (n - s))); + small_modulo_lemma_1 (from_vec #s (slice a (n - s) n)) (pow2 s) + +let rec zero_to_vec_lemma #n i = + if i = n - 1 then () else zero_to_vec_lemma #(n - 1) i + +let zero_from_vec_lemma #n = to_vec_lemma_2 (from_vec (zero_vec #n)) (zero n) + +let one_to_vec_lemma #n i = + if i = n - 1 then () else zero_to_vec_lemma #n i + +let rec pow2_to_vec_lemma #n p i = + if i = n - 1 then () + else if p = 0 then one_to_vec_lemma #n i + else pow2_to_vec_lemma #(n - 1) (p - 1) i + +let pow2_from_vec_lemma #n p = + to_vec_lemma_2 (from_vec (elem_vec #n p)) (pow2_n #n (n - p - 1)) + +let rec ones_to_vec_lemma #n i = + if i = n - 1 then () else ones_to_vec_lemma #(n - 1) i + +let ones_from_vec_lemma #n = + to_vec_lemma_2 (from_vec (ones_vec #n)) (ones n) + +let nth_lemma #n a b = + assert(forall (i:nat{i < n}). index (to_vec #n a) i = index (to_vec #n b) i); + to_vec_lemma_2 a b + +let zero_nth_lemma #n i = () + +let pow2_nth_lemma #n p i = () + +let one_nth_lemma #n i = () + +let ones_nth_lemma #n i = () + +let logand_definition #n a b i = () + +let logxor_definition #n a b i = () + +let logor_definition #n a b i = () + +let lognot_definition #n a i = () + +let logand_commutative #n a b = nth_lemma #n (logand #n a b) (logand #n b a) + +let logand_associative #n a b c = nth_lemma #n (logand #n (logand #n a b) c) (logand #n a (logand #n b c)) + +let logand_self #n a = nth_lemma #n (logand #n a a) a + +let logand_lemma_1 #n a = nth_lemma #n (logand #n a (zero n)) (zero n) + +let logand_lemma_2 #n a = nth_lemma #n (logand #n a (ones n)) a + +let rec subset_vec_le_lemma #n a b = match n with + | 1 -> () + | _ -> lemma_slice_subset_vec #n a b 0 (n-1); + subset_vec_le_lemma #(n-1) (slice a 0 (n-1)) (slice b 0 (n-1)) + +let logand_le #n a b = + let va = to_vec a in + let vb = to_vec b in + let vand = to_vec (logand a b) in + subset_vec_le_lemma #n vand va; + subset_vec_le_lemma #n vand vb + +let logxor_commutative #n a b = nth_lemma #n (logxor #n a b) (logxor #n b a) + +let logxor_associative #n a b c = nth_lemma #n (logxor #n (logxor #n a b) c) (logxor #n a (logxor #n b c)) + +let logxor_self #n a = nth_lemma #n (logxor #n a a) (zero n) + +let logxor_lemma_1 #n a = nth_lemma #n (logxor #n a (zero n)) a + +let logxor_lemma_2 #n a = nth_lemma #n (logxor #n a (ones n)) (lognot #n a) + +let xor_lemma _ _ = () + +let logxor_inv #n a b = + let open FStar.BitVector in + let open FStar.Seq in + let va = to_vec a in + let vb = to_vec b in + cut(forall (i:nat). i < n ==> index (logxor_vec #n va vb) i = (index va i <> index vb i)); + cut (forall (i:nat). {:pattern (index (logxor_vec (logxor_vec va vb) vb) i)} + i < n ==> index (logxor_vec (logxor_vec va vb) vb) i = (xor (xor (index va i) + (index vb i)) + (index vb i))); + cut (forall (i:nat). i < n ==> index (logxor_vec (logxor_vec va vb) vb) i = index va i); + Seq.lemma_eq_intro (logxor_vec (logxor_vec va vb) vb) va; + inverse_num_lemma a; inverse_num_lemma b + +let logxor_neq_nonzero #n a b = + let va = to_vec a in + let vb = to_vec b in + if logxor a b = 0 then + begin + let open FStar.Seq in + let f (i:nat{i < n}) : Lemma (not (nth #n 0 i)) = zero_nth_lemma #n i in + Classical.forall_intro f; + assert (forall (i:nat{i < n}). index va i = index vb i); + lemma_eq_intro va vb; + assert (from_vec va = from_vec vb) + end + +let logor_commutative #n a b = nth_lemma #n (logor #n a b) (logor #n b a) + +let logor_associative #n a b c = nth_lemma #n (logor #n (logor #n a b) c) (logor #n a (logor #n b c)) + +let logor_self #n a = nth_lemma #n (logor #n a a) a + +let logor_lemma_1 #n a = nth_lemma (logor #n a (zero n)) a + +let logor_lemma_2 #n a = nth_lemma (logor #n a (ones n)) (ones n) + +let rec superset_vec_ge_lemma #n a b = match n with + | 1 -> () + | _ -> lemma_slice_superset_vec #n a b 0 (n-1); + superset_vec_ge_lemma #(n-1) (slice a 0 (n-1)) (slice b 0 (n-1)) + +let logor_ge #n a b = + let va = to_vec a in + let vb = to_vec b in + let vor = to_vec (logor a b) in + superset_vec_ge_lemma #n vor va; + superset_vec_ge_lemma #n vor vb + +let lognot_self #n a = nth_lemma (lognot #n (lognot #n a)) a + +let lognot_lemma_1 #n = nth_lemma (lognot #n (zero n)) (ones n) + +val to_vec_mod_pow2: #n:nat -> a:uint_t n -> m:pos -> i:nat{n - m <= i /\ i < n} -> + Lemma (requires (a % pow2 m == 0)) + (ensures (index (to_vec a) i == false)) + [SMTPat (index (to_vec #n a) i); SMTPat (pow2 m)] +let rec to_vec_mod_pow2 #n a m i = + if i = n - 1 then + begin + lemma_index_app2 (to_vec #(n - 1) (a / 2)) (Seq.create 1 (a % 2 = 1)) i; + mod_mult_exact a 2 (pow2 (m - 1)) + end + else + begin + lemma_index_app1 (to_vec #(n - 1) (a / 2)) (Seq.create 1 (a % 2 = 1)) i; + assert (index (to_vec a) i == index (to_vec #(n - 1) (a / 2)) i); + mod_pow2_div2 a m; + to_vec_mod_pow2 #(n - 1) (a / 2) (m - 1) i + end + +val to_vec_lt_pow2: #n:nat -> a:uint_t n -> m:nat -> i:nat{i < n - m} -> + Lemma (requires (a < pow2 m)) + (ensures (index (to_vec a) i == false)) + [SMTPat (index (to_vec #n a) i); SMTPat (pow2 m)] +let rec to_vec_lt_pow2 #n a m i = + if n = 0 then () + else + if m = 0 then + assert (a == zero n) + else + begin + lemma_index_app1 (to_vec #(n - 1) (a / 2)) (Seq.create 1 (a % 2 = 1)) i; + assert (index (to_vec a) i == index (to_vec #(n - 1) (a / 2)) i); + to_vec_lt_pow2 #(n - 1) (a / 2) (m - 1) i + end + +(** Used in the next two lemmas *) +#push-options "--initial_fuel 0 --max_fuel 1 --z3rlimit 40" +let rec index_to_vec_ones #n m i = + let a = pow2 m - 1 in + pow2_le_compat n m; + if m = 0 then one_to_vec_lemma #n i + else if m = n then ones_to_vec_lemma #n i + else if i = n - 1 then () + else index_to_vec_ones #(n - 1) (m - 1) i +#pop-options + +let logor_disjoint #n a b m = + assert (a % pow2 m == 0); // To trigger pattern above + assert (forall (i:nat{n - m <= i /\ i < n}).{:pattern (index (to_vec a) i)} + index (to_vec a) i == false); + assert (b < pow2 m); // To trigger pattern above + assert (forall (i:nat{i < n - m}).{:pattern (index (to_vec b) i)} + index (to_vec b) i == false); + Seq.lemma_split (logor_vec (to_vec a) (to_vec b)) (n - m); + Seq.lemma_eq_intro + (logor_vec (to_vec a) (to_vec b)) + (append (slice (to_vec a) 0 (n - m)) (slice (to_vec b) (n - m) n)); + append_lemma #(n - m) #m (slice (to_vec a) 0 (n - m)) (slice (to_vec b) (n - m) n); + slice_left_lemma #n (to_vec a) (n - m); + div_exact_r a (pow2 m); + assert (from_vec #(n - m) (slice (to_vec a) 0 (n - m)) * pow2 m == a); + slice_right_lemma #n (to_vec b) m; + small_modulo_lemma_1 b (pow2 m); + assert (from_vec #m (slice (to_vec b) (n - m) n) == b) + +let logand_mask #n a m = + pow2_lt_compat n m; + Seq.lemma_split (logand_vec (to_vec a) (to_vec (pow2 m - 1))) (n - m); + Seq.lemma_eq_intro + (logand_vec (to_vec a) (to_vec (pow2 m - 1))) + (append (zero_vec #(n - m)) (slice (to_vec a) (n - m) n)); + append_lemma #(n - m) #m (zero_vec #(n - m)) (slice (to_vec a) (n - m) n); + calc (==) { + 0 * pow2 m + a % pow2 m; + == { } + 0 + a % pow2 m; + == { } + a % pow2 m; + }; + assert (0 * pow2 m + a % pow2 m == a % pow2 m); + assert (from_vec #(n - m) (zero_vec #(n - m)) == 0); + slice_right_lemma #n (to_vec a) m; + assert (from_vec #m (slice (to_vec a) (n - m) n) == a % pow2 m) + +let shift_left_lemma_1 #n a s i = () + +let shift_left_lemma_2 #n a s i = () + +let shift_right_lemma_1 #n a s i = () + +let shift_right_lemma_2 #n a s i = () + +let shift_left_logand_lemma #n a b s = nth_lemma (shift_left #n (logand #n a b) s) (logand #n (shift_left #n a s) (shift_left #n b s)) + +let shift_right_logand_lemma #n a b s = nth_lemma (shift_right #n (logand #n a b) s) (logand #n (shift_right #n a s) (shift_right #n b s)) + +let shift_left_logxor_lemma #n a b s = nth_lemma (shift_left #n (logxor #n a b) s) (logxor #n (shift_left #n a s) (shift_left #n b s)) + +let shift_right_logxor_lemma #n a b s = nth_lemma (shift_right #n (logxor #n a b) s) (logxor #n (shift_right #n a s) (shift_right #n b s)) + +let shift_left_logor_lemma #n a b s = nth_lemma (shift_left #n (logor #n a b) s) (logor #n (shift_left #n a s) (shift_left #n b s)) + +let shift_right_logor_lemma #n a b s = nth_lemma (shift_right #n (logor #n a b) s) (logor #n (shift_right #n a s) (shift_right #n b s)) + + +let shift_left_value_aux_1 #n a s = pow2_multiplication_modulo_lemma_1 a n s + +let shift_left_value_aux_2 #n a = + assert_norm(a * pow2 0 = a); + small_modulo_lemma_1 a (pow2 n) + +let shift_left_value_aux_3 #n a s = + append_lemma #(n - s) #s (slice (to_vec a) s n) (zero_vec #s); + slice_right_lemma #n (to_vec a) (n - s); + pow2_multiplication_modulo_lemma_2 a n s + +let shift_left_value_lemma #n a s = + if s >= n then shift_left_value_aux_1 #n a s + else if s = 0 then shift_left_value_aux_2 #n a + else shift_left_value_aux_3 #n a s + +let shift_right_value_aux_1 #n a s = + pow2_le_compat s n; + small_division_lemma_1 a (pow2 s) + +let shift_right_value_aux_2 #n a = assert_norm (pow2 0 == 1) + +#push-options "--z3rlimit 50" +let shift_right_value_aux_3 #n a s = + append_lemma #s #(n - s) (zero_vec #s) (slice (to_vec a) 0 (n - s)); + slice_left_lemma #n (to_vec a) (n - s) +#pop-options + +let shift_right_value_lemma #n a s = + if s >= n then shift_right_value_aux_1 #n a s + else if s = 0 then shift_right_value_aux_2 #n a + else shift_right_value_aux_3 #n a s + +#push-options "--z3rlimit 10" +let lemma_msb_pow2 #n a = if n = 1 then () else from_vec_propriety (to_vec a) 1 +#pop-options + +val plus_one_mod : p:pos -> a:nat -> + Lemma (requires (a < p /\ ((a + 1) % p == 0))) (ensures (a == p - 1)) +let plus_one_mod p a = () + +let lemma_minus_zero #n a = + if minus a = 0 then + begin + plus_one_mod (pow2 n) (lognot a); + lognot_self a; + logxor_self (ones n); + logxor_lemma_2 #n (ones n) + end + +#push-options "--z3rlimit 20 --fuel 1 --ifuel 0" +let lemma_msb_gte #n a b = + from_vec_propriety (to_vec a) 1; + from_vec_propriety (to_vec b) 1 +#pop-options + +(* Lemmas toward showing ~n + 1 = -a *) + +// #set-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1" + +#push-options "--z3rlimit 80" +let lemma_uint_mod #n a = () +#pop-options + +let lemma_add_sub_cancel #n a b = + let ab = (a-b) % pow2 n in + let abb = (ab + b) % pow2 n in + let ab_mod = sub_mod a b in + let abb_mod = add_mod ab b in + let p = pow2 n in + lemma_uint_mod a; + assert (add_mod (sub_mod a b) b = add_mod ab_mod b); + assert (add_mod ab_mod b = (ab_mod + b) % p); + assert (add_mod ab_mod b = ((a-b) % p + b) % p); + FStar.Math.Lemmas.lemma_mod_plus_distr_l (a-b) b p; + assert (((a-b) + b) % p = (((a-b) % p) + b) % p); + assert (a % p = (((a-b) % p) + b) % p) + +let lemma_mod_sub_distr_l a b p = + let q = (a - (a % p)) / p in + FStar.Math.Lemmas.lemma_mod_spec2 a p; + FStar.Math.Lemmas.lemma_mod_plus (a % p - b) q p + +let lemma_sub_add_cancel #n a b = + let ab = (a+b) % pow2 n in + let abb = (ab - b) % pow2 n in + let ab_mod = add_mod a b in + let abb_mod = sub_mod ab b in + let p = pow2 n in + lemma_uint_mod a; + lemma_mod_sub_distr_l (a+b) b p + +let lemma_zero_extend #n a = + let hd0 = Seq.create 1 false in + let av = to_vec a in + let eav = Seq.append hd0 av in + let r = zero_extend a in + append_lemma #1 #n hd0 av; + assert (r = from_vec eav); + from_vec_propriety #(n+1) eav 1; + assert (r = a) + +#push-options "--z3rlimit 40" +let lemma_one_extend #n a = + let hd1 = Seq.create 1 true in + let av = to_vec a in + let eav = Seq.append hd1 av in + let r = one_extend a in + append_lemma #1 #n hd1 av; + assert (r = from_vec eav); + from_vec_propriety #(n+1) eav 1; + assert (r = pow2 n + a) +#pop-options + +#push-options "--fuel 1 --ifuel 0 --z3rlimit 40" +let lemma_lognot_zero_ext #n a = + let lhs = lognot #(n+1) a in + let rhs = pow2 n + (lognot #n a) in + + let av = to_vec a in + assert (Seq.length av = n); + let hd0 = Seq.create 1 false in + let hd1 = Seq.create 1 true in + let nav = to_vec (lognot a) in + let eav = Seq.append hd0 av in + + append_lemma #1 #n hd0 av; + assert (from_vec #(n+1) eav = op_Multiply (from_vec #1 hd0) (pow2 n) + from_vec av); + assert (op_Multiply (from_vec #1 hd0) (pow2 n) = 0); + assert (from_vec #(n+1) eav = from_vec #n av); + assert (from_vec #(n+1) eav < pow2 n); + + let nav = BitVector.lognot_vec #n av in + let neav_r = BitVector.lognot_vec #(n+1) eav in + let neav_l = Seq.append hd1 nav in + append_lemma #1 #n hd1 nav; + assert (from_vec #(n+1) neav_l = (op_Multiply (from_vec #1 hd1) (pow2 n)) + (from_vec #n nav)); + assert (op_Multiply (from_vec #1 hd1) (pow2 n) = pow2 n); + assert (from_vec #(n+1) neav_l = pow2 n + from_vec #n nav); + assert (pow2 n + from_vec #n nav = rhs); + + assert (forall (i:pos{i < n+1}). Seq.index neav_r i = Seq.index neav_l i); + Seq.Base.lemma_eq_intro neav_l neav_r; + assert (neav_l = neav_r); + assert (from_vec neav_r = lhs) + +let lemma_lognot_one_ext #n a = + let lhs = lognot #(n+1) (one_extend a) in + let rhs = lognot #n a in + let av = to_vec a in + assert (Seq.length av = n); + let hd0 = Seq.create 1 false in + let hd1 = Seq.create 1 true in + let nav = to_vec (lognot #n a) in + let eav = Seq.append hd1 av in + append_lemma #1 #n hd1 av; + append_lemma #1 #n hd0 nav; + let nav = BitVector.lognot_vec #n av in + let neav_r = BitVector.lognot_vec #(n+1) eav in + let neav_l = Seq.append hd0 nav in + Seq.Base.lemma_eq_elim neav_l neav_r + +#push-options "--z3rlimit 60" +let rec lemma_lognot_value_mod #n a = + if n = 1 then () else + begin + assert (-pow2 n <= (-(a+1)) && -(a+1) < 0); + + let av = to_vec a in + let hd = from_vec #1 (Seq.slice (to_vec a) 0 1) in + let tl = from_vec #(n-1) (Seq.slice (to_vec a) 1 n) in + + assert (hd = 0 || hd = 1); + let hdpow = op_Multiply hd (pow2 (n-1)) in + + from_vec_propriety (to_vec a) 1; + assert (from_vec av = (op_Multiply + (from_vec #1 (Seq.slice av 0 1)) (pow2 (n-1))) + + (from_vec #(n-1) (Seq.slice av 1 n))); + + let ntl = lognot tl in + lemma_lognot_value_mod #(n-1) tl; + assert (ntl = pow2 (n-1) - tl - 1); + + assert (a = hdpow + tl); + assert (lognot a = lognot #n (hdpow + tl)); + assert (tl < pow2 (n-1)); + if hdpow = 0 then + begin + assert (lognot a = lognot #n tl); + lemma_lognot_zero_ext #(n-1) tl; + lemma_zero_extend tl + end + else + begin + lemma_lognot_one_ext #(n-1) tl; + lemma_one_extend tl + end + end +#pop-options + +let lemma_lognot_value_zero #n a = + let p = pow2 n in + calc (==) { + sub_mod (sub_mod 0 a) 1; + == { } + sub_mod ((0 - a) % p) 1; + == { } + ((0 - a) % p - 1) % p; + == { } + (0 % p - 1) % p; + == { modulo_lemma 0 p } + (0 - 1) % p; + == { lemma_mod_sub_0 p } + p - 1; + == { } + p - 0 - 1; + == { lemma_lognot_value_mod a } + lognot a; + } +#pop-options + +#push-options "--z3rlimit 150" +private +val lemma_mod_variation: #n:pos -> a:uint_t n -> + Lemma (a <> 0 ==> ((-a) % pow2 n) - 1 % pow2 n = (((-a) % pow2 n) - 1) % pow2 n) +let lemma_mod_variation #n a = assert (pow2 n =!= 0) +#pop-options + +let lemma_one_mod_pow2 #n = () + +#push-options "--z3rlimit 50" +private +val lemma_lognot_value_variation: #n:pos -> a:uint_t n{a <> 0} -> + Lemma (lognot a = (-a) % pow2 n - 1 % pow2 n) +let lemma_lognot_value_variation #n a = + let p = pow2 n in + calc (==) { + lognot a <: int; + == { lemma_lognot_value_mod a } + p - a - 1; + == { FStar.Math.Lemmas.modulo_lemma a p } + p - (a % p) - 1; + == { FStar.Math.Lemmas.modulo_lemma 1 p } + (p - (a % p)) - (1 % p); + == { FStar.Math.Lemmas.lemma_mod_sub_1 a p } + (-a) % p - 1 % p; + } +#pop-options + +let lemma_lognot_value_nonzero #n a = + let p = pow2 n in + lemma_lognot_value_variation #n a; + assert (lognot a = (-a) % (pow2 n) - 1 % (pow2 n)); + assert (sub_mod (sub_mod 0 a) 1 = (((0 - a) % p) - 1) % p); + lemma_mod_variation #n a; + assert (((-a) % p) - 1 % p = (((-a) % p) - 1) % p); + assert ((-a) % p - 1 % p = (((0 - a) % p) - 1) % p) + +let lemma_lognot_value #n a = + if a = 0 then lemma_lognot_value_zero a + else lemma_lognot_value_nonzero a + +let lemma_minus_eq_zero_sub #n a = + let na = lognot a in + let ma = minus a in + assert (sub_mod ma 1 = sub_mod (add_mod na 1) 1); + lemma_sub_add_cancel na 1; + assert (sub_mod ma 1 = na); + lemma_lognot_value #n a; + assert (na = sub_mod (sub_mod 0 a) 1); + assert (ma = add_mod (sub_mod (sub_mod 0 a) 1) 1); + lemma_add_sub_cancel (sub_mod 0 a) 1 diff --git a/stage0/ulib/FStar.UInt.fsti b/stage0/ulib/FStar.UInt.fsti new file mode 100644 index 00000000000..1b1c58f45ea --- /dev/null +++ b/stage0/ulib/FStar.UInt.fsti @@ -0,0 +1,603 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.UInt + +(* NOTE: anything that you fix/update here should be reflected in [FStar.Int.fsti], which is mostly + * a copy-paste of this module. *) + +open FStar.Mul +open FStar.BitVector +open FStar.Math.Lemmas + +val pow2_values: x:nat -> Lemma + (let p = pow2 x in + match x with + | 0 -> p=1 + | 1 -> p=2 + | 8 -> p=256 + | 16 -> p=65536 + | 31 -> p=2147483648 + | 32 -> p=4294967296 + | 63 -> p=9223372036854775808 + | 64 -> p=18446744073709551616 + | 128 -> p=0x100000000000000000000000000000000 + | _ -> True) + [SMTPat (pow2 x)] + +/// Specs +/// +/// Note: lacking any type of functors for F*, this is a copy/paste of [FStar.Int.fst], where the relevant bits that changed are: +/// - definition of max and min +/// - use of regular integer modulus instead of wrap-around modulus + +let max_int (n:nat) : Tot int = pow2 n - 1 +let min_int (n:nat) : Tot int = 0 + +let fits (x:int) (n:nat) : Tot bool = min_int n <= x && x <= max_int n +let size (x:int) (n:nat) : Tot Type0 = b2t(fits x n) + +(* Machine integer type *) +[@@do_not_unrefine] +type uint_t (n:nat) = x:int{size x n} + +/// Constants + +let zero (n:nat) : Tot (uint_t n) = 0 + +let pow2_n (#n:pos) (p:nat{p < n}) : Tot (uint_t n) = + pow2_le_compat (n - 1) p; pow2 p + +let one (n:pos) : Tot (uint_t n) = 1 + +let ones (n:nat) : Tot (uint_t n) = max_int n + +(* Increment and decrement *) +let incr (#n:nat) (a:uint_t n) : Pure (uint_t n) + (requires (b2t (a < max_int n))) (ensures (fun _ -> True)) + = a + 1 + +let decr (#n:nat) (a:uint_t n) : Pure (uint_t n) + (requires (b2t (a > min_int n))) (ensures (fun _ -> True)) + = a - 1 + +val incr_underspec: #n:nat -> a:uint_t n -> Pure (uint_t n) + (requires (b2t (a < max_int n))) + (ensures (fun b -> a + 1 = b)) + +val decr_underspec: #n:nat -> a:uint_t n -> Pure (uint_t n) + (requires (b2t (a > min_int n))) + (ensures (fun b -> a - 1 = b)) + +let incr_mod (#n:nat) (a:uint_t n) : Tot (uint_t n) = (a + 1) % (pow2 n) + +let decr_mod (#n:nat) (a:uint_t n) : Tot (uint_t n) = (a - 1) % (pow2 n) + +(* Addition primitives *) +let add (#n:nat) (a:uint_t n) (b:uint_t n) : Pure (uint_t n) + (requires (size (a + b) n)) + (ensures (fun _ -> True)) + = a + b + +val add_underspec: #n:nat -> a:uint_t n -> b:uint_t n -> Pure (uint_t n) + (requires True) + (ensures (fun c -> + size (a + b) n ==> a + b = c)) + +let add_mod (#n:nat) (a:uint_t n) (b:uint_t n) : Tot (uint_t n) = + (a + b) % (pow2 n) + +(* Subtraction primitives *) +let sub (#n:nat) (a:uint_t n) (b:uint_t n) : Pure (uint_t n) + (requires (size (a - b) n)) + (ensures (fun _ -> True)) + = a - b + +val sub_underspec: #n:nat -> a:uint_t n -> b:uint_t n -> Pure (uint_t n) + (requires True) + (ensures (fun c -> + size (a - b) n ==> a - b = c)) + +let sub_mod (#n:nat) (a:uint_t n) (b:uint_t n) : Tot (uint_t n) = + (a - b) % (pow2 n) + +(* Multiplication primitives *) +let mul (#n:nat) (a:uint_t n) (b:uint_t n) : Pure (uint_t n) + (requires (size (a * b) n)) + (ensures (fun _ -> True)) + = a * b + +val mul_underspec: #n:nat -> a:uint_t n -> b:uint_t n -> Pure (uint_t n) + (requires True) + (ensures (fun c -> + size (a * b) n ==> a * b = c)) + +let mul_mod (#n:nat) (a:uint_t n) (b:uint_t n) : Tot (uint_t n) = + (a * b) % (pow2 n) + +private +val lt_square_div_lt (a:nat) (b:pos) : Lemma + (requires (a < b * b)) + (ensures (a / b < b)) + +#push-options "--fuel 0 --ifuel 0" +let mul_div (#n:nat) (a:uint_t n) (b:uint_t n) : Tot (uint_t n) = + FStar.Math.Lemmas.lemma_mult_lt_sqr a b (pow2 n); + lt_square_div_lt (a * b) (pow2 n); + (a * b) / (pow2 n) +#pop-options + +(* Division primitives *) +let div (#n:nat) (a:uint_t n) (b:uint_t n{b <> 0}) : Pure (uint_t n) + (requires (size (a / b) n)) + (ensures (fun c -> b <> 0 ==> a / b = c)) + = a / b + +val div_underspec: #n:nat -> a:uint_t n -> b:uint_t n{b <> 0} -> Pure (uint_t n) + (requires True) + (ensures (fun c -> + (b <> 0 /\ size (a / b) n) ==> a / b = c)) + +val div_size: #n:pos -> a:uint_t n -> b:uint_t n{b <> 0} -> + Lemma (requires (size a n)) (ensures (size (a / b) n)) + +let udiv (#n:pos) (a:uint_t n) (b:uint_t n{b <> 0}) : Tot (c:uint_t n{b <> 0 ==> a / b = c}) = + div_size #n a b; + a / b + + +(* Modulo primitives *) +let mod (#n:nat) (a:uint_t n) (b:uint_t n{b <> 0}) : Tot (uint_t n) = + a - ((a/b) * b) + +(* Comparison operators *) +let eq #n (a:uint_t n) (b:uint_t n) : Tot bool = (a = b) +let gt #n (a:uint_t n) (b:uint_t n) : Tot bool = (a > b) +let gte #n (a:uint_t n) (b:uint_t n) : Tot bool = (a >= b) +let lt #n (a:uint_t n) (b:uint_t n) : Tot bool = (a < b) +let lte #n (a:uint_t n) (b:uint_t n) : Tot bool = (a <= b) + +/// Casts + +let to_uint_t (m:nat) (a:int) : Tot (uint_t m) = a % pow2 m + +open FStar.Seq.Base + +(* WARNING: Mind the big endian vs little endian definition *) + +(* Casts *) +let rec to_vec (#n:nat) (num:uint_t n) : Tot (bv_t n) = + if n = 0 then empty #bool + else append (to_vec #(n - 1) (num / 2)) (create 1 (num % 2 = 1)) + +let rec from_vec (#n:nat) (vec:bv_t n) : Tot (uint_t n) = + if n = 0 then 0 + else 2 * from_vec #(n - 1) (slice vec 0 (n - 1)) + (if index vec (n - 1) then 1 else 0) + +val to_vec_lemma_1: #n:nat -> a:uint_t n -> b:uint_t n -> + Lemma (requires a = b) (ensures equal (to_vec a) (to_vec b)) + +val to_vec_lemma_2: #n:nat -> a:uint_t n -> b:uint_t n -> + Lemma (requires equal (to_vec a) (to_vec b)) (ensures a = b) + +val inverse_aux: #n:nat -> vec:bv_t n -> i:nat{i < n} -> + Lemma (requires True) (ensures index vec i = index (to_vec (from_vec vec)) i) + [SMTPat (index (to_vec (from_vec vec)) i)] + +val inverse_vec_lemma: #n:nat -> vec:bv_t n -> + Lemma (requires True) (ensures equal vec (to_vec (from_vec vec))) + [SMTPat (to_vec (from_vec vec))] + +val inverse_num_lemma: #n:nat -> num:uint_t n -> + Lemma (requires True) (ensures num = from_vec (to_vec num)) + [SMTPat (from_vec (to_vec num))] + +val from_vec_lemma_1: #n:nat -> a:bv_t n -> b:bv_t n -> + Lemma (requires equal a b) (ensures from_vec a = from_vec b) + +val from_vec_lemma_2: #n:nat -> a:bv_t n -> b:bv_t n -> + Lemma (requires from_vec a = from_vec b) (ensures equal a b) + +val from_vec_aux: #n:nat -> a:bv_t n -> s1:nat{s1 < n} -> s2:nat{s2 < s1} -> + Lemma (requires True) + (ensures (from_vec #s2 (slice a 0 s2)) * pow2 (n - s2) + (from_vec #(s1 - s2) (slice a s2 s1)) * pow2 (n - s1) + (from_vec #(n - s1) (slice a s1 n)) = ((from_vec #s2 (slice a 0 s2)) * pow2 (s1 - s2) + (from_vec #(s1 - s2) (slice a s2 s1))) * pow2 (n - s1) + (from_vec #(n - s1) (slice a s1 n))) + +val seq_slice_lemma: #n:nat -> a:bv_t n -> s1:nat{s1 < n} -> t1:nat{t1 >= s1 && t1 <= n} -> s2:nat{s2 < t1 - s1} -> t2:nat{t2 >= s2 && t2 <= t1 - s1} -> + Lemma (equal (slice (slice a s1 t1) s2 t2) (slice a (s1 + s2) (s1 + t2))) + +val from_vec_propriety: #n:pos -> a:bv_t n -> s:nat{s < n} -> + Lemma (requires True) + (ensures from_vec a = (from_vec #s (slice a 0 s)) * pow2 (n - s) + from_vec #(n - s) (slice a s n)) + (decreases (n - s)) + +val append_lemma: #n:pos -> #m:pos -> a:bv_t n -> b:bv_t m -> + Lemma (from_vec #(n + m) (append a b) = (from_vec #n a) * pow2 m + (from_vec #m b)) + +val slice_left_lemma: #n:pos -> a:bv_t n -> s:pos{s < n} -> + Lemma (requires True) + (ensures from_vec #s (slice a 0 s) = (from_vec #n a) / (pow2 (n - s))) + +val slice_right_lemma: #n:pos -> a:bv_t n -> s:pos{s < n} -> + Lemma (requires True) + (ensures from_vec #s (slice a (n - s) n) = (from_vec #n a) % (pow2 s)) + +(* Relations between constants in BitVector and in UInt. *) +val zero_to_vec_lemma: #n:pos -> i:nat{i < n} -> + Lemma (requires True) (ensures index (to_vec (zero n)) i = index (zero_vec #n) i) + [SMTPat (index (to_vec (zero n)) i)] + +val zero_from_vec_lemma: #n:pos -> + Lemma (requires True) (ensures from_vec (zero_vec #n) = zero n) + [SMTPat (from_vec (zero_vec #n))] + +val one_to_vec_lemma: #n:pos -> i:nat{i < n} -> + Lemma (requires True) + (ensures index (to_vec (one n)) i = index (elem_vec #n (n - 1)) i) + [SMTPat (index (to_vec (one n)) i)] + +val pow2_to_vec_lemma: #n:pos -> p:nat{p < n} -> i:nat{i < n} -> + Lemma (requires True) + (ensures index (to_vec (pow2_n #n p)) i = index (elem_vec #n (n - p - 1)) i) + [SMTPat (index (to_vec (pow2_n #n p)) i)] + +val pow2_from_vec_lemma: #n:pos -> p:nat{p < n} -> + Lemma (requires True) (ensures from_vec (elem_vec #n p) = pow2_n #n (n - p - 1)) + [SMTPat (from_vec (elem_vec #n p))] + +val ones_to_vec_lemma: #n:pos -> i:nat{i < n} -> + Lemma (requires True) + (ensures index (to_vec (ones n)) i = index (ones_vec #n) i) + [SMTPat (index (to_vec (ones n)) i)] + +val ones_from_vec_lemma: #n:pos -> + Lemma (requires True) (ensures from_vec (ones_vec #n) = ones n) + [SMTPat (from_vec (ones_vec #n))] + + +(* (nth a i) returns a boolean indicating the i-th bit of a. *) +let nth (#n:pos) (a:uint_t n) (i:nat{i < n}) : Tot bool = + index (to_vec #n a) i + +val nth_lemma: #n:pos -> a:uint_t n -> b:uint_t n -> + Lemma (requires forall (i:nat{i < n}). nth a i = nth b i) + (ensures a = b) + +(* Lemmas for constants *) +val zero_nth_lemma: #n:pos -> i:nat{i < n} -> + Lemma (requires True) (ensures nth (zero n) i = false) + [SMTPat (nth (zero n) i)] + +val pow2_nth_lemma: #n:pos -> p:nat{p < n} -> i:nat{i < n} -> + Lemma (requires True) + (ensures (i = n - p - 1 ==> nth (pow2_n #n p) i = true) /\ + (i <> n - p - 1 ==> nth (pow2_n #n p) i = false)) + [SMTPat (nth (pow2_n #n p) i)] + +val one_nth_lemma: #n:pos -> i:nat{i < n} -> + Lemma (requires True) + (ensures (i = n - 1 ==> nth (one n) i = true) /\ + (i < n - 1 ==> nth (one n) i = false)) + [SMTPat (nth (one n) i)] + +val ones_nth_lemma: #n:pos -> i:nat{i < n} -> + Lemma (requires True) (ensures (nth (ones n) i) = true) + [SMTPat (nth (ones n) i)] + +(* Bitwise operators *) +let logand (#n:pos) (a:uint_t n) (b:uint_t n) : Tot (uint_t n) = + from_vec #n (logand_vec #n (to_vec #n a) (to_vec #n b)) + +let logxor (#n:pos) (a:uint_t n) (b:uint_t n) : Tot (uint_t n) = + from_vec #n (logxor_vec #n (to_vec #n a) (to_vec #n b)) + +let logor (#n:pos) (a:uint_t n) (b:uint_t n) : Tot (uint_t n) = + from_vec #n (logor_vec #n (to_vec #n a) (to_vec #n b)) + +let lognot (#n:pos) (a:uint_t n) : Tot (uint_t n) = + from_vec #n (lognot_vec #n (to_vec #n a)) + +(* Bitwise operators definitions *) +val logand_definition: #n:pos -> a:uint_t n -> b:uint_t n -> i:nat{i < n} -> + Lemma (requires True) + (ensures (nth (logand a b) i = (nth a i && nth b i))) + [SMTPat (nth (logand a b) i)] + +val logxor_definition: #n:pos -> a:uint_t n -> b:uint_t n -> i:nat{i < n} -> + Lemma (requires True) + (ensures (nth (logxor a b) i = (nth a i <> nth b i))) + [SMTPat (nth (logxor a b) i)] + +val logor_definition: #n:pos -> a:uint_t n -> b:uint_t n -> i:nat{i < n} -> + Lemma (requires True) + (ensures (nth (logor a b) i = (nth a i || nth b i))) + [SMTPat (nth (logor a b) i)] + +val lognot_definition: #n:pos -> a:uint_t n -> i:nat{i < n} -> + Lemma (requires True) + (ensures (nth (lognot a) i = not(nth a i))) + [SMTPat (nth (lognot a) i)] + +(* Two's complement unary minus *) +inline_for_extraction +let minus (#n:pos) (a:uint_t n) : Tot (uint_t n) = + add_mod (lognot a) 1 + +(* Bitwise operators lemmas *) +(* TODO: lemmas about the relations between different operators *) +(* Bitwise AND operator *) +val logand_commutative: #n:pos -> a:uint_t n -> b:uint_t n -> + Lemma (requires True) (ensures (logand #n a b = logand #n b a)) + +val logand_associative: #n:pos -> a:uint_t n -> b:uint_t n -> c:uint_t n -> + Lemma (requires True) + (ensures (logand #n (logand #n a b) c = logand #n a (logand #n b c))) + +val logand_self: #n:pos -> a:uint_t n -> + Lemma (requires True) (ensures (logand #n a a = a)) + +val logand_lemma_1: #n:pos -> a:uint_t n -> + Lemma (requires True) (ensures (logand #n a (zero n) = zero n)) + +val logand_lemma_2: #n:pos -> a:uint_t n -> + Lemma (requires True) (ensures (logand #n a (ones n) = a)) + +(* subset_vec_le_lemma proves that a subset of bits is numerically smaller or equal. *) +val subset_vec_le_lemma: #n:pos -> a:bv_t n -> b:bv_t n -> + Lemma (requires is_subset_vec #n a b) (ensures (from_vec a) <= (from_vec b)) + +(* logand_le proves the the result of AND is less than or equal to both arguments. *) +val logand_le: #n:pos -> a:uint_t n -> b:uint_t n -> + Lemma (requires True) + (ensures (logand a b) <= a /\ (logand a b) <= b) + +(* Bitwise XOR operator *) +val logxor_commutative: #n:pos -> a:uint_t n -> b:uint_t n -> + Lemma (requires True) (ensures (logxor #n a b = logxor #n b a)) + +val logxor_associative: #n:pos -> a:uint_t n -> b:uint_t n -> c:uint_t n -> + Lemma (requires True) (ensures (logxor #n (logxor #n a b) c = logxor #n a (logxor #n b c))) + +val logxor_self: #n:pos -> a:uint_t n -> + Lemma (requires True) (ensures (logxor #n a a = zero n)) + +val logxor_lemma_1: #n:pos -> a:uint_t n -> + Lemma (requires True) (ensures (logxor #n a (zero n) = a)) + +val logxor_lemma_2: #n:pos -> a:uint_t n -> + Lemma (requires True) (ensures (logxor #n a (ones n) = lognot #n a)) + +private let xor (b:bool) (b':bool) : Tot bool = b <> b' + +private val xor_lemma (a:bool) (b:bool) : Lemma + (requires (True)) + (ensures (xor (xor a b) b = a)) + [SMTPat (xor (xor a b) b)] + +val logxor_inv: #n:pos -> a:uint_t n -> b:uint_t n -> Lemma + (a = logxor #n (logxor #n a b) b) + +val logxor_neq_nonzero: #n:pos -> a:uint_t n -> b:uint_t n -> Lemma + (a <> b ==> logxor a b <> 0) + +(* Bitwise OR operators *) +val logor_commutative: #n:pos -> a:uint_t n -> b:uint_t n -> + Lemma (requires True) (ensures (logor #n a b = logor #n b a)) + +val logor_associative: #n:pos -> a:uint_t n -> b:uint_t n -> c:uint_t n -> + Lemma (requires True) + (ensures (logor #n (logor #n a b) c = logor #n a (logor #n b c))) + +val logor_self: #n:pos -> a:uint_t n -> + Lemma (requires True) (ensures (logor #n a a = a)) + +val logor_lemma_1: #n:pos -> a:uint_t n -> + Lemma (requires True) (ensures (logor #n a (zero n) = a)) + +val logor_lemma_2: #n:pos -> a:uint_t n -> + Lemma (requires True) (ensures (logor #n a (ones n) = ones n)) + + +(* superset_vec_le_lemma proves that a superset of bits is numerically greater than or equal. *) +val superset_vec_ge_lemma: #n:pos -> a:bv_t n -> b:bv_t n -> + Lemma (requires is_superset_vec #n a b) + (ensures (from_vec a) >= (from_vec b)) + +(* logor_ge proves that the result of an OR is greater than or equal to both arguments. *) +val logor_ge: #n:pos -> a:uint_t n -> b:uint_t n -> + Lemma (requires True) + (ensures (logor a b) >= a /\ (logor a b) >= b) + +(* Bitwise NOT operator *) +val lognot_self: #n:pos -> a:uint_t n -> + Lemma (requires True) (ensures (lognot #n (lognot #n a) = a)) + +val lognot_lemma_1: #n:pos -> + Lemma (requires True) (ensures (lognot #n (zero n) = ones n)) + +(** Used in the next two lemmas *) +private val index_to_vec_ones: #n:pos -> m:nat{m <= n} -> i:nat{i < n} -> + Lemma (requires True) + (ensures (pow2 m <= pow2 n /\ + (i < n - m ==> index (to_vec #n (pow2 m - 1)) i == false) /\ + (n - m <= i ==> index (to_vec #n (pow2 m - 1)) i == true))) + [SMTPat (index (to_vec #n (pow2 m - 1)) i)] + + +val logor_disjoint: #n:pos -> a:uint_t n -> b:uint_t n -> m:pos{m < n} -> + Lemma (requires (a % pow2 m == 0 /\ b < pow2 m)) + (ensures (logor #n a b == a + b)) + +val logand_mask: #n:pos -> a:uint_t n -> m:pos{m < n} -> + Lemma (pow2 m < pow2 n /\ logand #n a (pow2 m - 1) == a % pow2 m) + + +(* Shift operators *) + +let shift_left (#n:pos) (a:uint_t n) (s:nat) : Tot (uint_t n) = + from_vec (shift_left_vec #n (to_vec #n a) s) + +let shift_right (#n:pos) (a:uint_t n) (s:nat) : Tot (uint_t n) = + from_vec (shift_right_vec #n (to_vec #n a) s) + +(* Shift operators lemmas *) +val shift_left_lemma_1: #n:pos -> a:uint_t n -> s:nat -> i:nat{i < n && i >= n - s} -> + Lemma (requires True) + (ensures (nth (shift_left #n a s) i = false)) + [SMTPat (nth (shift_left #n a s) i)] + +val shift_left_lemma_2: #n:pos -> a:uint_t n -> s:nat -> i:nat{i < n && i < n - s} -> + Lemma (requires True) + (ensures (nth (shift_left #n a s) i = nth #n a (i + s))) + [SMTPat (nth (shift_left #n a s) i)] + +val shift_right_lemma_1: #n:pos -> a:uint_t n -> s:nat -> i:nat{i < n && i < s} -> + Lemma (requires True) + (ensures (nth (shift_right #n a s) i = false)) + [SMTPat (nth (shift_right #n a s) i)] + +val shift_right_lemma_2: #n:pos -> a:uint_t n -> s:nat -> i:nat{i < n && i >= s} -> + Lemma (requires True) + (ensures (nth (shift_right #n a s) i = nth #n a (i - s))) + [SMTPat (nth (shift_right #n a s) i)] + +(* Lemmas with shift operators and bitwise operators *) +val shift_left_logand_lemma: #n:pos -> a:uint_t n -> b:uint_t n -> s:nat -> + Lemma (requires True) + (ensures (shift_left #n (logand #n a b) s = logand #n (shift_left #n a s) (shift_left #n b s))) + +val shift_right_logand_lemma: #n:pos -> a:uint_t n -> b:uint_t n -> s:nat -> + Lemma (requires True) + (ensures (shift_right #n (logand #n a b) s = logand #n (shift_right #n a s) (shift_right #n b s))) + +val shift_left_logxor_lemma: #n:pos -> a:uint_t n -> b:uint_t n -> s:nat -> + Lemma (requires True) + (ensures (shift_left #n (logxor #n a b) s = logxor #n (shift_left #n a s) (shift_left #n b s))) + +val shift_right_logxor_lemma: #n:pos -> a:uint_t n -> b:uint_t n -> s:nat -> + Lemma (requires True) + (ensures (shift_right #n (logxor #n a b) s = logxor #n (shift_right #n a s) (shift_right #n b s))) + +val shift_left_logor_lemma: #n:pos -> a:uint_t n -> b:uint_t n -> s:nat -> + Lemma (requires True) + (ensures (shift_left #n (logor #n a b) s = logor #n (shift_left #n a s) (shift_left #n b s))) + +val shift_right_logor_lemma: #n:pos -> a:uint_t n -> b:uint_t n -> s:nat -> + Lemma (requires True) + (ensures (shift_right #n (logor #n a b) s = logor #n (shift_right #n a s) (shift_right #n b s))) + + +(* Lemmas about value after shift operations *) +val shift_left_value_aux_1: #n:pos -> a:uint_t n -> s:nat{s >= n} -> + Lemma (requires True) + (ensures shift_left #n a s = (a * pow2 s) % pow2 n) + +val shift_left_value_aux_2: #n:pos -> a:uint_t n -> + Lemma (requires True) + (ensures shift_left #n a 0 = (a * pow2 0) % pow2 n) + +val shift_left_value_aux_3: #n:pos -> a:uint_t n -> s:pos{s < n} -> + Lemma (requires True) + (ensures shift_left #n a s = (a * pow2 s) % pow2 n) + +val shift_left_value_lemma: #n:pos -> a:uint_t n -> s:nat -> + Lemma (requires True) + (ensures shift_left #n a s = (a * pow2 s) % pow2 n) + [SMTPat (shift_left #n a s)] + +val shift_right_value_aux_1: #n:pos -> a:uint_t n -> s:nat{s >= n} -> + Lemma (requires True) + (ensures shift_right #n a s = a / pow2 s) + +val shift_right_value_aux_2: #n:pos -> a:uint_t n -> + Lemma (requires True) + (ensures shift_right #n a 0 = a / pow2 0) + +val shift_right_value_aux_3: #n:pos -> a:uint_t n -> s:pos{s < n} -> + Lemma (requires True) + (ensures shift_right #n a s = a / pow2 s) + +val shift_right_value_lemma: #n:pos -> a:uint_t n -> s:nat -> + Lemma (requires True) + (ensures shift_right #n a s = a / pow2 s) + [SMTPat (shift_right #n a s)] + +(* Lemmas about the most significant bit in various situations *) + +let msb (#n:pos) (a:uint_t n) : Tot bool = nth a 0 + +val lemma_msb_pow2: #n:pos -> a:uint_t n -> + Lemma (msb a <==> a >= pow2 (n-1)) + +val lemma_minus_zero: #n:pos -> a:uint_t n -> + Lemma (minus a = 0 ==> a = 0) + +val lemma_msb_gte: #n:pos{n > 1} -> a:uint_t n -> b:uint_t n -> + Lemma ((a >= b && not (msb a)) ==> not (msb b)) + + +(* Lemmas toward showing ~n + 1 = -a *) + +val lemma_uint_mod: #n:pos -> a:uint_t n -> + Lemma (a = a % pow2 n) + +val lemma_add_sub_cancel: #n:pos -> a:uint_t n -> b:uint_t n -> + Lemma (add_mod (sub_mod a b) b = a) + +val lemma_mod_sub_distr_l: a:int -> b:int -> p:pos -> + Lemma ((a - b) % p = ((a % p) - b) % p) + +val lemma_sub_add_cancel: #n:pos -> a:uint_t n -> b:uint_t n -> + Lemma (sub_mod (add_mod a b) b = a) + +let zero_extend_vec (#n:pos) (a:BitVector.bv_t n): Tot (BitVector.bv_t (n+1)) = append (create 1 false) a +let one_extend_vec (#n:pos) (a:BitVector.bv_t n): Tot (BitVector.bv_t (n+1)) = append (create 1 true) a + +let zero_extend (#n:pos) (a:uint_t n): Tot (uint_t (n+1)) = from_vec (zero_extend_vec (to_vec a)) +let one_extend (#n:pos) (a:uint_t n): Tot (uint_t (n+1)) = from_vec (one_extend_vec (to_vec a)) + +val lemma_zero_extend: #n:pos -> a:uint_t n -> + Lemma (zero_extend a = a) + [SMTPat (zero_extend a)] + +val lemma_one_extend: #n:pos -> a:uint_t n -> + Lemma (one_extend a = pow2 n + a) + [SMTPat (one_extend a)] + +val lemma_lognot_zero_ext: #n:pos -> a:uint_t n -> + Lemma (lognot #(n+1) (zero_extend a) = pow2 n + (lognot #n a)) + +val lemma_lognot_one_ext: #n:pos -> a:uint_t n -> + Lemma (lognot #(n+1) (one_extend a) = lognot #n a) + +val lemma_lognot_value_mod: #n:pos -> a:uint_t n -> + Lemma + (requires True) + (ensures (lognot a = pow2 n - a - 1)) + (decreases n) + +val lemma_lognot_value_zero: #n:pos -> a:uint_t n{a = 0} -> + Lemma (lognot a = sub_mod (sub_mod 0 a) 1) + +val lemma_one_mod_pow2: #n:pos -> + Lemma (1 = 1 % (pow2 n)) + +val lemma_lognot_value_nonzero: #n:pos -> a:uint_t n{a <> 0} -> + Lemma (lognot a = sub_mod (sub_mod 0 a) 1) + +val lemma_lognot_value: #n:pos -> a:uint_t n -> + Lemma (lognot #n a = sub_mod (sub_mod 0 a) 1) + +val lemma_minus_eq_zero_sub: #n:pos -> a:uint_t n -> + Lemma (minus #n a = sub_mod #n 0 a) diff --git a/stage0/ulib/FStar.UInt128.fst b/stage0/ulib/FStar.UInt128.fst new file mode 100644 index 00000000000..c4cb549d4f0 --- /dev/null +++ b/stage0/ulib/FStar.UInt128.fst @@ -0,0 +1,1230 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.UInt128 + +open FStar.Mul + +module UInt = FStar.UInt +module Seq = FStar.Seq +module BV = FStar.BitVector + +module U32 = FStar.UInt32 +module U64 = FStar.UInt64 + +module Math = FStar.Math.Lemmas + +open FStar.BV + +(* We try to keep the dependencies of this module low, +by not opening the full Tactics module. This speeds up +checking the library by improving parallelism. *) +module T = FStar.Stubs.Tactics.V2.Builtins +module TD = FStar.Tactics.V2.Derived +module TM = FStar.Tactics.MApply + +#set-options "--max_fuel 0 --max_ifuel 0 --split_queries no" +#set-options "--using_facts_from '*,-FStar.Tactics,-FStar.Reflection'" + +(* TODO: explain why exactly this is needed? It leads to failures in +HACL* otherwise, claiming that some functions are not Low*. *) +#set-options "--normalize_pure_terms_for_extraction" + +[@@ noextract_to "krml"] +noextract +let carry_uint64 (a b: uint_t 64) : Tot (uint_t 64) = + let ( ^^ ) = UInt.logxor in + let ( |^ ) = UInt.logor in + let ( -%^ ) = UInt.sub_mod in + let ( >>^ ) = UInt.shift_right in + a ^^ ((a ^^ b) |^ ((a -%^ b) ^^ b)) >>^ 63 + +[@@ noextract_to "krml"] +noextract +let carry_bv (a b: uint_t 64) = + bvshr (bvxor (int2bv a) + (bvor (bvxor (int2bv a) (int2bv b)) (bvxor (bvsub (int2bv a) (int2bv b)) (int2bv b)))) + 63 + +let carry_uint64_ok (a b:uint_t 64) + : squash (int2bv (carry_uint64 a b) == carry_bv a b) + = _ by (T.norm [delta_only [`%carry_uint64]; unascribe]; + let open FStar.Tactics.BV in + TM.mapply (`FStar.Tactics.BV.Lemmas.trans); + arith_to_bv_tac (); + arith_to_bv_tac (); + T.norm [delta_only [`%carry_bv]]; + TD.trefl()) + +let fact1 (a b: uint_t 64) = carry_bv a b == int2bv 1 +let fact0 (a b: uint_t 64) = carry_bv a b == int2bv 0 + +let lem_ult_1 (a b: uint_t 64) + : squash (bvult (int2bv a) (int2bv b) ==> fact1 a b) + = assert (bvult (int2bv a) (int2bv b) ==> fact1 a b) + by (T.norm [delta_only [`%fact1;`%carry_bv]]; + T.set_options "--smtencoding.elim_box true --using_facts_from '__Nothing__' --z3smtopt '(set-option :smt.case_split 1)'"; + TD.smt()) + +let lem_ult_2 (a b:uint_t 64) + : squash (not (bvult (int2bv a) (int2bv b)) ==> fact0 a b) + = assert (not (bvult (int2bv a) (int2bv b)) ==> fact0 a b) + by (T.norm [delta_only [`%fact0;`%carry_bv]]; + T.set_options "--smtencoding.elim_box true --using_facts_from '__Nothing__' --z3smtopt '(set-option :smt.case_split 1)'") + +let int2bv_ult (#n: pos) (a b: uint_t n) + : Lemma (ensures a < b <==> bvult #n (int2bv #n a) (int2bv #n b)) + = introduce (a < b) ==> (bvult #n (int2bv #n a) (int2bv #n b)) + with _ . FStar.BV.int2bv_lemma_ult_1 a b; + introduce (bvult #n (int2bv #n a) (int2bv #n b)) ==> (a < b) + with _ . FStar.BV.int2bv_lemma_ult_2 a b + +let lem_ult (a b:uint_t 64) + : Lemma (if a < b + then fact1 a b + else fact0 a b) + = int2bv_ult a b; + lem_ult_1 a b; + lem_ult_2 a b + +let constant_time_carry (a b: U64.t) : Tot U64.t = + let open U64 in + // CONSTANT_TIME_CARRY macro + // ((a ^ ((a ^ b) | ((a - b) ^ b))) >> (sizeof(a) * 8 - 1)) + // 63 = sizeof(a) * 8 - 1 + a ^^ ((a ^^ b) |^ ((a -%^ b) ^^ b)) >>^ 63ul + +let carry_uint64_equiv (a b:UInt64.t) + : Lemma (U64.v (constant_time_carry a b) == carry_uint64 (U64.v a) (U64.v b)) + = () + +// This type gets a special treatment in KaRaMeL and its definition is never +// printed in the resulting C file. +type uint128: Type0 = { low: U64.t; high: U64.t } + +let t = uint128 + +let _ = intro_ambient n +let _ = intro_ambient t + +[@@ noextract_to "krml"] +let v x = U64.v x.low + (U64.v x.high) * (pow2 64) + +let div_mod (x:nat) (k:nat{k > 0}) : Lemma (x / k * k + x % k == x) = () + +let uint_to_t x = + div_mod x (pow2 64); + { low = U64.uint_to_t (x % (pow2 64)); + high = U64.uint_to_t (x / (pow2 64)); } + +let v_inj (x1 x2: t): Lemma (requires (v x1 == v x2)) + (ensures x1 == x2) = + assert (uint_to_t (v x1) == uint_to_t (v x2)); + assert (uint_to_t (v x1) == x1); + assert (uint_to_t (v x2) == x2); + () + +(* A weird helper used below... seems like the native encoding of +bitvectors may be making these proofs much harder than they should be? *) +let bv2int_fun (#n:pos) (a b : bv_t n) + : Lemma (requires a == b) + (ensures bv2int a == bv2int b) + = () + +(* This proof is quite brittle. It has a bunch of annotations to get +decent verification performance. *) +let constant_time_carry_ok (a b:U64.t) + : Lemma (constant_time_carry a b == + (if U64.lt a b then U64.uint_to_t 1 else U64.uint_to_t 0)) + = calc (==) { + U64.v (constant_time_carry a b); + (==) { carry_uint64_equiv a b } + carry_uint64 (U64.v a) (U64.v b); + (==) { inverse_num_lemma (carry_uint64 (U64.v a) (U64.v b)) } + bv2int (int2bv (carry_uint64 (U64.v a) (U64.v b))); + (==) { carry_uint64_ok (U64.v a) (U64.v b); + bv2int_fun (int2bv (carry_uint64 (U64.v a) (U64.v b))) (carry_bv (U64.v a) (U64.v b)); + () + } + bv2int (carry_bv (U64.v a) (U64.v b)); + (==) { + lem_ult (U64.v a) (U64.v b); + bv2int_fun (carry_bv (U64.v a) (U64.v b)) (if U64.v a < U64.v b then int2bv 1 else int2bv 0) + } + bv2int + (if U64.v a < U64.v b + then int2bv 1 + else int2bv 0); + }; + assert ( + bv2int (if U64.v a < U64.v b then int2bv 1 else int2bv 0) + == U64.v (if U64.lt a b then U64.uint_to_t 1 else U64.uint_to_t 0)) by (T.norm []); + U64.v_inj (constant_time_carry a b) (if U64.lt a b then U64.uint_to_t 1 else U64.uint_to_t 0) + +let carry (a b: U64.t) : Pure U64.t + (requires True) + (ensures (fun r -> U64.v r == (if U64.v a < U64.v b then 1 else 0))) = + constant_time_carry_ok a b; + constant_time_carry a b + +let carry_sum_ok (a b:U64.t) : + Lemma (U64.v (carry (U64.add_mod a b) b) == (U64.v a + U64.v b) / (pow2 64)) = () + +let add (a b: t) : Pure t + (requires (v a + v b < pow2 128)) + (ensures (fun r -> v a + v b = v r)) = + let l = U64.add_mod a.low b.low in + carry_sum_ok a.low b.low; + { low = l; + high = U64.add (U64.add a.high b.high) (carry l b.low); } + +let add_underspec (a b: t) = + let l = U64.add_mod a.low b.low in + begin + if v a + v b < pow2 128 + then carry_sum_ok a.low b.low + else () + end; + { low = l; + high = U64.add_underspec (U64.add_underspec a.high b.high) (carry l b.low); } + +val mod_mod: a:nat -> k:nat{k>0} -> k':nat{k'>0} -> + Lemma ((a % k) % (k'*k) == a % k) +let mod_mod a k k' = + assert (a % k < k); + assert (a % k < k' * k) + +let mod_spec (a:nat) (k:nat{k > 0}) : + Lemma (a % k == a - a / k * k) = () + +val div_product : n:nat -> m1:nat{m1>0} -> m2:nat{m2>0} -> + Lemma (n / (m1*m2) == (n / m1) / m2) +let div_product n m1 m2 = + Math.division_multiplication_lemma n m1 m2 + +val mul_div_cancel : n:nat -> k:nat{k>0} -> + Lemma ((n * k) / k == n) +let mul_div_cancel n k = + Math.cancel_mul_div n k + +val mod_mul: n:nat -> k1:pos -> k2:pos -> + Lemma ((n % k2) * k1 == (n * k1) % (k1*k2)) +let mod_mul n k1 k2 = + Math.modulo_scale_lemma n k1 k2 + +let mod_spec_rew_n (n:nat) (k:nat{k > 0}) : + Lemma (n == n / k * k + n % k) = mod_spec n k + +val mod_add: n1:nat -> n2:nat -> k:nat{k > 0} -> + Lemma ((n1 % k + n2 % k) % k == (n1 + n2) % k) +let mod_add n1 n2 k = Math.modulo_distributivity n1 n2 k + +val mod_add_small: n1:nat -> n2:nat -> k:nat{k > 0} -> Lemma + (requires (n1 % k + n2 % k < k)) + (ensures (n1 % k + n2 % k == (n1 + n2) % k)) +let mod_add_small n1 n2 k = + mod_add n1 n2 k; + Math.small_modulo_lemma_1 (n1%k + n2%k) k + +// This proof is pretty stable with the calc proof, but it can fail +// ~1% of the times, so add a retry. +#push-options "--split_queries no --z3rlimit 20 --retry 5" +let add_mod (a b: t) : Pure t + (requires True) + (ensures (fun r -> (v a + v b) % pow2 128 = v r)) = + + let l = U64.add_mod a.low b.low in + let r = { low = l; + high = U64.add_mod (U64.add_mod a.high b.high) (carry l b.low)} in + let a_l = U64.v a.low in + let a_h = U64.v a.high in + let b_l = U64.v b.low in + let b_h = U64.v b.high in + carry_sum_ok a.low b.low; + Math.lemma_mod_plus_distr_l (a_h + b_h) ((a_l + b_l) / (pow2 64)) (pow2 64); + calc (==) { + U64.v r.high * pow2 64; + == {} + ((a_h + b_h + (a_l + b_l) / (pow2 64)) % pow2 64) * pow2 64; + == { mod_mul (a_h + b_h + (a_l + b_l) / (pow2 64)) (pow2 64) (pow2 64) } + ((a_h + b_h + (a_l + b_l) / (pow2 64)) * pow2 64) % (pow2 64 * pow2 64); + == {} + ((a_h + b_h + (a_l + b_l)/(pow2 64)) * pow2 64) + % pow2 128; + == {} + (a_h * pow2 64 + b_h * pow2 64 + ((a_l + b_l)/(pow2 64)) * pow2 64) + % pow2 128; + }; + assert (U64.v r.low == (U64.v r.low) % pow2 128); + mod_add_small (a_h * pow2 64 + + b_h * pow2 64 + + (a_l + b_l) / (pow2 64) * (pow2 64)) + ((a_l + b_l) % (pow2 64)) + (pow2 128); + assert (U64.v r.low + U64.v r.high * pow2 64 == + (a_h * pow2 64 + + b_h * pow2 64 + + (a_l + b_l) / (pow2 64) * (pow2 64) + (a_l + b_l) % (pow2 64)) % pow2 128); + mod_spec_rew_n (a_l + b_l) (pow2 64); + assert (v r == + (a_h * pow2 64 + + b_h * pow2 64 + + a_l + b_l) % pow2 128); + assert_spinoff ((v a + v b) % pow2 128 = v r); + r +#pop-options + +#push-options "--retry 5" +let sub (a b: t) : Pure t + (requires (v a - v b >= 0)) + (ensures (fun r -> v r = v a - v b)) = + let l = U64.sub_mod a.low b.low in + { low = l; + high = U64.sub (U64.sub a.high b.high) (carry a.low l); } +#pop-options + +let sub_underspec (a b: t) = + let l = U64.sub_mod a.low b.low in + { low = l; + high = U64.sub_underspec (U64.sub_underspec a.high b.high) (carry a.low l); } + +let sub_mod_impl (a b: t) : t = + let l = U64.sub_mod a.low b.low in + { low = l; + high = U64.sub_mod (U64.sub_mod a.high b.high) (carry a.low l); } + +#push-options "--retry 10" // flaky +let sub_mod_pos_ok (a b:t) : Lemma + (requires (v a - v b >= 0)) + (ensures (v (sub_mod_impl a b) = v a - v b)) = + assert (sub a b == sub_mod_impl a b); + () +#pop-options + +val u64_diff_wrap : a:U64.t -> b:U64.t -> + Lemma (requires (U64.v a < U64.v b)) + (ensures (U64.v (U64.sub_mod a b) == U64.v a - U64.v b + pow2 64)) +let u64_diff_wrap a b = () + +#push-options "--z3rlimit 20" +val sub_mod_wrap1_ok : a:t -> b:t -> Lemma + (requires (v a - v b < 0 /\ U64.v a.low < U64.v b.low)) + (ensures (v (sub_mod_impl a b) = v a - v b + pow2 128)) + +#push-options "--retry 10" +let sub_mod_wrap1_ok a b = + // carry == 1 and subtraction in low wraps + let l = U64.sub_mod a.low b.low in + assert (U64.v (carry a.low l) == 1); + u64_diff_wrap a.low b.low; + // a.high <= b.high since v a < v b; + // case split on equality and strictly less + if U64.v a.high = U64.v b.high then () + else begin + u64_diff_wrap a.high b.high; + () + end +#pop-options +#pop-options + + +let sum_lt (a1 a2 b1 b2:nat) : Lemma + (requires (a1 + a2 < b1 + b2 /\ a1 >= b1)) + (ensures (a2 < b2)) = () + +let sub_mod_wrap2_ok (a b:t) : Lemma + (requires (v a - v b < 0 /\ U64.v a.low >= U64.v b.low)) + (ensures (v (sub_mod_impl a b) = v a - v b + pow2 128)) = + // carry == 0, subtraction in low is exact, but subtraction in high + // must wrap since v a < v b + let l = U64.sub_mod a.low b.low in + let r = sub_mod_impl a b in + assert (U64.v l == U64.v a.low - U64.v b.low); + assert (U64.v (carry a.low l) == 0); + sum_lt (U64.v a.low) (U64.v a.high * pow2 64) (U64.v b.low) (U64.v b.high * pow2 64); + assert (U64.v (U64.sub_mod a.high b.high) == U64.v a.high - U64.v b.high + pow2 64); + () + +let sub_mod_wrap_ok (a b:t) : Lemma + (requires (v a - v b < 0)) + (ensures (v (sub_mod_impl a b) = v a - v b + pow2 128)) = + if U64.v a.low < U64.v b.low + then sub_mod_wrap1_ok a b + else sub_mod_wrap2_ok a b + +#restart-solver +#push-options "--z3rlimit 40" +let sub_mod (a b: t) : Pure t + (requires True) + (ensures (fun r -> v r = (v a - v b) % pow2 128)) = + (if v a - v b >= 0 + then sub_mod_pos_ok a b + else sub_mod_wrap_ok a b); + sub_mod_impl a b +#pop-options + +#restart-solver + +val shift_bound : #n:nat -> num:UInt.uint_t n -> n':nat -> + Lemma (num * pow2 n' <= pow2 (n'+n) - pow2 n') +let shift_bound #n num n' = + Math.lemma_mult_le_right (pow2 n') num (pow2 n - 1); + Math.distributivity_sub_left (pow2 n) 1 (pow2 n'); + Math.pow2_plus n' n + +val append_uint : #n1:nat -> #n2:nat -> num1:UInt.uint_t n1 -> num2:UInt.uint_t n2 -> UInt.uint_t (n1+n2) +let append_uint #n1 #n2 num1 num2 = + shift_bound num2 n1; + num1 + num2 * pow2 n1 + +val to_vec_append : #n1:nat{n1 > 0} -> #n2:nat{n2 > 0} -> num1:UInt.uint_t n1 -> num2:UInt.uint_t n2 -> + Lemma (UInt.to_vec (append_uint num1 num2) == Seq.append (UInt.to_vec num2) (UInt.to_vec num1)) +let to_vec_append #n1 #n2 num1 num2 = + UInt.append_lemma (UInt.to_vec num2) (UInt.to_vec num1) + +let vec128 (a: t) : BV.bv_t 128 = UInt.to_vec #128 (v a) +let vec64 (a: U64.t) : BV.bv_t 64 = UInt.to_vec (U64.v a) + +let to_vec_v (a: t) : + Lemma (vec128 a == Seq.append (vec64 a.high) (vec64 a.low)) = + to_vec_append (U64.v a.low) (U64.v a.high) + +val logand_vec_append (#n1 #n2: pos) (a1 b1: BV.bv_t n1) (a2 b2: BV.bv_t n2) : + Lemma (Seq.append (BV.logand_vec a1 b1) (BV.logand_vec a2 b2) == + BV.logand_vec #(n1 + n2) (Seq.append a1 a2) (Seq.append b1 b2)) +let logand_vec_append #n1 #n2 a1 b1 a2 b2 = + Seq.lemma_eq_intro (Seq.append (BV.logand_vec a1 b1) (BV.logand_vec a2 b2)) + (BV.logand_vec #(n1 + n2) (Seq.append a1 a2) (Seq.append b1 b2)) + +let logand (a b: t) : Pure t + (requires True) + (ensures (fun r -> v r = UInt.logand #128 (v a) (v b))) = + let r = { low = U64.logand a.low b.low; + high = U64.logand a.high b.high } in + to_vec_v r; + assert (vec128 r == Seq.append (vec64 r.high) (vec64 r.low)); + logand_vec_append (vec64 a.high) (vec64 b.high) + (vec64 a.low) (vec64 b.low); + to_vec_v a; + to_vec_v b; + assert (vec128 r == BV.logand_vec (vec128 a) (vec128 b)); + r + +val logxor_vec_append (#n1 #n2: pos) (a1 b1: BV.bv_t n1) (a2 b2: BV.bv_t n2) : + Lemma (Seq.append (BV.logxor_vec a1 b1) (BV.logxor_vec a2 b2) == + BV.logxor_vec #(n1 + n2) (Seq.append a1 a2) (Seq.append b1 b2)) +let logxor_vec_append #n1 #n2 a1 b1 a2 b2 = + Seq.lemma_eq_intro (Seq.append (BV.logxor_vec a1 b1) (BV.logxor_vec a2 b2)) + (BV.logxor_vec #(n1 + n2) (Seq.append a1 a2) (Seq.append b1 b2)) + +let logxor (a b: t) : Pure t + (requires True) + (ensures (fun r -> v r = UInt.logxor #128 (v a) (v b))) = + let r = { low = U64.logxor a.low b.low; + high = U64.logxor a.high b.high } in + to_vec_v r; + assert (vec128 r == Seq.append (vec64 r.high) (vec64 r.low)); + logxor_vec_append (vec64 a.high) (vec64 b.high) + (vec64 a.low) (vec64 b.low); + to_vec_v a; + to_vec_v b; + assert (vec128 r == BV.logxor_vec (vec128 a) (vec128 b)); + r + +val logor_vec_append (#n1 #n2: pos) (a1 b1: BV.bv_t n1) (a2 b2: BV.bv_t n2) : + Lemma (Seq.append (BV.logor_vec a1 b1) (BV.logor_vec a2 b2) == + BV.logor_vec #(n1 + n2) (Seq.append a1 a2) (Seq.append b1 b2)) +let logor_vec_append #n1 #n2 a1 b1 a2 b2 = + Seq.lemma_eq_intro (Seq.append (BV.logor_vec a1 b1) (BV.logor_vec a2 b2)) + (BV.logor_vec #(n1 + n2) (Seq.append a1 a2) (Seq.append b1 b2)) + +let logor (a b: t) : Pure t + (requires True) + (ensures (fun r -> v r = UInt.logor #128 (v a) (v b))) = + let r = { low = U64.logor a.low b.low; + high = U64.logor a.high b.high } in + to_vec_v r; + assert (vec128 r == Seq.append (vec64 r.high) (vec64 r.low)); + logor_vec_append (vec64 a.high) (vec64 b.high) + (vec64 a.low) (vec64 b.low); + to_vec_v a; + to_vec_v b; + assert (vec128 r == BV.logor_vec (vec128 a) (vec128 b)); + r + +val lognot_vec_append (#n1 #n2: pos) (a1: BV.bv_t n1) (a2: BV.bv_t n2) : + Lemma (Seq.append (BV.lognot_vec a1) (BV.lognot_vec a2) == + BV.lognot_vec #(n1 + n2) (Seq.append a1 a2)) +let lognot_vec_append #n1 #n2 a1 a2 = + Seq.lemma_eq_intro (Seq.append (BV.lognot_vec a1) (BV.lognot_vec a2)) + (BV.lognot_vec #(n1 + n2) (Seq.append a1 a2)) + +let lognot (a: t) : Pure t + (requires True) + (ensures (fun r -> v r = UInt.lognot #128 (v a))) = + let r = { low = U64.lognot a.low; + high = U64.lognot a.high } in + to_vec_v r; + assert (vec128 r == Seq.append (vec64 r.high) (vec64 r.low)); + lognot_vec_append (vec64 a.high) (vec64 a.low); + to_vec_v a; + assert (vec128 r == BV.lognot_vec (vec128 a)); + r + +let mod_mul_cancel (n:nat) (k:nat{k > 0}) : + Lemma ((n * k) % k == 0) = + mod_spec (n * k) k; + mul_div_cancel n k; + () + +let shift_past_mod (n:nat) (k1:nat) (k2:nat{k2 >= k1}) : + Lemma ((n * pow2 k2) % pow2 k1 == 0) = + assert (k2 == (k2 - k1) + k1); + Math.pow2_plus (k2 - k1) k1; + Math.paren_mul_right n (pow2 (k2 - k1)) (pow2 k1); + mod_mul_cancel (n * pow2 (k2 - k1)) (pow2 k1) + +val mod_double: a:nat -> k:nat{k>0} -> + Lemma (a % k % k == a % k) +let mod_double a k = + mod_mod a k 1 + +let shift_left_large_val (#n1:nat) (#n2: nat) (a1:UInt.uint_t n1) (a2:UInt.uint_t n2) (s:nat) : + Lemma ((a1 + a2 * pow2 n1) * pow2 s == (a1 * pow2 s + a2 * pow2 (n1+s))) = + Math.distributivity_add_left a1 (a2 * pow2 n1) (pow2 s); + Math.paren_mul_right a2 (pow2 n1) (pow2 s); + Math.pow2_plus n1 s + +#push-options "--z3rlimit 40" +let shift_left_large_lemma (#n1: nat) (#n2: nat) (a1:UInt.uint_t n1) (a2:UInt.uint_t n2) (s: nat{s >= n2}) : + Lemma (((a1 + a2 * pow2 n1) * pow2 s) % pow2 (n1+n2) == + (a1 * pow2 s) % pow2 (n1+n2)) = + shift_left_large_val a1 a2 s; + mod_add (a1 * pow2 s) (a2 * pow2 (n1+s)) (pow2 (n1+n2)); + shift_past_mod a2 (n1+n2) (n1+s); + mod_double (a1 * pow2 s) (pow2 (n1+n2)); + () +#pop-options + +val shift_left_large_lemma_t : a:t -> s:nat -> + Lemma (requires (s >= 64)) + (ensures ((v a * pow2 s) % pow2 128 == + (U64.v a.low * pow2 s) % pow2 128)) +let shift_left_large_lemma_t a s = + shift_left_large_lemma #64 #64 (U64.v a.low) (U64.v a.high) s + +private let u32_64: n:U32.t{U32.v n == 64} = U32.uint_to_t 64 + +val div_pow2_diff: a:nat -> n1:nat -> n2:nat{n2 <= n1} -> Lemma + (requires True) + (ensures (a / pow2 (n1 - n2) == a * pow2 n2 / pow2 n1)) +let div_pow2_diff a n1 n2 = + Math.pow2_plus n2 (n1-n2); + assert (a * pow2 n2 / pow2 n1 == a * pow2 n2 / (pow2 n2 * pow2 (n1 - n2))); + div_product (a * pow2 n2) (pow2 n2) (pow2 (n1-n2)); + mul_div_cancel a (pow2 n2) + +val mod_mul_pow2 : n:nat -> e1:nat -> e2:nat -> + Lemma (n % pow2 e1 * pow2 e2 <= pow2 (e1+e2) - pow2 e2) +let mod_mul_pow2 n e1 e2 = + Math.lemma_mod_lt n (pow2 e1); + Math.lemma_mult_le_right (pow2 e2) (n % pow2 e1) (pow2 e1 - 1); + assert (n % pow2 e1 * pow2 e2 <= pow2 e1 * pow2 e2 - pow2 e2); + Math.pow2_plus e1 e2 + +let pow2_div_bound #b (n:UInt.uint_t b) (s:nat{s <= b}) : + Lemma (n / pow2 s < pow2 (b - s)) = + Math.lemma_div_lt n b s + +#push-options "--smtencoding.l_arith_repr native --z3rlimit 40" +let add_u64_shift_left (hi lo: U64.t) (s: U32.t{U32.v s < 64}) : Pure U64.t + (requires (U32.v s <> 0)) + (ensures (fun r -> U64.v r = (U64.v hi * pow2 (U32.v s)) % pow2 64 + U64.v lo / pow2 (64 - U32.v s))) = + let high = U64.shift_left hi s in + let low = U64.shift_right lo (U32.sub u32_64 s) in + let s = U32.v s in + let high_n = U64.v hi % pow2 (64 - s) * pow2 s in + let low_n = U64.v lo / pow2 (64 - s) in + Math.pow2_plus (64-s) s; + mod_mul (U64.v hi) (pow2 s) (pow2 (64-s)); + assert (U64.v high == high_n); + assert (U64.v low == low_n); + pow2_div_bound (U64.v lo) (64-s); + assert (low_n < pow2 s); + mod_mul_pow2 (U64.v hi) (64 - s) s; + U64.add high low +#pop-options + + +let div_plus_multiple (a:nat) (b:nat) (k:pos) : + Lemma (requires (a < k)) + (ensures ((a + b * k) / k == b)) = + Math.division_addition_lemma a k b; + Math.small_division_lemma_1 a k + +val div_add_small: n:nat -> m:nat -> k1:pos -> k2:pos -> + Lemma (requires (n < k1)) + (ensures (k1*m / (k1*k2) == (n + k1*m) / (k1*k2))) +let div_add_small n m k1 k2 = + div_product (k1*m) k1 k2; + div_product (n+k1*m) k1 k2; + mul_div_cancel m k1; + assert (k1*m/k1 == m); + div_plus_multiple n m k1 + +val add_mod_small: n: nat -> m:nat -> k1:pos -> k2:pos -> + Lemma (requires (n < k1)) + (ensures (n + (k1 * m) % (k1 * k2) == + (n + k1 * m) % (k1 * k2))) +#restart-solver +#push-options "--z3rlimit_factor 4" +let add_mod_small n m k1 k2 = + mod_spec (k1 * m) (k1 * k2); + mod_spec (n + k1 * m) (k1 * k2); + div_add_small n m k1 k2 +#pop-options + +let mod_then_mul_64 (n:nat) : Lemma (n % pow2 64 * pow2 64 == n * pow2 64 % pow2 128) = + Math.pow2_plus 64 64; + mod_mul n (pow2 64) (pow2 64) + +let mul_abc_to_acb (a b c: int) : Lemma (a * b * c == a * c * b) = () + +let add_u64_shift_left_respec (hi lo:U64.t) (s:U32.t{U32.v s < 64}) : Pure U64.t + (requires (U32.v s <> 0)) + (ensures (fun r -> + U64.v r * pow2 64 == + (U64.v hi * pow2 64) * pow2 (U32.v s) % pow2 128 + + U64.v lo * pow2 (U32.v s) / pow2 64 * pow2 64)) = + let r = add_u64_shift_left hi lo s in + let hi = U64.v hi in + let lo = U64.v lo in + let s = U32.v s in + // spec of add_u64_shift_left + assert (U64.v r == hi * pow2 s % pow2 64 + lo / pow2 (64 - s)); + Math.distributivity_add_left (hi * pow2 s % pow2 64) (lo / pow2 (64-s)) (pow2 64); + mod_then_mul_64 (hi * pow2 s); + assert (hi * pow2 s % pow2 64 * pow2 64 == (hi * pow2 s * pow2 64) % pow2 128); + div_pow2_diff lo 64 s; + assert (lo / pow2 (64-s) == lo * pow2 s / pow2 64); + assert (U64.v r * pow2 64 == hi * pow2 s * pow2 64 % pow2 128 + lo * pow2 s / pow2 64 * pow2 64); + mul_abc_to_acb hi (pow2 s) (pow2 64); + r + +val add_mod_small' : n:nat -> m:nat -> k:pos -> + Lemma (requires (n + m % k < k)) + (ensures (n + m % k == (n + m) % k)) +let add_mod_small' n m k = + Math.lemma_mod_lt (n + m % k) k; + Math.modulo_lemma n k; + mod_add n m k + +#push-options "--retry 5" +let shift_t_val (a: t) (s: nat) : + Lemma (v a * pow2 s == U64.v a.low * pow2 s + U64.v a.high * pow2 (64+s)) = + Math.pow2_plus 64 s; + () +#pop-options + +val mul_mod_bound : n:nat -> s1:nat -> s2:nat{s2>=s1} -> + Lemma (n * pow2 s1 % pow2 s2 <= pow2 s2 - pow2 s1) + +#push-options "--retry 5" +let mul_mod_bound n s1 s2 = + // n * pow2 s1 % pow2 s2 == n % pow2 (s2-s1) * pow2 s1 + // n % pow2 (s2-s1) <= pow2 (s2-s1) - 1 + // n % pow2 (s2-s1) * pow2 s1 <= pow2 s2 - pow2 s1 + mod_mul n (pow2 s1) (pow2 (s2-s1)); + // assert (n * pow2 s1 % pow2 s2 == n % pow2 (s2-s1) * pow2 s1); + Math.lemma_mod_lt n (pow2 (s2-s1)); + Math.lemma_mult_le_right (pow2 s1) (n % pow2 (s2-s1)) (pow2 (s2-s1) - 1); + Math.pow2_plus (s2-s1) s1 +#pop-options + +let add_lt_le (a a' b b': int) : + Lemma (requires (a < a' /\ b <= b')) + (ensures (a + b < a' + b')) = () + +let u64_pow2_bound (a: UInt.uint_t 64) (s: nat) : + Lemma (a * pow2 s < pow2 (64+s)) = + Math.pow2_plus 64 s; + Math.lemma_mult_le_right (pow2 s) a (pow2 64) + +let shift_t_mod_val' (a: t) (s: nat{s < 64}) : + Lemma ((v a * pow2 s) % pow2 128 == + U64.v a.low * pow2 s + U64.v a.high * pow2 (64+s) % pow2 128) = + let a_l = U64.v a.low in + let a_h = U64.v a.high in + u64_pow2_bound a_l s; + mul_mod_bound a_h (64+s) 128; + // assert (a_h * pow2 (64+s) % pow2 128 <= pow2 128 - pow2 (64+s)); + add_lt_le (a_l * pow2 s) (pow2 (64+s)) (a_h * pow2 (64+s) % pow2 128) (pow2 128 - pow2 (64+s)); + add_mod_small' (a_l * pow2 s) (a_h * pow2 (64+s)) (pow2 128); + shift_t_val a s; + () + +let shift_t_mod_val (a: t) (s: nat{s < 64}) : + Lemma ((v a * pow2 s) % pow2 128 == + U64.v a.low * pow2 s + (U64.v a.high * pow2 64) * pow2 s % pow2 128) = + let a_l = U64.v a.low in + let a_h = U64.v a.high in + shift_t_mod_val' a s; + Math.pow2_plus 64 s; + Math.paren_mul_right a_h (pow2 64) (pow2 s); + () + +#push-options "--z3rlimit 20" +let shift_left_small (a: t) (s: U32.t) : Pure t + (requires (U32.v s < 64)) + (ensures (fun r -> v r = (v a * pow2 (U32.v s)) % pow2 128)) = + if U32.eq s 0ul then a + else + let r = { low = U64.shift_left a.low s; + high = add_u64_shift_left_respec a.high a.low s; } in + let s = U32.v s in + let a_l = U64.v a.low in + let a_h = U64.v a.high in + mod_spec_rew_n (a_l * pow2 s) (pow2 64); + shift_t_mod_val a s; + r +#pop-options + +val shift_left_large : a:t -> s:U32.t{U32.v s >= 64 /\ U32.v s < 128} -> + r:t{v r = (v a * pow2 (U32.v s)) % pow2 128} + +#push-options "--z3rlimit 50 --retry 5" // sporadically fails +let shift_left_large a s = + let h_shift = U32.sub s u32_64 in + assert (U32.v h_shift < 64); + let r = { low = U64.uint_to_t 0; + high = U64.shift_left a.low h_shift; } in + assert (U64.v r.high == (U64.v a.low * pow2 (U32.v s - 64)) % pow2 64); + mod_mul (U64.v a.low * pow2 (U32.v s - 64)) (pow2 64) (pow2 64); + Math.pow2_plus (U32.v s - 64) 64; + assert (U64.v r.high * pow2 64 == (U64.v a.low * pow2 (U32.v s)) % pow2 128); + shift_left_large_lemma_t a (U32.v s); + r +#pop-options + +let shift_left a s = + if (U32.lt s u32_64) then shift_left_small a s + else shift_left_large a s + +#restart-solver +let add_u64_shift_right (hi lo: U64.t) (s: U32.t{U32.v s < 64}) : Pure U64.t + (requires (U32.v s <> 0)) + (ensures (fun r -> U64.v r == U64.v lo / pow2 (U32.v s) + + U64.v hi * pow2 (64 - U32.v s) % pow2 64)) = + let low = U64.shift_right lo s in + let high = U64.shift_left hi (U32.sub u32_64 s) in + let s = U32.v s in + let low_n = U64.v lo / pow2 s in + let high_n = U64.v hi % pow2 s * pow2 (64 - s) in + Math.pow2_plus (64-s) s; + mod_mul (U64.v hi) (pow2 (64-s)) (pow2 s); + assert (U64.v high == high_n); + pow2_div_bound (U64.v lo) s; + assert (low_n < pow2 (64 - s)); + mod_mul_pow2 (U64.v hi) s (64 - s); + U64.add low high + +val mul_pow2_diff: a:nat -> n1:nat -> n2:nat{n2 <= n1} -> + Lemma (a * pow2 (n1 - n2) == a * pow2 n1 / pow2 n2) +let mul_pow2_diff a n1 n2 = + Math.paren_mul_right a (pow2 (n1-n2)) (pow2 n2); + mul_div_cancel (a * pow2 (n1 - n2)) (pow2 n2); + Math.pow2_plus (n1 - n2) n2 + +#restart-solver +let add_u64_shift_right_respec (hi lo:U64.t) (s: U32.t{U32.v s < 64}) : Pure U64.t + (requires (U32.v s <> 0)) + (ensures (fun r -> U64.v r == U64.v lo / pow2 (U32.v s) + + U64.v hi * pow2 64 / pow2 (U32.v s) % pow2 64)) = + let r = add_u64_shift_right hi lo s in + let s = U32.v s in + mul_pow2_diff (U64.v hi) 64 s; + r + +#restart-solver +let mul_div_spec (n:nat) (k:pos) : Lemma (n / k * k == n - n % k) = () + +let mul_distr_sub (n1 n2:nat) (k:nat) : Lemma ((n1 - n2) * k == n1 * k - n2 * k) = () + +val div_product_comm : n1:nat -> k1:pos -> k2:pos -> + Lemma (n1 / k1 / k2 == n1 / k2 / k1) +let div_product_comm n1 k1 k2 = + div_product n1 k1 k2; + div_product n1 k2 k1 + +val shift_right_reconstruct : a_h:UInt.uint_t 64 -> s:nat{s < 64} -> + Lemma (a_h * pow2 (64-s) == a_h / pow2 s * pow2 64 + a_h * pow2 64 / pow2 s % pow2 64) +let shift_right_reconstruct a_h s = + mul_pow2_diff a_h 64 s; + mod_spec_rew_n (a_h * pow2 (64-s)) (pow2 64); + div_product_comm (a_h * pow2 64) (pow2 s) (pow2 64); + mul_div_cancel a_h (pow2 64); + assert (a_h / pow2 s * pow2 64 == a_h * pow2 64 / pow2 s / pow2 64 * pow2 64); + () + +val u128_div_pow2 (a: t) (s:nat{s < 64}) : + Lemma (v a / pow2 s == U64.v a.low / pow2 s + U64.v a.high * pow2 (64 - s)) +let u128_div_pow2 a s = + Math.pow2_plus (64-s) s; + Math.paren_mul_right (U64.v a.high) (pow2 (64-s)) (pow2 s); + Math.division_addition_lemma (U64.v a.low) (pow2 s) (U64.v a.high * pow2 (64 - s)) + +#restart-solver +let shift_right_small (a: t) (s: U32.t{U32.v s < 64}) : Pure t + (requires True) + (ensures (fun r -> v r == v a / pow2 (U32.v s))) = + if U32.eq s 0ul then a + else + let r = { low = add_u64_shift_right_respec a.high a.low s; + high = U64.shift_right a.high s; } in + let a_h = U64.v a.high in + let a_l = U64.v a.low in + let s = U32.v s in + shift_right_reconstruct a_h s; + assert (v r == a_h * pow2 (64-s) + a_l / pow2 s); + u128_div_pow2 a s; + r + +let shift_right_large (a: t) (s: U32.t{U32.v s >= 64 /\ U32.v s < 128}) : Pure t + (requires True) + (ensures (fun r -> v r == v a / pow2 (U32.v s))) = + let r = { high = U64.uint_to_t 0; + low = U64.shift_right a.high (U32.sub s u32_64); } in + let s = U32.v s in + Math.pow2_plus 64 (s - 64); + div_product (v a) (pow2 64) (pow2 (s - 64)); + assert (v a / pow2 s == v a / pow2 64 / pow2 (s - 64)); + div_plus_multiple (U64.v a.low) (U64.v a.high) (pow2 64); + r + +let shift_right (a: t) (s: U32.t) : Pure t + (requires (U32.v s < 128)) + (ensures (fun r -> v r == v a / pow2 (U32.v s))) = + if U32.lt s u32_64 + then shift_right_small a s + else shift_right_large a s + +let eq (a b:t) = U64.eq a.low b.low && U64.eq a.high b.high +let gt (a b:t) = U64.gt a.high b.high || + (U64.eq a.high b.high && U64.gt a.low b.low) +let lt (a b:t) = U64.lt a.high b.high || + (U64.eq a.high b.high && U64.lt a.low b.low) +let gte (a b:t) = U64.gt a.high b.high || + (U64.eq a.high b.high && U64.gte a.low b.low) +let lte (a b:t) = U64.lt a.high b.high || + (U64.eq a.high b.high && U64.lte a.low b.low) + +let u64_logand_comm (a b:U64.t) : Lemma (U64.logand a b == U64.logand b a) = + UInt.logand_commutative (U64.v a) (U64.v b) + +val u64_and_0 (a b:U64.t) : + Lemma (U64.v b = 0 ==> U64.v (U64.logand a b) = 0) + [SMTPat (U64.logand a b)] +let u64_and_0 a b = UInt.logand_lemma_1 (U64.v a) + +let u64_0_and (a b:U64.t) : + Lemma (U64.v a = 0 ==> U64.v (U64.logand a b) = 0) + [SMTPat (U64.logand a b)] = + u64_logand_comm a b + +val u64_1s_and (a b:U64.t) : + Lemma (U64.v a = pow2 64 - 1 /\ + U64.v b = pow2 64 - 1 ==> U64.v (U64.logand a b) = pow2 64 - 1) + [SMTPat (U64.logand a b)] +let u64_1s_and a b = UInt.logand_lemma_2 (U64.v a) + +let eq_mask (a b: t) : Pure t + (requires True) + (ensures (fun r -> (v a = v b ==> v r = pow2 128 - 1) /\ (v a <> v b ==> v r = 0))) = + let mask = U64.logand (U64.eq_mask a.low b.low) + (U64.eq_mask a.high b.high) in + { low = mask; high = mask; } + +private let gte_characterization (a b: t) : + Lemma (v a >= v b ==> + U64.v a.high > U64.v b.high \/ + (U64.v a.high = U64.v b.high /\ U64.v a.low >= U64.v b.low)) = () + +private let lt_characterization (a b: t) : + Lemma (v a < v b ==> + U64.v a.high < U64.v b.high \/ + (U64.v a.high = U64.v b.high /\ U64.v a.low < U64.v b.low)) = () + +let u64_logor_comm (a b:U64.t) : Lemma (U64.logor a b == U64.logor b a) = + UInt.logor_commutative (U64.v a) (U64.v b) + +val u64_or_1 (a b:U64.t) : + Lemma (U64.v b = pow2 64 - 1 ==> U64.v (U64.logor a b) = pow2 64 - 1) + [SMTPat (U64.logor a b)] +let u64_or_1 a b = UInt.logor_lemma_2 (U64.v a) + +let u64_1_or (a b:U64.t) : + Lemma (U64.v a = pow2 64 - 1 ==> U64.v (U64.logor a b) = pow2 64 - 1) + [SMTPat (U64.logor a b)] = + u64_logor_comm a b + +val u64_or_0 (a b:U64.t) : + Lemma (U64.v a = 0 /\ U64.v b = 0 ==> U64.v (U64.logor a b) = 0) + [SMTPat (U64.logor a b)] +let u64_or_0 a b = UInt.logor_lemma_1 (U64.v a) + +val u64_not_0 (a:U64.t) : + Lemma (U64.v a = 0 ==> U64.v (U64.lognot a) = pow2 64 - 1) + [SMTPat (U64.lognot a)] +let u64_not_0 a = UInt.lognot_lemma_1 #64 + +val u64_not_1 (a:U64.t) : + Lemma (U64.v a = pow2 64 - 1 ==> U64.v (U64.lognot a) = 0) + [SMTPat (U64.lognot a)] +let u64_not_1 a = + UInt.nth_lemma (UInt.lognot #64 (UInt.ones 64)) (UInt.zero 64) + +let gte_mask (a b: t) : Pure t + (requires True) + (ensures (fun r -> (v a >= v b ==> v r = pow2 128 - 1) /\ (v a < v b ==> v r = 0))) = + let mask_hi_gte = U64.logand (U64.gte_mask a.high b.high) + (U64.lognot (U64.eq_mask a.high b.high)) in + let mask_lo_gte = U64.logand (U64.eq_mask a.high b.high) + (U64.gte_mask a.low b.low) in + let mask = U64.logor mask_hi_gte mask_lo_gte in + gte_characterization a b; + lt_characterization a b; + { low = mask; high = mask; } + +let uint64_to_uint128 (a:U64.t) = { low = a; high = U64.uint_to_t 0; } + +let uint128_to_uint64 (a:t) : b:U64.t{U64.v b == v a % pow2 64} = a.low + +inline_for_extraction +let u64_l32_mask: x:U64.t{U64.v x == pow2 32 - 1} = U64.uint_to_t 0xffffffff + +let u64_mod_32 (a: U64.t) : Pure U64.t + (requires True) + (ensures (fun r -> U64.v r = U64.v a % pow2 32)) = + UInt.logand_mask (U64.v a) 32; + U64.logand a u64_l32_mask + +let u64_32_digits (a: U64.t) : Lemma (U64.v a / pow2 32 * pow2 32 + U64.v a % pow2 32 == U64.v a) = + div_mod (U64.v a) (pow2 32) + +val mul32_digits : x:UInt.uint_t 64 -> y:UInt.uint_t 32 -> + Lemma (x * y == (x / pow2 32 * y) * pow2 32 + (x % pow2 32) * y) +let mul32_digits x y = () + +let u32_32 : x:U32.t{U32.v x == 32} = U32.uint_to_t 32 + +#push-options "--z3rlimit 30" +let u32_combine (hi lo: U64.t) : Pure U64.t + (requires (U64.v lo < pow2 32)) + (ensures (fun r -> U64.v r = U64.v hi % pow2 32 * pow2 32 + U64.v lo)) = + U64.add lo (U64.shift_left hi u32_32) +#pop-options + +let product_bound (a b:nat) (k:pos) : + Lemma (requires (a < k /\ b < k)) + (ensures a * b <= k * k - 2*k + 1) = + Math.lemma_mult_le_right b a (k-1); + Math.lemma_mult_le_left (k-1) b (k-1) + +val uint_product_bound : #n:nat -> a:UInt.uint_t n -> b:UInt.uint_t n -> + Lemma (a * b <= pow2 (2*n) - 2*(pow2 n) + 1) +let uint_product_bound #n a b = + product_bound a b (pow2 n); + Math.pow2_plus n n + +val u32_product_bound : a:nat{a < pow2 32} -> b:nat{b < pow2 32} -> + Lemma (UInt.size (a * b) 64 /\ a * b < pow2 64 - pow2 32 - 1) +let u32_product_bound a b = + uint_product_bound #32 a b + +let mul32 x y = + let x0 = u64_mod_32 x in + let x1 = U64.shift_right x u32_32 in + u32_product_bound (U64.v x0) (U32.v y); + let x0y = U64.mul x0 (FStar.Int.Cast.uint32_to_uint64 y) in + let x0yl = u64_mod_32 x0y in + let x0yh = U64.shift_right x0y u32_32 in + u32_product_bound (U64.v x1) (U32.v y); + // not in the original C code + let x1y' = U64.mul x1 (FStar.Int.Cast.uint32_to_uint64 y) in + let x1y = U64.add x1y' x0yh in + // correspondence with C: + // r0 = r.low + // r0 is written using u32_combine hi lo = lo + hi << 32 + // r1 = r.high + let r = { low = u32_combine x1y x0yl; + high = U64.shift_right x1y u32_32; } in + u64_32_digits x; + //assert (U64.v x == U64.v x1 * pow2 32 + U64.v x0); + assert (U64.v x0y == U64.v x0 * U32.v y); + u64_32_digits x0y; + //assert (U64.v x0y == U64.v x0yh * pow2 32 + U64.v x0yl); + assert (U64.v x1y' == U64.v x / pow2 32 * U32.v y); + mul32_digits (U64.v x) (U32.v y); + assert (U64.v x * U32.v y == U64.v x1y' * pow2 32 + U64.v x0y); + r + +let l32 (x: UInt.uint_t 64) : UInt.uint_t 32 = x % pow2 32 +let h32 (x: UInt.uint_t 64) : UInt.uint_t 32 = x / pow2 32 + +val mul32_bound : x:UInt.uint_t 32 -> y:UInt.uint_t 32 -> + n:UInt.uint_t 64{n < pow2 64 - pow2 32 - 1 /\ n == x * y} +let mul32_bound x y = + u32_product_bound x y; + x * y + +let pll (x y: U64.t) : n:UInt.uint_t 64{n < pow2 64 - pow2 32 - 1} = + mul32_bound (l32 (U64.v x)) (l32 (U64.v y)) +let plh (x y: U64.t) : n:UInt.uint_t 64{n < pow2 64 - pow2 32 - 1} = + mul32_bound (l32 (U64.v x)) (h32 (U64.v y)) +let phl (x y: U64.t) : n:UInt.uint_t 64{n < pow2 64 - pow2 32 - 1} = + mul32_bound (h32 (U64.v x)) (l32 (U64.v y)) +let phh (x y: U64.t) : n:UInt.uint_t 64{n < pow2 64 - pow2 32 - 1} = + mul32_bound (h32 (U64.v x)) (h32 (U64.v y)) + +let pll_l (x y: U64.t) : UInt.uint_t 32 = + l32 (pll x y) +let pll_h (x y: U64.t) : UInt.uint_t 32 = + h32 (pll x y) + +let mul_wide_low (x y: U64.t) = (plh x y + (phl x y + pll_h x y) % pow2 32) * pow2 32 % pow2 64 + pll_l x y + +let mul_wide_high (x y: U64.t) = + phh x y + + (phl x y + pll_h x y) / pow2 32 + + (plh x y + (phl x y + pll_h x y) % pow2 32) / pow2 32 + +inline_for_extraction noextract +let mul_wide_impl_t' (x y: U64.t) : Pure (tuple4 U64.t U64.t U64.t U64.t) + (requires True) + (ensures (fun r -> let (u1, w3, x', t') = r in + U64.v u1 == U64.v x % pow2 32 /\ + U64.v w3 == pll_l x y /\ + U64.v x' == h32 (U64.v x) /\ + U64.v t' == phl x y + pll_h x y)) = + let u1 = u64_mod_32 x in + let v1 = u64_mod_32 y in + u32_product_bound (U64.v u1) (U64.v v1); + let t = U64.mul u1 v1 in + assert (U64.v t == pll x y); + let w3 = u64_mod_32 t in + assert (U64.v w3 == pll_l x y); + let k = U64.shift_right t u32_32 in + assert (U64.v k == pll_h x y); + let x' = U64.shift_right x u32_32 in + assert (U64.v x' == h32 (U64.v x)); + u32_product_bound (U64.v x') (U64.v v1); + let t' = U64.add (U64.mul x' v1) k in + (u1, w3, x', t') + +// similar to u32_combine, but use % 2^64 * 2^32 +let u32_combine' (hi lo: U64.t) : Pure U64.t + (requires (U64.v lo < pow2 32)) + (ensures (fun r -> U64.v r = U64.v hi * pow2 32 % pow2 64 + U64.v lo)) = + U64.add lo (U64.shift_left hi u32_32) + +inline_for_extraction noextract +let mul_wide_impl (x: U64.t) (y: U64.t) : + Tot (r:t{U64.v r.low == mul_wide_low x y /\ + U64.v r.high == mul_wide_high x y % pow2 64}) = + let (u1, w3, x', t') = mul_wide_impl_t' x y in + let k' = u64_mod_32 t' in + let w1 = U64.shift_right t' u32_32 in + assert (U64.v w1 == (phl x y + pll_h x y) / pow2 32); + let y' = U64.shift_right y u32_32 in + assert (U64.v y' == h32 (U64.v y)); + u32_product_bound (U64.v u1) (U64.v y'); + let t'' = U64.add (U64.mul u1 y') k' in + assert (U64.v t'' == plh x y + (phl x y + pll_h x y) % pow2 32); + let k'' = U64.shift_right t'' u32_32 in + assert (U64.v k'' == (plh x y + (phl x y + pll_h x y) % pow2 32) / pow2 32); + u32_product_bound (U64.v x') (U64.v y'); + mod_mul_pow2 (U64.v t'') 32 64; + let r0 = u32_combine' t'' w3 in + // let r0 = U64.add (U64.shift_left t'' u32_32) w3 in + assert (U64.v r0 == (plh x y + (phl x y + pll_h x y) % pow2 32) * pow2 32 % pow2 64 + pll_l x y); + let xy_w1 = U64.add (U64.mul x' y') w1 in + assert (U64.v xy_w1 == phh x y + (phl x y + pll_h x y) / pow2 32); + let r1 = U64.add_mod xy_w1 k'' in + assert (U64.v r1 == (phh x y + (phl x y + pll_h x y) / pow2 32 + (plh x y + (phl x y + pll_h x y) % pow2 32) / pow2 32) % pow2 64); + let r = { low = r0; high = r1; } in + r + +let product_sums (a b c d:nat) : + Lemma ((a + b) * (c + d) == a * c + a * d + b * c + b * d) = () + +val u64_32_product (xl xh yl yh:UInt.uint_t 32) : + Lemma ((xl + xh * pow2 32) * (yl + yh * pow2 32) == + xl * yl + (xl * yh) * pow2 32 + (xh * yl) * pow2 32 + (xh * yh) * pow2 64) +#push-options "--z3rlimit 25" +let u64_32_product xl xh yl yh = + assert (xh >= 0); //flakiness; without this, can't prove that (xh * pow2 32) >= 0 + assert (pow2 32 >= 0); //flakiness; without this, can't prove that (xh * pow2 32) >= 0 + assert (xh*pow2 32 >= 0); + product_sums xl (xh*pow2 32) yl (yh*pow2 32); + mul_abc_to_acb xh (pow2 32) yl; + assert (xl * (yh * pow2 32) == (xl * yh) * pow2 32); + Math.pow2_plus 32 32; + assert ((xh * pow2 32) * (yh * pow2 32) == (xh * yh) * pow2 64) +#pop-options + +let product_expand (x y: U64.t) : + Lemma (U64.v x * U64.v y == phh x y * pow2 64 + + (plh x y + phl x y + pll_h x y) * pow2 32 + + pll_l x y) = + assert (U64.v x == l32 (U64.v x) + h32 (U64.v x) * pow2 32); + assert (U64.v y == l32 (U64.v y) + h32 (U64.v y) * pow2 32); + u64_32_product (l32 (U64.v x)) (h32 (U64.v x)) (l32 (U64.v y)) (h32 (U64.v y)) + +let product_low_expand (x y: U64.t) : + Lemma ((U64.v x * U64.v y) % pow2 64 == + ((plh x y + phl x y + pll_h x y) * pow2 32 + pll_l x y) % pow2 64) = + product_expand x y; + Math.lemma_mod_plus ((plh x y + phl x y + pll_h x y) * pow2 32 + pll_l x y) (phh x y) (pow2 64) + +let add_mod_then_mod (n m:nat) (k:pos) : + Lemma ((n + m % k) % k == (n + m) % k) = + mod_add n m k; + mod_add n (m % k) k; + mod_double m k + +let shift_add (n:nat) (m:nat{m < pow2 32}) : + Lemma (n * pow2 32 % pow2 64 + m == (n * pow2 32 + m) % pow2 64) = + add_mod_small' m (n*pow2 32) (pow2 64) + +let mul_wide_low_ok (x y: U64.t) : + Lemma (mul_wide_low x y == (U64.v x * U64.v y) % pow2 64) = + Math.pow2_plus 32 32; + mod_mul (plh x y + (phl x y + pll_h x y) % pow2 32) (pow2 32) (pow2 32); + assert (mul_wide_low x y == + (plh x y + (phl x y + pll_h x y) % pow2 32) % pow2 32 * pow2 32 + pll_l x y); + add_mod_then_mod (plh x y) (phl x y + pll_h x y) (pow2 32); + assert (mul_wide_low x y == (plh x y + phl x y + pll_h x y) % pow2 32 * pow2 32 + pll_l x y); + mod_mul (plh x y + phl x y + pll_h x y) (pow2 32) (pow2 32); + shift_add (plh x y + phl x y + pll_h x y) (pll_l x y); + assert (mul_wide_low x y == ((plh x y + phl x y + pll_h x y) * pow2 32 + pll_l x y) % pow2 64); + product_low_expand x y + +val product_high32 : x:U64.t -> y:U64.t -> + Lemma ((U64.v x * U64.v y) / pow2 32 == phh x y * pow2 32 + plh x y + phl x y + pll_h x y) +#push-options "--z3rlimit 20" +let product_high32 x y = + Math.pow2_plus 32 32; + product_expand x y; + Math.division_addition_lemma (plh x y + phl x y + pll_h x y) (pow2 32) (phh x y * pow2 32); + mul_div_cancel (phh x y * pow2 32) (pow2 32); + mul_div_cancel (plh x y + phl x y + pll_h x y) (pow2 32); + Math.small_division_lemma_1 (pll_l x y) (pow2 32) +#pop-options + +val product_high_expand : x:U64.t -> y:U64.t -> + Lemma ((U64.v x * U64.v y) / pow2 64 == phh x y + (plh x y + phl x y + pll_h x y) / pow2 32) + +#push-options "--z3rlimit 15 --retry 5" // sporadically fails +let product_high_expand x y = + Math.pow2_plus 32 32; + div_product (mul_wide_high x y) (pow2 32) (pow2 32); + product_high32 x y; + Math.division_addition_lemma (plh x y + phl x y + pll_h x y) (pow2 32) (phh x y); + () +#pop-options + +val mod_spec_multiply : n:nat -> k:pos -> + Lemma ((n - n%k) / k * k == n - n%k) +let mod_spec_multiply n k = + Math.lemma_mod_spec2 n k + +val mod_spec_mod (n:nat) (k:pos) : Lemma ((n - n%k) % k == 0) +let mod_spec_mod n k = + assert (n - n%k == n / k * k); + Math.multiple_modulo_lemma (n/k) k + +let mul_injective (n m:nat) (k:pos) : + Lemma (requires (n * k == m * k)) + (ensures (n == m)) = () + +val div_sum_combine1 : n:nat -> m:nat -> k:pos -> + Lemma ((n / k + m / k) * k == (n - n % k) + (m - m % k)) +let div_sum_combine1 n m k = + Math.distributivity_add_left (n / k) (m / k) k; + div_mod n k; + div_mod m k; + () + +let mod_0 (k:pos) : + Lemma (0 % k == 0) = () + +let n_minus_mod_exact (n:nat) (k:pos) : + Lemma ((n - n % k) % k == 0) = + mod_spec_mod n k; + mod_0 k + +let sub_mod_gt_0 (n:nat) (k:pos) : + Lemma (0 <= n - n % k) = () + +val sum_rounded_mod_exact : n:nat -> m:nat -> k:pos -> + Lemma (((n - n%k) + (m - m%k)) / k * k == (n - n%k) + (m - m%k)) +#push-options "--retry 5" // sporadically fails +let sum_rounded_mod_exact n m k = + n_minus_mod_exact n k; + n_minus_mod_exact m k; + sub_mod_gt_0 n k; + sub_mod_gt_0 m k; + mod_add (n - n%k) (m - m%k) k; + Math.div_exact_r ((n - n%k) + (m - m % k)) k +#pop-options + +val div_sum_combine : n:nat -> m:nat -> k:pos -> + Lemma (n / k + m / k == (n + (m - n % k) - m % k) / k) +#push-options "--retry 5" // sporadically fails +let div_sum_combine n m k = + sum_rounded_mod_exact n m k; + div_sum_combine1 n m k; + mul_injective (n / k + m / k) (((n - n%k) + (m - m%k)) / k) k; + assert (n + m - n % k - m % k == (n - n%k) + (m - m%k)) +#pop-options + +val sum_shift_carry : a:nat -> b:nat -> k:pos -> + Lemma (a / k + (b + a%k) / k == (a + b) / k) +let sum_shift_carry a b k = + div_sum_combine a (b+a%k) k; +// assert (a / k + (b + a%k) / k == (a + b + (a % k - a % k) - (b + a%k) % k) / k); +// assert ((a + b + (a % k - a % k) - (b + a%k) % k) / k == (a + b - (b + a%k) % k) / k); + add_mod_then_mod b a k; + Math.lemma_mod_spec (a+b) k + +let mul_wide_high_ok (x y: U64.t) : + Lemma ((U64.v x * U64.v y) / pow2 64 == mul_wide_high x y) = + product_high_expand x y; + sum_shift_carry (phl x y + pll_h x y) (plh x y) (pow2 32) + +let product_div_bound (#n:pos) (x y: UInt.uint_t n) : + Lemma (x * y / pow2 n < pow2 n) = + Math.pow2_plus n n; + product_bound x y (pow2 n); + pow2_div_bound #(n+n) (x * y) n + +let mul_wide (x y:U64.t) : Pure t + (requires True) + (ensures (fun r -> v r == U64.v x * U64.v y)) = + mul_wide_low_ok x y; + mul_wide_high_ok x y; + product_div_bound (U64.v x) (U64.v y); + Math.modulo_lemma (mul_wide_high x y) (pow2 64); + mul_wide_impl x y diff --git a/stage0/ulib/FStar.UInt128.fsti b/stage0/ulib/FStar.UInt128.fsti new file mode 100644 index 00000000000..88f6da8fc42 --- /dev/null +++ b/stage0/ulib/FStar.UInt128.fsti @@ -0,0 +1,168 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.UInt128 + +open FStar.UInt +open FStar.Mul + +module U32 = FStar.UInt32 +module U64 = FStar.UInt64 + +noextract +let n = 128 + +val t: (x:Type0{hasEq x}) + +[@@ noextract_to "krml"] +val v (x:t) : Tot (uint_t n) + +[@@ noextract_to "krml"] +val uint_to_t: x:uint_t n -> Pure t + (requires True) + (ensures (fun y -> v y = x)) + +val v_inj (x1 x2: t): Lemma (requires (v x1 == v x2)) (ensures (x1 == x2)) + +val add: a:t -> b:t -> Pure t + (requires (size (v a + v b) n)) + (ensures (fun c -> v a + v b = v c)) + +val add_underspec: a:t -> b:t -> Pure t + (requires True) + (ensures (fun c -> + size (v a + v b) n ==> v a + v b = v c)) + +val add_mod: a:t -> b:t -> Pure t + (requires True) + (ensures (fun c -> (v a + v b) % pow2 n = v c)) + +(* Subtraction primitives *) +val sub: a:t -> b:t -> Pure t + (requires (size (v a - v b) n)) + (ensures (fun c -> v a - v b = v c)) + +val sub_underspec: a:t -> b:t -> Pure t + (requires True) + (ensures (fun c -> + size (v a - v b) n ==> v a - v b = v c)) + +val sub_mod: a:t -> b:t -> Pure t + (requires True) + (ensures (fun c -> (v a - v b) % pow2 n = v c)) + +(* Bitwise operators *) +val logand: a:t -> b:t -> Pure t + (requires True) + (ensures (fun r -> v r == logand (v a) (v b))) + +val logxor: a:t -> b:t -> Pure t + (requires True) + (ensures (fun r -> v r == logxor (v a) (v b))) + +val logor: a:t -> b:t -> Pure t + (requires True) + (ensures (fun r -> v r == logor (v a) (v b))) + +val lognot: a:t -> Pure t + (requires True) + (ensures (fun r -> v r == lognot (v a))) + +//This private primitive is used internally by the +//compiler to translate bounded integer constants +//with a desugaring-time check of the size of the number, +//rather than an expensive verifiation check. +//Since it is marked private, client programs cannot call it directly +//Since it is marked unfold, it eagerly reduces, +//eliminating the verification overhead of the wrapper +private +unfold +let __uint_to_t (x:int) : Tot t = + assume (fits x 128); + uint_to_t x + + +(* Shift operators *) +val shift_left: a:t -> s:UInt32.t -> Pure t + (requires (U32.v s < n)) + (ensures (fun c -> v c = ((v a * pow2 (UInt32.v s)) % pow2 n))) + +val shift_right: a:t -> s:UInt32.t -> Pure t + (requires (U32.v s < n)) + (ensures (fun c -> v c = (v a / (pow2 (UInt32.v s))))) + +(* Comparison operators *) + +val eq (a:t) (b:t) : Pure bool + (requires True) + (ensures (fun r -> r == eq #n (v a) (v b))) + +val gt (a:t) (b:t) : Pure bool + (requires True) + (ensures (fun r -> r == gt #n (v a) (v b))) + +val lt (a:t) (b:t) : Pure bool + (requires True) + (ensures (fun r -> r == lt #n (v a) (v b))) + +val gte (a:t) (b:t) : Pure bool + (requires True) + (ensures (fun r -> r == gte #n (v a) (v b))) + +val lte (a:t) (b:t) : Pure bool + (requires True) + (ensures (fun r -> r == lte #n (v a) (v b))) + +val eq_mask: a:t -> b:t -> Tot (c:t{(v a = v b ==> v c = pow2 n - 1) /\ (v a <> v b ==> v c = 0)}) + +val gte_mask: a:t -> b:t -> Tot (c:t{(v a >= v b ==> v c = pow2 n - 1) /\ (v a < v b ==> v c = 0)}) + +(* Casts *) +val uint64_to_uint128: a:U64.t -> b:t{v b == U64.v a} +val uint128_to_uint64: a:t -> b:U64.t{U64.v b == v a % pow2 64} + +(* To input / output constants *) +(* TODO: assume these without implementations *) +//val to_string: t -> Tot string +//val of_string: string -> Tot t + +(* Infix notations *) +inline_for_extraction noextract let op_Plus_Hat = add +inline_for_extraction noextract let op_Plus_Question_Hat = add_underspec +inline_for_extraction noextract let op_Plus_Percent_Hat = add_mod +inline_for_extraction noextract let op_Subtraction_Hat = sub +inline_for_extraction noextract let op_Subtraction_Question_Hat = sub_underspec +inline_for_extraction noextract let op_Subtraction_Percent_Hat = sub_mod +inline_for_extraction noextract let op_Amp_Hat = logand +inline_for_extraction noextract let op_Hat_Hat = logxor +inline_for_extraction noextract let op_Bar_Hat = logor +inline_for_extraction noextract let op_Less_Less_Hat = shift_left +inline_for_extraction noextract let op_Greater_Greater_Hat = shift_right +inline_for_extraction noextract let op_Equals_Hat = eq +inline_for_extraction noextract let op_Greater_Hat = gt +inline_for_extraction noextract let op_Less_Hat = lt +inline_for_extraction noextract let op_Greater_Equals_Hat = gte +inline_for_extraction noextract let op_Less_Equals_Hat = lte + +(* Multiplication primitives *) +(* Note that unlike UIntN, we do not provide uint128 * uint128 primitives (mul, + mul_underspec, mul_mod, and mul_div) *) +val mul32: x:U64.t -> y:U32.t -> Pure t + (requires True) + (ensures (fun r -> v r == U64.v x * U32.v y)) + +val mul_wide: x:U64.t -> y:U64.t -> Pure t + (requires True) + (ensures (fun r -> v r == U64.v x * U64.v y)) diff --git a/stage0/ulib/FStar.UInt16.fst b/stage0/ulib/FStar.UInt16.fst new file mode 100644 index 00000000000..64d0467f5d8 --- /dev/null +++ b/stage0/ulib/FStar.UInt16.fst @@ -0,0 +1,92 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.UInt16 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +open FStar.UInt +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +type t : eqtype = + | Mk: v:uint_t n -> t + +let v x = x.v + +irreducible +let uint_to_t x = Mk x + +let uv_inv _ = () + +let vu_inv _ = () + +let v_inj _ _ = () + +let zero = uint_to_t 0 + +let one = uint_to_t 1 + +let add a b = Mk (add (v a) (v b)) + +let add_underspec a b = Mk (add_underspec (v a) (v b)) + +let add_mod a b = Mk (add_mod (v a) (v b)) + +let sub a b = Mk (sub (v a) (v b)) + +let sub_underspec a b = Mk (sub_underspec (v a) (v b)) + +let sub_mod a b = Mk (sub_mod (v a) (v b)) + +let mul a b = Mk (mul (v a) (v b)) + +let mul_underspec a b = Mk (mul_underspec (v a) (v b)) + +let mul_mod a b = Mk (mul_mod (v a) (v b)) + +let div a b = Mk (div (v a) (v b)) + +let rem a b = Mk (mod (v a) (v b)) + +let logand x y = Mk (logand (v x) (v y)) + +let logxor x y = Mk (logxor (v x) (v y)) + +let logor x y = Mk (logor (v x) (v y)) + +let lognot x = Mk (lognot (v x)) + +let shift_right a s = Mk (shift_right (v a) (UInt32.v s)) + +#push-options "--z3rlimit 80 --fuel 1" //AR: working around the interleaving semantics of pragmas + +let shift_left a s = Mk (shift_left (v a) (UInt32.v s)) + +let lemma_sub_msbs a b + = from_vec_propriety (to_vec (v a)) 1; + from_vec_propriety (to_vec (v b)) 1; + from_vec_propriety (to_vec (v (sub_mod a b))) 1 + +#pop-options + +let to_string _ = admit () + +let to_string_hex _ = admit () + +let to_string_hex_pad _ = admit () + +let of_string _ = admit () diff --git a/stage0/ulib/FStar.UInt16.fsti b/stage0/ulib/FStar.UInt16.fsti new file mode 100644 index 00000000000..587a4b41d3c --- /dev/null +++ b/stage0/ulib/FStar.UInt16.fsti @@ -0,0 +1,362 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.UInt16 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +unfold let n = 16 + +/// For FStar.UIntN.fstp: anything that you fix/update here should be +/// reflected in [FStar.IntN.fstp], which is mostly a copy-paste of +/// this module. +/// +/// Except, as compared to [FStar.IntN.fstp], here: +/// - every occurrence of [int_t] has been replaced with [uint_t] +/// - every occurrence of [@%] has been replaced with [%]. +/// - some functions (e.g., add_underspec, etc.) are only defined here, not on signed integers + +/// This module provides an abstract type for machine integers of a +/// given signedness and width. The interface is designed to be safe +/// with respect to arithmetic underflow and overflow. + +/// Note, we have attempted several times to re-design this module to +/// make it more amenable to normalization and to impose less overhead +/// on the SMT solver when reasoning about machine integer +/// arithmetic. The following github issue reports on the current +/// status of that work. +/// +/// https://github.com/FStarLang/FStar/issues/1757 + +open FStar.UInt +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(** Abstract type of machine integers, with an underlying + representation using a bounded mathematical integer *) +new val t : eqtype + +(** A coercion that projects a bounded mathematical integer from a + machine integer *) +val v (x:t) : Tot (uint_t n) + +(** A coercion that injects a bounded mathematical integers into a + machine integer *) +val uint_to_t (x:uint_t n) : Pure t + (requires True) + (ensures (fun y -> v y = x)) + +(** Injection/projection inverse *) +val uv_inv (x : t) : Lemma + (ensures (uint_to_t (v x) == x)) + [SMTPat (v x)] + +(** Projection/injection inverse *) +val vu_inv (x : uint_t n) : Lemma + (ensures (v (uint_to_t x) == x)) + [SMTPat (uint_to_t x)] + +(** An alternate form of the injectivity of the [v] projection *) +val v_inj (x1 x2: t): Lemma + (requires (v x1 == v x2)) + (ensures (x1 == x2)) + +(** Constants 0 and 1 *) +val zero : x:t{v x = 0} + +val one : x:t{v x = 1} + +(**** Addition primitives *) + +(** Bounds-respecting addition + + The precondition enforces that the sum does not overflow, + expressing the bound as an addition on mathematical integers *) +val add (a:t) (b:t) : Pure t + (requires (size (v a + v b) n)) + (ensures (fun c -> v a + v b = v c)) + +(** Underspecified, possibly overflowing addition: + + The postcondition only enures that the result is the sum of the + arguments in case there is no overflow *) +val add_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a + v b) n ==> v a + v b = v c)) + +(** Addition modulo [2^n] + + Machine integers can always be added, but the postcondition is now + in terms of addition modulo [2^n] on mathematical integers *) +val add_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.add_mod (v a) (v b) = v c)) + +(**** Subtraction primitives *) + + +(** Bounds-respecting subtraction + + The precondition enforces that the difference does not underflow, + expressing the bound as a difference on mathematical integers *) +val sub (a:t) (b:t) : Pure t + (requires (size (v a - v b) n)) + (ensures (fun c -> v a - v b = v c)) + +(** Underspecified, possibly overflowing subtraction: + + The postcondition only enures that the result is the difference of + the arguments in case there is no underflow *) +val sub_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a - v b) n ==> v a - v b = v c)) + +(** Subtraction modulo [2^n] + + Machine integers can always be subtractd, but the postcondition is + now in terms of subtraction modulo [2^n] on mathematical integers *) +val sub_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.sub_mod (v a) (v b) = v c)) + +(**** Multiplication primitives *) + + +(** Bounds-respecting multiplication + + The precondition enforces that the product does not overflow, + expressing the bound as a product on mathematical integers *) +val mul (a:t) (b:t) : Pure t + (requires (size (v a * v b) n)) + (ensures (fun c -> v a * v b = v c)) + +(** Underspecified, possibly overflowing product + + The postcondition only enures that the result is the product of + the arguments in case there is no overflow *) +val mul_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a * v b) n ==> v a * v b = v c)) + +(** Multiplication modulo [2^n] + + Machine integers can always be multiplied, but the postcondition + is now in terms of product modulo [2^n] on mathematical integers *) +val mul_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.mul_mod (v a) (v b) = v c)) + +(**** Division primitives *) + +(** Euclidean division of [a] and [b], with [b] non-zero *) +val div (a:t) (b:t{v b <> 0}) : Pure t + (requires (True)) + (ensures (fun c -> v a / v b = v c)) + +(**** Modulo primitives *) + +(** Euclidean remainder + + The result is the modulus of [a] with respect to a non-zero [b] *) +val rem (a:t) (b:t{v b <> 0}) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.mod (v a) (v b) = v c)) + +(**** Bitwise operators *) + +/// Also see FStar.BV + +(** Bitwise logical conjunction *) +val logand (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logand` v y = v z)) + +(** Bitwise logical exclusive-or *) +val logxor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logxor` v y == v z)) + +(** Bitwise logical disjunction *) +val logor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logor` v y == v z)) + +(** Bitwise logical negation *) +val lognot (x:t) : Pure t + (requires True) + (ensures (fun z -> lognot (v x) == v z)) + +(**** Shift operators *) + +(** Shift right with zero fill, shifting at most the integer width *) +val shift_right (a:t) (s:UInt32.t) : Pure t + (requires (UInt32.v s < n)) + (ensures (fun c -> FStar.UInt.shift_right (v a) (UInt32.v s) = v c)) + +(** Shift left with zero fill, shifting at most the integer width *) +val shift_left (a:t) (s:UInt32.t) : Pure t + (requires (UInt32.v s < n)) + (ensures (fun c -> FStar.UInt.shift_left (v a) (UInt32.v s) = v c)) + +(**** Comparison operators *) + +(** Equality + + Note, it is safe to also use the polymorphic decidable equality + operator [=] *) +let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b) + +(** Greater than *) +let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b) + +(** Greater than or equal *) +let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b) + +(** Less than *) +let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b) + +(** Less than or equal *) +let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b) + +(** Unary negation *) +inline_for_extraction +let minus (a:t) = add_mod (lognot a) (uint_to_t 1) + +(** The maximum shift value for this type, i.e. its width minus one, + as an UInt32. *) +inline_for_extraction +let n_minus_one = UInt32.uint_to_t (n - 1) + +#set-options "--z3rlimit 80 --initial_fuel 1 --max_fuel 1" + +(** A constant-time way to compute the equality of + two machine integers. + + With inspiration from https://git.zx2c4.com/WireGuard/commit/src/crypto/curve25519-hacl64.h?id=2e60bb395c1f589a398ec606d611132ef9ef764b + + Note, the branching on [a=b] is just for proof-purposes. + *) +[@ CNoInline ] +let eq_mask (a:t) (b:t) + : Pure t + (requires True) + (ensures (fun c -> (v a = v b ==> v c = pow2 n - 1) /\ + (v a <> v b ==> v c = 0))) + = let x = logxor a b in + let minus_x = minus x in + let x_or_minus_x = logor x minus_x in + let xnx = shift_right x_or_minus_x n_minus_one in + let c = sub_mod xnx (uint_to_t 1) in + if a = b then + begin + logxor_self (v a); + lognot_lemma_1 #n; + logor_lemma_1 (v x); + assert (v x = 0 /\ v minus_x = 0 /\ + v x_or_minus_x = 0 /\ v xnx = 0); + assert (v c = ones n) + end + else + begin + logxor_neq_nonzero (v a) (v b); + lemma_msb_pow2 #n (v (lognot x)); + lemma_msb_pow2 #n (v minus_x); + lemma_minus_zero #n (v x); + assert (v c = FStar.UInt.zero n) + end; + c + +private +val lemma_sub_msbs (a:t) (b:t) + : Lemma ((msb (v a) = msb (v b)) ==> (v a < v b <==> msb (v (sub_mod a b)))) + +(** A constant-time way to compute the [>=] inequality of + two machine integers. + + With inspiration from https://git.zx2c4.com/WireGuard/commit/src/crypto/curve25519-hacl64.h?id=0a483a9b431d87eca1b275463c632f8d5551978a + *) +[@ CNoInline ] +let gte_mask (a:t) (b:t) + : Pure t + (requires True) + (ensures (fun c -> (v a >= v b ==> v c = pow2 n - 1) /\ + (v a < v b ==> v c = 0))) + = let x = a in + let y = b in + let x_xor_y = logxor x y in + let x_sub_y = sub_mod x y in + let x_sub_y_xor_y = logxor x_sub_y y in + let q = logor x_xor_y x_sub_y_xor_y in + let x_xor_q = logxor x q in + let x_xor_q_ = shift_right x_xor_q n_minus_one in + let c = sub_mod x_xor_q_ (uint_to_t 1) in + lemma_sub_msbs x y; + lemma_msb_gte (v x) (v y); + lemma_msb_gte (v y) (v x); + c +#reset-options + +(*** Infix notations *) +unfold let op_Plus_Hat = add +unfold let op_Plus_Question_Hat = add_underspec +unfold let op_Plus_Percent_Hat = add_mod +unfold let op_Subtraction_Hat = sub +unfold let op_Subtraction_Question_Hat = sub_underspec +unfold let op_Subtraction_Percent_Hat = sub_mod +unfold let op_Star_Hat = mul +unfold let op_Star_Question_Hat = mul_underspec +unfold let op_Star_Percent_Hat = mul_mod +unfold let op_Slash_Hat = div +unfold let op_Percent_Hat = rem +unfold let op_Hat_Hat = logxor +unfold let op_Amp_Hat = logand +unfold let op_Bar_Hat = logor +unfold let op_Less_Less_Hat = shift_left +unfold let op_Greater_Greater_Hat = shift_right +unfold let op_Equals_Hat = eq +unfold let op_Greater_Hat = gt +unfold let op_Greater_Equals_Hat = gte +unfold let op_Less_Hat = lt +unfold let op_Less_Equals_Hat = lte + +(**** To input / output constants *) +(** In decimal representation *) +val to_string: t -> Tot string + +(** In hex representation (with leading 0x) *) +val to_string_hex: t -> Tot string + +(** In fixed-width hex representation (left-padded with zeroes, no leading 0x) *) +val to_string_hex_pad: t -> Tot string + +val of_string: string -> Tot t + +#set-options "--admit_smt_queries true" +//This private primitive is used internally by the +//compiler to translate bounded integer constants +//with a desugaring-time check of the size of the number, +//rather than an expensive verification check. +//Since it is marked private, client programs cannot call it directly +//Since it is marked unfold, it eagerly reduces, +//eliminating the verification overhead of the wrapper +private +unfold +let __uint_to_t (x:int) : Tot t + = uint_to_t x +#reset-options diff --git a/stage0/ulib/FStar.UInt32.fst b/stage0/ulib/FStar.UInt32.fst new file mode 100644 index 00000000000..e87d8b968f9 --- /dev/null +++ b/stage0/ulib/FStar.UInt32.fst @@ -0,0 +1,92 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.UInt32 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +open FStar.UInt +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +type t : eqtype = + | Mk: v:uint_t n -> t + +let v x = x.v + +irreducible +let uint_to_t x = Mk x + +let uv_inv _ = () + +let vu_inv _ = () + +let v_inj _ _ = () + +let zero = uint_to_t 0 + +let one = uint_to_t 1 + +let add a b = Mk (add (v a) (v b)) + +let add_underspec a b = Mk (add_underspec (v a) (v b)) + +let add_mod a b = Mk (add_mod (v a) (v b)) + +let sub a b = Mk (sub (v a) (v b)) + +let sub_underspec a b = Mk (sub_underspec (v a) (v b)) + +let sub_mod a b = Mk (sub_mod (v a) (v b)) + +let mul a b = Mk (mul (v a) (v b)) + +let mul_underspec a b = Mk (mul_underspec (v a) (v b)) + +let mul_mod a b = Mk (mul_mod (v a) (v b)) + +let div a b = Mk (div (v a) (v b)) + +let rem a b = Mk (mod (v a) (v b)) + +let logand x y = Mk (logand (v x) (v y)) + +let logxor x y = Mk (logxor (v x) (v y)) + +let logor x y = Mk (logor (v x) (v y)) + +let lognot x = Mk (lognot (v x)) + +let shift_right a s = Mk (shift_right (v a) (v s)) + +#push-options "--z3rlimit 80 --fuel 1" //AR: working around the interleaving semantics of pragmas + +let shift_left a s = Mk (shift_left (v a) (v s)) + +let lemma_sub_msbs a b + = from_vec_propriety (to_vec (v a)) 1; + from_vec_propriety (to_vec (v b)) 1; + from_vec_propriety (to_vec (v (sub_mod a b))) 1 + +#pop-options + +let to_string _ = admit () + +let to_string_hex _ = admit () + +let to_string_hex_pad _ = admit () + +let of_string _ = admit () diff --git a/stage0/ulib/FStar.UInt32.fsti b/stage0/ulib/FStar.UInt32.fsti new file mode 100644 index 00000000000..1d32e55cd76 --- /dev/null +++ b/stage0/ulib/FStar.UInt32.fsti @@ -0,0 +1,362 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.UInt32 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +unfold let n = 32 + +/// For FStar.UIntN.fstp: anything that you fix/update here should be +/// reflected in [FStar.IntN.fstp], which is mostly a copy-paste of +/// this module. +/// +/// Except, as compared to [FStar.IntN.fstp], here: +/// - every occurrence of [int_t] has been replaced with [uint_t] +/// - every occurrence of [@%] has been replaced with [%]. +/// - some functions (e.g., add_underspec, etc.) are only defined here, not on signed integers + +/// This module provides an abstract type for machine integers of a +/// given signedness and width. The interface is designed to be safe +/// with respect to arithmetic underflow and overflow. + +/// Note, we have attempted several times to re-design this module to +/// make it more amenable to normalization and to impose less overhead +/// on the SMT solver when reasoning about machine integer +/// arithmetic. The following github issue reports on the current +/// status of that work. +/// +/// https://github.com/FStarLang/FStar/issues/1757 + +open FStar.UInt +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(** Abstract type of machine integers, with an underlying + representation using a bounded mathematical integer *) +new val t : eqtype + +(** A coercion that projects a bounded mathematical integer from a + machine integer *) +val v (x:t) : Tot (uint_t n) + +(** A coercion that injects a bounded mathematical integers into a + machine integer *) +val uint_to_t (x:uint_t n) : Pure t + (requires True) + (ensures (fun y -> v y = x)) + +(** Injection/projection inverse *) +val uv_inv (x : t) : Lemma + (ensures (uint_to_t (v x) == x)) + [SMTPat (v x)] + +(** Projection/injection inverse *) +val vu_inv (x : uint_t n) : Lemma + (ensures (v (uint_to_t x) == x)) + [SMTPat (uint_to_t x)] + +(** An alternate form of the injectivity of the [v] projection *) +val v_inj (x1 x2: t): Lemma + (requires (v x1 == v x2)) + (ensures (x1 == x2)) + +(** Constants 0 and 1 *) +val zero : x:t{v x = 0} + +val one : x:t{v x = 1} + +(**** Addition primitives *) + +(** Bounds-respecting addition + + The precondition enforces that the sum does not overflow, + expressing the bound as an addition on mathematical integers *) +val add (a:t) (b:t) : Pure t + (requires (size (v a + v b) n)) + (ensures (fun c -> v a + v b = v c)) + +(** Underspecified, possibly overflowing addition: + + The postcondition only enures that the result is the sum of the + arguments in case there is no overflow *) +val add_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a + v b) n ==> v a + v b = v c)) + +(** Addition modulo [2^n] + + Machine integers can always be added, but the postcondition is now + in terms of addition modulo [2^n] on mathematical integers *) +val add_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.add_mod (v a) (v b) = v c)) + +(**** Subtraction primitives *) + + +(** Bounds-respecting subtraction + + The precondition enforces that the difference does not underflow, + expressing the bound as a difference on mathematical integers *) +val sub (a:t) (b:t) : Pure t + (requires (size (v a - v b) n)) + (ensures (fun c -> v a - v b = v c)) + +(** Underspecified, possibly overflowing subtraction: + + The postcondition only enures that the result is the difference of + the arguments in case there is no underflow *) +val sub_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a - v b) n ==> v a - v b = v c)) + +(** Subtraction modulo [2^n] + + Machine integers can always be subtractd, but the postcondition is + now in terms of subtraction modulo [2^n] on mathematical integers *) +val sub_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.sub_mod (v a) (v b) = v c)) + +(**** Multiplication primitives *) + + +(** Bounds-respecting multiplication + + The precondition enforces that the product does not overflow, + expressing the bound as a product on mathematical integers *) +val mul (a:t) (b:t) : Pure t + (requires (size (v a * v b) n)) + (ensures (fun c -> v a * v b = v c)) + +(** Underspecified, possibly overflowing product + + The postcondition only enures that the result is the product of + the arguments in case there is no overflow *) +val mul_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a * v b) n ==> v a * v b = v c)) + +(** Multiplication modulo [2^n] + + Machine integers can always be multiplied, but the postcondition + is now in terms of product modulo [2^n] on mathematical integers *) +val mul_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.mul_mod (v a) (v b) = v c)) + +(**** Division primitives *) + +(** Euclidean division of [a] and [b], with [b] non-zero *) +val div (a:t) (b:t{v b <> 0}) : Pure t + (requires (True)) + (ensures (fun c -> v a / v b = v c)) + +(**** Modulo primitives *) + +(** Euclidean remainder + + The result is the modulus of [a] with respect to a non-zero [b] *) +val rem (a:t) (b:t{v b <> 0}) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.mod (v a) (v b) = v c)) + +(**** Bitwise operators *) + +/// Also see FStar.BV + +(** Bitwise logical conjunction *) +val logand (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logand` v y = v z)) + +(** Bitwise logical exclusive-or *) +val logxor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logxor` v y == v z)) + +(** Bitwise logical disjunction *) +val logor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logor` v y == v z)) + +(** Bitwise logical negation *) +val lognot (x:t) : Pure t + (requires True) + (ensures (fun z -> lognot (v x) == v z)) + +(**** Shift operators *) + +(** Shift right with zero fill, shifting at most the integer width *) +val shift_right (a:t) (s:t) : Pure t + (requires (v s < n)) + (ensures (fun c -> FStar.UInt.shift_right (v a) (v s) = v c)) + +(** Shift left with zero fill, shifting at most the integer width *) +val shift_left (a:t) (s:t) : Pure t + (requires (v s < n)) + (ensures (fun c -> FStar.UInt.shift_left (v a) (v s) = v c)) + +(**** Comparison operators *) + +(** Equality + + Note, it is safe to also use the polymorphic decidable equality + operator [=] *) +let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b) + +(** Greater than *) +let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b) + +(** Greater than or equal *) +let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b) + +(** Less than *) +let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b) + +(** Less than or equal *) +let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b) + +(** Unary negation *) +inline_for_extraction +let minus (a:t) = add_mod (lognot a) (uint_to_t 1) + +(** The maximum shift value for this type, i.e. its width minus one, + as an *) +inline_for_extraction +let n_minus_one = uint_to_t (n - 1) + +#set-options "--z3rlimit 80 --initial_fuel 1 --max_fuel 1" + +(** A constant-time way to compute the equality of + two machine integers. + + With inspiration from https://git.zx2c4.com/WireGuard/commit/src/crypto/curve25519-hacl64.h?id=2e60bb395c1f589a398ec606d611132ef9ef764b + + Note, the branching on [a=b] is just for proof-purposes. + *) +[@ CNoInline ] +let eq_mask (a:t) (b:t) + : Pure t + (requires True) + (ensures (fun c -> (v a = v b ==> v c = pow2 n - 1) /\ + (v a <> v b ==> v c = 0))) + = let x = logxor a b in + let minus_x = minus x in + let x_or_minus_x = logor x minus_x in + let xnx = shift_right x_or_minus_x n_minus_one in + let c = sub_mod xnx (uint_to_t 1) in + if a = b then + begin + logxor_self (v a); + lognot_lemma_1 #n; + logor_lemma_1 (v x); + assert (v x = 0 /\ v minus_x = 0 /\ + v x_or_minus_x = 0 /\ v xnx = 0); + assert (v c = ones n) + end + else + begin + logxor_neq_nonzero (v a) (v b); + lemma_msb_pow2 #n (v (lognot x)); + lemma_msb_pow2 #n (v minus_x); + lemma_minus_zero #n (v x); + assert (v c = FStar.UInt.zero n) + end; + c + +private +val lemma_sub_msbs (a:t) (b:t) + : Lemma ((msb (v a) = msb (v b)) ==> (v a < v b <==> msb (v (sub_mod a b)))) + +(** A constant-time way to compute the [>=] inequality of + two machine integers. + + With inspiration from https://git.zx2c4.com/WireGuard/commit/src/crypto/curve25519-hacl64.h?id=0a483a9b431d87eca1b275463c632f8d5551978a + *) +[@ CNoInline ] +let gte_mask (a:t) (b:t) + : Pure t + (requires True) + (ensures (fun c -> (v a >= v b ==> v c = pow2 n - 1) /\ + (v a < v b ==> v c = 0))) + = let x = a in + let y = b in + let x_xor_y = logxor x y in + let x_sub_y = sub_mod x y in + let x_sub_y_xor_y = logxor x_sub_y y in + let q = logor x_xor_y x_sub_y_xor_y in + let x_xor_q = logxor x q in + let x_xor_q_ = shift_right x_xor_q n_minus_one in + let c = sub_mod x_xor_q_ (uint_to_t 1) in + lemma_sub_msbs x y; + lemma_msb_gte (v x) (v y); + lemma_msb_gte (v y) (v x); + c +#reset-options + +(*** Infix notations *) +unfold let op_Plus_Hat = add +unfold let op_Plus_Question_Hat = add_underspec +unfold let op_Plus_Percent_Hat = add_mod +unfold let op_Subtraction_Hat = sub +unfold let op_Subtraction_Question_Hat = sub_underspec +unfold let op_Subtraction_Percent_Hat = sub_mod +unfold let op_Star_Hat = mul +unfold let op_Star_Question_Hat = mul_underspec +unfold let op_Star_Percent_Hat = mul_mod +unfold let op_Slash_Hat = div +unfold let op_Percent_Hat = rem +unfold let op_Hat_Hat = logxor +unfold let op_Amp_Hat = logand +unfold let op_Bar_Hat = logor +unfold let op_Less_Less_Hat = shift_left +unfold let op_Greater_Greater_Hat = shift_right +unfold let op_Equals_Hat = eq +unfold let op_Greater_Hat = gt +unfold let op_Greater_Equals_Hat = gte +unfold let op_Less_Hat = lt +unfold let op_Less_Equals_Hat = lte + +(**** To input / output constants *) +(** In decimal representation *) +val to_string: t -> Tot string + +(** In hex representation (with leading 0x) *) +val to_string_hex: t -> Tot string + +(** In fixed-width hex representation (left-padded with zeroes, no leading 0x) *) +val to_string_hex_pad: t -> Tot string + +val of_string: string -> Tot t + +#set-options "--admit_smt_queries true" +//This private primitive is used internally by the +//compiler to translate bounded integer constants +//with a desugaring-time check of the size of the number, +//rather than an expensive verification check. +//Since it is marked private, client programs cannot call it directly +//Since it is marked unfold, it eagerly reduces, +//eliminating the verification overhead of the wrapper +private +unfold +let __uint_to_t (x:int) : Tot t + = uint_to_t x +#reset-options diff --git a/stage0/ulib/FStar.UInt64.fst b/stage0/ulib/FStar.UInt64.fst new file mode 100644 index 00000000000..16333a891e9 --- /dev/null +++ b/stage0/ulib/FStar.UInt64.fst @@ -0,0 +1,92 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.UInt64 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +open FStar.UInt +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +type t : eqtype = + | Mk: v:uint_t n -> t + +let v x = x.v + +irreducible +let uint_to_t x = Mk x + +let uv_inv _ = () + +let vu_inv _ = () + +let v_inj _ _ = () + +let zero = uint_to_t 0 + +let one = uint_to_t 1 + +let add a b = Mk (add (v a) (v b)) + +let add_underspec a b = Mk (add_underspec (v a) (v b)) + +let add_mod a b = Mk (add_mod (v a) (v b)) + +let sub a b = Mk (sub (v a) (v b)) + +let sub_underspec a b = Mk (sub_underspec (v a) (v b)) + +let sub_mod a b = Mk (sub_mod (v a) (v b)) + +let mul a b = Mk (mul (v a) (v b)) + +let mul_underspec a b = Mk (mul_underspec (v a) (v b)) + +let mul_mod a b = Mk (mul_mod (v a) (v b)) + +let div a b = Mk (div (v a) (v b)) + +let rem a b = Mk (mod (v a) (v b)) + +let logand x y = Mk (logand (v x) (v y)) + +let logxor x y = Mk (logxor (v x) (v y)) + +let logor x y = Mk (logor (v x) (v y)) + +let lognot x = Mk (lognot (v x)) + +let shift_right a s = Mk (shift_right (v a) (UInt32.v s)) + +#push-options "--z3rlimit 80 --fuel 1" //AR: working around the interleaving semantics of pragmas + +let shift_left a s = Mk (shift_left (v a) (UInt32.v s)) + +let lemma_sub_msbs a b + = from_vec_propriety (to_vec (v a)) 1; + from_vec_propriety (to_vec (v b)) 1; + from_vec_propriety (to_vec (v (sub_mod a b))) 1 + +#pop-options + +let to_string _ = admit () + +let to_string_hex _ = admit () + +let to_string_hex_pad _ = admit () + +let of_string _ = admit () diff --git a/stage0/ulib/FStar.UInt64.fsti b/stage0/ulib/FStar.UInt64.fsti new file mode 100644 index 00000000000..c83d48a5cc0 --- /dev/null +++ b/stage0/ulib/FStar.UInt64.fsti @@ -0,0 +1,362 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.UInt64 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +unfold let n = 64 + +/// For FStar.UIntN.fstp: anything that you fix/update here should be +/// reflected in [FStar.IntN.fstp], which is mostly a copy-paste of +/// this module. +/// +/// Except, as compared to [FStar.IntN.fstp], here: +/// - every occurrence of [int_t] has been replaced with [uint_t] +/// - every occurrence of [@%] has been replaced with [%]. +/// - some functions (e.g., add_underspec, etc.) are only defined here, not on signed integers + +/// This module provides an abstract type for machine integers of a +/// given signedness and width. The interface is designed to be safe +/// with respect to arithmetic underflow and overflow. + +/// Note, we have attempted several times to re-design this module to +/// make it more amenable to normalization and to impose less overhead +/// on the SMT solver when reasoning about machine integer +/// arithmetic. The following github issue reports on the current +/// status of that work. +/// +/// https://github.com/FStarLang/FStar/issues/1757 + +open FStar.UInt +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(** Abstract type of machine integers, with an underlying + representation using a bounded mathematical integer *) +new val t : eqtype + +(** A coercion that projects a bounded mathematical integer from a + machine integer *) +val v (x:t) : Tot (uint_t n) + +(** A coercion that injects a bounded mathematical integers into a + machine integer *) +val uint_to_t (x:uint_t n) : Pure t + (requires True) + (ensures (fun y -> v y = x)) + +(** Injection/projection inverse *) +val uv_inv (x : t) : Lemma + (ensures (uint_to_t (v x) == x)) + [SMTPat (v x)] + +(** Projection/injection inverse *) +val vu_inv (x : uint_t n) : Lemma + (ensures (v (uint_to_t x) == x)) + [SMTPat (uint_to_t x)] + +(** An alternate form of the injectivity of the [v] projection *) +val v_inj (x1 x2: t): Lemma + (requires (v x1 == v x2)) + (ensures (x1 == x2)) + +(** Constants 0 and 1 *) +val zero : x:t{v x = 0} + +val one : x:t{v x = 1} + +(**** Addition primitives *) + +(** Bounds-respecting addition + + The precondition enforces that the sum does not overflow, + expressing the bound as an addition on mathematical integers *) +val add (a:t) (b:t) : Pure t + (requires (size (v a + v b) n)) + (ensures (fun c -> v a + v b = v c)) + +(** Underspecified, possibly overflowing addition: + + The postcondition only enures that the result is the sum of the + arguments in case there is no overflow *) +val add_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a + v b) n ==> v a + v b = v c)) + +(** Addition modulo [2^n] + + Machine integers can always be added, but the postcondition is now + in terms of addition modulo [2^n] on mathematical integers *) +val add_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.add_mod (v a) (v b) = v c)) + +(**** Subtraction primitives *) + + +(** Bounds-respecting subtraction + + The precondition enforces that the difference does not underflow, + expressing the bound as a difference on mathematical integers *) +val sub (a:t) (b:t) : Pure t + (requires (size (v a - v b) n)) + (ensures (fun c -> v a - v b = v c)) + +(** Underspecified, possibly overflowing subtraction: + + The postcondition only enures that the result is the difference of + the arguments in case there is no underflow *) +val sub_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a - v b) n ==> v a - v b = v c)) + +(** Subtraction modulo [2^n] + + Machine integers can always be subtractd, but the postcondition is + now in terms of subtraction modulo [2^n] on mathematical integers *) +val sub_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.sub_mod (v a) (v b) = v c)) + +(**** Multiplication primitives *) + + +(** Bounds-respecting multiplication + + The precondition enforces that the product does not overflow, + expressing the bound as a product on mathematical integers *) +val mul (a:t) (b:t) : Pure t + (requires (size (v a * v b) n)) + (ensures (fun c -> v a * v b = v c)) + +(** Underspecified, possibly overflowing product + + The postcondition only enures that the result is the product of + the arguments in case there is no overflow *) +val mul_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a * v b) n ==> v a * v b = v c)) + +(** Multiplication modulo [2^n] + + Machine integers can always be multiplied, but the postcondition + is now in terms of product modulo [2^n] on mathematical integers *) +val mul_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.mul_mod (v a) (v b) = v c)) + +(**** Division primitives *) + +(** Euclidean division of [a] and [b], with [b] non-zero *) +val div (a:t) (b:t{v b <> 0}) : Pure t + (requires (True)) + (ensures (fun c -> v a / v b = v c)) + +(**** Modulo primitives *) + +(** Euclidean remainder + + The result is the modulus of [a] with respect to a non-zero [b] *) +val rem (a:t) (b:t{v b <> 0}) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.mod (v a) (v b) = v c)) + +(**** Bitwise operators *) + +/// Also see FStar.BV + +(** Bitwise logical conjunction *) +val logand (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logand` v y = v z)) + +(** Bitwise logical exclusive-or *) +val logxor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logxor` v y == v z)) + +(** Bitwise logical disjunction *) +val logor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logor` v y == v z)) + +(** Bitwise logical negation *) +val lognot (x:t) : Pure t + (requires True) + (ensures (fun z -> lognot (v x) == v z)) + +(**** Shift operators *) + +(** Shift right with zero fill, shifting at most the integer width *) +val shift_right (a:t) (s:UInt32.t) : Pure t + (requires (UInt32.v s < n)) + (ensures (fun c -> FStar.UInt.shift_right (v a) (UInt32.v s) = v c)) + +(** Shift left with zero fill, shifting at most the integer width *) +val shift_left (a:t) (s:UInt32.t) : Pure t + (requires (UInt32.v s < n)) + (ensures (fun c -> FStar.UInt.shift_left (v a) (UInt32.v s) = v c)) + +(**** Comparison operators *) + +(** Equality + + Note, it is safe to also use the polymorphic decidable equality + operator [=] *) +let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b) + +(** Greater than *) +let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b) + +(** Greater than or equal *) +let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b) + +(** Less than *) +let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b) + +(** Less than or equal *) +let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b) + +(** Unary negation *) +inline_for_extraction +let minus (a:t) = add_mod (lognot a) (uint_to_t 1) + +(** The maximum shift value for this type, i.e. its width minus one, + as an UInt32. *) +inline_for_extraction +let n_minus_one = UInt32.uint_to_t (n - 1) + +#set-options "--z3rlimit 80 --initial_fuel 1 --max_fuel 1" + +(** A constant-time way to compute the equality of + two machine integers. + + With inspiration from https://git.zx2c4.com/WireGuard/commit/src/crypto/curve25519-hacl64.h?id=2e60bb395c1f589a398ec606d611132ef9ef764b + + Note, the branching on [a=b] is just for proof-purposes. + *) +[@ CNoInline ] +let eq_mask (a:t) (b:t) + : Pure t + (requires True) + (ensures (fun c -> (v a = v b ==> v c = pow2 n - 1) /\ + (v a <> v b ==> v c = 0))) + = let x = logxor a b in + let minus_x = minus x in + let x_or_minus_x = logor x minus_x in + let xnx = shift_right x_or_minus_x n_minus_one in + let c = sub_mod xnx (uint_to_t 1) in + if a = b then + begin + logxor_self (v a); + lognot_lemma_1 #n; + logor_lemma_1 (v x); + assert (v x = 0 /\ v minus_x = 0 /\ + v x_or_minus_x = 0 /\ v xnx = 0); + assert (v c = ones n) + end + else + begin + logxor_neq_nonzero (v a) (v b); + lemma_msb_pow2 #n (v (lognot x)); + lemma_msb_pow2 #n (v minus_x); + lemma_minus_zero #n (v x); + assert (v c = FStar.UInt.zero n) + end; + c + +private +val lemma_sub_msbs (a:t) (b:t) + : Lemma ((msb (v a) = msb (v b)) ==> (v a < v b <==> msb (v (sub_mod a b)))) + +(** A constant-time way to compute the [>=] inequality of + two machine integers. + + With inspiration from https://git.zx2c4.com/WireGuard/commit/src/crypto/curve25519-hacl64.h?id=0a483a9b431d87eca1b275463c632f8d5551978a + *) +[@ CNoInline ] +let gte_mask (a:t) (b:t) + : Pure t + (requires True) + (ensures (fun c -> (v a >= v b ==> v c = pow2 n - 1) /\ + (v a < v b ==> v c = 0))) + = let x = a in + let y = b in + let x_xor_y = logxor x y in + let x_sub_y = sub_mod x y in + let x_sub_y_xor_y = logxor x_sub_y y in + let q = logor x_xor_y x_sub_y_xor_y in + let x_xor_q = logxor x q in + let x_xor_q_ = shift_right x_xor_q n_minus_one in + let c = sub_mod x_xor_q_ (uint_to_t 1) in + lemma_sub_msbs x y; + lemma_msb_gte (v x) (v y); + lemma_msb_gte (v y) (v x); + c +#reset-options + +(*** Infix notations *) +unfold let op_Plus_Hat = add +unfold let op_Plus_Question_Hat = add_underspec +unfold let op_Plus_Percent_Hat = add_mod +unfold let op_Subtraction_Hat = sub +unfold let op_Subtraction_Question_Hat = sub_underspec +unfold let op_Subtraction_Percent_Hat = sub_mod +unfold let op_Star_Hat = mul +unfold let op_Star_Question_Hat = mul_underspec +unfold let op_Star_Percent_Hat = mul_mod +unfold let op_Slash_Hat = div +unfold let op_Percent_Hat = rem +unfold let op_Hat_Hat = logxor +unfold let op_Amp_Hat = logand +unfold let op_Bar_Hat = logor +unfold let op_Less_Less_Hat = shift_left +unfold let op_Greater_Greater_Hat = shift_right +unfold let op_Equals_Hat = eq +unfold let op_Greater_Hat = gt +unfold let op_Greater_Equals_Hat = gte +unfold let op_Less_Hat = lt +unfold let op_Less_Equals_Hat = lte + +(**** To input / output constants *) +(** In decimal representation *) +val to_string: t -> Tot string + +(** In hex representation (with leading 0x) *) +val to_string_hex: t -> Tot string + +(** In fixed-width hex representation (left-padded with zeroes, no leading 0x) *) +val to_string_hex_pad: t -> Tot string + +val of_string: string -> Tot t + +#set-options "--admit_smt_queries true" +//This private primitive is used internally by the +//compiler to translate bounded integer constants +//with a desugaring-time check of the size of the number, +//rather than an expensive verification check. +//Since it is marked private, client programs cannot call it directly +//Since it is marked unfold, it eagerly reduces, +//eliminating the verification overhead of the wrapper +private +unfold +let __uint_to_t (x:int) : Tot t + = uint_to_t x +#reset-options diff --git a/stage0/ulib/FStar.UInt8.fst b/stage0/ulib/FStar.UInt8.fst new file mode 100644 index 00000000000..00b53f2d14e --- /dev/null +++ b/stage0/ulib/FStar.UInt8.fst @@ -0,0 +1,92 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.UInt8 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +open FStar.UInt +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +type t : eqtype = + | Mk: v:uint_t n -> t + +let v x = x.v + +irreducible +let uint_to_t x = Mk x + +let uv_inv _ = () + +let vu_inv _ = () + +let v_inj _ _ = () + +let zero = uint_to_t 0 + +let one = uint_to_t 1 + +let add a b = Mk (add (v a) (v b)) + +let add_underspec a b = Mk (add_underspec (v a) (v b)) + +let add_mod a b = Mk (add_mod (v a) (v b)) + +let sub a b = Mk (sub (v a) (v b)) + +let sub_underspec a b = Mk (sub_underspec (v a) (v b)) + +let sub_mod a b = Mk (sub_mod (v a) (v b)) + +let mul a b = Mk (mul (v a) (v b)) + +let mul_underspec a b = Mk (mul_underspec (v a) (v b)) + +let mul_mod a b = Mk (mul_mod (v a) (v b)) + +let div a b = Mk (div (v a) (v b)) + +let rem a b = Mk (mod (v a) (v b)) + +let logand x y = Mk (logand (v x) (v y)) + +let logxor x y = Mk (logxor (v x) (v y)) + +let logor x y = Mk (logor (v x) (v y)) + +let lognot x = Mk (lognot (v x)) + +let shift_right a s = Mk (shift_right (v a) (UInt32.v s)) + +#push-options "--z3rlimit 80 --fuel 1" //AR: working around the interleaving semantics of pragmas + +let shift_left a s = Mk (shift_left (v a) (UInt32.v s)) + +let lemma_sub_msbs a b + = from_vec_propriety (to_vec (v a)) 1; + from_vec_propriety (to_vec (v b)) 1; + from_vec_propriety (to_vec (v (sub_mod a b))) 1 + +#pop-options + +let to_string _ = admit () + +let to_string_hex _ = admit () + +let to_string_hex_pad _ = admit () + +let of_string _ = admit () diff --git a/stage0/ulib/FStar.UInt8.fsti b/stage0/ulib/FStar.UInt8.fsti new file mode 100644 index 00000000000..4f36d383f53 --- /dev/null +++ b/stage0/ulib/FStar.UInt8.fsti @@ -0,0 +1,363 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.UInt8 + +(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) + +unfold let n = 8 + +/// For FStar.UIntN.fstp: anything that you fix/update here should be +/// reflected in [FStar.IntN.fstp], which is mostly a copy-paste of +/// this module. +/// +/// Except, as compared to [FStar.IntN.fstp], here: +/// - every occurrence of [int_t] has been replaced with [uint_t] +/// - every occurrence of [@%] has been replaced with [%]. +/// - some functions (e.g., add_underspec, etc.) are only defined here, not on signed integers + +/// This module provides an abstract type for machine integers of a +/// given signedness and width. The interface is designed to be safe +/// with respect to arithmetic underflow and overflow. + +/// Note, we have attempted several times to re-design this module to +/// make it more amenable to normalization and to impose less overhead +/// on the SMT solver when reasoning about machine integer +/// arithmetic. The following github issue reports on the current +/// status of that work. +/// +/// https://github.com/FStarLang/FStar/issues/1757 + +open FStar.UInt +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(** Abstract type of machine integers, with an underlying + representation using a bounded mathematical integer *) +new val t : eqtype + +(** A coercion that projects a bounded mathematical integer from a + machine integer *) +val v (x:t) : Tot (uint_t n) + +(** A coercion that injects a bounded mathematical integers into a + machine integer *) +val uint_to_t (x:uint_t n) : Pure t + (requires True) + (ensures (fun y -> v y = x)) + +(** Injection/projection inverse *) +val uv_inv (x : t) : Lemma + (ensures (uint_to_t (v x) == x)) + [SMTPat (v x)] + +(** Projection/injection inverse *) +val vu_inv (x : uint_t n) : Lemma + (ensures (v (uint_to_t x) == x)) + [SMTPat (uint_to_t x)] + +(** An alternate form of the injectivity of the [v] projection *) +val v_inj (x1 x2: t): Lemma + (requires (v x1 == v x2)) + (ensures (x1 == x2)) + +(** Constants 0 and 1 *) +val zero : x:t{v x = 0} + +val one : x:t{v x = 1} + +(**** Addition primitives *) + +(** Bounds-respecting addition + + The precondition enforces that the sum does not overflow, + expressing the bound as an addition on mathematical integers *) +val add (a:t) (b:t) : Pure t + (requires (size (v a + v b) n)) + (ensures (fun c -> v a + v b = v c)) + +(** Underspecified, possibly overflowing addition: + + The postcondition only enures that the result is the sum of the + arguments in case there is no overflow *) +val add_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a + v b) n ==> v a + v b = v c)) + +(** Addition modulo [2^n] + + Machine integers can always be added, but the postcondition is now + in terms of addition modulo [2^n] on mathematical integers *) +val add_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.add_mod (v a) (v b) = v c)) + +(**** Subtraction primitives *) + + +(** Bounds-respecting subtraction + + The precondition enforces that the difference does not underflow, + expressing the bound as a difference on mathematical integers *) +val sub (a:t) (b:t) : Pure t + (requires (size (v a - v b) n)) + (ensures (fun c -> v a - v b = v c)) + +(** Underspecified, possibly overflowing subtraction: + + The postcondition only enures that the result is the difference of + the arguments in case there is no underflow *) +val sub_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a - v b) n ==> v a - v b = v c)) + +(** Subtraction modulo [2^n] + + Machine integers can always be subtractd, but the postcondition is + now in terms of subtraction modulo [2^n] on mathematical integers *) +val sub_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.sub_mod (v a) (v b) = v c)) + +(**** Multiplication primitives *) + + +(** Bounds-respecting multiplication + + The precondition enforces that the product does not overflow, + expressing the bound as a product on mathematical integers *) +val mul (a:t) (b:t) : Pure t + (requires (size (v a * v b) n)) + (ensures (fun c -> v a * v b = v c)) + +(** Underspecified, possibly overflowing product + + The postcondition only enures that the result is the product of + the arguments in case there is no overflow *) +val mul_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a * v b) n ==> v a * v b = v c)) + +(** Multiplication modulo [2^n] + + Machine integers can always be multiplied, but the postcondition + is now in terms of product modulo [2^n] on mathematical integers *) +val mul_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.mul_mod (v a) (v b) = v c)) + +(**** Division primitives *) + +(** Euclidean division of [a] and [b], with [b] non-zero *) +val div (a:t) (b:t{v b <> 0}) : Pure t + (requires (True)) + (ensures (fun c -> v a / v b = v c)) + +(**** Modulo primitives *) + +(** Euclidean remainder + + The result is the modulus of [a] with respect to a non-zero [b] *) +val rem (a:t) (b:t{v b <> 0}) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.mod (v a) (v b) = v c)) + +(**** Bitwise operators *) + +/// Also see FStar.BV + +(** Bitwise logical conjunction *) +val logand (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logand` v y = v z)) + +(** Bitwise logical exclusive-or *) +val logxor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logxor` v y == v z)) + +(** Bitwise logical disjunction *) +val logor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logor` v y == v z)) + +(** Bitwise logical negation *) +val lognot (x:t) : Pure t + (requires True) + (ensures (fun z -> lognot (v x) == v z)) + +(**** Shift operators *) + +(** Shift right with zero fill, shifting at most the integer width *) +val shift_right (a:t) (s:UInt32.t) : Pure t + (requires (UInt32.v s < n)) + (ensures (fun c -> FStar.UInt.shift_right (v a) (UInt32.v s) = v c)) + +(** Shift left with zero fill, shifting at most the integer width *) +val shift_left (a:t) (s:UInt32.t) : Pure t + (requires (UInt32.v s < n)) + (ensures (fun c -> FStar.UInt.shift_left (v a) (UInt32.v s) = v c)) + +(**** Comparison operators *) + +(** Equality + + Note, it is safe to also use the polymorphic decidable equality + operator [=] *) +let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b) + +(** Greater than *) +let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b) + +(** Greater than or equal *) +let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b) + +(** Less than *) +let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b) + +(** Less than or equal *) +let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b) + +(** Unary negation *) +inline_for_extraction +let minus (a:t) = add_mod (lognot a) (uint_to_t 1) + +(** The maximum shift value for this type, i.e. its width minus one, + as an UInt32. *) +inline_for_extraction +let n_minus_one = UInt32.uint_to_t (n - 1) + +#set-options "--z3rlimit 80 --initial_fuel 1 --max_fuel 1" + +(** A constant-time way to compute the equality of + two machine integers. + + With inspiration from https://git.zx2c4.com/WireGuard/commit/src/crypto/curve25519-hacl64.h?id=2e60bb395c1f589a398ec606d611132ef9ef764b + + Note, the branching on [a=b] is just for proof-purposes. + *) +[@ CNoInline ] +let eq_mask (a:t) (b:t) + : Pure t + (requires True) + (ensures (fun c -> (v a = v b ==> v c = pow2 n - 1) /\ + (v a <> v b ==> v c = 0))) + = let x = logxor a b in + let minus_x = minus x in + let x_or_minus_x = logor x minus_x in + let xnx = shift_right x_or_minus_x n_minus_one in + let c = sub_mod xnx (uint_to_t 1) in + if a = b then + begin + logxor_self (v a); + lognot_lemma_1 #n; + logor_lemma_1 (v x); + assert (v x = 0 /\ v minus_x = 0 /\ + v x_or_minus_x = 0 /\ v xnx = 0); + assert (v c = ones n) + end + else + begin + logxor_neq_nonzero (v a) (v b); + lemma_msb_pow2 #n (v (lognot x)); + lemma_msb_pow2 #n (v minus_x); + lemma_minus_zero #n (v x); + assert (v c = FStar.UInt.zero n) + end; + c + +private +val lemma_sub_msbs (a:t) (b:t) + : Lemma ((msb (v a) = msb (v b)) ==> (v a < v b <==> msb (v (sub_mod a b)))) + +(** A constant-time way to compute the [>=] inequality of + two machine integers. + + With inspiration from https://git.zx2c4.com/WireGuard/commit/src/crypto/curve25519-hacl64.h?id=0a483a9b431d87eca1b275463c632f8d5551978a + *) +[@ CNoInline ] +let gte_mask (a:t) (b:t) + : Pure t + (requires True) + (ensures (fun c -> (v a >= v b ==> v c = pow2 n - 1) /\ + (v a < v b ==> v c = 0))) + = let x = a in + let y = b in + let x_xor_y = logxor x y in + let x_sub_y = sub_mod x y in + let x_sub_y_xor_y = logxor x_sub_y y in + let q = logor x_xor_y x_sub_y_xor_y in + let x_xor_q = logxor x q in + let x_xor_q_ = shift_right x_xor_q n_minus_one in + let c = sub_mod x_xor_q_ (uint_to_t 1) in + lemma_sub_msbs x y; + lemma_msb_gte (v x) (v y); + lemma_msb_gte (v y) (v x); + c +#reset-options + +(*** Infix notations *) +unfold let op_Plus_Hat = add +unfold let op_Plus_Question_Hat = add_underspec +unfold let op_Plus_Percent_Hat = add_mod +unfold let op_Subtraction_Hat = sub +unfold let op_Subtraction_Question_Hat = sub_underspec +unfold let op_Subtraction_Percent_Hat = sub_mod +unfold let op_Star_Hat = mul +unfold let op_Star_Question_Hat = mul_underspec +unfold let op_Star_Percent_Hat = mul_mod +unfold let op_Slash_Hat = div +unfold let op_Percent_Hat = rem +unfold let op_Hat_Hat = logxor +unfold let op_Amp_Hat = logand +unfold let op_Bar_Hat = logor +unfold let op_Less_Less_Hat = shift_left +unfold let op_Greater_Greater_Hat = shift_right +unfold let op_Equals_Hat = eq +unfold let op_Greater_Hat = gt +unfold let op_Greater_Equals_Hat = gte +unfold let op_Less_Hat = lt +unfold let op_Less_Equals_Hat = lte + +(**** To input / output constants *) +(** In decimal representation *) +val to_string: t -> Tot string + +(** In hex representation (with leading 0x) *) +val to_string_hex: t -> Tot string + +(** In fixed-width hex representation (left-padded with zeroes, no leading 0x) *) +val to_string_hex_pad: t -> Tot string + +val of_string: string -> Tot t + +#set-options "--admit_smt_queries true" +//This private primitive is used internally by the +//compiler to translate bounded integer constants +//with a desugaring-time check of the size of the number, +//rather than an expensive verification check. +//Since it is marked private, client programs cannot call it directly +//Since it is marked unfold, it eagerly reduces, +//eliminating the verification overhead of the wrapper +private +unfold +let __uint_to_t (x:int) : Tot t + = uint_to_t x +#reset-options +unfold inline_for_extraction type byte = t diff --git a/stage0/ulib/FStar.UIntN.fstip b/stage0/ulib/FStar.UIntN.fstip new file mode 100644 index 00000000000..8155ad7493c --- /dev/null +++ b/stage0/ulib/FStar.UIntN.fstip @@ -0,0 +1,341 @@ +/// For FStar.UIntN.fstp: anything that you fix/update here should be +/// reflected in [FStar.IntN.fstp], which is mostly a copy-paste of +/// this module. +/// +/// Except, as compared to [FStar.IntN.fstp], here: +/// - every occurrence of [int_t] has been replaced with [uint_t] +/// - every occurrence of [@%] has been replaced with [%]. +/// - some functions (e.g., add_underspec, etc.) are only defined here, not on signed integers + +/// This module provides an abstract type for machine integers of a +/// given signedness and width. The interface is designed to be safe +/// with respect to arithmetic underflow and overflow. + +/// Note, we have attempted several times to re-design this module to +/// make it more amenable to normalization and to impose less overhead +/// on the SMT solver when reasoning about machine integer +/// arithmetic. The following github issue reports on the current +/// status of that work. +/// +/// https://github.com/FStarLang/FStar/issues/1757 + +open FStar.UInt +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +(** Abstract type of machine integers, with an underlying + representation using a bounded mathematical integer *) +new val t : eqtype + +(** A coercion that projects a bounded mathematical integer from a + machine integer *) +val v (x:t) : Tot (uint_t n) + +(** A coercion that injects a bounded mathematical integers into a + machine integer *) +val uint_to_t (x:uint_t n) : Pure t + (requires True) + (ensures (fun y -> v y = x)) + +(** Injection/projection inverse *) +val uv_inv (x : t) : Lemma + (ensures (uint_to_t (v x) == x)) + [SMTPat (v x)] + +(** Projection/injection inverse *) +val vu_inv (x : uint_t n) : Lemma + (ensures (v (uint_to_t x) == x)) + [SMTPat (uint_to_t x)] + +(** An alternate form of the injectivity of the [v] projection *) +val v_inj (x1 x2: t): Lemma + (requires (v x1 == v x2)) + (ensures (x1 == x2)) + +(** Constants 0 and 1 *) +val zero : x:t{v x = 0} + +val one : x:t{v x = 1} + +(**** Addition primitives *) + +(** Bounds-respecting addition + + The precondition enforces that the sum does not overflow, + expressing the bound as an addition on mathematical integers *) +val add (a:t) (b:t) : Pure t + (requires (size (v a + v b) n)) + (ensures (fun c -> v a + v b = v c)) + +(** Underspecified, possibly overflowing addition: + + The postcondition only enures that the result is the sum of the + arguments in case there is no overflow *) +val add_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a + v b) n ==> v a + v b = v c)) + +(** Addition modulo [2^n] + + Machine integers can always be added, but the postcondition is now + in terms of addition modulo [2^n] on mathematical integers *) +val add_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.add_mod (v a) (v b) = v c)) + +(**** Subtraction primitives *) + + +(** Bounds-respecting subtraction + + The precondition enforces that the difference does not underflow, + expressing the bound as a difference on mathematical integers *) +val sub (a:t) (b:t) : Pure t + (requires (size (v a - v b) n)) + (ensures (fun c -> v a - v b = v c)) + +(** Underspecified, possibly overflowing subtraction: + + The postcondition only enures that the result is the difference of + the arguments in case there is no underflow *) +val sub_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a - v b) n ==> v a - v b = v c)) + +(** Subtraction modulo [2^n] + + Machine integers can always be subtractd, but the postcondition is + now in terms of subtraction modulo [2^n] on mathematical integers *) +val sub_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.sub_mod (v a) (v b) = v c)) + +(**** Multiplication primitives *) + + +(** Bounds-respecting multiplication + + The precondition enforces that the product does not overflow, + expressing the bound as a product on mathematical integers *) +val mul (a:t) (b:t) : Pure t + (requires (size (v a * v b) n)) + (ensures (fun c -> v a * v b = v c)) + +(** Underspecified, possibly overflowing product + + The postcondition only enures that the result is the product of + the arguments in case there is no overflow *) +val mul_underspec (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> + size (v a * v b) n ==> v a * v b = v c)) + +(** Multiplication modulo [2^n] + + Machine integers can always be multiplied, but the postcondition + is now in terms of product modulo [2^n] on mathematical integers *) +val mul_mod (a:t) (b:t) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.mul_mod (v a) (v b) = v c)) + +(**** Division primitives *) + +(** Euclidean division of [a] and [b], with [b] non-zero *) +val div (a:t) (b:t{v b <> 0}) : Pure t + (requires (True)) + (ensures (fun c -> v a / v b = v c)) + +(**** Modulo primitives *) + +(** Euclidean remainder + + The result is the modulus of [a] with respect to a non-zero [b] *) +val rem (a:t) (b:t{v b <> 0}) : Pure t + (requires True) + (ensures (fun c -> FStar.UInt.mod (v a) (v b) = v c)) + +(**** Bitwise operators *) + +/// Also see FStar.BV + +(** Bitwise logical conjunction *) +val logand (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logand` v y = v z)) + +(** Bitwise logical exclusive-or *) +val logxor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logxor` v y == v z)) + +(** Bitwise logical disjunction *) +val logor (x:t) (y:t) : Pure t + (requires True) + (ensures (fun z -> v x `logor` v y == v z)) + +(** Bitwise logical negation *) +val lognot (x:t) : Pure t + (requires True) + (ensures (fun z -> lognot (v x) == v z)) + +(**** Shift operators *) + +(** Shift right with zero fill, shifting at most the integer width *) +val shift_right (a:t) (s:UInt32.t) : Pure t + (requires (UInt32.v s < n)) + (ensures (fun c -> FStar.UInt.shift_right (v a) (UInt32.v s) = v c)) + +(** Shift left with zero fill, shifting at most the integer width *) +val shift_left (a:t) (s:UInt32.t) : Pure t + (requires (UInt32.v s < n)) + (ensures (fun c -> FStar.UInt.shift_left (v a) (UInt32.v s) = v c)) + +(**** Comparison operators *) + +(** Equality + + Note, it is safe to also use the polymorphic decidable equality + operator [=] *) +let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b) + +(** Greater than *) +let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b) + +(** Greater than or equal *) +let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b) + +(** Less than *) +let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b) + +(** Less than or equal *) +let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b) + +(** Unary negation *) +inline_for_extraction +let minus (a:t) = add_mod (lognot a) (uint_to_t 1) + +(** The maximum shift value for this type, i.e. its width minus one, + as an UInt32. *) +inline_for_extraction +let n_minus_one = UInt32.uint_to_t (n - 1) + +#set-options "--z3rlimit 80 --initial_fuel 1 --max_fuel 1" + +(** A constant-time way to compute the equality of + two machine integers. + + With inspiration from https://git.zx2c4.com/WireGuard/commit/src/crypto/curve25519-hacl64.h?id=2e60bb395c1f589a398ec606d611132ef9ef764b + + Note, the branching on [a=b] is just for proof-purposes. + *) +[@ CNoInline ] +let eq_mask (a:t) (b:t) + : Pure t + (requires True) + (ensures (fun c -> (v a = v b ==> v c = pow2 n - 1) /\ + (v a <> v b ==> v c = 0))) + = let x = logxor a b in + let minus_x = minus x in + let x_or_minus_x = logor x minus_x in + let xnx = shift_right x_or_minus_x n_minus_one in + let c = sub_mod xnx (uint_to_t 1) in + if a = b then + begin + logxor_self (v a); + lognot_lemma_1 #n; + logor_lemma_1 (v x); + assert (v x = 0 /\ v minus_x = 0 /\ + v x_or_minus_x = 0 /\ v xnx = 0); + assert (v c = ones n) + end + else + begin + logxor_neq_nonzero (v a) (v b); + lemma_msb_pow2 #n (v (lognot x)); + lemma_msb_pow2 #n (v minus_x); + lemma_minus_zero #n (v x); + assert (v c = FStar.UInt.zero n) + end; + c + +private +val lemma_sub_msbs (a:t) (b:t) + : Lemma ((msb (v a) = msb (v b)) ==> (v a < v b <==> msb (v (sub_mod a b)))) + +(** A constant-time way to compute the [>=] inequality of + two machine integers. + + With inspiration from https://git.zx2c4.com/WireGuard/commit/src/crypto/curve25519-hacl64.h?id=0a483a9b431d87eca1b275463c632f8d5551978a + *) +[@ CNoInline ] +let gte_mask (a:t) (b:t) + : Pure t + (requires True) + (ensures (fun c -> (v a >= v b ==> v c = pow2 n - 1) /\ + (v a < v b ==> v c = 0))) + = let x = a in + let y = b in + let x_xor_y = logxor x y in + let x_sub_y = sub_mod x y in + let x_sub_y_xor_y = logxor x_sub_y y in + let q = logor x_xor_y x_sub_y_xor_y in + let x_xor_q = logxor x q in + let x_xor_q_ = shift_right x_xor_q n_minus_one in + let c = sub_mod x_xor_q_ (uint_to_t 1) in + lemma_sub_msbs x y; + lemma_msb_gte (v x) (v y); + lemma_msb_gte (v y) (v x); + c +#reset-options + +(*** Infix notations *) +unfold let op_Plus_Hat = add +unfold let op_Plus_Question_Hat = add_underspec +unfold let op_Plus_Percent_Hat = add_mod +unfold let op_Subtraction_Hat = sub +unfold let op_Subtraction_Question_Hat = sub_underspec +unfold let op_Subtraction_Percent_Hat = sub_mod +unfold let op_Star_Hat = mul +unfold let op_Star_Question_Hat = mul_underspec +unfold let op_Star_Percent_Hat = mul_mod +unfold let op_Slash_Hat = div +unfold let op_Percent_Hat = rem +unfold let op_Hat_Hat = logxor +unfold let op_Amp_Hat = logand +unfold let op_Bar_Hat = logor +unfold let op_Less_Less_Hat = shift_left +unfold let op_Greater_Greater_Hat = shift_right +unfold let op_Equals_Hat = eq +unfold let op_Greater_Hat = gt +unfold let op_Greater_Equals_Hat = gte +unfold let op_Less_Hat = lt +unfold let op_Less_Equals_Hat = lte + +(**** To input / output constants *) +(** In decimal representation *) +val to_string: t -> Tot string + +(** In hex representation (with leading 0x) *) +val to_string_hex: t -> Tot string + +(** In fixed-width hex representation (left-padded with zeroes, no leading 0x) *) +val to_string_hex_pad: t -> Tot string + +val of_string: string -> Tot t + +#set-options "--admit_smt_queries true" +//This private primitive is used internally by the +//compiler to translate bounded integer constants +//with a desugaring-time check of the size of the number, +//rather than an expensive verification check. +//Since it is marked private, client programs cannot call it directly +//Since it is marked unfold, it eagerly reduces, +//eliminating the verification overhead of the wrapper +private +unfold +let __uint_to_t (x:int) : Tot t + = uint_to_t x +#reset-options diff --git a/stage0/ulib/FStar.UIntN.fstp b/stage0/ulib/FStar.UIntN.fstp new file mode 100644 index 00000000000..f62ba12c4f9 --- /dev/null +++ b/stage0/ulib/FStar.UIntN.fstp @@ -0,0 +1,73 @@ +open FStar.UInt +open FStar.Mul + +#set-options "--max_fuel 0 --max_ifuel 0" + +type t : eqtype = + | Mk: v:uint_t n -> t + +let v x = x.v + +irreducible +let uint_to_t x = Mk x + +let uv_inv _ = () + +let vu_inv _ = () + +let v_inj _ _ = () + +let zero = uint_to_t 0 + +let one = uint_to_t 1 + +let add a b = Mk (add (v a) (v b)) + +let add_underspec a b = Mk (add_underspec (v a) (v b)) + +let add_mod a b = Mk (add_mod (v a) (v b)) + +let sub a b = Mk (sub (v a) (v b)) + +let sub_underspec a b = Mk (sub_underspec (v a) (v b)) + +let sub_mod a b = Mk (sub_mod (v a) (v b)) + +let mul a b = Mk (mul (v a) (v b)) + +let mul_underspec a b = Mk (mul_underspec (v a) (v b)) + +let mul_mod a b = Mk (mul_mod (v a) (v b)) + +let div a b = Mk (div (v a) (v b)) + +let rem a b = Mk (mod (v a) (v b)) + +let logand x y = Mk (logand (v x) (v y)) + +let logxor x y = Mk (logxor (v x) (v y)) + +let logor x y = Mk (logor (v x) (v y)) + +let lognot x = Mk (lognot (v x)) + +let shift_right a s = Mk (shift_right (v a) (UInt32.v s)) + +#push-options "--z3rlimit 80 --fuel 1" //AR: working around the interleaving semantics of pragmas + +let shift_left a s = Mk (shift_left (v a) (UInt32.v s)) + +let lemma_sub_msbs a b + = from_vec_propriety (to_vec (v a)) 1; + from_vec_propriety (to_vec (v b)) 1; + from_vec_propriety (to_vec (v (sub_mod a b))) 1 + +#pop-options + +let to_string _ = admit () + +let to_string_hex _ = admit () + +let to_string_hex_pad _ = admit () + +let of_string _ = admit () diff --git a/stage0/ulib/FStar.Udp.fsti b/stage0/ulib/FStar.Udp.fsti new file mode 100644 index 00000000000..4fc562bc99d --- /dev/null +++ b/stage0/ulib/FStar.Udp.fsti @@ -0,0 +1,45 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Udp +open FStar.Bytes +open FStar.Error + +(* Type declarations *) +new val socket: eqtype +new val sock_in_channel: Type0 +new val sock_out_channel: Type0 +new val udpListener: Type0 + +(* Server side *) +val listen: string -> nat -> EXT udpListener +val accept: udpListener -> EXT socket +val stop: udpListener -> EXT unit + +(* Client side *) +val connect: string -> nat -> EXT socket + +(* Input/Output *) +val recv: socket -> nat -> EXT (optResult string bytes) +val send: socket -> bytes -> EXT (optResult string unit) +val close: socket -> EXT unit + +(* Helper functions *) +val socket_split: socket -> EXT (sock_in_channel & sock_out_channel) +val flush: sock_out_channel -> EXT unit + +(* Unimplemented *) +//assume val connectTimeout: nat -> string -> nat -> EXT socket +//assume val acceptTimeout: nat -> tcpListener -> EXT socket diff --git a/stage0/ulib/FStar.Universe.fst b/stage0/ulib/FStar.Universe.fst new file mode 100644 index 00000000000..be74a827047 --- /dev/null +++ b/stage0/ulib/FStar.Universe.fst @@ -0,0 +1,32 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Universe + +(** This module implements some basic facilities to raise the universe of a type * + * The type [raise_t a] is supposed to be isomorphic to [a] but in a higher * + * universe. The two functions [raise_val] and [downgrade_val] allow to coerce * + * from [a] to [raise_t a] and back. **) + +noeq type raise0 (a : Type u#a) : Type u#(max a b) = +| Ret : a -> raise0 a + +let raise_t a = raise0 a +let raise_val #a x = Ret x +let downgrade_val #a x = match x with Ret x0 -> x0 + +let downgrade_val_raise_val #a x = () + +let raise_val_downgrade_val #a x = () diff --git a/stage0/ulib/FStar.Universe.fsti b/stage0/ulib/FStar.Universe.fsti new file mode 100644 index 00000000000..bde3b9f050a --- /dev/null +++ b/stage0/ulib/FStar.Universe.fsti @@ -0,0 +1,51 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Universe + +(** This module implements some basic facilities to raise the universe of a type * + * The type [raise_t a] is supposed to be isomorphic to [a] but in a higher * + * universe. The two functions [raise_val] and [downgrade_val] allow to coerce * + * from [a] to [raise_t a] and back. **) + + +(** [raise_t a] is an isomorphic copy of [a] (living in universe 'ua) in universe [max 'ua 'ub] **) +val raise_t ([@@@ strictly_positive] _ : Type u#a) : Type u#(max a b) + +(** [raise_val x] injects a value [x] of type [a] to [raise_t a] **) +val raise_val : #a:Type u#a -> x:a -> raise_t u#a u#b a + +(** [downgrade_val x] projects a value [x] of type [raise_t a] to [a] **) +val downgrade_val : #a:Type u#a -> x:raise_t u#a u#b a -> a + +val downgrade_val_raise_val + (#a: Type u#a) + (x: a) +: Lemma + (downgrade_val u#a u#b (raise_val x) == x) + [SMTPat (downgrade_val u#a u#b (raise_val x))] + +val raise_val_downgrade_val + (#a: Type u#a) + (x: raise_t u#a u#b a) +: Lemma + (raise_val (downgrade_val x) == x) + [SMTPat (raise_val u#a u#b (downgrade_val x))] + +let lift_dom #a #b (q:a -> b) : raise_t a -> b = + fun v -> q (downgrade_val v) + +let lift_codom #a #b (q:a -> b) : a -> raise_t b = + fun v -> raise_val (q v) diff --git a/stage0/ulib/FStar.Util.fst b/stage0/ulib/FStar.Util.fst new file mode 100644 index 00000000000..eb1a5db6713 --- /dev/null +++ b/stage0/ulib/FStar.Util.fst @@ -0,0 +1,29 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Util + +open FStar.Heap +open FStar.HyperStack + +(* 2016-11-22: the following MUST be defined here AFTER the above `open', + since they are used in [op_At_Plus_At] below *) +let op_Plus_Plus x y = TSet.union x y +let op_Plus_Plus_Hat x y = x ++ (TSet.singleton y) +let op_Hat_Plus_Hat x y = (TSet.singleton x) ++ (TSet.singleton y) + +let op_At_Plus_At (#a:Type) (#b:Type) (x:reference a) (y:reference b) = + Set.union (Set.singleton (as_addr x)) (Set.singleton (as_addr y)) +let op_Plus_Plus_At (#a:Type) (x:Set.set nat) (y:reference a) = Set.union x (Set.singleton (as_addr y)) diff --git a/stage0/ulib/FStar.Vector.Base.fst b/stage0/ulib/FStar.Vector.Base.fst new file mode 100644 index 00000000000..55308351d51 --- /dev/null +++ b/stage0/ulib/FStar.Vector.Base.fst @@ -0,0 +1,91 @@ + (* + Copyright 2008-2017 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Vector.Base +module U32 = FStar.UInt32 +module S = FStar.Seq + +/// The raw vector type: the main type provided by this module +let raw a l = s:S.seq a{S.length s = U32.v l} + +/// Abstractly, a `raw a l` is just a sequence whose length is `U32.v l`. +/// `reveal` and `hide` build an isomorphism establishing this +let reveal #a #l v = v +let hide #a s = s +let hide_reveal #a #l v = () +let reveal_hide #a s = () + +/// Extensional equality can be used to prove syntactic equality +let extensionality #a #l v1 v2 = () + +//////////////////////////////////////////////////////////////////////////////// +/// A small set of basic operations on vectors, corresponding to the operations on +/// sequences. Other operations can be derived from these, as we do for seq. +/// -- init, index, update, append, slice +//////////////////////////////////////////////////////////////////////////////// + +/// `init l contents`: +/// initialize an `l`-sized vector using `contents i` for the `i`th element +let init #a l contents = Seq.init (U32.v l) contents + +/// `index v i`: get the `i`th element of `v` +let index #a #l v i = Seq.index v (U32.v i) + +/// `update v i x`: +/// a new vector that differs from `v` only at index `i`, where it contains `x`. +let update #a #l v i x = Seq.upd v (U32.v i) x + +/// `append v1 v2`: +/// requires proving that the sum of the lengths of v1 and v2 still fit in a u32 +let append #a #l1 #l2 v1 v2 = Seq.append v1 v2 + +/// `sub v i j`: +/// the sub-vector of `v` starting from index `i` up to, but not including, `j` +let sub #a #l v i j = Seq.slice v (U32.v i) (U32.v j) + +//////////////////////////////////////////////////////////////////////////////// +/// Lemmas about the basic operations, all rather boring +/// -- Each is just a lifting specifying the corresponding operation on seq +//////////////////////////////////////////////////////////////////////////////// +let reveal_init #a l contents = () +let reveal_index #a #l v i = () +let reveal_update #a #l v i x = () +let reveal_append #a #l1 #l2 v1 v2 = () +let reveal_sub #a #l v i j = () + +//////////////////////////////////////////////////////////////////////////////// +/// Dynamically sized vectors +//////////////////////////////////////////////////////////////////////////////// + +let t a = (l:len_t & raw a l) + + +/// Unlike raw vectors, t-vectors support decidable equality +let t_has_eq a = () + +/// The length of a t-vector is a dynamically computable u32 +let len #a (| l , _ |) = l + +/// Access the underlying raw vector +let as_raw #a (|_, v|) = v + +/// Promote a raw vector +let from_raw #a #l v = (| l, v |) + +/// as_raw and from_raw are mutual inverses +let as_raw_from_raw #a #l v = () +let from_raw_as_raw #a x = () + +let dummy = () diff --git a/stage0/ulib/FStar.Vector.Base.fsti b/stage0/ulib/FStar.Vector.Base.fsti new file mode 100644 index 00000000000..2d7646375fa --- /dev/null +++ b/stage0/ulib/FStar.Vector.Base.fsti @@ -0,0 +1,345 @@ +(* + Copyright 2008-2017 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +(* + + A library for vectors, i.e., immutable arrays, whose length is + representable by a machine integer, FStar.UInt32.t. + + This is closely related to FStar.Seq, with the following main + differences: + + The type `raw a l`: A raw vector + + 1. Raw vectors receive special treatment during extraction, + especially by KaRaMeL, which extracts a vector to a raw C + pointer. When extracting to OCaml, a `raw a l` is a + `Batteries.Vect t a` + + 2. The length of a vector is representable in a U32.t + + 3. The interface is designed around a length-indexed type: this + enables the compilation to raw pointers, since this ensures + that all functions that manipulate vectors always have a U32 + variable describing that vector's length in scope. + + A length-indexed interface is also suitable for clients for whom + proving properties about the length is a primary concern: the + signatures in this interface carry intrinsic proofs about length + properties, simplifying proof obligations in client code. + + 4. Raw vectors lack decidable equality (since that cannot be + implemented given the representation choice in KaRaMeL) + + The type `t a`: A dynamically sized vector + + 1. Conceptually, a `t a` is a pair of a `len:U32.t` and a `raw a + len`. They are implemented as such by KaRaMeL. When extracting + to OCaml, `t a` is identical to `raw a _`, i.e., it is still + extracted to a `Batteries.Vect.t a` + + 2. Unlike raw vectors, `t a` supports decidable equality when it + is supported by `a`. This is the main reason `t a` is provided + at an abstract type, rather than being exposed as a pair of a + U32 and a raw vector, since the latter does not support + decidable equality. + + @summary Immutable vectors whose length is less than `pow2 32` +*) + +module FStar.Vector.Base +module U32 = FStar.UInt32 +module S = FStar.Seq + +//////////////////////////////////////////////////////////////////////////////// +/// The basic model of raw vectors as u32-length sequences +//////////////////////////////////////////////////////////////////////////////// + +/// The length of a vector fits in 32 bits +let len_t = U32.t + +/// A raw vector. +/// - `vector a n` is extracted to an `a*` in C by KaRaMeL +/// - Does not support decidable equality +val raw ([@@@strictly_positive] a:Type u#a) + (l:len_t) + : Type u#a + +/// A convenience to use `nat` for the length of vector in specs and proofs +let raw_length (#a:Type) (#l:len_t) (v:raw a l) : GTot nat = U32.v l + +(** + Abstractly, a `vec a l` is just a sequence whose length is `U32.v l`. + `reveal` and `hide` build an isomorphism establishing this +**) + +val reveal: + #a:Type + -> #l:len_t + -> v:raw a l + -> GTot (s:S.seq a{S.length s = raw_length v}) + +val hide: + #a:Type + -> s:S.seq a{S.length s < pow2 32} + -> GTot (raw a (U32.uint_to_t (S.length s))) + +val hide_reveal: + #a:Type + -> #l:len_t + -> v:raw a l + -> Lemma (ensures (hide (reveal v) == v)) + [SMTPat (reveal v)] + +val reveal_hide: + #a:Type + -> s:S.seq a{S.length s < pow2 32} + -> Lemma (ensures (reveal (hide s) == s)) + [SMTPat (hide s)] + +/// Extensional equality for vectors +let equal (#a:Type) (#l:len_t) (v1:raw a l) (v2:raw a l) = + Seq.equal (reveal v1) (reveal v2) + +/// Extensional equality can be used to prove syntactic equality +val extensionality: + #a:Type + -> #l:len_t + -> v1:raw a l + -> v2:raw a l + -> Lemma (requires (equal v1 v2)) + (ensures (v1 == v2)) + +//////////////////////////////////////////////////////////////////////////////// +/// end of the basic model +//////////////////////////////////////////////////////////////////////////////// + + +//////////////////////////////////////////////////////////////////////////////// +/// A small set of basic operations on raw vectors, corresponding to the operations +/// on sequences. Other operations can be derived from these, as we do for seq. +/// -- init, index, update, append, slice +//////////////////////////////////////////////////////////////////////////////// + +/// `index_t v`: is the type of a within-bounds index of `v` +let index_t (#a:Type) (#l:len_t) (v:raw a l) = + m:len_t{U32.v m < U32.v l} + +/// `init l contents`: +/// initialize an `l`-sized vector using `contents i` for the `i`th element +val init: + #a:Type + -> l:len_t + -> contents: (i:nat { i < U32.v l } -> Tot a) + -> Tot (raw a l) + +/// `index v i`: get the `i`th element of `v` +val index: + #a:Type + -> #l:len_t + -> v:raw a l + -> i:index_t v + -> Tot a + +/// `v.[i]` is shorthand for `index v i` +unfold let op_String_Access #a #l = index #a #l + +/// `update v i x`: +/// - a new vector that differs from `v` only at index `i`, where it contains `x`. +/// - Incurs a full copy in KaRaMeL +/// - In OCaml, the new vector shares as much as possible with `v` +val update: + #a:Type + -> #l:len_t + -> v:raw a l + -> i:index_t v + -> x:a + -> Tot (raw a l) + +/// `v.[i] <- x` is shorthand for `update v i x` +unfold let op_String_Assignment #a #l = update #a #l + +/// `append v1 v2`: +/// - requires proving that the sum of the lengths of v1 and v2 still fit in a u32 +/// - Incurs a full copy in KaRaMeL +/// - Amortized constant time in OCaml +val append: + #a:Type + -> #l1:len_t + -> #l2:len_t + -> v1:raw a l1 + -> v2:raw a l2{UInt.size U32.(v l1 + v l2) U32.n} + -> Tot (raw a U32.(l1 +^ l2)) + +/// `v1 @| v2`: shorthand for `append v1 v2` +unfold let (@|) #a #l1 #l2 = append #a #l1 #l2 + +/// `sub v i j`: +/// - the sub-vector of `v` starting from index `i` up to, but not including, `j` +/// - Constant time in KaRaMeL (just an addition on a pointer) +/// - Worst-case (log l) time in OCaml +val sub: + #a:Type + -> #l:len_t + -> v:raw a l + -> i:len_t + -> j:len_t{U32.(v i <= v j /\ v j <= v l)} + -> Tot (raw a U32.(j -^ i)) + +//////////////////////////////////////////////////////////////////////////////// +/// Lemmas about the basic operations, all rather boring +/// -- Each is just a lifting specifying the corresponding operation on seq +//////////////////////////////////////////////////////////////////////////////// + +val reveal_init: + #a:Type + -> l:len_t + -> contents: (i:nat { i < U32.v l } -> Tot a) + -> Lemma + (ensures (reveal (init l contents) == Seq.init (U32.v l) contents)) + [SMTPat (init l contents)] + +val reveal_index: + #a:Type + -> #l:len_t + -> v:raw a l + -> i:index_t v + -> Lemma + (ensures (v.[i] == Seq.index (reveal v) (U32.v i))) + [SMTPat (v.[i])] + +val reveal_update: + #a:Type + -> #l:len_t + -> v:raw a l + -> i:index_t v + -> x:a + -> Lemma + (ensures (reveal (v.[i] <- x) == Seq.upd (reveal v) (U32.v i) x)) + [SMTPat (v.[i] <- x)] + +val reveal_append: + #a:Type + -> #l1:len_t + -> #l2:len_t + -> v1:raw a l1 + -> v2:raw a l2{UInt.size U32.(v l1 + v l2) U32.n} + -> Lemma + (ensures (reveal (v1 @| v2) == Seq.append (reveal v1) (reveal v2))) + [SMTPat (v1 @| v2)] + +val reveal_sub: + #a:Type + -> #l:len_t + -> v:raw a l + -> i:len_t + -> j:len_t{U32.(v i <= v j /\ v j <= v l)} + -> Lemma + (ensures (reveal (sub v i j) == S.slice (reveal v) (U32.v i) (U32.v j))) + [SMTPat (sub v i j)] + +//////////////////////////////////////////////////////////////////////////////// +/// Now, we have `Vector.Base.t`, abstractly, a raw vector paired with its u32 length +//////////////////////////////////////////////////////////////////////////////// +val t: + a:Type u#a + -> Type u#a + +/// Unlike raw vectors, t-vectors support decidable equality +val t_has_eq: + a:Type u#a + -> Lemma + (requires (hasEq a)) + (ensures (hasEq (t a))) + [SMTPat (hasEq (t a))] + +/// The length of a t-vector is a dynamically computable u32 +val len: + #a:Type + -> t a + -> len_t + +/// A convenience to access the length of a t-vector as a nat +[@@"deprecated: this will be moved to the ghost effect"] +let length (#a:Type) (x:t a) : nat = U32.v (len x) + +/// Access the underlying raw vector +val as_raw: + #a:Type + -> x:t a + -> raw a (len x) + +/// Promote a raw vector +val from_raw: + #a:Type + -> #l:len_t + -> v:raw a l + -> x:t a{len x = l} + +/// as_raw and from_raw are mutual inverses +val as_raw_from_raw: + #a:Type + -> #l:len_t + -> v:raw a l + -> Lemma (ensures (as_raw (from_raw v) == v)) + [SMTPat (from_raw v)] + +val from_raw_as_raw: + #a:Type + -> x:t a + -> Lemma (ensures (from_raw (as_raw x) == x)) + [SMTPat (as_raw x)] + +/// `v.(i)` accesses the ith element of v +unfold +let op_Array_Access + (#a:Type) + (x:t a) + (i:index_t (as_raw x)) + : Tot a + = (as_raw x).[i] + +/// `v.(i) <- x` is a new t-vector that differs from v only at i +unfold +let op_Array_Assignment + (#a:Type) + (x:t a) + (i:index_t (as_raw x)) + (v:a) + : Tot (t a) + = from_raw ((as_raw x).[i] <- v) + +/// `v1 @@ v2`: appending t-vectors +unfold +let (@@) + (#a:Type) + (x1:t a) + (x2:t a{UInt.size (length x1 + length x2) U32.n}) + : Tot (t a) + = from_raw (as_raw x1 @| as_raw x2) + +/// `slice v i j`: +/// the sub-vector of `v` starting from index `i` up to, but not including, `j` +unfold +let slice + (#a:Type) + (x:t a) + (i:len_t) + (j:len_t{U32.(v i <= v j /\ v j <= length x)}) + : Tot (t a) + = from_raw (sub (as_raw x) i j) + +val dummy : unit diff --git a/stage0/ulib/FStar.Vector.Properties.fst b/stage0/ulib/FStar.Vector.Properties.fst new file mode 100644 index 00000000000..6c46296e9d7 --- /dev/null +++ b/stage0/ulib/FStar.Vector.Properties.fst @@ -0,0 +1,100 @@ + (* + Copyright 2008-2017 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Vector.Properties +open FStar.Vector.Base +module S = FStar.Seq +module U32 = FStar.UInt32 + +/// This coercion seems to be necessary in some places +/// +/// For example, when trying to treat a `raw a (l1 +^ l2)` +/// as a `raw a (m1 +^ m2)` +/// F* type inference tries matches on the head symbol of the index +/// and tries to prove `l1 = m1 /\ l2 = m2` +/// which is often too strong. +/// This coercion is a workaround for in such cases +unfold +let coerce + (#a:Type) + (#l:len_t) + (v:raw a l) + (m:len_t{l == m}) + : Tot (raw a m) + = v + +/// An abbreviation that states that some binary arithmetic +/// operation on len_t's respects bouns +unfold +let ok + (op:int -> int -> int) + (l1:len_t) + (l2:len_t) + : Type + = UInt.size U32.(op (v l1) (v l2)) U32.n + +/// Most lemmas from FStar.Seq.Properties can just be lifted +/// to vectors, although the lengths have to be bounds checked +let append_inj + (#a:Type) + (#l1:len_t) + (#l2:len_t) + (#m1:len_t) + (#m2:len_t) + (u1:raw a l1) + (u2:raw a l2{ok (+) l1 l2}) + (v1:raw a m1) + (v2:raw a m2{ok (+) m1 m2}) + : Lemma + (requires (let open U32 in + m1 +^ m2 = l1 +^ l2 /\ + equal (u1@|u2) (coerce (v1@|v2) (l1 +^ l2)) /\ + (l1 == m1 \/ l2 == m2))) + (ensures (l1 = m1 /\ + l2 = m2 /\ + equal u1 v1 /\ + equal u2 v2)) + = FStar.Seq.lemma_append_inj (reveal u1) (reveal u2) (reveal v1) (reveal v2) + +let head (#a:Type) (#l:len_t{l <> 0ul}) (v:raw a l) + : Tot a + = v.[0ul] + +let tail (#a:Type) (#l:len_t{l <> 0ul}) (v:raw a l) + : Tot (raw a U32.(l -^ 1ul)) + = sub v 1ul l + +let head_append + (#a:Type) + (#l1:len_t) + (#l2:len_t) + (v1:raw a l1{l1 <> 0ul}) + (v2:raw a l2{ok (+) l1 l2}) + : Lemma + (ensures (head (v1@|v2) == head v1)) + = () + +let tail_append + (#a:Type) + (#l1:len_t) + (#l2:len_t) + (v1:raw a l1{l1 <> 0ul}) + (v2:raw a l2{ok (+) l1 l2}) + : Lemma + (ensures (tail (v1@|v2) == tail v1@|v2)) + = Seq.lemma_tail_append (reveal v1) (reveal v2) + +/// and so on ... diff --git a/stage0/ulib/FStar.Vector.fst b/stage0/ulib/FStar.Vector.fst new file mode 100644 index 00000000000..30261080018 --- /dev/null +++ b/stage0/ulib/FStar.Vector.fst @@ -0,0 +1,18 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Vector +include FStar.Vector.Base +include FStar.Vector.Properties diff --git a/stage0/ulib/FStar.WellFounded.Util.fst b/stage0/ulib/FStar.WellFounded.Util.fst new file mode 100644 index 00000000000..98661a47d84 --- /dev/null +++ b/stage0/ulib/FStar.WellFounded.Util.fst @@ -0,0 +1,121 @@ +module FStar.WellFounded.Util +open FStar.WellFounded + +#set-options "--warn_error -242" //inner let recs not encoded to SMT; ok + +let intro_lift_binrel (#a:Type) (r:binrel a) (y:a) (x:a) + : Lemma + (requires r y x) + (ensures lift_binrel r (| a, y |) (| a, x |)) + = let t0 : top = (| a, y |) in + let t1 : top = (| a, x |) in + let pf1 : squash (dfst t0 == a /\ dfst t1 == a) = () in + let pf2 : squash (r (dsnd t0) (dsnd t1)) = () in + let pf : squash (lift_binrel r t0 t1) = + FStar.Squash.bind_squash pf1 (fun (pf1: (dfst t0 == a /\ dfst t1 == a)) -> + FStar.Squash.bind_squash pf2 (fun (pf2: (r (dsnd t0) (dsnd t1))) -> + let p : lift_binrel r t0 t1 = (| pf1, pf2 |) in + FStar.Squash.return_squash p)) + in + () + +let elim_lift_binrel (#a:Type) (r:binrel a) (y:top) (x:a) + : Lemma + (requires lift_binrel r y (| a, x |)) + (ensures dfst y == a /\ r (dsnd y) x) + = let s : squash (lift_binrel r y (| a, x |)) = FStar.Squash.get_proof (lift_binrel r y (| a, x |)) in + let s : squash (dfst y == a /\ r (dsnd y) x) = FStar.Squash.bind_squash s (fun (pf:lift_binrel r y (|a, x|)) -> + let p1 : (dfst y == a /\ a == a) = dfst pf in + let p2 : r (dsnd y) x = dsnd pf in + introduce dfst y == a /\ r (dsnd y) x + with eliminate dfst y == a /\ a == a + returns _ + with l r. l + and FStar.Squash.return_squash p2) + in + () + +let lower_binrel (#a:Type) + (#r:binrel a) + (x y:top) + (p:lift_binrel r x y) + : r (dsnd x) (dsnd y) + = dsnd p + + +let lift_binrel_well_founded (#a:Type u#a) + (#r:binrel u#a u#r a) + (wf_r:well_founded r) + : well_founded (lift_binrel r) + = let rec aux (y:top{dfst y == a}) + (pf:acc r (dsnd y)) + : Tot (acc (lift_binrel r) y) + (decreases pf) + = AccIntro (fun (z:top) (p:lift_binrel r z y) -> + aux z (pf.access_smaller (dsnd z) (lower_binrel z y p))) + in + let aux' (y:top{dfst y =!= a}) + : acc (lift_binrel r) y + = AccIntro (fun y p -> false_elim ()) + in + fun (x:top) -> + let b = FStar.StrongExcludedMiddle.strong_excluded_middle (dfst x == a) in + if b + then aux x (wf_r (dsnd x)) + else aux' x + +let lower_binrel_squashed (#a:Type u#a) + (#r:binrel u#a u#r a) + (x y:top u#a) + (p:lift_binrel_squashed r x y) + : squash (r (dsnd x) (dsnd y)) + = assert (dfst x==a /\ dfst y==a /\ squash (r (dsnd x) (dsnd y))) + by FStar.Tactics.(exact (quote (FStar.Squash.return_squash p))) + + +let lift_binrel_squashed_well_founded (#a:Type u#a) + (#r:binrel u#a u#r a) + (wf_r:well_founded (squash_binrel r)) + : well_founded (lift_binrel_squashed r) + = let rec aux (y:top{dfst y == a}) + (pf:acc (squash_binrel r) (dsnd y)) + : Tot (acc (lift_binrel_squashed r) y) + (decreases pf) + = AccIntro (fun (z:top) (p:lift_binrel_squashed r z y) -> + let p = lower_binrel_squashed z y p in + aux z (pf.access_smaller (dsnd z) (FStar.Squash.join_squash p))) + in + let aux' (y:top{dfst y =!= a}) + : acc (lift_binrel_squashed r) y + = AccIntro (fun y p -> false_elim ()) + in + fun (x:top) -> + let b = FStar.StrongExcludedMiddle.strong_excluded_middle (dfst x == a) in + if b + then aux x (wf_r (dsnd x)) + else aux' x + +let lift_binrel_squashed_intro (#a:Type) (#r:binrel a) + (wf_r:well_founded (squash_binrel r)) + (x y:a) + (sqr:squash (r x y)) + : Lemma + (ensures lift_binrel_squashed r (|a, x|) (|a, y|)) + = assert (lift_binrel_squashed r (| a, x |) (| a , y |)) + by FStar.Tactics.( + norm [delta_only [`%lift_binrel_squashed]]; + split(); split(); trefl(); trefl(); + mapply (`FStar.Squash.join_squash) + ) + +let unsquash_well_founded (#a:Type u#a) (r:binrel u#a u#r a) (wf_r:well_founded (squash_binrel r)) + : well_founded u#a u#r r + = let rec f (x:a) + : Tot (acc r x) + (decreases {:well-founded (lift_binrel_squashed_as_well_founded_relation wf_r) (| a, x |)}) + = AccIntro (let g_smaller (y: a) (u: r y x) : acc r y = + lift_binrel_squashed_intro wf_r y x (FStar.Squash.return_squash u); + f y + in g_smaller) + in + f diff --git a/stage0/ulib/FStar.WellFounded.Util.fsti b/stage0/ulib/FStar.WellFounded.Util.fsti new file mode 100644 index 00000000000..3706c626750 --- /dev/null +++ b/stage0/ulib/FStar.WellFounded.Util.fsti @@ -0,0 +1,118 @@ +(* + Copyright 2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Authors: N. Swamy +*) + +module FStar.WellFounded.Util +open FStar.WellFounded +(** Provides some utilities related to well-founded relations *) + +(* 1. Given a well-founded relation `r:binrel a` + turn it into a well-founded relation on `binrel top`, + by construction a relation that only relates `top` elements + in `a` by `r` + + This is useful when writing type-polymorphic recursive functions + whose termination depends on some custom well-founded order + + See tests/micro-benchmarks/TestWellFoundedRecursion.rel_poly2 +*) + +let top = (b:Type & b) + +let lift_binrel (#a:Type) + (r:binrel a) + : binrel top + = fun (t0 t1:top) -> + (_:(dfst t0==a /\ dfst t1==a) & r (dsnd t0) (dsnd t1)) + +val intro_lift_binrel (#a:Type) (r:binrel a) (y:a) (x:a) + : Lemma + (requires r y x) + (ensures lift_binrel r (| a, y |) (| a, x |)) + +val elim_lift_binrel (#a:Type) (r:binrel a) (y:top) (x:a) + : Lemma + (requires lift_binrel r y (| a, x |)) + (ensures dfst y == a /\ r (dsnd y) x) + +val lower_binrel (#a:Type) + (#r:binrel a) + (x y:top) + (p:lift_binrel r x y) + : r (dsnd x) (dsnd y) + + +val lift_binrel_well_founded (#a:Type u#a) + (#r:binrel u#a u#r a) + (wf_r:well_founded r) + : well_founded (lift_binrel r) + +let lift_binrel_as_well_founded_relation (#a:Type u#a) (#r:binrel u#a u#r a) (wf_r:well_founded r) + : well_founded_relation u#(a + 1) u#r (top u#a) + = as_well_founded #top #(lift_binrel r) (lift_binrel_well_founded wf_r) + + +(* 2. Given a well-founded relation `r:binrel a` + turn it into a *squashed* well-founded relation on `binrel top`, + by construction a relation that only relates `top` elements + in `a` by `r` + + This is very similar to 1, but uses squashed types, + which leads to slightly better SMT automation at use sites. + + See tests/micro-benchmarks/TestWellFoundedRecursion.rel_poly + +*) +let lift_binrel_squashed (#a:Type u#a) + (r:binrel u#a u#r a) + : binrel top + = fun (t0 t1:top) -> + (dfst t0==a /\ dfst t1==a /\ squash (r (dsnd t0) (dsnd t1))) + +val lower_binrel_squashed (#a:Type u#a) + (#r:binrel u#a u#r a) + (x y:top u#a) + (p:lift_binrel_squashed r x y) + : squash (r (dsnd x) (dsnd y)) + + +let squash_binrel (#a:Type) (r:binrel u#a u#r a) (x y:a) = squash (r x y) + +val lift_binrel_squashed_well_founded (#a:Type u#a) + (#r:binrel u#a u#r a) + (wf_r:well_founded (squash_binrel r)) + : well_founded (lift_binrel_squashed r) + + +let lift_binrel_squashed_as_well_founded_relation (#a:Type u#a) + (#r:binrel u#a u#r a) + (wf_r:well_founded (squash_binrel r)) + : well_founded_relation u#(a + 1) u#0 top + = as_well_founded #top #(lift_binrel_squashed r) (lift_binrel_squashed_well_founded wf_r) + +val lift_binrel_squashed_intro (#a:Type) + (#r:binrel a) + (wf_r:well_founded (squash_binrel r)) + (x y:a) + (sqr:squash (r x y)) + : Lemma + (ensures lift_binrel_squashed r (|a, x|) (|a, y|)) + +(* If a squashed relation is well-founded, then so is its unsquashed counterpart. + The converse is not true, i.e., the well-founded proof is in contravariant position here *) +val unsquash_well_founded (#a:Type u#a) (r:binrel u#a u#r a) (wf_r:well_founded (squash_binrel r)) + : well_founded u#a u#r r diff --git a/stage0/ulib/FStar.WellFounded.fst b/stage0/ulib/FStar.WellFounded.fst new file mode 100644 index 00000000000..eab78d99e16 --- /dev/null +++ b/stage0/ulib/FStar.WellFounded.fst @@ -0,0 +1,131 @@ +(* + Copyright 2015 Chantal Keller and Catalin Hritcu, Microsoft Research and Inria + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Authors: Chantal Keller, Catalin Hritcu, Aseem Rastogi, Nikhil Swamy +*) + +(* Defining accessibility predicates and well-founded recursion like in Coq + https://coq.inria.fr/library/Coq.Init.Wf.html +*) + +module FStar.WellFounded + +#set-options "--warn_error -242" //inner let recs not encoded to SMT; ok + +let binrel (a:Type) = a -> a -> Type + +(* + * The accessibility relation + * -- Marked erasable, since this is a singleton type anyway + * -- Erasability also simplifies proofs that use accessibility in + * with axioms like indefinitedescription + *) +[@@ erasable] +noeq +type acc (#a:Type u#a) (r:binrel u#a u#r a) (x:a) : Type u#(max a r) = + | AccIntro : access_smaller:(y:a -> r y x -> acc r y) -> acc r x + +(* + * A binrel r is well-founded if every element is accessible + *) +let well_founded (#a:Type u#a) (r:binrel u#a u#r a) = x:a -> acc r x + +(* + * Accessibility predicates can be used for implementing + * total fix points + *) +let rec fix_F (#aa:Type) (#r:binrel aa) (#p:(aa -> Type)) + (f: (x:aa -> (y:aa -> r y x -> p y) -> p x)) + (x:aa) (a:acc r x) + : Tot (p x) (decreases a) + = f x (fun y h -> fix_F f y (a.access_smaller y h)) + +let fix (#aa:Type) (#r:binrel aa) (rwf:well_founded r) + (p:aa -> Type) (f:(x:aa -> (y:aa -> r y x -> p y) -> p x)) + (x:aa) + : p x + = fix_F f x (rwf x) + +let is_well_founded (#a:Type) (rel:binrel a) = + forall (x:a). squash (acc rel x) + +let well_founded_relation (a:Type) = rel:binrel a{is_well_founded rel} + +unfold +let as_well_founded (#a:Type u#a) + (#rel:binrel u#a u#r a) + (f:well_founded rel) + : well_founded_relation a + = introduce forall (x:a). squash (acc rel x) + with FStar.Squash.return_squash (FStar.Squash.return_squash (f x)); + rel + +open FStar.IndefiniteDescription + +(* + * Proofs that subrelation and inverse image commute with well-foundedness + * + * Reference: Constructing Recursion Operators in Type Theory, L. Paulson JSC (1986) 2, 325-355 + *) +let subrelation_wf (#a:Type) (#r #sub_r:binrel a) + (sub_w:(x:a -> y:a -> sub_r x y -> r x y)) + (r_wf:well_founded r) + : well_founded sub_r + = let rec aux (x:a) (acc_r:acc r x) : Tot (acc sub_r x) (decreases acc_r) = + AccIntro (fun y sub_r_y_x -> + aux y + (match acc_r with + | AccIntro f -> f y (sub_w y x sub_r_y_x))) in + fun x -> aux x (r_wf x) + +let subrelation_squash_wf (#a:Type u#a) + (#r:binrel u#a u#r a) + (#sub_r:binrel u#a u#sr a) + (sub_w:(x:a -> y:a -> sub_r x y -> squash (r x y))) + (r_wf:well_founded r) + : Lemma (is_well_founded sub_r) + = introduce forall (x:a). squash (acc sub_r x) + with ( + let rec acc_y (x:a) (acc_r:acc r x) (y:a) (p:sub_r y x) + : Tot (acc sub_r y) + (decreases acc_r) + = AccIntro (acc_y y (acc_r.access_smaller + y + (elim_squash (sub_w y x p)))) + in + FStar.Squash.return_squash (FStar.Squash.return_squash (AccIntro (acc_y x (r_wf x)))) + ) + +unfold +let subrelation_as_wf (#a:Type u#a) (#r #sub_r:binrel u#a u#r a) + (sub_w:(x:a -> y:a -> sub_r x y -> squash (r x y))) + (r_wf:well_founded r) + : well_founded_relation a + = subrelation_squash_wf sub_w r_wf; + sub_r + +let inverse_image (#a:Type u#a) (#b:Type u#b) (r_b:binrel u#b u#r b) (f:a -> b) : binrel u#a u#r a = + fun x y -> r_b (f x) (f y) + +let inverse_image_wf (#a:Type u#a) (#b:Type u#b) (#r_b:binrel u#b u#r b) + (f:a -> b) + (r_b_wf:well_founded r_b) + : well_founded (inverse_image r_b f) + = let rec aux (x:a) (acc_r_b:acc r_b (f x)) + : Tot (acc (inverse_image r_b f) x) + (decreases acc_r_b) = + AccIntro (fun y p -> aux y (acc_r_b.access_smaller (f y) p)) + in + fun x -> aux x (r_b_wf (f x)) diff --git a/stage0/ulib/FStar.WellFoundedRelation.fst b/stage0/ulib/FStar.WellFoundedRelation.fst new file mode 100644 index 00000000000..8fc20a895a5 --- /dev/null +++ b/stage0/ulib/FStar.WellFoundedRelation.fst @@ -0,0 +1,213 @@ +(* + Copyright 2022 Jay Lorch and Nikhil Swamy, Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +(* This library is intended to simplify using well-founded relations + in decreases clauses. +*) + +module FStar.WellFoundedRelation + +open FStar.Universe +module WF = FStar.WellFounded +module WFU = FStar.WellFounded.Util + +let rec default_decreaser (#a: Type u#a) (x: a) + : Tot (acc_classical (default_relation #a) x) (decreases x) = + let smaller (y: a{default_relation y x}) : acc_classical (default_relation #a) y = + default_decreaser y + in + AccClassicalIntro smaller + +let default_wfr (a: Type u#a) : (wfr: wfr_t a{wfr.relation == default_relation}) = + let proof (x1: a) (x2: a) + : Lemma (requires default_relation x1 x2) + (ensures default_decreaser x1 << default_decreaser x2) = + assert ((default_decreaser x2).access_smaller x1 == default_decreaser x1) + in + { relation = default_relation; decreaser = default_decreaser; proof = proof; } + +let rec empty_decreaser (#a: Type u#a) (x: a) + : Tot (acc_classical (empty_relation #a) x) (decreases x) = + let smaller (y: a{empty_relation y x}) : acc_classical (empty_relation #a) y = + empty_decreaser y + in + AccClassicalIntro smaller + +let empty_wfr (a: Type u#a) : (wfr: wfr_t a{wfr.relation == empty_relation}) = + let proof (x1: a) (x2: a) + : Lemma (requires empty_relation x1 x2) (ensures empty_decreaser x1 << empty_decreaser x2) = + assert ((empty_decreaser x2).access_smaller x1 == empty_decreaser x1) + in + { relation = empty_relation; decreaser = empty_decreaser; proof = proof; } + +let rec acc_decreaser + (#a: Type u#a) + (r: a -> a -> Type0) + (f: WF.well_founded r{forall x1 x2 (p: r x1 x2). (f x2).access_smaller x1 p == f x1}) + (x: a) + : Tot (acc_classical (acc_relation r) x) (decreases (f x)) = + let smaller (y: a{(acc_relation r) y x}) : (acc_classical (acc_relation r) y) = ( + eliminate exists (p: r y x). True + returns f y << f x + with _. assert ((f x).access_smaller y p == f y); + acc_decreaser r f y + ) in + AccClassicalIntro smaller + +let rec eta_expand_well_founded (#a: Type) (r: WF.binrel a) (wf_r: WF.well_founded r) (x: a) + : Tot (WF.acc r x) + (decreases {:well-founded (WFU.lift_binrel_as_well_founded_relation wf_r) (| a, x |)}) + = WF.AccIntro (let g_smaller (y: a) (u: r y x) : WF.acc r y = + WFU.intro_lift_binrel r y x; + eta_expand_well_founded r wf_r y + in g_smaller) + +let acc_to_wfr (#a: Type u#a) (r: WF.binrel u#a u#0 a) (f: WF.well_founded r) + : (wfr: wfr_t a{wfr.relation == acc_relation r}) = + let f = eta_expand_well_founded r f in + let proof (x1: a) (x2: a) + : Lemma (requires acc_relation r x1 x2) + (ensures acc_decreaser r f x1 << acc_decreaser r f x2) = + assert ((acc_decreaser r f x2).access_smaller x1 == acc_decreaser r f x1) + in + { relation = acc_relation r; decreaser = acc_decreaser r f; proof = proof; } + +let rec subrelation_decreaser (#a: Type u#a) (r: a -> a -> Type0) + (wfr: wfr_t a{forall x1 x2. r x1 x2 ==> wfr.relation x1 x2}) (x: a) + : Tot (acc_classical r x) (decreases wfr.decreaser x) = + let smaller (y: a{r y x}) : (acc_classical r y) = + subrelation_decreaser r wfr y + in + AccClassicalIntro smaller + +let subrelation_to_wfr (#a: Type u#a) (r: a -> a -> Type0) + (wfr: wfr_t a{forall x1 x2. r x1 x2 ==> wfr.relation x1 x2}) + : (wfr': wfr_t a{wfr'.relation == r}) = + let proof (x1: a) (x2: a) + : Lemma (requires r x1 x2) + (ensures subrelation_decreaser r wfr x1 << subrelation_decreaser r wfr x2) = + assert ((subrelation_decreaser r wfr x2).access_smaller x1 == subrelation_decreaser r wfr x1) + in + { relation = r; decreaser = subrelation_decreaser r wfr; proof = proof; } + +let rec inverse_image_decreaser (#a: Type u#a) (#b: Type u#b) (r: a -> a -> Type0) (f: a -> b) + (wfr: wfr_t b{forall x1 x2. r x1 x2 ==> wfr.relation (f x1) (f x2)}) + (x: a) + : Tot (acc_classical r x) (decreases wfr.decreaser (f x)) = + let smaller (y: a{r y x}) : (acc_classical r y) = + inverse_image_decreaser r f wfr y + in + AccClassicalIntro smaller + +let inverse_image_to_wfr (#a: Type u#a) (#b: Type u#b) (r: a -> a -> Type0) (f: a -> b) + (wfr: wfr_t b{forall x1 x2. r x1 x2 ==> wfr.relation (f x1) (f x2)}) + : (wfr': wfr_t a{wfr'.relation == r}) = + let proof (x1: a) (x2: a) + : Lemma (requires r x1 x2) + (ensures inverse_image_decreaser r f wfr x1 << inverse_image_decreaser r f wfr x2) = + assert ((inverse_image_decreaser r f wfr x2).access_smaller x1 == + inverse_image_decreaser r f wfr x1) + in + { relation = r; decreaser = inverse_image_decreaser r f wfr; proof = proof; } + +let rec lex_nondep_decreaser (#a: Type u#a) (#b: Type u#b) (wfr_a: wfr_t a) (wfr_b: wfr_t b) + (xy: a & b) + : Tot (acc_classical (lex_nondep_relation wfr_a wfr_b) xy) + (decreases %[wfr_a.decreaser (fst xy); wfr_b.decreaser (snd xy)]) = + let smaller (xy': a & b{lex_nondep_relation wfr_a wfr_b xy' xy}) + : (acc_classical (lex_nondep_relation wfr_a wfr_b) xy') = + lex_nondep_decreaser wfr_a wfr_b xy' + in + AccClassicalIntro smaller + +let lex_nondep_wfr (#a: Type u#a) (#b: Type u#b) (wfr_a: wfr_t a) (wfr_b: wfr_t b) + : wfr: wfr_t (a & b){wfr.relation == lex_nondep_relation wfr_a wfr_b} = + let proof (xy1: a & b) (xy2: a & b) + : Lemma (requires lex_nondep_relation wfr_a wfr_b xy1 xy2) + (ensures lex_nondep_decreaser wfr_a wfr_b xy1 << + lex_nondep_decreaser wfr_a wfr_b xy2) = + assert ((lex_nondep_decreaser wfr_a wfr_b xy2).access_smaller xy1 == + lex_nondep_decreaser wfr_a wfr_b xy1) + in + { relation = lex_nondep_relation wfr_a wfr_b; + decreaser = lex_nondep_decreaser wfr_a wfr_b; + proof = proof; } + +let rec lex_dep_decreaser (#a: Type u#a) (#b: a -> Type u#b) (wfr_a: wfr_t a) + (a_to_wfr_b: (x: a -> wfr_t (b x))) (xy: (x: a & b x)) + : Tot (acc_classical (lex_dep_relation wfr_a a_to_wfr_b) xy) + (decreases %[wfr_a.decreaser (dfst xy); (a_to_wfr_b (dfst xy)).decreaser (dsnd xy)]) = + let smaller (xy': (x: a & b x){lex_dep_relation wfr_a a_to_wfr_b xy' xy}) + : (acc_classical (lex_dep_relation wfr_a a_to_wfr_b) xy') = + lex_dep_decreaser wfr_a a_to_wfr_b xy' + in + AccClassicalIntro smaller + +let lex_dep_wfr (#a: Type u#a) (#b: a -> Type u#b) (wfr_a: wfr_t a) + (a_to_wfr_b: (x: a -> wfr_t (b x))) + : wfr: wfr_t (x: a & b x){wfr.relation == lex_dep_relation wfr_a a_to_wfr_b} = + let proof (xy1: (x: a & b x)) (xy2: (x: a & b x)) + : Lemma (requires lex_dep_relation wfr_a a_to_wfr_b xy1 xy2) + (ensures lex_dep_decreaser wfr_a a_to_wfr_b xy1 << + lex_dep_decreaser wfr_a a_to_wfr_b xy2) = + assert ((lex_dep_decreaser wfr_a a_to_wfr_b xy2).access_smaller xy1 == + lex_dep_decreaser wfr_a a_to_wfr_b xy1) + in + { relation = lex_dep_relation wfr_a a_to_wfr_b; + decreaser = lex_dep_decreaser wfr_a a_to_wfr_b; + proof = proof; } + +let bool_wfr: (wfr: wfr_t bool{wfr.relation == bool_relation}) = + inverse_image_to_wfr #bool #nat bool_relation (fun b -> if b then 1 else 0) (default_wfr nat) + +let option_wfr (#a: Type u#a) (wfr: wfr_t a) + : wfr': wfr_t (option a){wfr'.relation == option_relation wfr} = + // We'll need the unit type raised to universe u#a + + let unit_a: Type u#a = raise_t unit in + + // Step 1: Create a function f mapping an `option a` to a + // `(b: bool & (if b then a else unit_a))`. It should map + // `Some x` to `(| true, x |)` and `None` to `(| false, () |)`. + + let f: option a -> (b: bool & (if b then a else unit_a)) = + fun opt -> (match opt with | Some x -> (| true, x |) | None -> (| false, raise_val () |) ) + in + + // Step 2: Create a wfr for (b: bool & (if b then a else unit_a)) using lex_dep_wfr. + // The precedence for the bool field should be bool_wfr (i.e., false precedes true). + // The precedence for the (if b then a else unit_a) field should be either wfr or + // empty_wfr, depending on whether b is true or false. + + let bool_to_wfr_a (b: bool) : wfr_t (if b then a else unit_a) = + if b then wfr else empty_wfr unit_a + in + let wfr_bool_a: wfr_t (b: bool & (if b then a else unit_a)) = + lex_dep_wfr bool_wfr bool_to_wfr_a + in + + assert (forall (bx1: (b: bool & (if b then a else unit_a))) + (bx2: (b: bool & (if b then a else unit_a))). + wfr_bool_a.relation bx1 bx2 <==> + (let (| b1, x1 |), (| b2, x2 |) = bx1, bx2 in + (not b1 && b2) \/ (b1 && b2 /\ wfr.relation x1 x2))); + + // Step 3: Create the final wfr using inverse_image_to_wfr, using `f` as the mapping + // function from `option a` to `(b: bool & (if b then a else unit_a))` and + // `wfr_bool_a` as the wfr_t for `(b: bool & (if b then a else unit_a))`. + + assert (forall opt1 opt2. (option_relation wfr) opt1 opt2 ==> wfr_bool_a.relation (f opt1) (f opt2)); + inverse_image_to_wfr (option_relation wfr) f wfr_bool_a diff --git a/stage0/ulib/FStar.WellFoundedRelation.fsti b/stage0/ulib/FStar.WellFoundedRelation.fsti new file mode 100644 index 00000000000..4d56cf786c0 --- /dev/null +++ b/stage0/ulib/FStar.WellFoundedRelation.fsti @@ -0,0 +1,230 @@ +(* + Copyright 2022 Jay Lorch and Nikhil Swamy, Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +(* This library is intended to simplify using well-founded relations + in decreases clauses. + + The key data structure is `wfr_t a`, which encapsulates a + well-founded relation on `a`. Specifically, the predicate + `wfr.relation x1 x2` means that `x1` precedes `x2` in the + well-founded relation described by `wfr`. + + You can then use this relatedness to show that a function is + decreasing in a certain term. Whenever `wfr.relation x1 x2` holds, + `wfr.decreaser x1 << wfr.decreaser x2` also holds. The library has + an ambient lemma triggered by seeing two instances of + `wfr.decreaser`, so you can use `wfr.decreaser x` in your decreases + clause. For example: + + // Define `nat_nat_wfr` to represent the lexicographically-precedes + // relation between two elements of type `nat * nat`. That is, + // `(x1, y1)` is related to `(x2, y2)` if + // `x1 < x2 \/ (x1 == x2 /\ y1 < y2)`. + + let nat_nat_wfr = lex_nondep_wfr (default_wfr nat) (default_wfr nat) + + // To show that `f` is well-defined, we use the decreases clause + // `nat_nat_wfr.decreaser (x, y)`. We then need to show, on each + // recursive call, that the parameters x2 and y2 to the nested + // call satisfy `nat_nat_wfr.relation (x2, y2) (x, y)`. + + let rec f (x: nat) (y: nat) + : Tot nat (decreases (nat_nat_wfr.decreaser (x, y))) = + if x = 0 then + 0 + else if y = 0 then ( + // This assertion isn't necessary; it's just for illustration + assert (nat_nat_wfr.relation (x - 1, 100) (x, y)); + f (x - 1) 100 + ) + else ( + // This assertion isn't necessary; it's just for illustration + assert (nat_nat_wfr.relation (x, y - 1) (x, y)); + f x (y - 1) + ) + + One way `wfr_t` can be useful is that it simplifies debugging when + the SMT solver can't verify termination. You can assert the + relation explicitly (as in the examples above), and if the assertion + doesn't hold you can try to prove it. If you instead use something + like `decreases %[x, y]`, it's harder to debug because you can't + `assert (%[x2, y2] << %[x, y])`. + + But where `wfr_t` is most useful is in writing a function that takes + a well-founded relation as input. Here's an illustrative example: + + let rec count_steps_to_none + (#a: Type) + (wfr: wfr_t a) + (stepper: (x: a) -> (y: option a{Some? y ==> wfr.relation (Some?.v y) x})) + (start: option a) + : Tot nat (decreases (option_wfr wfr).decreaser start) = + match start with + | None -> 0 + | Some x -> 1 + count_steps_to_none wfr stepper (stepper x) + + `wfr_t` is also useful when composing a well-founded relation + produced using `acc` (from the FStar.WellFounded library) with one + or more other well-founded relations. + + There are a few ways to build a `wfr_t`, described in more detail in + comments throughout this file. Those ways are: + + `default_wfr a` + `empty_wfr a` + `acc_to_wfr r f` + `subrelation_to_wfr r wfr` + `inverse_image_to_wfr r f wfr` + `lex_nondep_wfr wfr_a wfr_b` + `lex_dep_wfr wfr_a a_to_wfr_b` + `bool_wfr` + `option_wfr wfr` +*) + +module FStar.WellFoundedRelation + +noeq type acc_classical (#a: Type u#a) (r: a -> a -> Type0) (x: a) : Type u#a = + | AccClassicalIntro : access_smaller:(y: a{r y x} -> acc_classical r y) -> acc_classical r x + +noeq type wfr_t (a: Type u#a) : Type u#(a + 1) = + { + relation: a -> a -> Type0; + decreaser: (x: a -> acc_classical relation x); + proof: (x1: a) -> (x2: a) -> + Lemma (requires relation x1 x2) (ensures decreaser x1 << decreaser x2); + } + +let ambient_wfr_lemma (#a: Type u#a) (wfr: wfr_t a) (x1: a) (x2: a) + : Lemma (requires wfr.relation x1 x2) + (ensures wfr.decreaser x1 << wfr.decreaser x2) + [SMTPat (wfr.decreaser x1); SMTPat (wfr.decreaser x2)] = + wfr.proof x1 x2 + +/// `default_wfr a` is the well-founded relation built into F* for +/// type `a`. For instance, for `nat` it's the less-than relation. +/// For an inductive type it's the sub-term ordering. +/// +/// `(default_wfr a).relation` is `default_relation` as defined below. + +let default_relation (#a: Type u#a) (x1: a) (x2: a) : Type0 = x1 << x2 + +val default_wfr (a: Type u#a) : (wfr: wfr_t a{wfr.relation == default_relation}) + +/// `empty_wfr a` is the empty well-founded relation, which doesn't relate any +/// pair of values. +/// +/// `(empty_wfr a).relation` is `empty_relation` as defined below. + +let empty_relation (#a: Type u#a) (x1: a) (x2: a) : Type0 = False + +val empty_wfr (a: Type u#a) : (wfr: wfr_t a{wfr.relation == empty_relation}) + +/// `acc_to_wfr r f` is a `wfr_t` built from a relation `r` and a +/// function `f: well-founded r`. Such a function demonstrates that +/// `r` is well-founded using the accessibility type `acc` described +/// in FStar.WellFounded.fst. +/// +/// `(acc_to_wfr r f).relation` is `acc_relation r` as defined below. + +let acc_relation (#a: Type u#a) (r: a -> a -> Type0) (x1: a) (x2: a) : Type0 = exists (p: r x1 x2). True + +val acc_to_wfr (#a: Type u#a) (r: a -> a -> Type0) (f: FStar.WellFounded.well_founded r) + : (wfr: wfr_t a{wfr.relation == acc_relation r}) + +/// `subrelation_to_wfr r wfr` is a `wfr_t` built from a relation `r` +/// that's a subrelation of an existing well-founded relation `wfr`. +/// By "subrelation" we mean that any pair related by `r` is also +/// related by `wfr`. +/// +/// `(subrelation_to_wfr r wfr).relation` is the parameter `r`. + +val subrelation_to_wfr (#a: Type u#a) (r: a -> a -> Type0) + (wfr: wfr_t a{forall x1 x2. r x1 x2 ==> wfr.relation x1 x2}) + : (wfr': wfr_t a{wfr'.relation == r}) + +/// `inverse_image_to_wfr r f wfr` is a `wfr_t` built from a relation +/// `r: a -> a -> Type0`, a function `f: a -> b`, and an existing +/// well-founded relation `wfr` on `b`. The relation `r` must be an +/// "inverse image" of `wfr` using the mapping function `f`, meaning +/// that `forall x1 x2. r x1 x2 ==> wfr.relation (f x1) (f x2)`. +/// +/// `(inverse_image_to_wfr r f wfr).relation` is the parameter `r`. + +val inverse_image_to_wfr + (#a: Type u#a) + (#b: Type u#b) + (r: a -> a -> Type0) + (f: a -> b) + (wfr: wfr_t b{forall x1 x2. r x1 x2 ==> wfr.relation (f x1) (f x2)}) + : (wfr': wfr_t a{wfr'.relation == r}) + +/// `lex_nondep_wfr wfr_a wfr_b` is a `wfr_t` describing lexicographic +/// precedence for non-dependent tuples of some type `a * b`. It's +/// built from two well-founded relations: a `wfr_t a` and a `wfr_t +/// b`. +/// +/// `(lex_nondep_wfr wfr_a wfr_b).relation` is `lex_nondep_relation +/// wfr_a wfr_b` as defined below. + +let lex_nondep_relation (#a: Type u#a) (#b: Type u#b) (wfr_a: wfr_t a) (wfr_b: wfr_t b) + (xy1: a & b) (xy2: a & b) + : Type0 = + let (x1, y1), (x2, y2) = xy1, xy2 in + wfr_a.relation x1 x2 \/ (x1 == x2 /\ wfr_b.relation y1 y2) + +val lex_nondep_wfr (#a: Type u#a) (#b: Type u#b) (wfr_a: wfr_t a) (wfr_b: wfr_t b) + : wfr: wfr_t (a & b){wfr.relation == lex_nondep_relation wfr_a wfr_b} + +/// `lex_dep_wfr wfr_a a_to_wfr_b` is a `wfr_t` describing +/// lexicographic precedence for dependent tuples of type `(x: a & b +/// x)`. It's built from a well-founded relation of type `wfr_t a` +/// and a function `a_to_wfr_b` that maps each `x: a` to a `wfr_t (b +/// x)`. +/// +/// `(lex_dep_wfr wfr_a a_to_wfr_b).relation` is `lex_dep_relation +/// wfr_a a_to_wfr_b` as defined below. + +let lex_dep_relation (#a: Type u#a) (#b: a -> Type u#b) (wfr_a: wfr_t a) + (a_to_wfr_b: (x: a -> wfr_t (b x))) (xy1: (x: a & b x)) (xy2: (x: a & b x)) + : Type0 = + let (| x1, y1 |), (| x2, y2 |) = xy1, xy2 in + wfr_a.relation x1 x2 \/ (x1 == x2 /\ (a_to_wfr_b x1).relation y1 y2) + +val lex_dep_wfr (#a: Type u#a) (#b: a -> Type u#b) (wfr_a: wfr_t a) + (a_to_wfr_b: (x: a -> wfr_t (b x))) + : wfr: wfr_t (x: a & b x){wfr.relation == lex_dep_relation wfr_a a_to_wfr_b} + +/// `bool_wfr` is the well-founded relation on booleans that has false +/// preceding true. +/// +/// `bool_wfr.relation` is `bool_relation`, as defined below. + +let bool_relation (x1: bool) (x2: bool) : Type0 = x1 == false /\ x2 == true + +val bool_wfr: (wfr: wfr_t bool{wfr.relation == bool_relation}) + +/// `option_wfr wfr` is a `wfr_t` describing precedence for an `option +/// a`. It's built from a well-founded relation `wfr` on `a`. It has +/// `None` precede any `Some x`, and has `Some x1` precede `Some x2` +/// if `x1` precedes `x2` according to `wfr`. +/// +/// `(option_wfr wfr).relation` is `option_relation wfr` as defined below. + +let option_relation (#a: Type u#a) (wfr: wfr_t a) (opt1: option a) (opt2: option a) : Type0 = + Some? opt2 /\ (None? opt1 \/ wfr.relation (Some?.v opt1) (Some?.v opt2)) + +val option_wfr (#a: Type u#a) (wfr: wfr_t a) + : wfr': wfr_t (option a){wfr'.relation == option_relation wfr} diff --git a/stage0/ulib/LowStar.Buffer.fst b/stage0/ulib/LowStar.Buffer.fst new file mode 100644 index 00000000000..75d9b69432d --- /dev/null +++ b/stage0/ulib/LowStar.Buffer.fst @@ -0,0 +1,100 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.Buffer + +include LowStar.Monotonic.Buffer + +module P = FStar.Preorder +module G = FStar.Ghost +module U32 = FStar.UInt32 +module Seq = FStar.Seq + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +(* + * Wrapper over LowStar.Monotonic.Buffer, with trivial preorders + * -- functions that take explicit preorder as arguments (e.g. sub etc.) + * -- these include allocation functions also + *) +let trivial_preorder (a:Type0) :srel a = fun _ _ -> True + +type buffer (a:Type0) = mbuffer a (trivial_preorder a) (trivial_preorder a) + +unfold let null (#a:Type0) :buffer a = mnull #a #(trivial_preorder a) #(trivial_preorder a) + +unfold let gsub (#a:Type0) = mgsub #a #(trivial_preorder a) #(trivial_preorder a) (trivial_preorder a) + +unfold let gsub_inj (#a:Type0) = mgsub_inj #a #(trivial_preorder a) #(trivial_preorder a) (trivial_preorder a) (trivial_preorder a) + +[@@unifier_hint_injective] +inline_for_extraction +type pointer (a:Type0) = b:buffer a{length b == 1} + +inline_for_extraction +type pointer_or_null (a:Type0) = b:buffer a{if g_is_null b then True else length b == 1} + +inline_for_extraction let sub (#a:Type0) = msub #a #(trivial_preorder a) #(trivial_preorder a) (trivial_preorder a) + +inline_for_extraction let offset (#a:Type0) = moffset #a #(trivial_preorder a) #(trivial_preorder a) (trivial_preorder a) + +unfold let lbuffer (a:Type0) (len:nat) = lmbuffer a (trivial_preorder a) (trivial_preorder a) len + +inline_for_extraction let gcmalloc (#a:Type0) = mgcmalloc #a #(trivial_preorder a) + +inline_for_extraction let malloc (#a:Type0) = mmalloc #a #(trivial_preorder a) + +inline_for_extraction let alloca (#a:Type0) = malloca #a #(trivial_preorder a) + +inline_for_extraction let alloca_of_list (#a:Type0) = malloca_of_list #a #(trivial_preorder a) + +inline_for_extraction let gcmalloc_of_list (#a:Type0) = mgcmalloc_of_list #a #(trivial_preorder a) + +module L = FStar.List.Tot + +unfold +let assign_list_t #a (l: list a) = (b: buffer a) -> HST.Stack unit + (requires (fun h0 -> + live h0 b /\ + length b = L.length l)) + (ensures (fun h0 _ h1 -> + live h1 b /\ + (modifies (loc_buffer b) h0 h1) /\ + as_seq h1 b == Seq.seq_of_list l)) + +let rec assign_list #a (l: list a): assign_list_t l += fun b -> + Seq.lemma_seq_of_list_induction l; + match l with + | [] -> + let h = HST.get () in + assert (length b = 0); + assert (Seq.length (as_seq h b) = 0); + assert (Seq.equal (as_seq h b) (Seq.empty #a)); + assert (Seq.seq_of_list [] `Seq.equal` Seq.empty #a) + | hd :: tl -> + let b_hd = sub b 0ul 1ul in + let b_tl = offset b 1ul in + let h = HST.get () in + upd b_hd 0ul hd; + let h0 = HST.get () in + assign_list tl b_tl; + let h1 = HST.get () in + assert (as_seq h1 b_hd == as_seq h0 b_hd); + assert (get h1 b_hd 0 == hd); + assert (as_seq h1 b_tl == Seq.seq_of_list tl); + assert (Seq.equal (as_seq h1 b) (Seq.append (as_seq h1 b_hd) (as_seq h1 b_tl))); + assert ((Seq.seq_of_list l) == (Seq.cons hd (Seq.seq_of_list tl))) diff --git a/stage0/ulib/LowStar.BufferOps.fst b/stage0/ulib/LowStar.BufferOps.fst new file mode 100644 index 00000000000..a0f478e58a2 --- /dev/null +++ b/stage0/ulib/LowStar.BufferOps.fst @@ -0,0 +1,63 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.BufferOps + +(* Handy notations for LowStar.Buffer, so users can open this module + instead of the whole LowStar.Buffer, to just bring these operators + and notations into the scope without bringing any definition from + LowStar.Buffer into the scope. *) + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST +module U32 = FStar.UInt32 +module G = FStar.Ghost +module Seq = FStar.Seq +module B = LowStar.Buffer +module L = FStar.List.Tot + +inline_for_extraction +unfold +let op_Array_Access (#a:Type0) (#rrel #rel:B.srel a) = B.index #a #rrel #rel + +inline_for_extraction +unfold +let op_Array_Assignment (#a:Type0) (#rrel #rel:B.srel a) = B.upd #a #rrel #rel + +(* NOTE: DO NOT mark ( !* ) as inline_for_extraction, + because it is specially treated by KaRaMeL to extract as *p instead + of p[0] *) +let ( !* ) (#a:Type0) (#rrel #rel:B.srel a) (p:B.mpointer a rrel rel): + HST.Stack a + (requires (fun h -> B.live h p)) + (ensures (fun h0 x h1 -> B.live h1 p /\ x == B.get h0 p 0 /\ h1 == h0)) = + B.index p 0ul + +(* NOTE: DO NOT mark ( *= ) as inline_for_extraction, + because it is specially treated by KaRaMeL to extract as *p = v instead + of p[0] = v *) +let ( *= ) (#a:Type0) (#rrel #rel:B.srel a) (p:B.mpointer a rrel rel) (v:a) : HST.Stack unit + (requires (fun h -> B.live h p /\ rel (B.as_seq h p) (Seq.upd (B.as_seq h p) 0 v))) + (ensures (fun h0 _ h1 -> + B.live h1 p /\ + B.as_seq h1 p `Seq.equal` Seq.create 1 v /\ + B.modifies (B.loc_buffer p) h0 h1 + )) += B.upd p 0ul v + +// TODO: remove + +inline_for_extraction +let blit (#a:Type0) (#rrel1 #rel1 #rrel2 #rel2:B.srel a) = B.blit #a #rrel1 #rel1 #rrel2 #rel2 diff --git a/stage0/ulib/LowStar.BufferView.Down.fst b/stage0/ulib/LowStar.BufferView.Down.fst new file mode 100644 index 00000000000..b5ecee6fc50 --- /dev/null +++ b/stage0/ulib/LowStar.BufferView.Down.fst @@ -0,0 +1,363 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.BufferView.Down +open LowStar.Monotonic.Buffer +open FStar.Mul +module HS=FStar.HyperStack +module B=LowStar.Monotonic.Buffer +module Math=FStar.Math.Lemmas + +#set-options "--smtencoding.elim_box true" +#set-options "--smtencoding.l_arith_repr native" +#set-options "--smtencoding.nl_arith_repr wrapped" +#set-options "--z3rlimit_factor 4" //just being conservative +#set-options "--initial_fuel 1 --max_fuel 1" + +noeq +type buffer_view (src:Type0) (rrel rel:B.srel src) (dest:Type u#b) : Type u#b = + | BufferView: buf:B.mbuffer src rrel rel + -> v:view src dest + -> buffer_view src rrel rel dest + +let mk_buffer_view (#src:Type0) (#rrel #rel:B.srel src) (#dest:Type) + (b:B.mbuffer src rrel rel) + (v:view src dest) + : GTot (buffer dest) + = (| src, rrel, rel, BufferView b v |) + +let as_buffer (#b:Type) (v:buffer b) = + let (| _, _, _, BufferView b _ |) = v in + b + +let as_buffer_mk_buffer_view + (#src:Type0) (#rrel #rel:B.srel src) (#dest:Type) + (b:B.mbuffer src rrel rel) + (v:view src dest) = + () + +let get_view (#b : Type) (bv:buffer b) = + let (| _, _, _, BufferView _ v |) = bv in + v + +let get_view_mk_buffer_view + (#src:Type0) (#rrel #rel:B.srel src) (#dest:Type) + (b:B.mbuffer src rrel rel) + (v:view src dest) + = () + +let length (#b: _) (vb:buffer b) = + let b = as_buffer vb in + let v = get_view vb in + B.length b * View?.n v + +let length_eq (#b: _) (vb:buffer b) = () + +let indexing' (#a #b: _) (v:view a b) (len_as:nat) (i:nat{i < len_as * View?.n v}) + : Lemma (let n = View?.n v in + let vlen = len_as * n in + n * (i / n) < vlen /\ + n * (i / n) + n <= vlen) + = let n = View?.n v in + let vlen = len_as * n in + assert (n * (i / n) < vlen); + assert (i / n <= len_as - 1) + +let indexing #b vb i = indexing' (get_view vb) (B.length (as_buffer vb)) i + +let sel' (#a #b: _) (v:view a b) + (es:Seq.seq a) + (i:nat{i / View?.n v < Seq.length es}) + : GTot b + = let n = View?.n v in + let a_i = i / n in + let bs = View?.get v (Seq.index es a_i) in + Seq.index bs (i % n) + +let sel (#b: _) + (h:HS.mem) + (vb:buffer b) + (i:nat{i < length vb}) + : GTot b + = indexing vb i; + let es = B.as_seq h (as_buffer vb) in + let v = get_view vb in + sel' v es i + +let lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem{B.live h b}) (s:_) + : Lemma (Seq.equal s (B.as_seq h b) ==> + B.g_upd_seq b s h == h) + = B.lemma_g_upd_with_same_seq b h + +let mods (#b: _) + (vb:buffer b) + (h h':HS.mem) + = B.modifies (B.loc_buffer (as_buffer vb)) h h' + +val upd' (#b: _) + (h:HS.mem) + (vb:buffer b{live h vb}) + (i:nat{i < length vb}) + (x:b) + : GTot (h':HS.mem{ + (indexing vb i; + let b = as_buffer vb in + let v = get_view vb in + let n = View?.n v in + let a_i = i / n in + B.as_seq h' b == + Seq.upd (B.as_seq h b) a_i (Seq.index (B.as_seq h' b) a_i)) /\ + sel h' vb i == x /\ + (forall (j:nat{j< length vb}). i<>j ==> sel h' vb j == sel h vb j) /\ + (x == sel h vb i ==> h == h') /\ + mods vb h h' /\ + live h' vb /\ + FStar.HyperStack.ST.equal_domains h h' + }) +#push-options "--z3rlimit_factor 8" +let upd' #b h vb i x = + indexing vb i; + let es = B.as_seq h (as_buffer vb) in + let v = get_view vb in + let n = View?.n v in + let a_i = i / n in + let bs = View?.get v (Seq.index es a_i) in + let bs' = Seq.upd bs (i % n) x in + assert (x == sel h vb i ==> Seq.equal bs bs'); + let a' = View?.put v bs' in + let mem = B.g_upd (as_buffer vb) a_i a' h in + B.g_upd_seq_as_seq (as_buffer vb) (Seq.upd es a_i a') h; + lemma_g_upd_with_same_seq (as_buffer vb) h (Seq.upd es a_i a'); + mem +#pop-options + +let upd = upd' +let sel_upd #b vb i j x h = () +let lemma_upd_with_sel #b vb i h = () +let upd_modifies #b h vb i x = () +let upd_equal_domains #b h vb i x = () + +let rec seq_fold_right_gtot #a #b (s:Seq.seq a) (f:a -> b -> GTot b) (acc:b) + : GTot b (decreases (Seq.length s)) + = if Seq.length s = 0 then acc + else f (Seq.head s) (seq_fold_right_gtot (Seq.tail s) f acc) + +let cons_view #a #b (v:view a b) (x:a) (tl:Seq.seq b) : GTot (Seq.seq b) = + Seq.append (View?.get v x) tl + +let as_seq' (#a #b:_) (es:Seq.seq a) (v:view a b) : GTot (Seq.seq b) = + seq_fold_right_gtot #a #(Seq.seq b) es (cons_view #a #b v) Seq.empty + +let rec as_seq'_len (#a #b:_) (es:Seq.seq a) (v:view a b) + : Lemma (ensures (Seq.length (as_seq' es v) == View?.n v * Seq.length es)) + (decreases (Seq.length es)) + = if Seq.length es = 0 then () + else as_seq'_len (Seq.tail es) v + +let rec as_seq'_injective #a #b (v:view a b) (as1 as2:Seq.seq a) + : Lemma + (requires as_seq' as1 v `Seq.equal` as_seq' as2 v) + (ensures as1 `Seq.equal` as2) + (decreases (Seq.length as1)) + = as_seq'_len as1 v; + as_seq'_len as2 v; + assert (Seq.length as1 == Seq.length as2); + if Seq.length as1 = 0 then () + else let n = View?.n v in + as_seq'_len (Seq.tail as1) v; + as_seq'_len (Seq.tail as2) v; + Seq.lemma_append_inj + (View?.get v (Seq.head as1)) + (as_seq' (Seq.tail as1) v) + (View?.get v (Seq.head as2)) + (as_seq' (Seq.tail as2) v); + as_seq'_injective v (Seq.tail as1) (Seq.tail as2); + assert (as1 `Seq.equal` (Seq.head as1 `Seq.cons` Seq.tail as1)); + assert (as2 `Seq.equal` (Seq.head as2 `Seq.cons` Seq.tail as2)) + +let as_seq #b h vb = + let (| a, _, _, BufferView buf v |) = vb in + let es = B.as_seq h buf in + let bs = as_seq' #a #b es v in + as_seq'_len es v; + bs + +#push-options "--max_ifuel 0" +val sel'_tail (#a #b:_) (v:view a b) (es:Seq.seq a{Seq.length es > 0}) + (i:nat{View?.n v <= i /\ i < Seq.length es * View?.n v}) + : Lemma (let j = i - View?.n v in + sel' v es i == sel' v (Seq.tail es) j) +let sel'_tail #a #b v es i = + let len_as = Seq.length es in + indexing' v len_as i; + let n = View?.n v in + let j = i - n in + let a_i = i / n in + assert (sel' v es i == Seq.index (View?.get v (Seq.index es a_i)) (i % n)); + FStar.Math.Lemmas.lemma_mod_sub i n 1; + FStar.Math.Lemmas.add_div_mod_1 j n; + assert (j / n == (i / n) - 1) +#pop-options + +val as_seq'_sel' (#a #b: _) + (v:view a b) + (es:Seq.seq a) + (i:nat{i < Seq.length es * View?.n v}) + : Lemma + (ensures ( + as_seq'_len es v; + sel' v es i == Seq.index (as_seq' es v) i)) + (decreases (Seq.length es)) + +let rec as_seq'_sel' #a #b v es i = + as_seq'_len es v; + let n : pos = View?.n v in + assert (i / n < Seq.length es); + if Seq.length es = 0 then () + else let bs = as_seq' es v in + assert (Seq.length bs = n + Seq.length (as_seq' (Seq.tail es) v)); + if (i < n) then + begin + assert (Seq.index bs i == Seq.index (View?.get v (Seq.head es)) i) + end + else + begin + let as' = Seq.tail es in + as_seq'_len as' v; + let j = i - n in + assert (j / n < Seq.length as'); + assert (j < Seq.length (as_seq' as' v)); + as_seq'_sel' v as' j; + assert (sel' v as' j == Seq.index (as_seq' as' v) j); + assert (Seq.index (as_seq' es v) i == + Seq.index (as_seq' as' v) j); + sel'_tail v es i + end + +let as_seq_sel #b h vb i = + indexing vb i; + let (| a, _, _, BufferView buf v |) = vb in + let es = B.as_seq h buf in + as_seq'_len es v; + as_seq'_sel' v es i + +let get_sel #b h vb i = as_seq_sel h vb i + +val as_seq'_slice (#a #b: _) + (v:view a b) + (es:Seq.seq a) + (i:nat{i < Seq.length es * View?.n v}) + : Lemma + (ensures ( + as_seq'_len es v; + indexing' v (Seq.length es) i; + let n = View?.n v in + View?.get v (Seq.index es (i / n)) == + Seq.slice (as_seq' es v) (n * (i /n)) (n * (i / n) + n))) + (decreases (Seq.length es)) + +#push-options "--z3rlimit 100" +let rec as_seq'_slice #a #b v es i = + let n = View?.n v in + if Seq.length es = 0 then () + else let bs = as_seq' es v in + if i < n then + begin + assert (View?.get v (Seq.index es (i / n)) `Seq.equal` + Seq.slice (as_seq' es v) (n * (i /n)) (n * (i / n) + n)) + end + else let as' = Seq.tail es in + let j = i - n in + as_seq'_slice v (Seq.tail es) (i - n); + as_seq'_len as' v; + indexing' v (Seq.length as') j; + FStar.Math.Lemmas.add_div_mod_1 j n; + assert (View?.get v (Seq.index as' (j / n)) `Seq.equal` + Seq.slice (as_seq' as' v) (n * (j / n)) (n * (j / n) + n)); + assert (Seq.slice (as_seq' as' v) (n * (j / n)) (n * (j / n) + n) `Seq.equal` + Seq.slice (as_seq' es v) (n * (j / n) + n) (n * (j / n) + n + n)); + FStar.Math.Lemmas.add_div_mod_1 j n; + assert (j / n == i / n - 1) +#pop-options + +let put_sel #b h vb i = + indexing vb i; + let v = get_view vb in + let n = View?.n v in + let es = (B.as_seq h (as_buffer vb)) in + as_seq'_slice v es i; + as_seq'_len es v; + assert (View?.put v (View?.get v (Seq.index es (i / n))) == + View?.put v (Seq.slice (as_seq' es v) (n * (i /n)) (n * (i / n) + n))) + +let rec upd_seq' (#a #b: _) (v:view a b) (s:Seq.seq b{Seq.length s % View?.n v = 0}) (acc:Seq.seq a) + : GTot (Seq.lseq a (Seq.length acc + Seq.length s / View?.n v)) + (decreases (Seq.length s)) = + let n = View?.n v in + if Seq.length s = 0 then acc + else let pfx, suffix = Seq.split s n in + Math.lemma_mod_sub (Seq.length s) n 1; + let es = upd_seq' v suffix acc in + Seq.cons (View?.put v pfx) es + +let upd_seq #b h vb s = + let (| a, _, _, BufferView b v |) = vb in + Math.cancel_mul_mod (B.length b) (View?.n v); + let es : Seq.seq a = upd_seq' v s Seq.empty in + B.g_upd_seq b es h + +let as_seq'_cons (#a #b:_) (v:view a b) (hd:a) (tl:Seq.seq a) + : Lemma (as_seq' (Seq.cons hd tl) v == View?.get v hd `Seq.append` as_seq' tl v) + = let s = Seq.cons hd tl in + assert (Seq.head s == hd); + assert (Seq.tail s `Seq.equal` tl) + +let rec upd_seq'_spec (#a #b: _) (v:view a b) (s:Seq.seq b{Seq.length s % View?.n v = 0}) (acc:Seq.seq a) + : Lemma + (ensures ( + let es = upd_seq' v s acc in + as_seq' es v `Seq.equal` Seq.append s (as_seq' acc v))) + (decreases (Seq.length s)) + = if Seq.length s = 0 then () + else let n = View?.n v in + let pfx, suffix = Seq.split s n in + Math.lemma_mod_sub (Seq.length s) n 1; + upd_seq'_spec v suffix acc; + as_seq'_slice v (upd_seq' v s acc) 0; + let as' = upd_seq' v suffix acc in + assert (as_seq' as' v `Seq.equal` Seq.append suffix (as_seq' acc v)); + let es = upd_seq' v s acc in + assert (es `Seq.equal` Seq.cons (View?.put v pfx) as'); + as_seq'_cons v (View?.put v pfx) as' + +#push-options "--z3rlimit 20" +let upd_seq_spec (#b: _) (h:HS.mem) (vb:buffer b{live h vb}) (s:Seq.seq b{Seq.length s = length vb}) + = let h' = upd_seq h vb s in + Math.cancel_mul_mod (B.length (as_buffer vb)) (View?.n (get_view vb)); + let es = upd_seq' (get_view vb) s Seq.empty in + B.g_upd_seq_as_seq (as_buffer vb) es h; + lemma_g_upd_with_same_seq (as_buffer vb) h es; + assert (FStar.HyperStack.ST.equal_domains h h'); + assert (modifies vb h h'); + upd_seq'_spec (get_view vb) s Seq.empty; + assert (as_seq h' vb `Seq.equal` s); + assert (as_seq h vb == as_seq' (B.as_seq h (as_buffer vb)) (get_view vb)); + assert (as_seq h' vb == s); + assert (es == B.as_seq h' (as_buffer vb)); + let v= get_view vb in + FStar.Classical.forall_intro_2 (fun as1 as2 -> + Classical.move_requires (as_seq'_injective v as1) as2 + <: Lemma ((as_seq' as1 v `Seq.equal` as_seq' as2 v) ==> (as1 `Seq.equal` as2))) +#pop-options diff --git a/stage0/ulib/LowStar.BufferView.Down.fsti b/stage0/ulib/LowStar.BufferView.Down.fsti new file mode 100644 index 00000000000..7862da54982 --- /dev/null +++ b/stage0/ulib/LowStar.BufferView.Down.fsti @@ -0,0 +1,272 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.BufferView.Down + +(** + * A "down view" on a buffer allows treating a + * `Buffer.buffer a` as a + * `BufferView.Down.buffer b` + * + * A "view" on a buffer is intended for specification purposes only + * It does not correspond to a pointer cast in C. + * + * Building a down view requires providing a pair of mutually inverse functions + * from `a` to sequences of `b`. (e.g., from a `lbuffer u32 n` to a `lbuffer u8 (4*n)`) + * + * I.e., a down view allows "exploding" an `a` into its component `b`'s. + + * In contrast, an "up view" (see LowStar.BufferView.Up) allows + * "compacting" a sequences of `a`'s into a `b`. + * (e.g., from an `lbuffer u8 (4*n)` to an `lbuffer u32 n`) + **) + +open LowStar.Monotonic.Buffer +open FStar.Mul +module HS=FStar.HyperStack +module B=LowStar.Monotonic.Buffer + +(** Definition of a view **) + +/// `f` and `g` are mutual inverses +let inverses #a #b + (f: (a -> GTot b)) + (g: (b -> GTot a)) = + (forall x. g (f x) == x) /\ + (forall y. f (g y) == y) + +/// `view a b` maps single `a`'s to an `n`-lengthed sequence of `b`s +noeq +type view (a:Type) (b:Type) = + | View : n:pos -> + get:(a -> GTot (Seq.lseq b n)) -> + put:(Seq.lseq b n -> GTot a) { + inverses get put + } -> + view a b + +/// `buffer_views src dest`: +/// +/// The main abstract type provided by this module. This type is +/// indexed by both the `src` and `dest` types. The former (`src`) is +/// the type of the underlying B.buffer's contents: as such, it is +/// forced to be in universe 0. +/// +/// The destination type `dest` is for specification only and is not +/// subject to the same universe constraints by the memory model. +/// + +val buffer_view (src:Type0) (rrel rel:B.srel src) (dest:Type u#b) : Type u#b + +/// `buffer b`: In contrast to `buffer_view`, `buffer b` hides the +/// source type of the view. As such, it is likely more convenient to +/// use in specifications and the rest of this interface is designed +/// around this type. +/// +/// However, the type lives in a higher universe, +/// this means, for instance, that values of `buffer b` cannot be +/// stored in the heap. +/// +/// We leave its definition transparent in case clients wish to +/// manipulate both the `src` and `dest` types explicitly (e.g., to +/// stay in a lower universe) + +let buffer (dest:Type u#a) : Type u#(max 1 a) = (src:Type0 & rrel:B.srel src & rel:B.srel src & buffer_view src rrel rel dest) + +let as_buffer_t (#dest:Type) (b:buffer dest) = B.mbuffer (Mkdtuple4?._1 b) (Mkdtuple4?._2 b) (Mkdtuple4?._3 b) + +/// `mk_buffer_view`: The main constructor +val mk_buffer_view (#src:Type0) (#rrel #rel:B.srel src) (#dest:Type) + (b:B.mbuffer src rrel rel) + (v:view src dest) + : GTot (buffer dest) + + +/// `as_buffer`: Projecting the underlying B.buffer from its view +val as_buffer (#b:Type) (v:buffer b) : as_buffer_t v + +/// A lemma-relating projector to constructor +val as_buffer_mk_buffer_view (#src:Type0) (#rrel #rel:B.srel src) (#dest:Type) + (b:B.mbuffer src rrel rel) + (v:view src dest) + : Lemma (let bv = mk_buffer_view b v in + Mkdtuple4?._1 bv == src /\ + Mkdtuple4?._2 bv == rrel /\ + Mkdtuple4?._3 bv == rel /\ + as_buffer bv == b) + [SMTPat (as_buffer (mk_buffer_view b v))] + +/// `get_view`: Projecting the view functions itself +val get_view (#b : Type) (v:buffer b) : view (Mkdtuple4?._1 v) b + +/// A lemma-relating projector to constructor +val get_view_mk_buffer_view (#src:Type0) (#rrel #rel:B.srel src) (#dest:Type) + (b:B.mbuffer src rrel rel) + (v:view src dest) + : Lemma (let bv = mk_buffer_view b v in + Mkdtuple4?._1 bv == src /\ + get_view bv == v) + [SMTPat (get_view (mk_buffer_view b v))] + +/// `live h vb`: liveness of a buffer view corresponds to liveness of +/// the underlying buffer +unfold +let live #b h (vb:buffer b) = live h (as_buffer vb) + + +/// `length vb`: is defined in terms of the underlying buffer +/// +/// Internally, it is defined as +/// +/// ``` +/// length (as_buffer vb) * View?.n (get_view vb) +/// ``` +/// +/// However, rather than expose this definition to callers, we treat +/// length abstractly. +/// +/// To reveal its definition explicitly, use the `length_eq` lemma below. +val length (#b: _) (vb:buffer b) + : GTot nat + +/// `length_eq`: Reveals the definition of the `length` function +val length_eq (#b: _) (vb:buffer b) + : Lemma (length vb = B.length (as_buffer vb) * View?.n (get_view vb)) + +/// `indexing` +val indexing (#b: _) (vb:buffer b) (i:nat{i < length vb}) + : Lemma (let n = View?.n (get_view vb) in + let vlen = length vb in + n * (i / n) < vlen /\ + n * (i / n) + n <= vlen) + +/// `sel h vb i` : selects element at index `i` from the buffer `vb` in heap `h` +val sel (#b: _) + (h:HS.mem) + (vb:buffer b) + (i:nat{i < length vb}) + : GTot b + +/// `upd h vb i x`: stores `x` at index `i` in the buffer `vb` in heap `h` +val upd (#b: _) + (h:HS.mem) + (vb:buffer b{live h vb}) + (i:nat{i < length vb}) + (x:b) + : GTot HS.mem + +/// `sel_upd`: A classic select/update lemma for reasoning about maps +val sel_upd (#b:_) + (vb:buffer b) + (i:nat{i < length vb}) + (j:nat{j < length vb}) + (x:b) + (h:HS.mem{live h vb}) + : Lemma (if i = j + then sel (upd h vb i x) vb j == x + else sel (upd h vb i x) vb j == sel h vb j) + [SMTPat (sel (upd h vb i x) vb j)] + +val lemma_upd_with_sel (#b:_) + (vb:buffer b) + (i:nat{i < length vb}) + (h:HS.mem{live h vb}) + :Lemma (upd h vb i (sel h vb i) == h) + +/// `modifies` on views is just defined in terms of the underlying buffer +unfold +let modifies (#b: _) + (vb:buffer b) + (h h':HS.mem) + = B.modifies (B.loc_buffer (as_buffer vb)) h h' + +/// `upd_modifies`: Footprint of `upd` +val upd_modifies (#b: _) + (h:HS.mem) + (vb:buffer b{live h vb}) + (i:nat{i < length vb}) + (x:b) + : Lemma (ensures (modifies vb h (upd h vb i x) /\ + live (upd h vb i x) vb)) + [SMTPat (upd h vb i x)] + +/// `upd_equal_domains`: `upd` does not modify the memory domains +val upd_equal_domains (#b: _) + (h:HS.mem) + (vb:buffer b{live h vb}) + (i:nat{i < length vb}) + (x:b) + : Lemma (FStar.HyperStack.ST.equal_domains h (upd h vb i x)) + +/// `as_seq h vb`: Viewing the entire buffer as a sequence of `b` +val as_seq (#b: _) (h:HS.mem) (vb:buffer b) + : GTot (Seq.lseq b (length vb)) + +/// `as_seq_sel`: +/// +/// Relates selecting an element in the heap to selecting an element +/// from the sequence +val as_seq_sel (#b: _) + (h:HS.mem) + (vb:buffer b) + (i:nat{i < length vb}) + : Lemma (sel h vb i == Seq.index (as_seq h vb) i) + +/// `get_sel`: +/// +/// Relates selecting an element from the view to translating an +/// element of the underlying buffer to a sequence and selecting +/// the corresponding element there +val get_sel (#b: _) + (h:HS.mem) + (vb:buffer b) + (i:nat{i < length vb}) + : Lemma (let v = get_view vb in + let n = View?.n v in + length_eq vb; + sel h vb i == + Seq.index + (View?.get v (Seq.index (B.as_seq h (as_buffer vb)) + (i / n))) + (i % n)) + +/// `put_sel`: +/// +/// Relates selecting a subsequence of the underlying buffer +/// to selecting and translating an element from the view. +val put_sel (#b: _) + (h:HS.mem) + (vb:buffer b) + (i:nat{i < length vb}) + : Lemma (let v = get_view vb in + let n = View?.n v in + length_eq vb; + indexing vb i; + View?.put v (Seq.slice (as_seq h vb) + (n * (i / n)) + (n * (i / n) + n)) == + Seq.index (B.as_seq h (as_buffer vb)) + (i / n)) + +/// `upd_seq h vb s`: Updating the entire sequence in one go +val upd_seq (#b: _) (h:HS.mem) (vb:buffer b{live h vb}) (s:Seq.seq b{Seq.length s = length vb}) + : GTot HS.mem + +val upd_seq_spec (#b: _) (h:HS.mem) (vb:buffer b{live h vb}) (s:Seq.seq b{Seq.length s = length vb}) + : Lemma (let h' = upd_seq h vb s in + as_seq h' vb == s /\ + FStar.HyperStack.ST.equal_domains h h' /\ + modifies vb h h' /\ + (as_seq h vb == s ==> h==h')) diff --git a/stage0/ulib/LowStar.BufferView.Up.fst b/stage0/ulib/LowStar.BufferView.Up.fst new file mode 100644 index 00000000000..fe270504b7f --- /dev/null +++ b/stage0/ulib/LowStar.BufferView.Up.fst @@ -0,0 +1,205 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.BufferView.Up +module Down = LowStar.BufferView.Down + +noeq +type buffer (dest:Type0) : Type u#1 = + | Buffer: src:Type0 + -> down_buf:Down.buffer src + -> v:view src dest{Down.length down_buf % View?.n v == 0} + -> buffer dest + +let mk_buffer #src #dest down_buf v = Buffer src down_buf v + +let buffer_src #b bv = Buffer?.src bv +let as_down_buffer #b bv = Buffer?.down_buf bv +let get_view #b v = Buffer?.v v +let as_buffer_mk_buffer #_ #_ _ _ = () +let length #b vb = + Down.length (as_down_buffer vb) / View?.n (get_view vb) + +let length_eq #_ _ = () + +//#reset-options "--max_fuel 0 --max_ifuel 1" +let view_indexing #b vb i + = let n = View?.n (get_view vb) in + length_eq vb; + FStar.Math.Lemmas.distributivity_add_left (length vb) (-i) n; + let open FStar.Mul in + assert ((length vb + (-i)) * n = length vb * n + (-i) * n); + assert (length vb > i); + assert (length vb + (-i) > 0); + assert (n <= (length vb + (-i)) * n) + +let split_at_i (#b: _) (vb:buffer b) (i:nat{i < length vb}) (h:HS.mem) + : GTot (frags: + (let src_t = buffer_src vb in + Seq.seq src_t & + Seq.lseq src_t (View?.n (get_view vb)) & + Seq.seq src_t){ + let prefix, es, suffix = frags in + Down.as_seq h (as_down_buffer vb) == + (prefix `Seq.append` (es `Seq.append` suffix)) + }) + = let open FStar.Mul in + let s0 = Down.as_seq h (as_down_buffer vb) in + let v = get_view vb in + let n = View?.n v in + let start = i * n in + view_indexing vb i; + length_eq vb; + let prefix, suffix = Seq.split s0 start in + Seq.lemma_split s0 start; + let es, tail = Seq.split suffix n in + Seq.lemma_split suffix n; + prefix, es, tail + +let sel (#b: _) (h:HS.mem) (vb:buffer b) (i:nat{i < length vb}) + : GTot b + = let v = get_view vb in + let _, es, _ = split_at_i vb i h in + View?.get v es + +let upd' (#b: _) + (h:HS.mem) + (vb:buffer b{live h vb}) + (i:nat{i < length vb}) + (x:b) + : GTot (h':HS.mem{sel h' vb i == x}) + = let open FStar.Mul in + let v = get_view vb in + let prefix, _, suffix = split_at_i vb i h in + let s1 = prefix `Seq.append` (View?.put v x `Seq.append` suffix) in + let h' = Down.upd_seq h (as_down_buffer vb) s1 in + Down.upd_seq_spec h (as_down_buffer vb) s1; + assert (Down.as_seq h' (as_down_buffer vb) == s1); + let n = View?.n v in + assert (sel h' vb i == View?.get v (Seq.slice s1 (i * n) (i * n + n))); + assert (Seq.slice s1 (i * n) (i * n + n) `Seq.equal` View?.put v x); + h' + + +let upd #b h vb i x + : GTot HS.mem + = upd' #b h vb i x + +let sel_upd1 (#b:_) (vb:buffer b) (i:nat{i < length vb}) (x:b) (h:HS.mem{live h vb}) + : Lemma (sel (upd h vb i x) vb i == x) + = () + +let lt_leq_mul (min:nat) (max:nat{min < max}) (n:nat) + : Lemma (FStar.Mul.(min * n + n <= max * n)) + = let open FStar.Mul in + assert ((min * n) + n = (min + 1) * n); + assert ((min * n) + n <= max * n) + +#set-options "--z3rlimit 20" +let sel_upd2 (#b:_) (vb:buffer b) + (i:nat{i < length vb}) + (j:nat{j < length vb /\ i<>j}) + (x:b) + (h:HS.mem{live h vb}) + : Lemma (sel (upd h vb i x) vb j == sel h vb j) + = let open FStar.Mul in + let v = get_view vb in + view_indexing vb i; + view_indexing vb j; + let h' = upd h vb i x in + let s0 = Down.as_seq h (as_down_buffer vb) in + let s1 = Down.as_seq h' (as_down_buffer vb) in + let min = if i < j then i else j in + let max = if i < j then j else i in + let n = View?.n v in + lt_leq_mul min max n; + let min0, max0 = + Seq.slice s0 (min * n) ((min * n) + n), + Seq.slice s0 (max * n) ((max * n) + n) + in + let _, s_j, _ = split_at_i vb j h in + let min1, max1 = + Seq.slice s1 (min * n) ((min * n) + n), + Seq.slice s1 (max * n) ((max * n) + n) + in + let _, s_j', _ = split_at_i vb j h' in + let prefix, s_i, suffix = split_at_i vb i h in + Down.upd_seq_spec h (as_down_buffer vb) (prefix `Seq.append` (View?.put v x `Seq.append` suffix)); + if i < j + then begin + assert (Seq.equal max0 s_j); + assert (Seq.equal max1 s_j'); + assert (Seq.equal s_j s_j') + end + else begin + assert (Seq.equal min0 s_j); + assert (Seq.equal min1 s_j'); + assert (Seq.equal s_j s_j') + end + +let sel_upd #b vb i j x h = + if i=j then sel_upd1 vb i x h + else sel_upd2 vb i j x h + +let lemma_upd_with_sel #b vb i h = + let v = get_view vb in + let prefix, es, suffix = split_at_i vb i h in + let s0 = Down.as_seq h (as_down_buffer vb) in + let s1 = prefix `Seq.append` (View?.put v (View?.get v es) `Seq.append` suffix) in + assert (Seq.equal s0 s1); + Down.upd_seq_spec h (as_down_buffer vb) s0 + +let upd_modifies #b h vb i x + = let open FStar.Mul in + let v = get_view vb in + let prefix, _, suffix = split_at_i vb i h in + let s1 = prefix `Seq.append` (View?.put v x `Seq.append` suffix) in + Down.upd_seq_spec h (as_down_buffer vb) s1 + +let upd_equal_domains #b h vb i x + = let open FStar.Mul in + let v = get_view vb in + let prefix, _, suffix = split_at_i vb i h in + let s1 = prefix `Seq.append` (View?.put v x `Seq.append` suffix) in + upd_modifies h vb i x; + Down.upd_seq_spec h (as_down_buffer vb) s1 + +let rec as_seq' (#b: _) (h:HS.mem) (vb:buffer b) (i:nat{i <= length vb}) + : GTot (Seq.lseq b (length vb - i)) + (decreases (length vb - i)) + = let v = get_view vb in + if i = length vb + then Seq.empty + else let _ = view_indexing vb i in + let _, s_i, suffix = split_at_i vb i h in + View?.get v s_i `Seq.cons` as_seq' h vb (i + 1) + +let as_seq (#b: _) (h:HS.mem) (vb:buffer b) = as_seq' h vb 0 + +#set-options "--max_fuel 1 --max_ifuel 1" +let as_seq_sel (#b: _) (h:HS.mem) (vb:buffer b) (i:nat{i < length vb}) + : Lemma (ensures (sel h vb i == Seq.index (as_seq h vb) i)) + = + let rec as_seq'_as_seq' (j:nat) + (i:nat{j + i < length vb}) + : Lemma (ensures (Seq.index (as_seq' h vb j) i == Seq.index (as_seq' h vb (j + i)) 0)) + (decreases i) + = if i = 0 then () else as_seq'_as_seq' (j + 1) (i - 1) + in + as_seq'_as_seq' 0 i + +#set-options "--max_fuel 0 --max_ifuel 1" +let get_sel (#b: _) (h:HS.mem) (vb:buffer b) (i:nat{i < length vb}) = () +let put_sel (#b: _) (h:HS.mem) (vb:buffer b) (i:nat{i < length vb}) = () diff --git a/stage0/ulib/LowStar.BufferView.Up.fsti b/stage0/ulib/LowStar.BufferView.Up.fsti new file mode 100644 index 00000000000..113983a3614 --- /dev/null +++ b/stage0/ulib/LowStar.BufferView.Up.fsti @@ -0,0 +1,206 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.BufferView.Up + +(** + * A "view" on a buffer allows treating a + * `Buffer.buffer a` as a + * `BufferView.buffer b` + * + * A "view" on a buffer is intended for specification purposes only + * It does not correspond to a pointer cast in C. + * + * Building a view requires providing a pair of mutually inverse functions + * from sequences of `a` (sub-sequences of the source buffer) + * to elements of `b`. + * + **) +open LowStar.Monotonic.Buffer + +module HS=FStar.HyperStack +module B=LowStar.Monotonic.Buffer +module Down=LowStar.BufferView.Down + +(** Definition of a view **) + +/// `f` and `g` are mutual inverses +let inverses = Down.inverses + +/// `view a b` maps `n`-lengthed sequences of `a` to a single `b` +noeq +type view (a:Type) (b:Type) = + | View : n:pos -> + get:(Seq.lseq a n -> GTot b) -> + put:(b -> GTot (Seq.lseq a n)) { + inverses get put + } -> + view a b + +val buffer (dest:Type0) : Type u#1 + +/// `mk_buffer`: The main constructor +val mk_buffer (#src:Type0) + (#dest:Type0) + (b:Down.buffer src) + (v:view src dest{ + Down.length b % View?.n v == 0 + }) + : GTot (buffer dest) + +val buffer_src (#dest:Type) (b:buffer dest) : Type0 + +/// `as_down_buffer`: Projecting the underlying Down.buffer from its view +val as_down_buffer (#b:Type) (v:buffer b) : Down.buffer (buffer_src v) + +/// `get_view`: Projecting the view functions itself +val get_view (#b : Type) (v:buffer b) : view (buffer_src v) b + +/// A lemma-relating projector to constructor +val as_buffer_mk_buffer (#src #dest:Type) + (d:Down.buffer src) + (v:view src dest{ + Down.length d % View?.n v == 0 + }) + : Lemma (let bv = mk_buffer d v in + buffer_src bv == src /\ + as_down_buffer bv == d /\ + get_view bv == v) + [SMTPatOr [[SMTPat (buffer_src (mk_buffer d v))]; + [SMTPat (as_down_buffer (mk_buffer d v))]; + [SMTPat (get_view (mk_buffer d v))]]] + +/// `live h vb`: liveness of a buffer view corresponds to liveness of +/// the underlying buffer +unfold +let live #b h (vb:buffer b) = Down.live h (as_down_buffer vb) +val length (#b: _) (vb:buffer b) + : GTot nat + +/// `length_eq`: Reveals the definition of the `length` function +val length_eq (#b: _) (vb:buffer b) + : Lemma (length vb = Down.length (as_down_buffer vb) / View?.n (get_view vb)) + +/// `view_indexing`: A lemma that requires a bit of non-linear +/// arithmetic, necessary for some of the specs below and convenient +/// when relating the underlying buffer to its view. +val view_indexing (#b: _) (vb:buffer b) (i:nat{i < length vb}) + : Lemma (let open FStar.Mul in + let n = View?.n (get_view vb) in + n <= length vb * n - i * n) + +/// `sel h vb i` : selects element at index `i` from the buffer `vb` in heap `h` +val sel (#b: _) + (h:HS.mem) + (vb:buffer b) + (i:nat{i < length vb}) + : GTot b + +/// `upd h vb i x`: stores `x` at index `i` in the buffer `vb` in heap `h` +val upd (#b: _) + (h:HS.mem) + (vb:buffer b{live h vb}) + (i:nat{i < length vb}) + (x:b) + : GTot HS.mem + +/// `sel_upd`: A classic select/update lemma for reasoning about maps +val sel_upd (#b:_) + (vb:buffer b) + (i:nat{i < length vb}) + (j:nat{j < length vb}) + (x:b) + (h:HS.mem{live h vb}) + : Lemma (if i = j + then sel (upd h vb i x) vb j == x + else sel (upd h vb i x) vb j == sel h vb j) + [SMTPat (sel (upd h vb i x) vb j)] + +val lemma_upd_with_sel (#b:_) + (vb:buffer b) + (i:nat{i < length vb}) + (h:HS.mem{live h vb}) + :Lemma (upd h vb i (sel h vb i) == h) + +/// `modifies` on views is just defined in terms of the underlying buffer +unfold +let modifies (#b: _) + (vb:buffer b) + (h h':HS.mem) + = Down.modifies (as_down_buffer vb) h h' + +/// `upd_modifies`: Footprint of `upd` +val upd_modifies (#b: _) + (h:HS.mem) + (vb:buffer b{live h vb}) + (i:nat{i < length vb}) + (x:b) + : Lemma (ensures (modifies vb h (upd h vb i x) /\ + live (upd h vb i x) vb)) + [SMTPat (upd h vb i x)] + +/// `upd_equal_domains`: `upd` does not modify the memory domains +val upd_equal_domains (#b: _) + (h:HS.mem) + (vb:buffer b{live h vb}) + (i:nat{i < length vb}) + (x:b) + : Lemma (FStar.HyperStack.ST.equal_domains h (upd h vb i x)) + +/// `as_seq h vb`: Viewing the entire buffer as a sequence of `b` +val as_seq (#b: _) (h:HS.mem) (vb:buffer b) + : GTot (Seq.lseq b (length vb)) + +/// `as_seq_sel`: +/// +/// Relates selecting an element in the heap to selecting an element +/// from the sequence +val as_seq_sel (#b: _) + (h:HS.mem) + (vb:buffer b) + (i:nat{i < length vb}) + : Lemma (sel h vb i == Seq.index (as_seq h vb) i) + +/// `get_sel`: +/// +/// Relates selecting an element from the view to translating a +/// subsequence of the underlying buffer through the view +val get_sel (#b: _) + (h:HS.mem) + (vb:buffer b) + (i:nat{i < length vb}) + : Lemma (let open FStar.Mul in + let v = get_view vb in + let n = View?.n v in + length_eq vb; + view_indexing vb i; + sel h vb i == + View?.get v (Seq.slice (Down.as_seq h (as_down_buffer vb)) (i * n) (i * n + n))) + +/// `put_sel`: +/// +/// Relates selecting a subsequence of the underlying buffer +/// to selecting and translating an element from the view. +val put_sel (#b: _) + (h:HS.mem) + (vb:buffer b) + (i:nat{i < length vb}) + : Lemma (let open FStar.Mul in + let v = get_view vb in + let n = View?.n v in + length_eq vb; + view_indexing vb i; + View?.put v (sel h vb i) == + Seq.slice (Down.as_seq h (as_down_buffer vb)) (i * n) (i * n + n)) diff --git a/stage0/ulib/LowStar.BufferView.fst b/stage0/ulib/LowStar.BufferView.fst new file mode 100644 index 00000000000..f33679d3f96 --- /dev/null +++ b/stage0/ulib/LowStar.BufferView.fst @@ -0,0 +1,207 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.BufferView + +noeq +type buffer_view (a:Type0) (rrel rel:B.srel a) (b:Type u#b) : Type u#b = + | BufferView: buf:B.mbuffer a rrel rel + -> v:view a b{B.length buf % View?.n v == 0} + -> buffer_view a rrel rel b + +let mk_buffer_view #src #rrel #rel #dest b v = (| src, rrel, rel, BufferView b v |) + +let as_buffer #b v = BufferView?.buf (Mkdtuple4?._4 v) + +let as_buffer_mk_buffer_view #_ #_ #_ #_ _ _ = () + +let get_view #b v = BufferView?.v (Mkdtuple4?._4 v) + +let get_view_mk_buffer_view #_ #_ #_ #_ _ _ = () + +let length #b vb = B.length (as_buffer vb) / View?.n (get_view vb) + +let length_eq #_ _ = () + +#reset-options "--max_fuel 0 --max_ifuel 1" +let view_indexing #b vb i + = let n = View?.n (get_view vb) in + length_eq vb; + FStar.Math.Lemmas.distributivity_add_left (length vb) (-i) n; + let open FStar.Mul in + assert ((length vb + (-i)) * n = length vb * n + (-i) * n); + assert (length vb > i); + assert (length vb + (-i) > 0); + assert (n <= (length vb + (-i)) * n) + + +let split_at_i (#b: _) (vb:buffer b) (i:nat{i < length vb}) (h:HS.mem) + : GTot (frags: + (let src_t = Mkdtuple4?._1 vb in + Seq.seq src_t & + Seq.lseq src_t (View?.n (get_view vb)) & + Seq.seq src_t){ + let prefix, es, suffix = frags in + B.as_seq h (as_buffer vb) == + (prefix `Seq.append` (es `Seq.append` suffix)) + }) + = let open FStar.Mul in + let s0 = B.as_seq h (as_buffer vb) in + let v = get_view vb in + let n = View?.n v in + let start = i * n in + view_indexing vb i; + length_eq vb; + let prefix, suffix = Seq.split s0 start in + Seq.lemma_split s0 start; + let es, tail = Seq.split suffix n in + Seq.lemma_split suffix n; + prefix, es, tail + +let sel (#b: _) (h:HS.mem) (vb:buffer b) (i:nat{i < length vb}) + : GTot b + = let v = get_view vb in + let _, es, _ = split_at_i vb i h in + View?.get v es + +let upd #b h vb i x + : GTot HS.mem + = let open FStar.Mul in + let v = get_view vb in + let prefix, _, suffix = split_at_i vb i h in + let s1 = prefix `Seq.append` (View?.put v x `Seq.append` suffix) in + B.g_upd_seq (as_buffer vb) s1 h + +let sel_upd1 (#b:_) (vb:buffer b) (i:nat{i < length vb}) (x:b) (h:HS.mem{live h vb}) + : Lemma (sel (upd h vb i x) vb i == x) + = + let v = get_view vb in + view_indexing vb i; + let h' = upd h vb i x in + let prefix, es, suffix = split_at_i vb i h in + let es' = View?.put v x in + let s' = B.as_seq h' (as_buffer vb) in + B.g_upd_seq_as_seq (as_buffer vb) (prefix `Seq.append` (es' `Seq.append` suffix)) h; + assert (s' == prefix `Seq.append` (es' `Seq.append` suffix)); + let prefix', es'', suffix' = split_at_i vb i h' in + assert (s' == prefix' `Seq.append` (es'' `Seq.append` suffix')); + Seq.lemma_append_inj prefix (es' `Seq.append` suffix) + prefix' (es'' `Seq.append` suffix'); + Seq.lemma_append_inj es' suffix + es'' suffix'; + assert (es' == es'') + +let lt_leq_mul (min:nat) (max:nat{min < max}) (n:nat) + : Lemma (FStar.Mul.(min * n + n <= max * n)) + = let open FStar.Mul in + assert ((min * n) + n = (min + 1) * n); + assert ((min * n) + n <= max * n) + +#set-options "--z3rlimit 20" +let sel_upd2 (#b:_) (vb:buffer b) + (i:nat{i < length vb}) + (j:nat{j < length vb /\ i<>j}) + (x:b) + (h:HS.mem{live h vb}) + : Lemma (sel (upd h vb i x) vb j == sel h vb j) + = let open FStar.Mul in + let v = get_view vb in + view_indexing vb i; + view_indexing vb j; + let h' = upd h vb i x in + let s0 = B.as_seq h (as_buffer vb) in + let s1 = B.as_seq h' (as_buffer vb) in + let min = if i < j then i else j in + let max = if i < j then j else i in + let n = View?.n v in + lt_leq_mul min max n; + let min0, max0 = + Seq.slice s0 (min * n) ((min * n) + n), + Seq.slice s0 (max * n) ((max * n) + n) + in + let _, s_j, _ = split_at_i vb j h in + let min1, max1 = + Seq.slice s1 (min * n) ((min * n) + n), + Seq.slice s1 (max * n) ((max * n) + n) + in + let _, s_j', _ = split_at_i vb j h' in + let prefix, s_i, suffix = split_at_i vb i h in + B.g_upd_seq_as_seq (as_buffer vb) (prefix `Seq.append` (View?.put v x `Seq.append` suffix)) h; + if i < j + then begin + assert (Seq.equal max0 s_j); + assert (Seq.equal max1 s_j'); + assert (Seq.equal s_j s_j') + end + else begin + assert (Seq.equal min0 s_j); + assert (Seq.equal min1 s_j'); + assert (Seq.equal s_j s_j') + end + +let sel_upd #b vb i j x h = + if i=j then sel_upd1 vb i x h + else sel_upd2 vb i j x h + +let lemma_upd_with_sel #b vb i h = + let v = get_view vb in + let prefix, es, suffix = split_at_i vb i h in + let s0 = B.as_seq h (as_buffer vb) in + let s1 = prefix `Seq.append` (View?.put v (View?.get v es) `Seq.append` suffix) in + assert (Seq.equal s0 s1); + B.lemma_g_upd_with_same_seq (as_buffer vb) h + +let upd_modifies #b h vb i x + = let open FStar.Mul in + let v = get_view vb in + let prefix, _, suffix = split_at_i vb i h in + let s1 = prefix `Seq.append` (View?.put v x `Seq.append` suffix) in + B.g_upd_seq_as_seq (as_buffer vb) s1 h + +let upd_equal_domains #b h vb i x + = let open FStar.Mul in + let v = get_view vb in + let prefix, _, suffix = split_at_i vb i h in + let s1 = prefix `Seq.append` (View?.put v x `Seq.append` suffix) in + upd_modifies h vb i x; + B.g_upd_seq_as_seq (as_buffer vb) s1 h + +let rec as_seq' (#b: _) (h:HS.mem) (vb:buffer b) (i:nat{i <= length vb}) + : GTot (Seq.lseq b (length vb - i)) + (decreases (length vb - i)) + = let v = get_view vb in + if i = length vb + then Seq.empty + else let _ = view_indexing vb i in + let _, s_i, suffix = split_at_i vb i h in + View?.get v s_i `Seq.cons` as_seq' h vb (i + 1) + +let as_seq (#b: _) (h:HS.mem) (vb:buffer b) = as_seq' h vb 0 + +#set-options "--max_fuel 1 --max_ifuel 1" +let as_seq_sel (#b: _) (h:HS.mem) (vb:buffer b) (i:nat{i < length vb}) + : Lemma (ensures (sel h vb i == Seq.index (as_seq h vb) i)) + = + let rec as_seq'_as_seq' (j:nat) + (i:nat{j + i < length vb}) + : Lemma (ensures (Seq.index (as_seq' h vb j) i == Seq.index (as_seq' h vb (j + i)) 0)) + (decreases i) + = if i = 0 then () else as_seq'_as_seq' (j + 1) (i - 1) + in + as_seq'_as_seq' 0 i + +#set-options "--max_fuel 0 --max_ifuel 1" +let get_sel (#b: _) (h:HS.mem) (vb:buffer b) (i:nat{i < length vb}) = () +let put_sel (#b: _) (h:HS.mem) (vb:buffer b) (i:nat{i < length vb}) = () diff --git a/stage0/ulib/LowStar.BufferView.fsti b/stage0/ulib/LowStar.BufferView.fsti new file mode 100644 index 00000000000..e076f63502e --- /dev/null +++ b/stage0/ulib/LowStar.BufferView.fsti @@ -0,0 +1,257 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.BufferView + +(** + * A "view" on a buffer allows treating a + * `Buffer.buffer a` as a + * `BufferView.buffer b` + * + * A "view" on a buffer is intended for specification purposes only + * It does not correspond to a pointer cast in C. + * + * Building a view requires providing a pair of mutually inverse functions + * from sequences of `a` (sub-sequences of the source buffer) + * to elements of `b`. + * + **) +open LowStar.Monotonic.Buffer + +module HS=FStar.HyperStack +module B=LowStar.Monotonic.Buffer + +(** Definition of a view **) + +/// `f` and `g` are mutual inverses +let inverses #a #b + (f: (a -> GTot b)) + (g: (b -> GTot a)) = + (forall x. g (f x) == x) /\ + (forall y. f (g y) == y) + +/// `view a b` maps `n`-lengthed sequences of `a` to a single `b` +noeq +type view (a:Type) (b:Type) = + | View : n:pos -> + get:(Seq.lseq a n -> GTot b) -> + put:(b -> GTot (Seq.lseq a n)) { + inverses get put + } -> + view a b + +/// `buffer_views src dest`: +/// +/// The main abstract type provided by this module. This type is +/// indexed by both the `src` and `dest` types. The former (`src`) is +/// the type of the underlying B.buffer's contents: as such, it is +/// forced to be in universe 0. +/// +/// The destination type `dest` is for specification only and is not +/// subject to the same universe constraints by the memory model. + +val buffer_view (src:Type0) (rrel rel:B.srel src) (dest:Type u#b) : Type u#b + +/// `buffer b`: In contrast to `buffer_view`, `buffer b` hides the +/// source type of the view. As such, it is likely more convenient to +/// use in specifications and the rest of this interface is designed +/// around this type. +/// +/// However, the type has a higher universe, and +/// this means, for instance, that values of `buffer b` cannot be +/// stored in the heap. +/// +/// We leave its definition transparent in case clients wish to +/// manipulate both the `src` and `dest` types explicitly (e.g., to +/// stay in a lower universe) + +let buffer (dest:Type u#a) : Type u#(max a 1) = (src:Type0 & rrel:B.srel src & rel:B.srel src & buffer_view src rrel rel dest) + +let as_buffer_t (#dest:Type) (b:buffer dest) = B.mbuffer (Mkdtuple4?._1 b) (Mkdtuple4?._2 b) (Mkdtuple4?._3 b) + +/// `mk_buffer_view`: The main constructor +val mk_buffer_view (#src:Type0) (#rrel #rel:B.srel src) (#dest:Type) + (b:B.mbuffer src rrel rel) + (v:view src dest{ + length b % View?.n v == 0 + }) + : GTot (buffer dest) + + +/// `as_buffer`: Projecting the underlying B.buffer from its view +val as_buffer (#b:Type) (v:buffer b) : as_buffer_t v + +/// A lemma-relating projector to constructor +val as_buffer_mk_buffer_view (#src:Type0) (#rrel #rel:B.srel src) (#dest:Type) + (b:B.mbuffer src rrel rel) + (v:view src dest{ + length b % View?.n v == 0 + }) + : Lemma (let bv = mk_buffer_view b v in + Mkdtuple4?._1 bv == src /\ + Mkdtuple4?._2 bv == rrel /\ + Mkdtuple4?._3 bv == rel /\ + as_buffer bv == b) + [SMTPat (as_buffer (mk_buffer_view b v))] + +/// `get_view`: Projecting the view functions itself +val get_view (#b : Type) (v:buffer b) : view (Mkdtuple4?._1 v) b + +/// A lemma-relating projector to constructor +val get_view_mk_buffer_view (#src:Type0) (#rrel #rel:B.srel src) (#dest:Type) + (b:B.mbuffer src rrel rel) + (v:view src dest{ + length b % View?.n v == 0 + }) + : Lemma (let bv = mk_buffer_view b v in + Mkdtuple4?._1 bv == src /\ + get_view bv == v) + [SMTPat (get_view (mk_buffer_view b v))] + +/// `live h vb`: liveness of a buffer view corresponds to liveness of +/// the underlying buffer +unfold +let live #b h (vb:buffer b) = live h (as_buffer vb) + +/// `length vb`: is defined in terms of the underlying buffer +/// +/// Internally, it is defined as +/// +/// ``` +/// length (as_buffer vb) / View?.n (get_view vb) +/// ``` +/// +/// However, rather than expose this definition (which uses non-linear +/// arithmetic) to callers, we treat length abstractly. +/// +/// To reveal its definition explicitly, use the `length_eq` lemma below. +val length (#b: _) (vb:buffer b) + : GTot nat + +/// `length_eq`: Reveals the definition of the `length` function +val length_eq (#b: _) (vb:buffer b) + : Lemma (length vb = B.length (as_buffer vb) / View?.n (get_view vb)) + +/// `view_indexing`: A lemma that requires a bit of non-linear +/// arithmetic, necessary for some of the specs below and convenient +/// when relating the underlying buffer to its view. +val view_indexing (#b: _) (vb:buffer b) (i:nat{i < length vb}) + : Lemma (let open FStar.Mul in + let n = View?.n (get_view vb) in + n <= length vb * n - i * n) + +/// `sel h vb i` : selects element at index `i` from the buffer `vb` in heap `h` +val sel (#b: _) + (h:HS.mem) + (vb:buffer b) + (i:nat{i < length vb}) + : GTot b + +/// `upd h vb i x`: stores `x` at index `i` in the buffer `vb` in heap `h` +val upd (#b: _) + (h:HS.mem) + (vb:buffer b{live h vb}) + (i:nat{i < length vb}) + (x:b) + : GTot HS.mem + +/// `sel_upd`: A classic select/update lemma for reasoning about maps +val sel_upd (#b:_) + (vb:buffer b) + (i:nat{i < length vb}) + (j:nat{j < length vb}) + (x:b) + (h:HS.mem{live h vb}) + : Lemma (if i = j + then sel (upd h vb i x) vb j == x + else sel (upd h vb i x) vb j == sel h vb j) + [SMTPat (sel (upd h vb i x) vb j)] + +val lemma_upd_with_sel (#b:_) + (vb:buffer b) + (i:nat{i < length vb}) + (h:HS.mem{live h vb}) + :Lemma (upd h vb i (sel h vb i) == h) + +/// `modifies` on views is just defined in terms of the underlying buffer +unfold +let modifies (#b: _) + (vb:buffer b) + (h h':HS.mem) + = B.modifies (B.loc_buffer (as_buffer vb)) h h' + +/// `upd_modifies`: Footprint of `upd` +val upd_modifies (#b: _) + (h:HS.mem) + (vb:buffer b{live h vb}) + (i:nat{i < length vb}) + (x:b) + : Lemma (ensures (modifies vb h (upd h vb i x) /\ + live (upd h vb i x) vb)) + [SMTPat (upd h vb i x)] + +/// `upd_equal_domains`: `upd` does not modify the memory domains +val upd_equal_domains (#b: _) + (h:HS.mem) + (vb:buffer b{live h vb}) + (i:nat{i < length vb}) + (x:b) + : Lemma (FStar.HyperStack.ST.equal_domains h (upd h vb i x)) + +/// `as_seq h vb`: Viewing the entire buffer as a sequence of `b` +val as_seq (#b: _) (h:HS.mem) (vb:buffer b) + : GTot (Seq.lseq b (length vb)) + +/// `as_seq_sel`: +/// +/// Relates selecting an element in the heap to selecting an element +/// from the sequence +val as_seq_sel (#b: _) + (h:HS.mem) + (vb:buffer b) + (i:nat{i < length vb}) + : Lemma (sel h vb i == Seq.index (as_seq h vb) i) + +/// `get_sel`: +/// +/// Relates selecting an element from the view to translating a +/// subsequence of the underlying buffer through the view +val get_sel (#b: _) + (h:HS.mem) + (vb:buffer b) + (i:nat{i < length vb}) + : Lemma (let open FStar.Mul in + let v = get_view vb in + let n = View?.n v in + length_eq vb; + view_indexing vb i; + sel h vb i == + View?.get v (Seq.slice (B.as_seq h (as_buffer vb)) (i * n) (i * n + n))) + +/// `put_sel`: +/// +/// Relates selecting a subsequence of the underlying buffer +/// to selecting and translating an element from the view. +val put_sel (#b: _) + (h:HS.mem) + (vb:buffer b) + (i:nat{i < length vb}) + : Lemma (let open FStar.Mul in + let v = get_view vb in + let n = View?.n v in + length_eq vb; + view_indexing vb i; + View?.put v (sel h vb i) == + Seq.slice (B.as_seq h (as_buffer vb)) (i * n) (i * n + n)) diff --git a/stage0/ulib/LowStar.Comment.fst b/stage0/ulib/LowStar.Comment.fst new file mode 100644 index 00000000000..6777af9bf52 --- /dev/null +++ b/stage0/ulib/LowStar.Comment.fst @@ -0,0 +1,9 @@ +module LowStar.Comment + +let comment_gen + #t before body after += body + +let comment + s += () diff --git a/stage0/ulib/LowStar.Comment.fsti b/stage0/ulib/LowStar.Comment.fsti new file mode 100644 index 00000000000..a3fb0cd3d83 --- /dev/null +++ b/stage0/ulib/LowStar.Comment.fsti @@ -0,0 +1,28 @@ +module LowStar.Comment +open FStar.HyperStack.ST + +/// `comment_gen before body after` extracts to KaRaMeL AST +/// `EComment (before, body', after)` (where `body` extracts +/// to `body'`), and so ultimately extracts to the +/// corresponding C implementation of `body` enclosed with +/// two comments `/* before */` and `/* after */`. +/// `before` and `after` *must be* string literals. +/// However, `comment_gen` is not enough to produce +/// standalone comments, because if `body` is a pure unit +/// expression, then F\*, not KaRaMeL, will erase it at +/// extraction. + +val comment_gen: #t: Type -> before: string -> body: t -> after: string -> Pure t + (requires (True)) + (ensures (fun res -> res == body)) + +/// `comment s` extracts to KaRaMeL AST +/// `EStandaloneComment s`, and so ultimately extracts to +/// the standalone C comment `/* s */`. `s` *must be* +/// a string literal. + +val comment + (s: string) +: Stack unit + (requires (fun _ -> True)) + (ensures (fun h _ h' -> h == h')) diff --git a/stage0/ulib/LowStar.ConstBuffer.fst b/stage0/ulib/LowStar.ConstBuffer.fst new file mode 100644 index 00000000000..1359d632b67 --- /dev/null +++ b/stage0/ulib/LowStar.ConstBuffer.fst @@ -0,0 +1,53 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.ConstBuffer + +module U32 = FStar.UInt32 +module Seq = FStar.Seq + +module HS = FStar.HyperStack +open FStar.HyperStack.ST + +module I = LowStar.ImmutableBuffer +module B = LowStar.Buffer + +let const_buffer a = qbuf a + +let as_qbuf c = c + +let of_buffer b = (| MUTABLE, b |) + +let of_ibuffer b = (| IMMUTABLE, b |) + +let of_qbuf #_ #q b = (| q, b |) + +let is_null #a c = + let x = qbuf_mbuf c in + B.is_null x + +let index c i = + let x = qbuf_mbuf c in + B.index x i + +let sub #a c i len = + let (| q, x |) = c in + let x : B.mbuffer a (q_preorder q a) (q_preorder q a) = x in + let y = B.msub (q_preorder q a) x i len in + (| q, y |) + +let cast c = qbuf_mbuf c +let to_buffer c = qbuf_mbuf c +let to_ibuffer c = qbuf_mbuf c diff --git a/stage0/ulib/LowStar.ConstBuffer.fsti b/stage0/ulib/LowStar.ConstBuffer.fsti new file mode 100644 index 00000000000..3a85785ce3f --- /dev/null +++ b/stage0/ulib/LowStar.ConstBuffer.fsti @@ -0,0 +1,298 @@ +(* + Copyright 2008-2019 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.ConstBuffer + +(* This module provides a model of const pointers in C. + + A well-typed client guarantees that it will not mutate memory + through a const pointer. But it cannot rely on the context not + mutating the same memory. + + As such, we model const pointers as a finite disjunction of + {mutable, immutable}-pointers, forcing code to guarantee the + strongest condition of the two (immutability) and to rely only on + the weakest (i.e., mutability). + + The main type of this module is `const_buffer t`. It is extracted + by KaRaMeL to `const t*`. +*) + +module U32 = FStar.UInt32 +module Seq = FStar.Seq + +module HS = FStar.HyperStack +open FStar.HyperStack.ST + +module I = LowStar.ImmutableBuffer +module B = LowStar.Buffer + +(*** A model for const pointers **) + +/// We start by defining the finite disjunction of mutable and +/// immutable buffers. +/// +/// NB: THIS IS FOR SPECIFICATIONAL PURPOSES ONLY +/// The concrete type `const_buffer` is defined later + +/// `qual`: mutability qualifier +[@@erasable] +noeq +type qual = + | MUTABLE + | IMMUTABLE + +/// `qbuf_cases q a`: disjunction of mutable and immutable pointers +let qbuf_cases (q:qual) (a:Type) = + match q with + | MUTABLE -> B.buffer a + | IMMUTABLE -> I.ibuffer a + +/// `q_preorder q a`: As we'll see shortly, it is convenient to also +/// define a disjunction of the preorder indices on a qualified +/// buffer +inline_for_extraction +let q_preorder (q:qual) (a:Type) = + match q with + | MUTABLE -> B.trivial_preorder a + | IMMUTABLE -> I.immutable_preorder a + +/// `qbuf a`: This type is used for specificational purposes only. It +/// is buffer whose mutability qualifier is existentially bound, via +/// a dependent pair +let qbuf a = (q:qual & qbuf_cases q a) + +/// `qbuf_qual c`: projecting the mutability qualifier of a `qbuf` +let qbuf_qual (c:qbuf 'a) : qual = dfst c + +/// `qbuf_pre c`: case-dependent preorders for a qbuf +let qbuf_pre (c:qbuf 'a) = q_preorder (qbuf_qual c) 'a + +/// `qbuf_mbuf c`: turning a qbuf into a regular monotonic buffer +let qbuf_mbuf (c:qbuf 'a) : B.mbuffer 'a (qbuf_pre c) (qbuf_pre c) = dsnd c + +(*** CONCRETE CONST POINTERS **) + +/// `const_buffer`: +/// An abstract type of a read-only pointer to an array of `a` +val const_buffer (a:Type u#0) : Type u#0 + +/// `as_qbuf`: For specificational purposes, a const_buffer can be +/// seen as an existentially quantified qbuf +val as_qbuf (c:const_buffer 'a) : Tot (qbuf 'a) + +/// `qual_of`: +let qual_of (#a:_) (c:const_buffer a) + : Tot qual + = dfst (as_qbuf c) + +/// `as_mbuf`: A convenience function to turn a const_buffer into a +/// regular mbuffer, for spec purposes +let as_mbuf (c:const_buffer 'a) : GTot _ = qbuf_mbuf (as_qbuf c) + +/// We now give several convenience functions that lift common +/// notions on buffers to const_buffer, via the `as_mbuf` coercion +let live (h:HS.mem) (c:const_buffer 'a) = B.live h (as_mbuf c) +let length (c:const_buffer 'a) = B.length (as_mbuf c) +let loc_buffer (c:const_buffer 'a) = B.loc_buffer (as_mbuf c) +let loc_addr_of_buffer (c:const_buffer 'a) = B.loc_addr_of_buffer (as_mbuf c) +let as_seq (h:HS.mem) (c:const_buffer 'a) = B.as_seq h (as_mbuf c) +let g_is_null (c:const_buffer 'a) = B.g_is_null (as_mbuf c) + +(*** CONSTRUCTORS **) + +/// `of_buffer`: A constructors for const buffers from mutable and +/// immutable buffers. It is fully specified in terms of the +/// `qbuf/mbuf` model +val of_buffer (b:B.buffer 'a) + : Pure (const_buffer 'a) + (requires True) + (ensures fun c -> + let c = as_qbuf c in + qbuf_qual c == MUTABLE /\ + qbuf_mbuf c == b) + +/// `of_ibuffer`: A constructors for const buffers from mutable and +/// immutable buffers. It is fully specified in terms of the +/// `qbuf/mbuf` model +val of_ibuffer (b:I.ibuffer 'a) + : Pure (const_buffer 'a) + (requires True) + (ensures fun c -> + let c = as_qbuf c in + qbuf_qual c == IMMUTABLE /\ + qbuf_mbuf c == b) + + +/// `of_qbuf`: A constructors for const buffers from either mutable and +/// immutable buffers. It is fully specified in terms of the +/// `qbuf/mbuf` model +val of_qbuf (#q:qual) (b:B.mbuffer 'a (q_preorder q 'a) (q_preorder q 'a)) + : Pure (const_buffer 'a) + (requires True) + (ensures fun c -> + let c = as_qbuf c in + qbuf_qual c == q /\ + qbuf_mbuf c == b) + +/// null constant buffer +let null 'a : const_buffer 'a = of_buffer B.null + +(*** OPERATIONS ON CONST POINTERS **) + +/// Is the buffer the null pointer? +val is_null (c:const_buffer 'a) + : Stack bool (requires (fun h -> live h c)) + (ensures (fun h y h' -> h == h' /\ y == g_is_null c)) + + +/// `index c i`: Very similar to the spec for `Buffer.index` +val index (c:const_buffer 'a) (i:U32.t) + : Stack 'a + (requires fun h -> + live h c /\ + U32.v i < length c) + (ensures fun h0 y h1 -> + h0 == h1 /\ + y == Seq.index (as_seq h0 c) (U32.v i)) + + +/// Specification of sub +let gsub (c:const_buffer 'a) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length c}) + : GTot (const_buffer 'a) + = let qc = as_qbuf c in + of_qbuf (B.mgsub (qbuf_pre qc) (qbuf_mbuf qc) i len) + +/// Relational specification of sub +let const_sub_buffer (i:U32.t) (len:U32.t) (csub c:const_buffer 'a) = + let qc = as_qbuf c in + let qcsub = as_qbuf csub in + U32.v i + U32.v len <= length c /\ + csub == gsub c i len + +/// `sub`: A sub-buffer of a const buffer points to a given +/// within-bounds offset of its head +val sub (#a:_) (c:const_buffer a) (i:U32.t) (len:Ghost.erased (U32.t)) + : Stack (const_buffer a) + (requires fun h -> + live h c /\ + U32.v i + U32.v len <= length c) + (ensures fun h0 c' h1 -> + let qc = as_qbuf c in + let qc' = as_qbuf c' in + h0 == h1 /\ + c' `const_sub_buffer i len` c) + +/// Discussion between NS and JP (20191119) +/// +/// Why is it safe to generate C code that casts away the const qualifier with the +/// cast operations below? Looking at the C11 standard, 6.7.3 alinea 6: +/// +/// > If an attempt is made to modify an object defined with a const-qualified type +/// > through useof an lvalue with non-const-qualified type, the behavior is +/// > undefined. +/// +/// So, dangerous things happen in situations where the original object is *created* +/// with a const qualifier (the object's _identity_ is const). +/// +/// ``` +/// #include +/// #include +/// +/// extern void f(const int *x); +/// +/// int main() { +/// const int x = 0; +/// f(&x); // f promises not to modify x +/// printf("%d\n", x); // prints 0 at -O3 but 1 at -O0 +/// return 0; +/// } +/// ``` +/// +/// with: +/// +/// ``` +/// void f(const int *x) { +/// int *y = (int *)x; +/// *y = 1; +/// } +/// ``` +/// +/// In Low*, however, we never create objects that are marked const from the start. +/// This is for historical reasons; in particular, immutable buffers are not marked +/// const (they certainly could be). +/// +/// So, the casts seem to be safe? Also, the difference in behavior noted above +/// does not happen if x is defined as +/// +/// ``` +/// const int *x = calloc(1, sizeof *x); +/// ``` +/// +/// Finally, the compiler, if the const qualifier is stripped from x, could still +/// potentially rely on an argument of freshness (pointer provenance?) to deduce +/// that &x is the sole pointer to x and that therefore the value of x should remain +/// the same. This does not seem to be happening. + +/// `cast`: It is possible to cast away the const qualifier recovering +/// a mutable or immutable pointer, in case the context can prove +/// that `qbuf_qual c` is MUTABLE or IMMUTABLE, respectively +val cast (c:const_buffer 'a) + : Pure (B.mbuffer 'a (qbuf_pre (as_qbuf c)) (qbuf_pre (as_qbuf c))) + (requires True) + (ensures fun x -> + x == as_mbuf c) + +val to_buffer (c:const_buffer 'a) + : Pure (B.buffer 'a) + (requires ( + let c = as_qbuf c in + qbuf_qual c == MUTABLE)) + (ensures fun x -> + x == as_mbuf c) + +val to_ibuffer (c:const_buffer 'a) + : Pure (I.ibuffer 'a) + (requires ( + let c = as_qbuf c in + qbuf_qual c == IMMUTABLE)) + (ensures fun x -> + x == as_mbuf c) + +//////////////////////////////////////////////////////////////////////////////// +let test (x:B.buffer U32.t) (y:I.ibuffer U32.t) + : Stack U32.t + (requires fun h -> + B.live h x /\ + B.live h y /\ + B.length x > 0 /\ + B.length y > 2 /\ + B.get h y 0 == 1ul /\ + B.get h y 1 == 2ul /\ + B.disjoint x y) + (ensures fun h0 a h1 -> + B.modifies (B.loc_buffer x) h0 h1 /\ + a == 4ul) + = let c1 = of_buffer x in + let c2 = of_ibuffer y in + B.upd x 0ul 1ul; + let a = index c1 0ul in + assert (a == 1ul); + let a' = index c2 0ul in + assert (a' == 1ul); + let c3 = sub c2 1ul 1ul in + let a'' = index c3 0ul in + assert (a'' == 2ul); + U32.(a +^ a' +^ a'') diff --git a/stage0/ulib/LowStar.Endianness.fst b/stage0/ulib/LowStar.Endianness.fst new file mode 100644 index 00000000000..9b0af2d6caa --- /dev/null +++ b/stage0/ulib/LowStar.Endianness.fst @@ -0,0 +1,551 @@ +module LowStar.Endianness + +/// Stateful operations between machine integers and buffers of uint8s. Most of +/// these operations are implemented natively using the target's system endianness +/// headers, relying on macros or static inline declarations. +/// +/// .. note:: +/// +/// This module supersedes ``C.Endianness``. + +module MB = LowStar.Monotonic.Buffer +module B = LowStar.Buffer + +open FStar.HyperStack.ST +open FStar.Endianness +open LowStar.BufferOps + +module U8 = FStar.UInt8 +module U16 = FStar.UInt16 +module U32 = FStar.UInt32 +module U64 = FStar.UInt64 +module U128 = FStar.UInt128 + +module HS = FStar.HyperStack + +inline_for_extraction +type u8 = U8.t +inline_for_extraction +type u16 = U16.t +inline_for_extraction +type u32 = U32.t +inline_for_extraction +type u64 = U64.t +inline_for_extraction +type u128 = U128.t + +/// Byte-swapping operations +/// ------------------------ +/// +/// TODO these are totally unspecified + +assume val htole16: u16 -> u16 +assume val le16toh: u16 -> u16 + +assume val htole32: u32 -> u32 +assume val le32toh: u32 -> u32 + +assume val htole64: u64 -> u64 +assume val le64toh: u64 -> u64 + +assume val htobe16: u16 -> u16 +assume val be16toh: u16 -> u16 + +assume val htobe32: u32 -> u32 +assume val be32toh: u32 -> u32 + +assume val htobe64: u64 -> u64 +assume val be64toh: u64 -> u64 + +/// Precondition for store functions +/// +/// Parametrized by the predicate that different store functions can pass accordingly +/// +/// Typically saying sequence contents are le_to_n or be_to_n etc. + +unfold let store_pre + (#a:Type) (#rrel #rel:MB.srel a) (b:MB.mbuffer a rrel rel) + (i:nat) (j:nat{i + j <= MB.length b}) (predicate:Seq.seq a -> Type0) + = fun (h:HS.mem) -> + let sb = MB.as_seq h b in + let len = MB.length b in + + MB.live h b /\ + + (forall (s:Seq.seq a). //monotonicity precondition that once the contents of the buffer + //between [i, j) are replaced as per the predicate, the + //preorder rel is satisfied + + (Seq.length s == len /\ + Seq.equal (Seq.slice s 0 i) (Seq.slice sb 0 i) /\ + Seq.equal (Seq.slice s (i + j) len) (Seq.slice sb (i + j) len) /\ + predicate (Seq.slice s i (i + j))) + + ==> rel sb s) + + +/// Common postcondition + +unfold let store_post + (#a:Type) (#rrel #rel:MB.srel a) (b:MB.mbuffer a rrel rel) + (i:nat) (j:nat{i + j <= MB.length b}) (predicate:Seq.seq a -> Type0) + = fun (h0:HS.mem) (_:unit) (h1:HS.mem) -> + MB.live h1 b /\ + MB.(modifies (loc_buffer b) h0 h1) /\ + (let s1 = MB.as_seq h0 b in + let s2 = MB.as_seq h1 b in + let len = MB.length b in + + //the buffer only changes in the interval [i, j) as per the predicate + Seq.equal (Seq.slice s2 0 i) (Seq.slice s1 0 i) /\ + Seq.equal (Seq.slice s2 (i + j) len) (Seq.slice s1 (i + j) len) /\ + predicate (Seq.slice s2 i (i + j))) + + +/// Loads and stores +/// ---------------- +/// +/// These are primitive + +assume val store16_le_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 2 <= MB.length b}) + (z:u16) + : Stack unit + (requires (store_pre b (U32.v i) 2 (fun s -> le_to_n s == U16.v z))) + (ensures (store_post b (U32.v i) 2 (fun s -> le_to_n s == U16.v z))) + +assume val load16_le_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 2 <= MB.length b}) + : Stack u16 + (requires fun h -> MB.live h b) + (ensures fun h0 z h1 -> + h0 == h1 /\ + MB.live h1 b /\ + le_to_n (Seq.slice (MB.as_seq h1 b) (U32.v i) (U32.v i + 2)) == U16.v z) + +assume val store16_be_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 2 <= MB.length b}) + (z:u16) + : Stack unit + (requires (store_pre b (U32.v i) 2 (fun s -> be_to_n s == U16.v z))) + (ensures (store_post b (U32.v i) 2 (fun s -> be_to_n s == U16.v z))) + +assume val load16_be_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 2 <= MB.length b}) + : Stack u16 + (requires fun h -> MB.live h b) + (ensures fun h0 z h1 -> + h0 == h1 /\ + MB.live h1 b /\ + be_to_n (Seq.slice (MB.as_seq h1 b) (U32.v i) (U32.v i + 2)) == U16.v z) + +assume val store32_le_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 4 <= MB.length b}) + (z:u32) + : Stack unit + (requires (store_pre b (U32.v i) 4 (fun s -> le_to_n s == U32.v z))) + (ensures (store_post b (U32.v i) 4 (fun s -> le_to_n s == U32.v z))) + +assume val load32_le_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 4 <= MB.length b}) + : Stack u32 + (requires fun h -> MB.live h b) + (ensures fun h0 z h1 -> + h0 == h1 /\ + MB.live h1 b /\ + le_to_n (Seq.slice (MB.as_seq h1 b) (U32.v i) (U32.v i + 4)) == U32.v z) + +assume val store32_be_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 4 <= MB.length b}) + (z:u32) + : Stack unit + (requires (store_pre b (U32.v i) 4 (fun s -> be_to_n s == U32.v z))) + (ensures (store_post b (U32.v i) 4 (fun s -> be_to_n s == U32.v z))) + +assume val load32_be_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 4 <= MB.length b}) + : Stack u32 + (requires fun h -> MB.live h b) + (ensures fun h0 z h1 -> + h0 == h1 /\ + MB.live h1 b /\ + be_to_n (Seq.slice (MB.as_seq h1 b) (U32.v i) (U32.v i + 4)) == U32.v z) + +assume val store64_le_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 8 <= MB.length b}) + (z:u64) + : Stack unit + (requires (store_pre b (U32.v i) 8 (fun s -> le_to_n s == U64.v z))) + (ensures (store_post b (U32.v i) 8 (fun s -> le_to_n s == U64.v z))) + +assume val load64_le_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 8 <= MB.length b}) + : Stack u64 + (requires fun h -> MB.live h b) + (ensures fun h0 z h1 -> + h0 == h1 /\ + MB.live h1 b /\ + le_to_n (Seq.slice (MB.as_seq h1 b) (U32.v i) (U32.v i + 8)) == U64.v z) + +assume val store64_be_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 8 <= MB.length b}) + (z:u64) + : Stack unit + (requires (store_pre b (U32.v i) 8 (fun s -> be_to_n s == U64.v z))) + (ensures (store_post b (U32.v i) 8 (fun s -> be_to_n s == U64.v z))) + +assume val load64_be_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 8 <= MB.length b}) + : Stack u64 + (requires fun h -> MB.live h b) + (ensures fun h0 z h1 -> + h0 == h1 /\ + MB.live h1 b /\ + be_to_n (Seq.slice (MB.as_seq h1 b) (U32.v i) (U32.v i + 8)) == U64.v z) + +assume val store128_le_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 16 <= MB.length b}) + (z:u128) + : Stack unit + (requires (store_pre b (U32.v i) 16 (fun s -> le_to_n s == U128.v z))) + (ensures (store_post b (U32.v i) 16 (fun s -> le_to_n s == U128.v z))) + +assume val load128_le_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 16 <= MB.length b}) + : Stack u128 + (requires fun h -> MB.live h b) + (ensures fun h0 z h1 -> + h0 == h1 /\ + MB.live h1 b /\ + le_to_n (Seq.slice (MB.as_seq h1 b) (U32.v i) (U32.v i + 16)) == U128.v z) + +assume val store128_be_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 16 <= MB.length b}) + (z:u128) + : Stack unit + (requires (store_pre b (U32.v i) 16 (fun s -> be_to_n s == U128.v z))) + (ensures (store_post b (U32.v i) 16 (fun s -> be_to_n s == U128.v z))) + + +assume val load128_be_i + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32{U32.v i + 16 <= MB.length b}) + : Stack u128 + (requires fun h -> MB.live h b) + (ensures fun h0 z h1 -> + h0 == h1 /\ + MB.live h1 b /\ + be_to_n (Seq.slice (MB.as_seq h1 b) (U32.v i) (U32.v i + 16)) == U128.v z) + +/// Loads and stores, on buffers of the right size. +/// ----------------------------------------------- +/// +/// There is bunch of legacy code that wants these operators that operate on buffers of exactly the right size. This is actually more restrictive than the version above, which operates on monotonic buffers, so we offer specialized operators. + +let store16_le + (b:B.buffer UInt8.t{B.length b == 2}) + (z:UInt16.t): + Stack unit + (requires (fun h -> B.live h b)) + (ensures (fun h0 _ h1 -> B.(modifies (loc_buffer b) h0 h1) /\ B.live h1 b /\ + le_to_n (B.as_seq h1 b) == UInt16.v z)) += + store16_le_i b 0ul z + +let load16_le + (b:B.buffer UInt8.t{B.length b == 2}): + Stack UInt16.t + (requires (fun h -> B.live h b)) + (ensures (fun h0 z h1 -> h0 == h1 /\ B.live h1 b /\ + le_to_n (B.as_seq h1 b) == UInt16.v z)) += + load16_le_i b 0ul + + +let store16_be + (b:B.buffer UInt8.t{B.length b == 2}) + (z:UInt16.t): + Stack unit + (requires (fun h -> B.live h b)) + (ensures (fun h0 _ h1 -> B.(modifies (loc_buffer b) h0 h1) /\ B.live h1 b /\ + be_to_n (B.as_seq h1 b) == UInt16.v z)) += + store16_be_i b 0ul z + +let load16_be + (b:B.buffer UInt8.t{B.length b == 2}): + Stack UInt16.t + (requires (fun h -> B.live h b)) + (ensures (fun h0 z h1 -> h0 == h1 /\ B.live h1 b /\ + be_to_n (B.as_seq h1 b) == UInt16.v z)) += + load16_be_i b 0ul + + +let store32_le + (b:B.buffer UInt8.t{B.length b == 4}) + (z:UInt32.t): + Stack unit + (requires (fun h -> B.live h b)) + (ensures (fun h0 _ h1 -> B.(modifies (loc_buffer b) h0 h1) /\ B.live h1 b /\ + le_to_n (B.as_seq h1 b) == UInt32.v z)) += + store32_le_i b 0ul z + +let load32_le + (b:B.buffer UInt8.t{B.length b == 4}): + Stack UInt32.t + (requires (fun h -> B.live h b)) + (ensures (fun h0 z h1 -> h0 == h1 /\ B.live h1 b /\ + le_to_n (B.as_seq h1 b) == UInt32.v z)) += + load32_le_i b 0ul + + +let store32_be + (b:B.buffer UInt8.t{B.length b == 4}) + (z:UInt32.t): + Stack unit + (requires (fun h -> B.live h b)) + (ensures (fun h0 _ h1 -> B.(modifies (loc_buffer b) h0 h1) /\ B.live h1 b /\ + be_to_n (B.as_seq h1 b) == UInt32.v z)) += + store32_be_i b 0ul z + +let load32_be + (b:B.buffer UInt8.t{B.length b == 4}): + Stack UInt32.t + (requires (fun h -> B.live h b)) + (ensures (fun h0 z h1 -> h0 == h1 /\ B.live h1 b /\ + be_to_n (B.as_seq h1 b) == UInt32.v z)) += + load32_be_i b 0ul + + +let store64_le + (b:B.buffer UInt8.t{B.length b == 8}) + (z:UInt64.t): + Stack unit + (requires (fun h -> B.live h b)) + (ensures (fun h0 _ h1 -> B.(modifies (loc_buffer b) h0 h1) /\ B.live h1 b /\ + le_to_n (B.as_seq h1 b) == UInt64.v z)) += + store64_le_i b 0ul z + +let load64_le + (b:B.buffer UInt8.t{B.length b == 8}): + Stack UInt64.t + (requires (fun h -> B.live h b)) + (ensures (fun h0 z h1 -> h0 == h1 /\ B.live h1 b /\ + le_to_n (B.as_seq h1 b) == UInt64.v z)) += + load64_le_i b 0ul + + +let load64_be + (b:B.buffer UInt8.t{B.length b == 8}): + Stack UInt64.t + (requires (fun h -> B.live h b)) + (ensures (fun h0 z h1 -> h0 == h1 /\ B.live h1 b /\ + be_to_n (B.as_seq h1 b) == UInt64.v z)) += + load64_be_i b 0ul + +let store64_be + (b:B.buffer UInt8.t{B.length b == 8}) + (z:UInt64.t): + Stack unit + (requires (fun h -> B.live h b)) + (ensures (fun h0 _ h1 -> B.(modifies (loc_buffer b) h0 h1) /\ B.live h1 b /\ + be_to_n (B.as_seq h1 b) == UInt64.v z)) += + store64_be_i b 0ul z + + +let load128_le + (b:B.buffer UInt8.t{B.length b == 16}): + Stack UInt128.t + (requires (fun h -> B.live h b)) + (ensures (fun h0 z h1 -> h0 == h1 /\ B.live h1 b /\ + le_to_n (B.as_seq h1 b) == UInt128.v z)) += + load128_le_i b 0ul + +let store128_le + (b:B.buffer UInt8.t{B.length b == 16}) + (z:UInt128.t): + Stack unit + (requires (fun h -> B.live h b)) + (ensures (fun h0 _ h1 -> B.(modifies (loc_buffer b) h0 h1) /\ B.live h1 b /\ + le_to_n (B.as_seq h1 b) == UInt128.v z)) += + store128_le_i b 0ul z + + +let load128_be + (b:B.buffer UInt8.t{B.length b == 16}): + Stack UInt128.t + (requires (fun h -> B.live h b)) + (ensures (fun h0 z h1 -> h0 == h1 /\ B.live h1 b /\ + be_to_n (B.as_seq h1 b) == UInt128.v z)) += + load128_be_i b 0ul + +let store128_be + (b:B.buffer UInt8.t{B.length b = 16}) + (z:UInt128.t): + Stack unit + (requires (fun h -> B.live h b)) + (ensures (fun h0 _ h1 -> B.(modifies (loc_buffer b) h0 h1) /\ B.live h1 b /\ + be_to_n (B.as_seq h1 b) == UInt128.v z)) += + store128_be_i b 0ul z + +/// Index and update +/// ---------------- +/// +/// These are more sophisticated than load/store above, because they reason +/// over the underlying sequence of bytes interpreted as a sequence of (little|big)-endian +/// integers. + +#set-options "--z3rlimit 20 --max_fuel 0 --max_ifuel 0" + +inline_for_extraction +let index_32_be + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32) + : Stack u32 + (requires fun h -> + MB.live h b /\ MB.length b % 4 = 0 /\ + U32.v i < MB.length b / 4) + (ensures fun h0 r h1 -> + h0 == h1 /\ + r = Seq.index (seq_uint32_of_be (MB.length b / 4) (MB.as_seq h0 b)) (U32.v i)) + = load32_be_i b FStar.UInt32.(4ul *^ i) + +inline_for_extraction +let index_32_le + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32) + : Stack u32 + (requires fun h -> + MB.live h b /\ MB.length b % 4 = 0 /\ + U32.v i < MB.length b / 4) + (ensures fun h0 r h1 -> + h0 == h1 /\ + r = Seq.index (seq_uint32_of_le (MB.length b / 4) (MB.as_seq h0 b)) (U32.v i)) + = load32_le_i b FStar.UInt32.(4ul *^ i) + +inline_for_extraction +let index_64_be + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32) + : Stack u64 + (requires fun h -> + MB.live h b /\ MB.length b % 8 = 0 /\ + U32.v i < MB.length b / 8) + (ensures fun h0 r h1 -> + h0 == h1 /\ + r = Seq.index (seq_uint64_of_be (MB.length b / 8) (MB.as_seq h0 b)) (UInt32.v i)) + = load64_be_i b FStar.UInt32.(8ul *^ i) + +inline_for_extraction +let index_64_le + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32) + : Stack u64 + (requires fun h -> + MB.live h b /\ MB.length b % 8 = 0 /\ + U32.v i < MB.length b / 8) + (ensures fun h0 r h1 -> + h0 == h1 /\ + r = Seq.index (seq_uint64_of_le (MB.length b / 8) (MB.as_seq h0 b)) (UInt32.v i)) + = load64_le_i b FStar.UInt32.(8ul *^ i) + +#reset-options "--using_facts_from 'Prims'" + +let interval_4_disjoint (i j: nat) + : Lemma + (requires (i <> j)) + (ensures (let open FStar.Mul in 4 * i + 4 <= 4 * j \/ 4 * j + 4 <= 4 * i)) + = () + +#reset-options "--z3rlimit 16 --max_fuel 0 --max_ifuel 0" + +open FStar.Mul + +inline_for_extraction +let upd_32_be + (#rrel #rel:MB.srel u8) (b:MB.mbuffer u8 rrel rel) + (i:u32) (v:u32) + : Stack unit + (requires fun h -> + MB.length b % 4 = 0 /\ + U32.v i < MB.length b / 4 /\ + store_pre b (U32.(v (4ul *^ i))) 4 (fun s -> be_to_n s == U32.v v) h) + (ensures fun h0 _ h1 -> + MB.(modifies (loc_buffer b) h0 h1) /\ + seq_uint32_of_be (MB.length b / 4) (MB.as_seq h1 b) `Seq.equal` Seq.upd (seq_uint32_of_be (MB.length b / 4) (MB.as_seq h0 b)) (U32.v i) v) + = let h0 = get () in + store32_be_i b U32.(4ul *^ i) v; + let h1 = get () in + //AR: 03/01: the following 3 assertions say how the buffer changed + assert (be_to_n (Seq.slice (MB.as_seq h1 b) (U32.(v (4ul *^ i))) (U32.(v (4ul *^ i) + 4))) == U32.v v); + assert (Seq.equal (Seq.slice (MB.as_seq h0 b) 0 (U32.(v (4ul *^ i)))) + (Seq.slice (MB.as_seq h1 b) 0 (U32.(v (4ul *^ i))))); + assert (Seq.equal (Seq.slice (MB.as_seq h0 b) (U32.(v (4ul *^ i) + 4)) (MB.length b)) + (Seq.slice (MB.as_seq h1 b) (U32.(v (4ul *^ i) + 4)) (MB.length b))); + let f () : Lemma + (seq_uint32_of_be (MB.length b / 4) (MB.as_seq h1 b) `Seq.equal` Seq.upd (seq_uint32_of_be (MB.length b / 4) (MB.as_seq h0 b)) + (UInt32.v i) v) + = let s0 = MB.as_seq h0 b in + let s1 = MB.as_seq h1 b in + let n = MB.length b / 4 in + assert (4 `Prims.op_Multiply` n == MB.length b); + let s0' = seq_uint32_of_be n s0 in + let s1' = seq_uint32_of_be n s1 in + let lo = UInt32.v i in + let hi = lo + 1 in + let s2' = Seq.upd s0' lo v in + assert (Seq.length s1' == Seq.length s2'); + let i' = UInt32.v i in + let g + (j' : nat) + : Lemma + (requires (j' < n)) + (ensures (j' < n /\ Seq.index s1' j' == Seq.index s2' j')) + = if j' = UInt32.v i + then () + else begin + let u () : Lemma + (Seq.slice s0 (4 * j') (4 * j' + 4) == Seq.slice s1 (4 * j') (4 * j' + 4)) + = if j' < UInt32.v i + then begin + Seq.slice_slice s0 0 (4 * i') (4 * j') (4 * j' + 4); + Seq.slice_slice s1 0 (4 * i') (4 * j') (4 * j' + 4) + end else begin + Seq.slice_slice s0 (4 * i' + 4) (MB.length b) (4 * (j' - i' - 1)) (4 * (j' - i')); + Seq.slice_slice s1 (4 * i' + 4) (MB.length b) (4 * (j' - i' - 1)) (4 * (j' - i')) + end + in + u () + end + in + Classical.forall_intro (Classical.move_requires g) + in + f () diff --git a/stage0/ulib/LowStar.Failure.fsti b/stage0/ulib/LowStar.Failure.fsti new file mode 100644 index 00000000000..124d3ef05da --- /dev/null +++ b/stage0/ulib/LowStar.Failure.fsti @@ -0,0 +1,12 @@ +module LowStar.Failure + +/// This module exposes a single function for failure, which gets distinguished +/// treatment in KaRaMeL, is implemented in a header, correctly redirects to a +/// user-overridable KRML_HOST_EXIT macro (for situations where libc's exit(3) +/// does not apply), and does not require disabling C compiler warnings about +/// infinite recursion like C.Failure does. + +val failwith: #a:Type -> Prims.string -> + FStar.HyperStack.ST.Stack a + (requires (fun _ -> True)) + (ensures (fun _ _ _ -> False)) diff --git a/stage0/ulib/LowStar.Ignore.fsti b/stage0/ulib/LowStar.Ignore.fsti new file mode 100644 index 00000000000..31c213900c2 --- /dev/null +++ b/stage0/ulib/LowStar.Ignore.fsti @@ -0,0 +1,12 @@ +module LowStar.Ignore + +open FStar.HyperStack.ST + +(** This module provides a distinguished `ignore` function; the function gets + special treatment in KaRaMeL, which ensures that a call to `ignore e` + results in KRML_IGNORE(e). + + By default, KRML_IGNORE(e) expands to (void)(e), but this behavior can be + overridden (see include/krml/internal/target.h). *) + +val ignore: #a:Type -> x:a -> Stack unit (fun _ -> True) (fun h0 _ h1 -> h0 == h1) diff --git a/stage0/ulib/LowStar.ImmutableBuffer.fst b/stage0/ulib/LowStar.ImmutableBuffer.fst new file mode 100644 index 00000000000..564c00f2506 --- /dev/null +++ b/stage0/ulib/LowStar.ImmutableBuffer.fst @@ -0,0 +1,265 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.ImmutableBuffer + +include LowStar.Monotonic.Buffer + +module P = FStar.Preorder +module G = FStar.Ghost +module U32 = FStar.UInt32 +module Seq = FStar.Seq + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +let immutable_preorder (a:Type0) :srel a = fun s1 s2 -> Seq.equal s1 s2 + +type ibuffer (a:Type0) = mbuffer a (immutable_preorder a) (immutable_preorder a) + +unfold let inull (#a:Type0) :ibuffer a = mnull #a #(immutable_preorder a) #(immutable_preorder a) + +unfold let igsub (#a:Type0) = mgsub #a #(immutable_preorder a) #(immutable_preorder a) (immutable_preorder a) + +unfold let igsub_inj (#a:Type0) = mgsub_inj #a #(immutable_preorder a) #(immutable_preorder a) (immutable_preorder a) (immutable_preorder a) + +inline_for_extraction +type ipointer (a:Type0) = b:ibuffer a{length b == 1} + +inline_for_extraction +type ipointer_or_null (a:Type0) = b:ibuffer a{if g_is_null b then True else length b == 1} + +inline_for_extraction let isub (#a:Type0) = msub #a #(immutable_preorder a) #(immutable_preorder a) (immutable_preorder a) + +inline_for_extraction let ioffset (#a:Type0) = moffset #a #(immutable_preorder a) #(immutable_preorder a) (immutable_preorder a) + +(* + * It's a bit sub-optimal that we have both cpred and seq_eq + * Ideally it should only be the erased version seq_eq + * + * However, Lib.Buffer in hacl is already using cpred, so keeping it for now + * But it should be cleaned up when that dependency is gone + *) +let cpred (#a:Type0) (s:Seq.seq a) :spred a = fun s1 -> Seq.equal s s1 + +let seq_eq (s:Ghost.erased (Seq.seq 'a)) : spred 'a = + fun s' -> s' `Seq.equal` Ghost.reveal s + +let value_is #a (b:ibuffer a) (s:Ghost.erased (Seq.seq a)) = + witnessed b (seq_eq s) + +let sub_ptr_value_is (#a:_) (b0 b1:ibuffer a) (h:HS.mem) (i len:U32.t) (v:Seq.seq a) + : Lemma + (requires + U32.v i + U32.v len <= length b1 /\ + b0 == mgsub (immutable_preorder a) b1 i len /\ + value_is b1 v /\ + Seq.length v == length b1) + (ensures + value_is b0 (Seq.slice v (U32.v i) (U32.v i + U32.v len))) + = let sub_v = Seq.slice v (U32.v i) (U32.v i + U32.v len) in + witnessed_functorial b1 b0 i len (seq_eq (Ghost.hide v)) (seq_eq (Ghost.hide sub_v)) + +unfold let libuffer (a:Type0) (len:nat) (s:Seq.seq a) = + b:lmbuffer a (immutable_preorder a) (immutable_preorder a) len{witnessed b (cpred s)} + +unfold let libuffer_or_null (a:Type0) (len:nat) (r:HS.rid) (s:Seq.seq a) = + b:lmbuffer_or_null a (immutable_preorder a) (immutable_preorder a) len r{(not (g_is_null b)) ==> + witnessed b (cpred s)} + +let igcmalloc (#a:Type0) (r:HS.rid) (init:a) (len:U32.t) + :HST.ST (b:libuffer a (U32.v len) (Seq.create (U32.v len) init){frameOf b == r /\ recallable b}) + (requires (fun _ -> malloc_pre r len)) + (ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) + = let b = mgcmalloc r init len in + witness_p b (cpred (Seq.create (U32.v len) init)); + b + +(* + * Unlike other allocation functions in this module, + * this function (and other flavors of alloc_and_blit) don't provide the witnessed contents + * as the refinement of the return type + * This is because the contents depend on the input memory (== the contents of src) + *) +let igcmalloc_and_blit (#a:Type0) (r:HS.rid) + (#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t) + : HST.ST (b:lmbuffer a (immutable_preorder a) (immutable_preorder a) (U32.v len){frameOf b == r}) + (requires fun h0 -> + malloc_pre r len /\ + live h0 src /\ U32.v id_src + U32.v len <= length src) + (ensures fun h0 b h1 -> + let s = Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len) in + alloc_post_mem_common b h0 h1 s /\ + b `value_is` G.hide s) + = let b = mgcmalloc_and_blit r src id_src len in + let h0 = HST.get () in + witness_p b (seq_eq (G.hide (Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))); + b + +inline_for_extraction +let igcmalloc_partial (#a:Type0) (r:HS.rid) (init:a) (len:U32.t) + :HST.ST (b:libuffer_or_null a (U32.v len) r (Seq.create (U32.v len) init){recallable b}) + (requires (fun _ -> malloc_pre r len)) + (ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) + = igcmalloc r init len + +let imalloc (#a:Type0) (r:HS.rid) (init:a) (len:U32.t) + :HST.ST (b:libuffer a (U32.v len) (Seq.create (U32.v len) init){frameOf b == r /\ freeable b}) + (requires (fun _ -> malloc_pre r len)) + (ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) + = let b = mmalloc r init len in + witness_p b (cpred (Seq.create (U32.v len) init)); + b + +let imalloc_and_blit (#a:Type0) (r:HS.rid) + (#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t) + : HST.ST (b:lmbuffer a (immutable_preorder a) (immutable_preorder a) (U32.v len){frameOf b == r /\ freeable b}) + (requires fun h0 -> + malloc_pre r len /\ + live h0 src /\ U32.v id_src + U32.v len <= length src) + (ensures fun h0 b h1 -> + let s = Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len) in + alloc_post_mem_common b h0 h1 s /\ + b `value_is` G.hide s) + = let b = mmalloc_and_blit r src id_src len in + let h0 = HST.get () in + witness_p b (seq_eq (G.hide (Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))); + b + +inline_for_extraction +let imalloc_partial (#a:Type0) (r:HS.rid) (init:a) (len:U32.t) + :HST.ST (b:libuffer_or_null a (U32.v len) r (Seq.create (U32.v len) init){(not (g_is_null b)) ==> freeable b}) + (requires (fun _ -> malloc_pre r len)) + (ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) + = imalloc r init len + +let ialloca (#a:Type0) (init:a) (len:U32.t) + :HST.StackInline (libuffer a (U32.v len) (Seq.create (U32.v len) init)) + (requires (fun _ -> alloca_pre len)) + (ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init) /\ + frameOf b == HS.get_tip h0)) + = let b = malloca init len in + witness_p b (cpred (Seq.create (U32.v len) init)); + b + +let ialloca_and_blit (#a:Type0) + (#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t) + : HST.StackInline (lmbuffer a (immutable_preorder a) (immutable_preorder a) (U32.v len)) + (requires fun h0 -> + alloca_pre len /\ + live h0 src /\ U32.v id_src + U32.v len <= length src) + (ensures fun h0 b h1 -> + let s = Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len) in + alloc_post_mem_common b h0 h1 s /\ + frameOf b == HS.get_tip h0 /\ + b `value_is` G.hide s) + = let b = malloca_and_blit src id_src len in + let h0 = HST.get () in + witness_p b (seq_eq (G.hide (Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))); + b + +let ialloca_of_list (#a:Type0) (init: list a) + :HST.StackInline (libuffer a (normalize_term (List.Tot.length init)) (Seq.seq_of_list init)) + (requires (fun _ -> alloca_of_list_pre init)) + (ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init) /\ + frameOf b == HS.get_tip h0)) + = let b = malloca_of_list init in + witness_p b (cpred (Seq.seq_of_list init)); + b + +let igcmalloc_of_list (#a:Type0) (r:HS.rid) (init:list a) + :HST.ST (b:libuffer a (normalize_term (List.Tot.length init)) (Seq.seq_of_list init){frameOf b == r /\ recallable b}) + (requires (fun _ -> gcmalloc_of_list_pre r init)) + (ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init))) + = let b = mgcmalloc_of_list r init in + witness_p b (cpred (Seq.seq_of_list init)); + b + +inline_for_extraction +let igcmalloc_of_list_partial (#a:Type0) (r:HS.rid) (init:list a) + :HST.ST (b:libuffer_or_null a (normalize_term (List.Tot.length init)) r (Seq.seq_of_list init){recallable b}) + (requires (fun _ -> gcmalloc_of_list_pre r init)) + (ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init))) + = igcmalloc_of_list r init + +let witness_contents (#a:Type0) (b:ibuffer a) (s:Seq.seq a) + :HST.ST unit (requires (fun h0 -> Seq.equal (as_seq h0 b) s)) + (ensures (fun h0 _ h1 -> h0 == h1 /\ witnessed b (cpred s))) + = witness_p b (cpred s) + +let recall_contents (#a:Type0) (b:ibuffer a) (s:Seq.seq a) + :HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ witnessed b (cpred s))) + (ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ as_seq h0 b == s)) + = recall_p b (cpred s) + +let witness_value (#a:Type0) (b:ibuffer a) + :HST.ST unit (requires (fun h0 -> True)) + (ensures (fun h0 _ h1 -> h0 == h1 /\ b `value_is` (Ghost.hide (as_seq h1 b)))) + = let h = HST.get () in + let s = Ghost.hide (as_seq h b) in + witness_p b (seq_eq s) + +let recall_value (#a:Type0) (b:ibuffer a) (s:Ghost.erased (Seq.seq a)) + :HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `value_is` s)) + (ensures (fun h0 _ h1 -> h0 == h1 /\ live h1 b /\ as_seq h1 b == Ghost.reveal s)) + = recall_p b (seq_eq s) + +(* + * Immutable buffers are distinct from (trivial) buffers + * + * The proof basically proves a contradiction assuming that the buffers are not distinct + * Using injectivity of the base preorders, we get that trivial preorder is same as immutable preorder + * After which it is easy to derive the contradiction, provided client has provided a witness for inhabitance + *) +let inhabited_immutable_buffer_is_distinct_from_buffer (#a:Type0) (x:a) (ib:ibuffer a) (b:LowStar.Buffer.buffer a) + : Lemma (~ (ib === b)) + = let aux () : Lemma (requires (ib === b)) (ensures False) + = //use injectivity to prove that all sequences of type a are equal + mbuffer_injectivity_in_first_preorder (); + assert (immutable_preorder a == LowStar.Buffer.trivial_preorder a); + assert (forall (s1 s2:Seq.seq a). (immutable_preorder a) s1 s2 == (LowStar.Buffer.trivial_preorder a) s1 s2); + assert (forall (s1 s2:Seq.seq a). (immutable_preorder a) s1 s2 == Seq.equal s1 s2); + assert (forall (s1 s2:Seq.seq a). (LowStar.Buffer.trivial_preorder a) s1 s2 == True); + assert (forall (s1 s2:Seq.seq a). Seq.equal s1 s2); + + //now derive the contradiction + let s1 = Seq.create 0 x in + let s2 = Seq.create 1 x in + Seq.lemma_eq_elim s1 s2; + assert (s1 == s2); assert (Seq.length s1 == Seq.length s2) + in + (Classical.move_requires aux) () + +let buffer_immutable_buffer_disjoint + (#t: Type) (#ti: Type) + (b: LowStar.Buffer.buffer t) + (bi: ibuffer ti) + (h: HS.mem) +: Lemma + (requires ( + live h b /\ live h bi + )) + (ensures ( + disjoint b bi + )) += if length b = 0 + then empty_disjoint b bi + else if length bi = 0 + then empty_disjoint bi b + else begin + let s = as_seq h b in + assert (~ (LowStar.Buffer.trivial_preorder _ Seq.empty s <==> immutable_preorder _ Seq.empty s)); + live_same_addresses_equal_types_and_preorders b bi h + end diff --git a/stage0/ulib/LowStar.Literal.fsti b/stage0/ulib/LowStar.Literal.fsti new file mode 100644 index 00000000000..183e49ef519 --- /dev/null +++ b/stage0/ulib/LowStar.Literal.fsti @@ -0,0 +1,120 @@ +module LowStar.Literal + +module B = LowStar.Buffer +module IB = LowStar.ImmutableBuffer +module HS = FStar.HyperStack +module ST = FStar.HyperStack.ST + +open FStar.Mul + +/// This module enables clients to make use of string literals in Low* as +/// shorthand syntax for immutable, always-live uint8 buffers. See +/// LowStar.printf for writing and printing string literals. + +/// .. note:: +/// +/// This module supersedes ``C.String``. + +/// As a reminder, the F* compiler enforces that string literals are UTF-8 +/// encoded, and list_of_string returns the corresponding sequence of Unicode +/// scalar values (see https://erratique.ch/software/uucp/doc/Uucp.html#uminimal) for an excellent +/// crash course on Unicode. + +/// When compiling with KaRaMeL, string literals are printed as series of bytes, +/// where non-alphanumeric characters are hex-encoded. For instance, if after reading +/// the C standard, the user writes ``let x = "🤮"``, then KaRaMeL will generate +/// ``const char *x = "\xf0\x9f\xa4\xae"``. + +/// String literals as buffers +/// -------------------------- + +/// Therefore, in order to talk about the interpretation of a string literal as +/// a series of bytes, we need to define the serialization of Unicode scalar values +/// (as returned by ``String.list_of_string``) into bytes. This is a commendable and +/// noble goal, but instead, we choose to restrict ourselves to the ASCII subset of +/// UTF-8, where the byte encoding of a scalar value is the identity. +let is_ascii_char (c: Char.char) = UInt32.lt (Char.u32_of_char c) 0x80ul + +let ascii_char = c:Char.char{is_ascii_char c} + +let is_ascii_string (s: string) = + List.Tot.for_all is_ascii_char (String.list_of_string s) + +let ascii_string = s:string{is_ascii_string s} + +let for_all_tail #a p (l: list a { Cons? l }): Lemma + (requires (List.Tot.for_all p l)) + (ensures (List.Tot.for_all p (List.Tot.tl l))) += + () + +let ascii_chars_of_ascii_string (s: ascii_string): + l:list ascii_char { List.Tot.length l = String.length s } += List.Tot.list_refb #(Char.char) #_ (String.list_of_string s) + +let u8_of_ascii_char (c: ascii_char): x:UInt8.t{ UInt8.v x = Char.int_of_char c } = + let x32 = Char.u32_of_char c in + assert_norm (pow2 24 * pow2 8 = pow2 32); + Math.Lemmas.modulo_modulo_lemma (UInt32.v x32) (pow2 24) (pow2 8); + Int.Cast.Full.uint32_to_uint8 x32 + +/// This means that if a string literal only contains ASCII, then we can easily +/// reflect its contents in terms of uint8's, without having to talk about the utf8 +/// encoding. +/// TODO: lemma: S.index (u8s_of_string s) i = String.index s i +/// (cannot be proven right now because we don't know much about String.index) +/// (is this even what we want? should we do everything in terms of list_of_string?) +let u8s_of_ascii_string (s: ascii_string): + ss:Seq.seq UInt8.t { Seq.length ss = List.Tot.length (String.list_of_string s) } += + let cs = List.Tot.map u8_of_ascii_char (ascii_chars_of_ascii_string s) in + Seq.seq_of_list cs + +unfold let buffer_of_literal_post (s: ascii_string) (h0: HS.mem) (b: IB.ibuffer UInt8.t) (h1: HS.mem) = + IB.frameOf b == HS.root /\ // is this really useful? + IB.recallable b /\ ( + // An inlined version of alloc_post_mem_common, without the unused_in + // condition, which would contradict the Stack annotation + let s = u8s_of_ascii_string s in + IB.live h1 b /\ + Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\ + HS.get_tip h1 == HS.get_tip h0 /\ + B.(modifies loc_none h0 h1) /\ + B.as_seq h1 b == s) + +/// Consequently, this function becomes in C a simple cast from ``const char *`` to +/// ``char *``, since immutable buffers don't (yet) have the ``const`` attribute in +/// KaRaMeL. (This is unsavory, and should be fixed later.) This way, a string +/// literal can be seen as an immutable buffer and passed around as such. +/// This function checks at extraction-time that its argument is a literal. +val buffer_of_literal: (s: ascii_string) -> + ST.Stack (IB.ibuffer UInt8.t) + (requires (fun _ -> String.length s < pow2 32)) + (ensures buffer_of_literal_post s) + +/// .. note:: +/// +/// This literal will be zero-terminated, but since we do not require that the +/// string literal be zero-free, the trailing zero will be ignored and unused. This +/// means that we won't be able to use the C standard library functions for string +/// manipulation and will instead have to pass lengths at run-time. + +/// Rather than having to write ``assert_norm`` by hand, this convenient wrapper +/// relies on the normalizer to discharge all the relevant proof obligations, and +/// synthesizes the length of the resulting buffer. The pair has no cost: KaRaMeL +/// guarantees that it will be eliminated. +unfold +let buf_len_of_literal (s: string): + ST.Stack (IB.ibuffer UInt8.t & UInt32.t) + (requires (fun _ -> + normalize (is_ascii_string s) /\ + normalize (List.Tot.length (String.list_of_string s) < pow2 32))) + (ensures (fun h0 r h1 -> + let b, l = r in + buffer_of_literal_post s h0 b h1 /\ + UInt32.v l = normalize_term (List.Tot.length (String.list_of_string s)) /\ + UInt32.v l = IB.length b)) += + [@@inline_let] + let l = normalize_term (UInt32.uint_to_t (List.Tot.length (String.list_of_string s))) in + buffer_of_literal s, l diff --git a/stage0/ulib/LowStar.Modifies.fst b/stage0/ulib/LowStar.Modifies.fst new file mode 100644 index 00000000000..1482f14b626 --- /dev/null +++ b/stage0/ulib/LowStar.Modifies.fst @@ -0,0 +1,17 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.Modifies +include LowStar.Buffer diff --git a/stage0/ulib/LowStar.ModifiesPat.fst b/stage0/ulib/LowStar.ModifiesPat.fst new file mode 100644 index 00000000000..532c15785ce --- /dev/null +++ b/stage0/ulib/LowStar.ModifiesPat.fst @@ -0,0 +1,17 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.ModifiesPat +include LowStar.Modifies diff --git a/stage0/ulib/LowStar.Monotonic.Buffer.fst b/stage0/ulib/LowStar.Monotonic.Buffer.fst new file mode 100644 index 00000000000..684ff260f0d --- /dev/null +++ b/stage0/ulib/LowStar.Monotonic.Buffer.fst @@ -0,0 +1,1954 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.Monotonic.Buffer + +module P = FStar.Preorder +module G = FStar.Ghost +module U32 = FStar.UInt32 +module Seq = FStar.Seq + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre + +(* + * Counterpart of compatible_sub from the fsti but using sequences + * + * The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise + * The cost is that we have to additional asserts as triggers + *) +let compatible_sub_preorder (#a:Type0) + (len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a) + = compatible_subseq_preorder len rel i j sub_rel + +(* + * Reflexivity of the compatibility relation + *) +let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a) + :Lemma (compatible_sub_preorder len rel 0 len rel) + = assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==> + Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2) + +(* + * Transitivity of the compatibility relation + * + * i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0) + *) +let lemma_seq_sub_compatibility_is_transitive (#a:Type0) + (len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a) + :Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\ + compatible_sub_preorder len rel i1 j1 rel1 /\ + compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2)) + (ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2)) + = let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in + let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in + + let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2) + = Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2) + (fun _ -> + assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1)); + assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2)); + assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2))); + assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2)))) + in + + + let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\ + rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in + let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in + let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2) + = Classical.arrow_to_impl #(t1 s s2) #(t2 s s2) + (fun _ -> + assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2)); + assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)); + assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))); + assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)) + (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2))) + in + + Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1 + +noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 = + | Null + | Buffer: + max_length:U32.t -> + content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) -> + idx:U32.t -> + length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} -> + mbuffer a rrel rel + +let g_is_null #_ #_ #_ b = Null? b + +let mnull #_ #_ #_ = Null + +let null_unique #_ #_ #_ _ = () + +let unused_in #_ #_ #_ b h = + match b with + | Null -> False + | Buffer _ content _ _ -> content `HS.unused_in` h + +let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 = + match b with + | Null -> True + | Buffer max_length content idx length -> + compatible_sub_preorder (U32.v max_length) rrel + (U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility + +let live #_ #rrel #rel h b = + match b with + | Null -> True + | Buffer max_length content idx length -> + h `HS.contains` content /\ + buffer_compatible b + +let live_null _ _ _ _ = () + +let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) + :Lemma (requires (g_is_null b == true)) + (ensures (live h b)) + [SMTPat (live h b)] + = null_unique b; + live_null a rrel rel h + +let live_not_unused_in #_ #_ #_ _ _ = () + +let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = () + +let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) + :Lemma (requires (live h b /\ b `unused_in` h)) + (ensures False) + [SMTPat (live h b); SMTPat (b `unused_in` h)] + = live_not_unused_in h b + + + +let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b) + +let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b) + +let unused_in_equiv #_ #_ #_ b h = + if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else () + +let live_region_frameOf #_ #_ #_ _ _ = () + +let len #_ #_ #_ b = + match b with + | Null -> 0ul + | Buffer _ _ _ len -> len + +let len_null a _ _ = () + +let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false)) + [SMTPat (length b)] + = len_null a rrel rel; + null_unique b + +let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :Lemma (requires (g_is_null b == true)) (ensures (length b == 0)) + [SMTPat (g_is_null b)] + = len_null a rrel rel; + null_unique b + +let as_seq #_ #_ #_ h b = + match b with + | Null -> Seq.empty + | Buffer max_len content idx len -> + Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len) + +let length_as_seq #_ #_ #_ _ _ = () + +let mbuffer_injectivity_in_first_preorder () = () + +let mgsub #a #rrel #rel sub_rel b i len = + match b with + | Null -> Null + | Buffer max_len content idx length -> + Buffer max_len content (U32.add idx i) (Ghost.hide len) + +let live_gsub #_ #rrel #rel _ b i len sub_rel = + match b with + | Null -> () + | Buffer max_len content idx length -> + let prf () : Lemma + (requires (buffer_compatible b)) + (ensures (buffer_compatible (mgsub sub_rel b i len))) + = + lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel + (U32.v idx) (U32.v idx + U32.v length) rel + (U32.v i) (U32.v i + U32.v len) sub_rel + in + Classical.move_requires prf () + +let gsub_is_null #_ #_ #_ _ _ _ _ = () + +let len_gsub #_ #_ #_ _ _ _ _ = () + +let frameOf_gsub #_ #_ #_ _ _ _ _ = () + +let as_addr_gsub #_ #_ #_ _ _ _ _ = () + +let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = () + +#push-options "--z3rlimit 20" +let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 = + let prf () : Lemma + (requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2)) + (ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2)) + = + lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1 + (U32.v i2) (U32.v i2 + U32.v len2) sub_rel2 + in + Classical.move_requires prf () +#pop-options + +/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and +/// length ``len b``. + +let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel + +let as_seq_gsub #_ #_ #_ h b i len _ = + match b with + | Null -> () + | Buffer _ content idx len0 -> + Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len) + +let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b) + : Lemma (requires s1 === s2) + (ensures a == b) + = Seq.lemma_equal_instances_implies_equal_types () + +let s_lemma_equal_instances_implies_equal_types (_:unit) + : Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b). + {:pattern (has_type s1 (Seq.seq a)); + (has_type s2 (Seq.seq b)) } + s1 === s2 ==> a == b) + = Seq.lemma_equal_instances_implies_equal_types() + +let live_same_addresses_equal_types_and_preorders' + (#a1 #a2: Type0) + (#rrel1 #rel1: srel a1) + (#rrel2 #rel2: srel a2) + (b1: mbuffer a1 rrel1 rel1) + (b2: mbuffer a2 rrel2 rel2) + (h: HS.mem) +: Lemma + (requires + frameOf b1 == frameOf b2 /\ + as_addr b1 == as_addr b2 /\ + live h b1 /\ + live h b2 /\ + (~ (g_is_null b1 /\ g_is_null b2))) + (ensures + a1 == a2 /\ + rrel1 == rrel2) += Heap.lemma_distinct_addrs_distinct_preorders (); + Heap.lemma_distinct_addrs_distinct_mm (); + let s1 : Seq.seq a1 = as_seq h b1 in + assert (Seq.seq a1 == Seq.seq a2); + let s1' : Seq.seq a2 = coerce_eq _ s1 in + assert (s1 === s1'); + lemma_equal_instances_implies_equal_types a1 a2 s1 s1' + +let live_same_addresses_equal_types_and_preorders + #_ #_ #_ #_ #_ #_ b1 b2 h += Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h + +(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *) + +noeq +type ubuffer_ +: Type0 += { + b_max_length: nat; + b_offset: nat; + b_length: nat; + b_is_mm: bool; +} + +val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0 + +let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } ) + +let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr) + +let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) + :Tot (ubuffer (frameOf b) (as_addr b)) + = if Null? b + then + Ghost.hide ({ + b_max_length = 0; + b_offset = 0; + b_length = 0; + b_is_mm = false; + }) + else + Ghost.hide ({ + b_max_length = U32.v (Buffer?.max_length b); + b_offset = U32.v (Buffer?.idx b); + b_length = U32.v (Buffer?.length b); + b_is_mm = HS.is_mm (Buffer?.content b); + }) + +let ubuffer_preserved' + (#r: HS.rid) + (#a: nat) + (b: ubuffer r a) + (h h' : HS.mem) +: GTot Type0 += forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) . + ((frameOf b' == r /\ as_addr b' == a) ==> ( + (live h b' ==> live h' b') /\ ( + ((live h b' /\ live h' b' /\ Buffer? b') ==> ( + let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in + let Buffer max _ idx len = b' in ( + U32.v max == bmax /\ + U32.v idx <= boff /\ + boff + blen <= U32.v idx + U32.v len + ) ==> + Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen)) + ))))) + +val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0 + +let ubuffer_preserved = ubuffer_preserved' + +let ubuffer_preserved_intro + (#r:HS.rid) + (#a:nat) + (b:ubuffer r a) + (h h' :HS.mem) + (f0: ( + (t':Type0) -> + (rrel:srel t') -> (rel:srel t') -> + (b':mbuffer t' rrel rel) -> + Lemma + (requires (frameOf b' == r /\ as_addr b' == a /\ live h b')) + (ensures (live h' b')) + )) + (f: ( + (t':Type0) -> + (rrel:srel t') -> (rel:srel t') -> + (b':mbuffer t' rrel rel) -> + Lemma + (requires ( + frameOf b' == r /\ as_addr b' == a /\ + live h b' /\ live h' b' /\ + Buffer? b' /\ ( + let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in + let Buffer max _ idx len = b' in ( + U32.v max == bmax /\ + U32.v idx <= boff /\ + boff + blen <= U32.v idx + U32.v len + )))) + (ensures ( + Buffer? b' /\ ( + let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in + let Buffer max _ idx len = b' in + U32.v max == bmax /\ + U32.v idx <= boff /\ + boff + blen <= U32.v idx + U32.v len /\ + Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen)) + ))) + )) +: Lemma + (ubuffer_preserved b h h') += let g' + (t':Type0) (rrel rel:srel t') + (b':mbuffer t' rrel rel) + : Lemma + ((frameOf b' == r /\ as_addr b' == a) ==> ( + (live h b' ==> live h' b') /\ ( + ((live h b' /\ live h' b' /\ Buffer? b') ==> ( + let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in + let Buffer max _ idx len = b' in ( + U32.v max == bmax /\ + U32.v idx <= boff /\ + boff + blen <= U32.v idx + U32.v len + ) ==> + Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen)) + ))))) + = Classical.move_requires (f0 t' rrel rel) b'; + Classical.move_requires (f t' rrel rel) b' + in + Classical.forall_intro_4 g' + +val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma + (ubuffer_preserved b h h) + +let ubuffer_preserved_refl #r #a b h = () + +val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma + (requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3)) + (ensures (ubuffer_preserved b h1 h3)) + +let ubuffer_preserved_trans #r #a b h1 h2 h3 = () + +val same_mreference_ubuffer_preserved + (#r: HS.rid) + (#a: nat) + (b: ubuffer r a) + (h1 h2: HS.mem) + (f: ( + (a' : Type) -> + (pre: Preorder.preorder a') -> + (r': HS.mreference a' pre) -> + Lemma + (requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r')) + (ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r')) + )) +: Lemma + (ubuffer_preserved b h1 h2) + +let same_mreference_ubuffer_preserved #r #a b h1 h2 f = + ubuffer_preserved_intro b h1 h2 + (fun t' _ _ b' -> + if Null? b' + then () + else + f _ _ (Buffer?.content b') + ) + (fun t' _ _ b' -> + if Null? b' + then () + else + f _ _ (Buffer?.content b') + ) + +val addr_unused_in_ubuffer_preserved + (#r: HS.rid) + (#a: nat) + (b: ubuffer r a) + (h1 h2: HS.mem) +: Lemma + (requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r))) + (ensures (ubuffer_preserved b h1 h2)) + +let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = () + +val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b)) + +let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b + +let ubuffer_of_buffer_from_to_none_cond + #a #rrel #rel (b: mbuffer a rrel rel) from to +: GTot bool += g_is_null b || U32.v to < U32.v from || U32.v from > length b + +let ubuffer_of_buffer_from_to + #a #rrel #rel (b: mbuffer a rrel rel) from to +: GTot (ubuffer (frameOf b) (as_addr b)) += if ubuffer_of_buffer_from_to_none_cond b from to + then + Ghost.hide ({ + b_max_length = 0; + b_offset = 0; + b_length = 0; + b_is_mm = false; + }) + else + let to' = if U32.v to > length b then length b else U32.v to in + let b1 = ubuffer_of_buffer b in + Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from }) + +val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem) + :Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b)) + (ensures (live h' b /\ as_seq h b == as_seq h' b)) + +let ubuffer_preserved_elim #_ #_ #_ _ _ _ = () + +val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem) + :Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b)) + (ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))) + +let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = () + +let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a) + (b:mbuffer a rrel rel) (h h':HS.mem) + : Lemma (requires (b `unused_in` h)) + (ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h')) + = Classical.move_requires (fun b -> live_not_unused_in h b) b; + live_null a rrel rel h; + null_unique b; + unused_in_equiv b h; + addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' + +let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 = + larger.b_is_mm == smaller.b_is_mm /\ + larger.b_max_length == smaller.b_max_length /\ + larger.b_offset <= smaller.b_offset /\ + smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length + +(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *) +let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) = + r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller) + +val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0 + +let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller + +val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma + (b `ubuffer_includes` b) + +let ubuffer_includes_refl #r #a b = () + +val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma + (requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3)) + (ensures (b1 `ubuffer_includes` b3)) + +let ubuffer_includes_trans #r #a b1 b2 b3 = () + +(* + * TODO: not sure how to make this lemma work with preorders + * it creates a buffer larger' in the proof + * we need a compatible preorder for that + * may be take that as an argument? + *) +(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma + (requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2)) + (ensures (ubuffer_preserved smaller h1 h2)) +let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 = + ubuffer_preserved_intro smaller h1 h2 (fun t' b' -> + if Null? b' + then () + else + let (Buffer max_len content idx' len') = b' in + let idx = U32.uint_to_t (G.reveal larger).b_offset in + let len = U32.uint_to_t (G.reveal larger).b_length in + let larger' = Buffer max_len content idx len in + assert (b' == gsub larger' (U32.sub idx' idx) len'); + ubuffer_preserved_elim larger' h1 h2 + )*) + +let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 = + if x1.b_length = 0 || x2.b_length = 0 + then True + else + (x1.b_max_length == x2.b_max_length /\ + (x1.b_offset + x1.b_length <= x2.b_offset \/ + x2.b_offset + x2.b_length <= x1.b_offset)) + +(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *) +let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) = + r1 == r2 /\ a1 == a2 /\ + ubuffer_disjoint' (G.reveal b1) (G.reveal b2) + +val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0 +let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2 + +val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a) + :Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1) +let ubuffer_disjoint_sym #_ #_ b1 b2 = () + +val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma + (requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2)) + (ensures (ubuffer_disjoint smaller1 smaller2)) + +let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = () + +val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a) + (h h':HS.mem) (b:mbuffer a rrel rel) + (f: ( + (t':Type0) -> + (pre: Preorder.preorder t') -> + (r: HS.mreference t' pre) -> + Lemma + (requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r)) + (ensures (h' `HS.contains` r)) + )) + :Lemma (requires (live h b)) (ensures (live h' b)) + +let liveness_preservation_intro #_ #_ #_ _ _ b f = + if Null? b + then () + else f _ _ (Buffer?.content b) + +(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *) + +let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 = + forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) . + h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r) + +let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 = + forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r + +let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 = + forall (r: HS.rid) (n: nat) . ( + HS.live_region h1 r /\ HS.live_region h2 r /\ + n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r) + ) ==> ( + n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r) + ) + +let modifies_0' (h1 h2: HS.mem) : GTot Type0 = + modifies_0_preserves_mreferences h1 h2 /\ + modifies_0_preserves_regions h1 h2 /\ + modifies_0_preserves_not_unused_in h1 h2 + +val modifies_0 (h1 h2: HS.mem) : GTot Type0 + +let modifies_0 = modifies_0' + +val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma + (requires (modifies_0 h1 h2 /\ HS.live_region h1 r)) + (ensures (HS.live_region h2 r)) + +let modifies_0_live_region h1 h2 r = () + +val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma + (requires (modifies_0 h1 h2 /\ h1 `HS.contains` r)) + (ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r)) + +let modifies_0_mreference #a #pre h1 h2 r = () + +let modifies_0_ubuffer + (#r: HS.rid) + (#a: nat) + (b: ubuffer r a) + (h1 h2: HS.mem) +: Lemma + (requires (modifies_0 h1 h2)) + (ensures (ubuffer_preserved b h1 h2)) += same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r') + +val modifies_0_unused_in + (h1 h2: HS.mem) + (r: HS.rid) + (n: nat) +: Lemma + (requires ( + modifies_0 h1 h2 /\ + HS.live_region h1 r /\ HS.live_region h2 r /\ + n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r) + )) + (ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))) + +let modifies_0_unused_in h1 h2 r n = () + +let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) + :GTot Type0 + = forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). + ((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==> + (h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r') + +let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) + : GTot Type0 + = forall (b':ubuffer (frameOf b) (as_addr b)). + (ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2 + +let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) + : GTot Type0 + = forall (b':ubuffer (frameOf b) (as_addr b)). + (ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2 + +let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) + : GTot Type0 + = forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r' + +let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) + : GTot Type0 + = modifies_0_preserves_regions h1 h2 /\ + modifies_1_preserves_mreferences b h1 h2 /\ + modifies_1_preserves_livenesses b h1 h2 /\ + modifies_0_preserves_not_unused_in h1 h2 /\ + modifies_1_preserves_ubuffers b h1 h2 + +val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 + +let modifies_1 = modifies_1' + +let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) + : GTot Type0 + = if ubuffer_of_buffer_from_to_none_cond b from to + then modifies_0 h1 h2 + else + modifies_0_preserves_regions h1 h2 /\ + modifies_1_preserves_mreferences b h1 h2 /\ + modifies_1_preserves_livenesses b h1 h2 /\ + modifies_0_preserves_not_unused_in h1 h2 /\ + modifies_1_from_to_preserves_ubuffers b from to h1 h2 + +val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) + :Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r)) + +let modifies_1_live_region #_ #_ #_ _ _ _ _ = () + +let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) + :Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r)) += () + +val modifies_1_liveness + (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) + (#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre) + :Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r')) + +let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = () + +let modifies_1_from_to_liveness + (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) + (#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre) + :Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r')) += () + +val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat) + :Lemma (requires (modifies_1 b h1 h2 /\ + HS.live_region h1 r /\ HS.live_region h2 r /\ + n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r))) + (ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))) +let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = () + +let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat) + :Lemma (requires (modifies_1_from_to b from to h1 h2 /\ + HS.live_region h1 r /\ HS.live_region h2 r /\ + n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r))) + (ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))) += () + +val modifies_1_mreference + (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) + (#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre) + : Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r')) + (ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r')) +let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = () + +let modifies_1_from_to_mreference + (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) + (#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre) + : Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r')) + (ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r')) += () + +val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a) + (b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b)) + : Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b')) + (ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2)) +let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = () + +let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a) + (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b)) + : Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b')) + (ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2)) += () + +val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a) + (b:mbuffer a rrel rel) (h1 h2:HS.mem) + : Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b)) + (ensures (modifies_0 h1 h2)) +let modifies_1_null #_ #_ #_ _ _ _ = () + +let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) + :GTot Type0 + = forall (r: HS.rid) (n: nat) . + ((r <> frameOf b \/ n <> as_addr b) /\ + HS.live_region h1 r /\ HS.live_region h2 r /\ + n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==> + (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)) + +let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 = + modifies_0_preserves_regions h1 h2 /\ + modifies_1_preserves_mreferences b h1 h2 /\ + modifies_addr_of_preserves_not_unused_in b h1 h2 + +val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 +let modifies_addr_of = modifies_addr_of' + +val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) + (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) + :Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r)) + (ensures (HS.live_region h2 r)) +let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = () + +val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a) + (b:mbuffer a rrel rel) (h1 h2:HS.mem) + (#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre) + : Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r')) + (ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r')) +let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = () + +val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) + (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat) + : Lemma (requires (modifies_addr_of b h1 h2 /\ + (r <> frameOf b \/ n <> as_addr b) /\ + HS.live_region h1 r /\ HS.live_region h2 r /\ + n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r))) + (ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))) +let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = () + +module MG = FStar.ModifiesGen + +let cls : MG.cls ubuffer = MG.Cls #ubuffer + ubuffer_includes + (fun #r #a x -> ubuffer_includes_refl x) + (fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3) + ubuffer_disjoint + (fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2) + (fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2) + ubuffer_preserved + (fun #r #a x h -> ubuffer_preserved_refl x h) + (fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3) + (fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f) + +let loc = MG.loc cls +let _ = intro_ambient loc + +let loc_none = MG.loc_none +let _ = intro_ambient loc_none + +let loc_union = MG.loc_union +let _ = intro_ambient loc_union + +let loc_union_idem = MG.loc_union_idem + +let loc_union_comm = MG.loc_union_comm + +let loc_union_assoc = MG.loc_union_assoc + +let loc_union_idem_1 + (s1 s2: loc) +: Lemma + (loc_union s1 (loc_union s1 s2) == loc_union s1 s2) + [SMTPat (loc_union s1 (loc_union s1 s2))] += loc_union_assoc s1 s1 s2 + +let loc_union_idem_2 + (s1 s2: loc) +: Lemma + (loc_union (loc_union s1 s2) s2 == loc_union s1 s2) + [SMTPat (loc_union (loc_union s1 s2) s2)] += loc_union_assoc s1 s2 s2 + +let loc_union_loc_none_l = MG.loc_union_loc_none_l + +let loc_union_loc_none_r = MG.loc_union_loc_none_r + +let loc_buffer_from_to #a #rrel #rel b from to = + if ubuffer_of_buffer_from_to_none_cond b from to + then MG.loc_none + else + MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) + +let loc_buffer #_ #_ #_ b = + if g_is_null b then MG.loc_none + else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) + +let loc_buffer_eq #_ #_ #_ _ = () + +let loc_buffer_from_to_high #_ #_ #_ _ _ _ = () + +let loc_buffer_from_to_none #_ #_ #_ _ _ _ = () + +let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = () + +let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = () + +let loc_buffer_null _ _ _ = () + +let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = () + +let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = () + +let loc_addresses = MG.loc_addresses + +let loc_regions = MG.loc_regions + +let loc_includes = MG.loc_includes + +let loc_includes_refl = MG.loc_includes_refl + +let loc_includes_trans = MG.loc_includes_trans + +let loc_includes_trans_backwards + (s1 s2 s3: loc) +: Lemma + (requires (loc_includes s1 s2 /\ loc_includes s2 s3)) + (ensures (loc_includes s1 s3)) + [SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)] += loc_includes_trans s1 s2 s3 + + +let loc_includes_union_r = MG.loc_includes_union_r + +let loc_includes_union_l = MG.loc_includes_union_l + +let loc_includes_union_l' + (s1 s2 s: loc) + : Lemma + (requires (loc_includes s1 s \/ loc_includes s2 s)) + (ensures (loc_includes (loc_union s1 s2) s)) + [SMTPat (loc_includes (loc_union s1 s2) s)] + = loc_includes_union_l s1 s2 s + +let loc_includes_union_r' + (s s1 s2: loc) +: Lemma + (loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2)) + [SMTPat (loc_includes s (loc_union s1 s2))] += Classical.move_requires (loc_includes_union_r s s1) s2; + Classical.move_requires (loc_includes_union_l s1 s2) s1; + Classical.move_requires (loc_includes_union_l s1 s2) s2; + Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1; + Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2 + +let loc_includes_none = MG.loc_includes_none + +val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a) + (b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2) + :Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ + ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2))) + (ensures (loc_includes (loc_buffer b1) (loc_buffer b2))) +let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 = + let t1 = ubuffer (frameOf b1) (as_addr b1) in + MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2) + +let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel = + let b' = mgsub sub_rel b i len in + loc_includes_buffer b b'; + loc_includes_trans l (loc_buffer b) (loc_buffer b') + +let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a) + :Lemma (requires (UInt32.v i + UInt32.v len <= (length b))) + (ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len)))) + [SMTPat (mgsub sub_rel b i len)] + = () + +let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 = + let b1 = mgsub sub_rel1 b i1 len1 in + let b2 = mgsub sub_rel2 b i2 len2 in + loc_includes_buffer b1 b2 + +let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to = + if ubuffer_of_buffer_from_to_none_cond b from to + then () + else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to) + +let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 = + if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2 + then () + else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2) + +#push-options "--z3rlimit 20" +let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller = + if Null? smaller then () else + if Null? larger then begin + MG.loc_includes_none_elim (loc_buffer smaller); + MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller) + end else begin + MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller); + let ul = Ghost.reveal (ubuffer_of_buffer larger) in + let us = Ghost.reveal (ubuffer_of_buffer smaller) in + assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller)); + assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller)) + end +#pop-options + +let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p = + MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p) + +let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b)) + [SMTPat (loc_buffer b)] + = () + +let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b = + MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) + +let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b)) + [SMTPat (loc_buffer b)] + = () + +let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls + +let loc_includes_region_addresses' + (preserve_liveness: bool) + (r: HS.rid) + (a: Set.set nat) +: Lemma + (loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a)) + [SMTPat (loc_addresses preserve_liveness r a)] += () + +let loc_includes_region_region = MG.loc_includes_region_region #_ #cls + +let loc_includes_region_region' + (preserve_liveness: bool) + (s: Set.set HS.rid) +: Lemma + (loc_includes (loc_regions false s) (loc_regions preserve_liveness s)) + [SMTPat (loc_regions preserve_liveness s)] += () + +let loc_includes_region_union_l = MG.loc_includes_region_union_l + +let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls + +let loc_includes_addresses_addresses_1 + (preserve_liveness1 preserve_liveness2: bool) + (r1 r2: HS.rid) + (s1 s2: Set.set nat) +: Lemma + (requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1)) + (ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))) + [SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))] += loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2 + +let loc_includes_addresses_addresses_2 + (preserve_liveness: bool) + (r: HS.rid) + (s: Set.set nat) +: Lemma + (loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s)) + [SMTPat (loc_addresses preserve_liveness r s)] += () + +let loc_includes_union_l_buffer + (s1 s2:loc) + (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) + :Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b))) + (ensures (loc_includes (loc_union s1 s2) (loc_buffer b))) + [SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))] + = loc_includes_union_l s1 s2 (loc_buffer b) + +let loc_includes_union_l_addresses + (s1 s2: loc) + (prf: bool) + (r: HS.rid) + (a: Set.set nat) +: Lemma + (requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a))) + (ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a))) + [SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))] += loc_includes_union_l s1 s2 (loc_addresses prf r a) + +let loc_includes_union_l_regions + (s1 s2: loc) + (prf: bool) + (r: Set.set HS.rid) +: Lemma + (requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r))) + (ensures (loc_includes (loc_union s1 s2) (loc_regions prf r))) + [SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))] += loc_includes_union_l s1 s2 (loc_regions prf r) + +let loc_disjoint = MG.loc_disjoint + +let loc_disjoint_sym = MG.loc_disjoint_sym + +let loc_disjoint_sym' + (s1 s2: loc) +: Lemma + (loc_disjoint s1 s2 <==> loc_disjoint s2 s1) + [SMTPat (loc_disjoint s1 s2)] += Classical.move_requires (loc_disjoint_sym s1) s2; + Classical.move_requires (loc_disjoint_sym s2) s1 + +let loc_disjoint_none_r = MG.loc_disjoint_none_r + +let loc_disjoint_union_r = MG.loc_disjoint_union_r + +let loc_disjoint_includes = MG.loc_disjoint_includes + +let loc_disjoint_union_r' + (s s1 s2: loc) +: Lemma + (ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2))) + [SMTPat (loc_disjoint s (loc_union s1 s2))] += Classical.move_requires (loc_disjoint_union_r s s1) s2; + loc_includes_union_l s1 s2 s1; + loc_includes_union_l s1 s2 s2; + Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1; + Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2 + +val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2) + (b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) + :Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==> + ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2))) + (ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2))) +let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 = + MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2) + +let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma + (requires (loc_includes b2 b2' /\ loc_disjoint b1 b2)) + (ensures (loc_disjoint b1 b2')) + [SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')] + = loc_disjoint_includes b1 b2 b1 b2' + +let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 = + loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2) + +let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 = + if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2 + then () + else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2) + +let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls + +let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls + +let modifies = MG.modifies + +let modifies_live_region = MG.modifies_live_region + +let modifies_mreference_elim = MG.modifies_mreference_elim + +let modifies_buffer_elim #_ #_ #_ b p h h' = + if g_is_null b + then + assert (as_seq h b `Seq.equal` as_seq h' b) + else begin + MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ; + ubuffer_preserved_elim b h h' + end + +let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' = + if g_is_null b + then () + else begin + MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ; + ubuffer_preserved_from_to_elim b from to h h' + end + +let modifies_refl = MG.modifies_refl + +let modifies_loc_includes = MG.modifies_loc_includes + +let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _ + +let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _ + +let address_liveness_insensitive_buffer #_ #_ #_ b = + MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) + +let address_liveness_insensitive_addresses = + MG.loc_includes_address_liveness_insensitive_locs_addresses cls + +let region_liveness_insensitive_buffer #_ #_ #_ b = + MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) + +let region_liveness_insensitive_addresses = + MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls + +let region_liveness_insensitive_regions = + MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls + +let region_liveness_insensitive_address_liveness_insensitive = + MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls + +let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness + +let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x = + if g_is_null x then () + else + liveness_preservation_intro h h' x (fun t' pre r -> + MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x)) + +let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness + +let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference + +let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x = + if g_is_null x then () + else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x) + +let modifies_liveness_insensitive_region_weak + (l2 : loc) + (h h' : HS.mem) + (x: HS.rid) +: Lemma + (requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x)) + (ensures (HS.live_region h' x)) + [SMTPatOr [ + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)]; + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)]; + ]] += modifies_liveness_insensitive_region loc_none l2 h h' x + +let modifies_liveness_insensitive_region_mreference_weak + (l2 : loc) + (h h' : HS.mem) + (#t: Type) + (#pre: Preorder.preorder t) + (x: HS.mreference t pre) + : Lemma (requires (modifies l2 h h' /\ + region_liveness_insensitive_locs `loc_includes` l2 /\ + HS.live_region h (HS.frameOf x))) + (ensures (HS.live_region h' (HS.frameOf x))) + [SMTPatOr [ + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))]; + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))]; + ]] + = modifies_liveness_insensitive_region_mreference loc_none l2 h h' x + +let modifies_liveness_insensitive_region_buffer_weak + (l2:loc) + (h h':HS.mem) + (#a:Type0) (#rrel #rel:srel a) + (x:mbuffer a rrel rel) + :Lemma (requires (modifies l2 h h' /\ + region_liveness_insensitive_locs `loc_includes` l2 /\ + HS.live_region h (frameOf x))) + (ensures (HS.live_region h' (frameOf x))) + [SMTPatOr [ + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))]; + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))]; + ]] + = modifies_liveness_insensitive_region_buffer loc_none l2 h h' x + +let modifies_trans = MG.modifies_trans + +let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem) + : Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l)) + (ensures (modifies l_goal h1 h3)) + [SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)] + = modifies_trans l h1 h2 l_goal h3 + +let modifies_only_live_regions = MG.modifies_only_live_regions + +let no_upd_fresh_region = MG.no_upd_fresh_region + +let new_region_modifies = MG.new_region_modifies #_ cls + +let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped + +let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls + +let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls + +let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls + +let modifies_salloc_post = MG.modifies_salloc_post #_ #cls + +let modifies_free = MG.modifies_free #_ #cls + +let modifies_none_modifies = MG.modifies_none_modifies #_ #cls + +let modifies_upd = MG.modifies_upd #_ #cls + +val modifies_0_modifies + (h1 h2: HS.mem) +: Lemma + (requires (modifies_0 h1 h2)) + (ensures (modifies loc_none h1 h2)) +let modifies_0_modifies h1 h2 = + MG.modifies_none_intro #_ #cls h1 h2 + (fun r -> modifies_0_live_region h1 h2 r) + (fun t pre b -> modifies_0_mreference #t #pre h1 h2 b) + (fun r n -> modifies_0_unused_in h1 h2 r n) + +val modifies_1_modifies + (#a:Type0)(#rrel #rel:srel a) + (b:mbuffer a rrel rel) (h1 h2:HS.mem) + :Lemma (requires (modifies_1 b h1 h2)) + (ensures (modifies (loc_buffer b) h1 h2)) +let modifies_1_modifies #t #_ #_ b h1 h2 = + if g_is_null b + then begin + modifies_1_null b h1 h2; + modifies_0_modifies h1 h2 + end else + MG.modifies_intro (loc_buffer b) h1 h2 + (fun r -> modifies_1_live_region b h1 h2 r) + (fun t pre p -> + loc_disjoint_sym (loc_mreference p) (loc_buffer b); + MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p)); + modifies_1_mreference b h1 h2 p + ) + (fun t pre p -> + modifies_1_liveness b h1 h2 p + ) + (fun r n -> + modifies_1_unused_in b h1 h2 r n + ) + (fun r' a' b' -> + loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b); + MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b'; + if frameOf b = r' && as_addr b = a' + then + modifies_1_ubuffer #t b h1 h2 b' + else + same_mreference_ubuffer_preserved #r' #a' b' h1 h2 + (fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_) + ) + +val modifies_1_from_to_modifies + (#a:Type0)(#rrel #rel:srel a) + (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) + :Lemma (requires (modifies_1_from_to b from to h1 h2)) + (ensures (modifies (loc_buffer_from_to b from to) h1 h2)) +let modifies_1_from_to_modifies #t #_ #_ b from to h1 h2 = + if ubuffer_of_buffer_from_to_none_cond b from to + then begin + modifies_0_modifies h1 h2 + end else + MG.modifies_intro (loc_buffer_from_to b from to) h1 h2 + (fun r -> modifies_1_from_to_live_region b from to h1 h2 r) + (fun t pre p -> + loc_disjoint_sym (loc_mreference p) (loc_buffer_from_to b from to); + MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) true (HS.frameOf p) (Set.singleton (HS.as_addr p)); + modifies_1_from_to_mreference b from to h1 h2 p + ) + (fun t pre p -> + modifies_1_from_to_liveness b from to h1 h2 p + ) + (fun r n -> + modifies_1_from_to_unused_in b from to h1 h2 r n + ) + (fun r' a' b' -> + loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer_from_to b from to); + MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer_from_to b from to) b'; + if frameOf b = r' && as_addr b = a' + then + modifies_1_from_to_ubuffer #t b from to h1 h2 b' + else + same_mreference_ubuffer_preserved #r' #a' b' h1 h2 + (fun a_ pre_ r_ -> modifies_1_from_to_mreference b from to h1 h2 r_) + ) + +val modifies_addr_of_modifies + (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (h1 h2:HS.mem) + :Lemma (requires (modifies_addr_of b h1 h2)) + (ensures (modifies (loc_addr_of_buffer b) h1 h2)) +let modifies_addr_of_modifies #t #_ #_ b h1 h2 = + MG.modifies_address_intro #_ #cls (frameOf b) (as_addr b) h1 h2 + (fun r -> modifies_addr_of_live_region b h1 h2 r) + (fun t pre p -> + modifies_addr_of_mreference b h1 h2 p + ) + (fun r n -> + modifies_addr_of_unused_in b h1 h2 r n + ) + +val modifies_loc_buffer_from_to_intro' + (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + (from to: U32.t) + (l: loc) (h h' : HS.mem) +: Lemma + (requires ( + let s = as_seq h b in + let s' = as_seq h' b in + not (g_is_null b) /\ + live h b /\ + modifies (loc_union l (loc_buffer b)) h h' /\ + U32.v from <= U32.v to /\ + U32.v to <= length b /\ + Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\ + Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b) + )) + (ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h')) + +#push-options "--z3rlimit 16" + +let modifies_loc_buffer_from_to_intro' #a #rrel #rel b from to l h h' = + let r0 = frameOf b in + let a0 = as_addr b in + let bb : ubuffer r0 a0 = ubuffer_of_buffer b in + modifies_loc_includes (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h' (loc_union l (loc_buffer b)); + MG.modifies_strengthen l #r0 #a0 (ubuffer_of_buffer_from_to b from to) h h' (fun f (x: ubuffer r0 a0) -> + ubuffer_preserved_intro x h h' + (fun t' rrel' rel' b' -> f _ _ (Buffer?.content b')) + (fun t' rrel' rel' b' -> + // prove that the types, rrels, rels are equal + Heap.lemma_distinct_addrs_distinct_preorders (); + Heap.lemma_distinct_addrs_distinct_mm (); + assert (Seq.seq t' == Seq.seq a); + let _s0 : Seq.seq a = as_seq h b in + let _s1 : Seq.seq t' = coerce_eq _ _s0 in + lemma_equal_instances_implies_equal_types a t' _s0 _s1; + let boff = U32.v (Buffer?.idx b) in + let from_ = boff + U32.v from in + let to_ = boff + U32.v to in + let ({ b_max_length = ml; b_offset = xoff; b_length = xlen; b_is_mm = is_mm }) = Ghost.reveal x in + let ({ b_max_length = _; b_offset = b'off; b_length = b'len }) = Ghost.reveal (ubuffer_of_buffer b') in + let bh = as_seq h b in + let bh' = as_seq h' b in + let xh = Seq.slice (as_seq h b') (xoff - b'off) (xoff - b'off + xlen) in + let xh' = Seq.slice (as_seq h' b') (xoff - b'off) (xoff - b'off + xlen) in + let prf (i: nat) : Lemma + (requires (i < xlen)) + (ensures (i < xlen /\ Seq.index xh i == Seq.index xh' i)) + = let xi = xoff + i in + let bi : ubuffer r0 a0 = + Ghost.hide ({ b_max_length = ml; b_offset = xi; b_length = 1; b_is_mm = is_mm; }) + in + assert (Seq.index xh i == Seq.index (Seq.slice (as_seq h b') (xi - b'off) (xi - b'off + 1)) 0); + assert (Seq.index xh' i == Seq.index (Seq.slice (as_seq h' b') (xi - b'off) (xi - b'off + 1)) 0); + let li = MG.loc_of_aloc bi in + MG.loc_includes_aloc #_ #cls x bi; + loc_disjoint_includes l (MG.loc_of_aloc x) l li; + if xi < boff || boff + length b <= xi + then begin + MG.loc_disjoint_aloc_intro #_ #cls bb bi; + assert (loc_disjoint (loc_union l (loc_buffer b)) li); + MG.modifies_aloc_elim bi (loc_union l (loc_buffer b)) h h' + end else + if xi < from_ + then begin + assert (Seq.index xh i == Seq.index (Seq.slice bh 0 (U32.v from)) (xi - boff)); + assert (Seq.index xh' i == Seq.index (Seq.slice bh' 0 (U32.v from)) (xi - boff)) + end else begin + assert (to_ <= xi); + assert (Seq.index xh i == Seq.index (Seq.slice bh (U32.v to) (length b)) (xi - to_)); + assert (Seq.index xh' i == Seq.index (Seq.slice bh' (U32.v to) (length b)) (xi - to_)) + end + in + Classical.forall_intro (Classical.move_requires prf); + assert (xh `Seq.equal` xh') + ) + ) + +#pop-options + +let modifies_loc_buffer_from_to_intro #a #rrel #rel b from to l h h' = + if g_is_null b + then () + else modifies_loc_buffer_from_to_intro' b from to l h h' + +let does_not_contain_addr = MG.does_not_contain_addr + +let not_live_region_does_not_contain_addr = MG.not_live_region_does_not_contain_addr + +let unused_in_does_not_contain_addr = MG.unused_in_does_not_contain_addr + +let addr_unused_in_does_not_contain_addr = MG.addr_unused_in_does_not_contain_addr + +let free_does_not_contain_addr = MG.free_does_not_contain_addr + +let does_not_contain_addr_elim = MG.does_not_contain_addr_elim + +let modifies_only_live_addresses = MG.modifies_only_live_addresses + +let loc_not_unused_in = MG.loc_not_unused_in _ + +let loc_unused_in = MG.loc_unused_in _ + +let loc_regions_unused_in = MG.loc_regions_unused_in cls + +let loc_unused_in_not_unused_in_disjoint = + MG.loc_unused_in_not_unused_in_disjoint cls + +let not_live_region_loc_not_unused_in_disjoint = MG.not_live_region_loc_not_unused_in_disjoint cls + +let fresh_frame_loc_not_unused_in_disjoint + (h0 h1: HS.mem) +: Lemma + (requires (HS.fresh_frame h0 h1)) + (ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0))) + [SMTPat (HS.fresh_frame h0 h1)] += not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1) + +let live_loc_not_unused_in #_ #_ #_ b h = + unused_in_equiv b h; + Classical.move_requires (MG.does_not_contain_addr_addr_unused_in h) (frameOf b, as_addr b); + MG.loc_addresses_not_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h; + () + +let unused_in_loc_unused_in #_ #_ #_ b h = + unused_in_equiv b h; + Classical.move_requires (MG.addr_unused_in_does_not_contain_addr h) (frameOf b, as_addr b); + MG.loc_addresses_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h; + () + +let modifies_address_liveness_insensitive_unused_in = + MG.modifies_address_liveness_insensitive_unused_in cls + +let modifies_only_not_unused_in = MG.modifies_only_not_unused_in + +let mreference_live_loc_not_unused_in = + MG.mreference_live_loc_not_unused_in cls + +let mreference_unused_in_loc_unused_in = + MG.mreference_unused_in_loc_unused_in cls + +let unused_in_not_unused_in_disjoint_2 + (l1 l2 l1' l2': loc) + (h: HS.mem) +: Lemma + (requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' )) + (ensures (loc_disjoint l1' l2' )) + [SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)] += loc_includes_trans (loc_unused_in h) l1 l1' ; + loc_includes_trans (loc_not_unused_in h) l2 l2' ; + loc_unused_in_not_unused_in_disjoint h ; + loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2' + +let modifies_loc_unused_in l h1 h2 l' = + modifies_loc_includes address_liveness_insensitive_locs h1 h2 l; + modifies_address_liveness_insensitive_unused_in h1 h2; + loc_includes_trans (loc_unused_in h1) (loc_unused_in h2) l' + +let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem) + (x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma + (requires (HST.ralloc_post i init m0 x m1)) + (ensures (fresh_loc (loc_freed_mreference x) m0 m1)) + [SMTPat (HST.ralloc_post i init m0 x m1)] += () + +let fresh_frame_modifies h0 h1 = MG.fresh_frame_modifies #_ cls h0 h1 + +let popped_modifies = MG.popped_modifies #_ cls + +let modifies_remove_new_locs l_fresh l_aux l_goal h1 h2 h3 = + modifies_only_not_unused_in l_goal h1 h3 + +let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc) + : Lemma (requires (HS.fresh_frame h1 h2 /\ + modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3)) + (ensures (modifies l h1 h3)) + [SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)] + = loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2))); + modifies_only_not_unused_in l h1 h3 + +let disjoint_neq #_ #_ #_ #_ #_ #_ b1 b2 = + if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then + MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2) + else () + +let empty_disjoint + #t1 #t2 #rrel1 #rel1 #rrel2 #rel2 b1 b2 += let r = frameOf b1 in + let a = as_addr b1 in + if r = frameOf b2 && a = as_addr b2 then + MG.loc_disjoint_aloc_intro #_ #cls #r #a #r #a (ubuffer_of_buffer b1) (ubuffer_of_buffer b2) + else () + +(* +let includes_live #a #rrel #rel1 #rel2 h larger smaller = + if Null? larger || Null? smaller then () + else + MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller) +*) + +let includes_frameOf_as_addr #_ #_ #_ #_ #_ #_ larger smaller = + if Null? larger || Null? smaller then () + else + MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller) + +let pointer_distinct_sel_disjoint #a #_ #_ #_ #_ b1 b2 h = + if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 + then begin + HS.mreference_distinct_sel_disjoint h (Buffer?.content b1) (Buffer?.content b2); + loc_disjoint_buffer b1 b2 + end + else + loc_disjoint_buffer b1 b2 + +let is_null #_ #_ #_ b = Null? b + +let msub #a #rrel #rel sub_rel b i len = + match b with + | Null -> Null + | Buffer max_len content i0 len0 -> + Buffer max_len content (U32.add i0 i) len + +let moffset #a #rrel #rel sub_rel b i = + match b with + | Null -> Null + | Buffer max_len content i0 len -> + Buffer max_len content (U32.add i0 i) (Ghost.hide ((U32.sub (Ghost.reveal len) i))) + +let index #_ #_ #_ b i = + let open HST in + let s = ! (Buffer?.content b) in + Seq.index s (U32.v (Buffer?.idx b) + U32.v i) + +let g_upd_seq #_ #_ #_ b s h = + if Seq.length s = 0 then h + else + let s0 = HS.sel h (Buffer?.content b) in + let Buffer _ content idx length = b in + HS.upd h (Buffer?.content b) (Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v length) s) + +let lemma_g_upd_with_same_seq #_ #_ #_ b h = + if Null? b then () + else + let open FStar.UInt32 in + let Buffer _ content idx length = b in + let s = HS.sel h content in + assert (Seq.equal (Seq.replace_subseq s (v idx) (v idx + v length) (Seq.slice s (v idx) (v idx + v length))) s); + HS.lemma_heap_equality_upd_with_sel h (Buffer?.content b) + +#push-options "--z3rlimit 48" +let g_upd_seq_as_seq #a #_ #_ b s h = + let h' = g_upd_seq b s h in + if g_is_null b then assert (Seq.equal s Seq.empty) + else begin + assert (Seq.equal (as_seq h' b) s); + // prove modifies_1_preserves_ubuffers + Heap.lemma_distinct_addrs_distinct_preorders (); + Heap.lemma_distinct_addrs_distinct_mm (); + s_lemma_equal_instances_implies_equal_types (); + modifies_1_modifies b h h' + end + +let g_upd_modifies_strong #_ #_ #_ b i v h = + let h' = g_upd b i v h in + // prove modifies_1_from_to_preserves_ubuffers + Heap.lemma_distinct_addrs_distinct_preorders (); + Heap.lemma_distinct_addrs_distinct_mm (); + s_lemma_equal_instances_implies_equal_types (); + modifies_1_from_to_modifies b (U32.uint_to_t i) (U32.uint_to_t (i + 1)) h h' +#pop-options + +let upd' #_ #_ #_ b i v = + let open HST in + let h = get() in + let Buffer max_length content idx len = b in + let s0 = !content in + let sb0 = Seq.slice s0 (U32.v idx) (U32.v max_length) in + let s_upd = Seq.upd sb0 (U32.v i) v in + let sf = Seq.replace_subseq s0 (U32.v idx) (U32.v max_length) s_upd in + assert (sf `Seq.equal` + Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v len) (Seq.upd (as_seq h b) (U32.v i) v)); + content := sf + +let recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0 = + (not (g_is_null b)) ==> ( + HST.is_eternal_region (frameOf b) /\ + not (HS.is_mm (Buffer?.content b)) /\ + buffer_compatible b + ) + +let region_lifetime_buf #_ #_ #_ b = + (not (g_is_null b)) ==> ( + HS.is_heap_color (HS.color (frameOf b)) /\ + not (HS.is_mm (Buffer?.content b)) /\ + buffer_compatible b + ) + +let region_lifetime_sub #a #rrel #rel #subrel b0 b1 = + match b1 with + | Null -> () + | Buffer max_len content idx length -> + assert (forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_sub_preorder len rrel i j subrel) + +let recallable_null #_ #_ #_ = () + +let recallable_mgsub #_ #rrel #rel b i len sub_rel = + match b with + | Null -> () + | Buffer max_len content idx length -> + lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel + (U32.v idx) (U32.v idx + U32.v length) rel + (U32.v i) (U32.v i + U32.v len) sub_rel + +(* +let recallable_includes #_ #_ #_ #_ #_ #_ larger smaller = + if Null? larger || Null? smaller then () + else + MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller) +*) + +let recall #_ #_ #_ b = if Null? b then () else HST.recall (Buffer?.content b) + +private let spred_as_mempred (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) + :HST.mem_predicate + = fun h -> + buffer_compatible b ==> + p (as_seq h b) + +let witnessed #_ #rrel #rel b p = + match b with + | Null -> p Seq.empty + | Buffer max_length content idx length -> + HST.token_p content (spred_as_mempred b p) + +private let lemma_stable_on_rel_is_stable_on_rrel (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (p:spred a) + :Lemma (requires (Buffer? b /\ stable_on p rel)) + (ensures (HST.stable_on (spred_as_mempred b p) (Buffer?.content b))) + = let Buffer max_length content idx length = b in + let mp = spred_as_mempred b p in + let aux (h0 h1:HS.mem) :Lemma ((mp h0 /\ rrel (HS.sel h0 content) (HS.sel h1 content)) ==> mp h1) + = Classical.arrow_to_impl #(mp h0 /\ rrel (HS.sel h0 content) (HS.sel h1 content) /\ buffer_compatible b) #(mp h1) + (fun _ -> assert (rel (as_seq h0 b) (as_seq h1 b))) + in + Classical.forall_intro_2 aux + +let witness_p #a #rrel #rel b p = + match b with + | Null -> () + | Buffer _ content _ _ -> + lemma_stable_on_rel_is_stable_on_rrel b p; + //AR: TODO: the proof doesn't go through without this assertion, which should follow directly from the lemma call + assert (HST.stable_on #(Seq.lseq a (U32.v (Buffer?.max_length b))) #(srel_to_lsrel (U32.v (Buffer?.max_length b)) rrel) (spred_as_mempred b p) (Buffer?.content b)); + HST.witness_p content (spred_as_mempred b p) + +let recall_p #_ #_ #_ b p = + match b with + | Null -> () + | Buffer _ content _ _ -> HST.recall_p content (spred_as_mempred b p) + +let witnessed_functorial #a #rrel #rel1 #rel2 b1 b2 i len s1 s2 = + match b1, b2 with + | Null, Null -> assert (as_seq HS.empty_mem b1 == Seq.empty) + | Buffer _ content _ _, _ -> + assert (forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_sub_preorder len rrel i j rel1); + HST.token_functoriality content (spred_as_mempred b1 s1) (spred_as_mempred b2 s2) + +let witnessed_functorial_st #a #rrel #rel1 #rel2 b1 b2 i len s1 s2 = + match b1, b2 with + | Null, Null -> () + | Buffer _ content _ _, _ -> + HST.token_functoriality content (spred_as_mempred b1 s1) (spred_as_mempred b2 s2) + +let freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) = + (not (g_is_null b)) /\ + HS.is_mm (Buffer?.content b) /\ + HS.is_heap_color (HS.color (frameOf b)) /\ + U32.v (Buffer?.max_length b) > 0 /\ + Buffer?.idx b == 0ul /\ + Ghost.reveal (Buffer?.length b) == Buffer?.max_length b + +let free #_ #_ #_ b = HST.rfree (Buffer?.content b) + +let freeable_length #_ #_ #_ b = () + +let freeable_disjoint #_ #_ #_ #_ #_ #_ b1 b2 = + if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then + MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2) + +let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2) + (b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) + :Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2)) + (ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2))) + [SMTPat (freeable b1); SMTPat (disjoint b1 b2)] + = freeable_disjoint b1 b2 + +private let alloc_heap_common (#a:Type0) (#rrel:srel a) + (r:HST.erid) (len:U32.t{U32.v len > 0}) (s:Seq.seq a{Seq.length s == U32.v len}) + (mm:bool) + :HST.ST (lmbuffer a rrel rrel (U32.v len)) + (requires (fun _ -> True)) + (ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 s /\ + frameOf b == r /\ + HS.is_mm (Buffer?.content b) == mm /\ + Buffer?.idx b == 0ul /\ + Ghost.reveal (Buffer?.length b) == Buffer?.max_length b)) + = lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel; + let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) = + if mm then HST.ralloc_mm r s else HST.ralloc r s + in + let b = Buffer len content 0ul (Ghost.hide len) in + b + +let mgcmalloc #_ #_ r init len = + alloc_heap_common r len (Seq.create (U32.v len) init) false + +private let read_sub_buffer (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (idx len:U32.t) + : HST.ST (Seq.seq a) + (requires fun h0 -> + live h0 b /\ U32.v len > 0 /\ + U32.v idx + U32.v len <= length b) + (ensures fun h0 s h1 -> + h0 == h1 /\ + s == Seq.slice (as_seq h0 b) (U32.v idx) (U32.v idx + U32.v len)) + = let open HST in + let s = ! (Buffer?.content b) in //the whole allocation unit + let s = Seq.slice s (U32.v (Buffer?.idx b)) + (U32.v (Buffer?.max_length b)) in //b buffer + Seq.slice s (U32.v idx) (U32.v idx + U32.v len) //slice of b + +let mgcmalloc_and_blit #_ #_ r #_ #_ src id_src len = + alloc_heap_common r len (read_sub_buffer src id_src len) false + +let mmalloc #_ #_ r init len = + alloc_heap_common r len (Seq.create (U32.v len) init) true + +let mmalloc_and_blit #_ #_ r #_ #_ src id_src len = + alloc_heap_common r len (read_sub_buffer src id_src len) true + +let malloca #a #rrel init len = + lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel; + let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) = + HST.salloc (Seq.create (U32.v len) init) + in + Buffer len content 0ul (Ghost.hide len) + +let malloca_and_blit #a #rrel #_ #_ src id_src len = + lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel; + let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) = + HST.salloc (read_sub_buffer src id_src len) + in + Buffer len content 0ul (Ghost.hide len) + +let malloca_of_list #a #rrel init = + let len = U32.uint_to_t (FStar.List.Tot.length init) in + let s = Seq.seq_of_list init in + lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel; + let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) = + HST.salloc s + in + Buffer len content 0ul (Ghost.hide len) + +let mgcmalloc_of_list #a #rrel r init = + let len = U32.uint_to_t (FStar.List.Tot.length init) in + let s = Seq.seq_of_list init in + lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel; + let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) = + HST.ralloc r s + in + Buffer len content 0ul (Ghost.hide len) + +let mmalloc_drgn #a #rrel d init len = + lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel; + let content : HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) = + HST.ralloc_drgn d (Seq.create (U32.v len) init) + in + Buffer len content 0ul len + +let mmalloc_drgn_mm #a #rrel d init len = + lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel; + let content : HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) = + HST.ralloc_drgn_mm d (Seq.create (U32.v len) init) + in + Buffer len content 0ul len + +let mmalloc_drgn_and_blit #a #rrel #_ #_ d src id_src len = + lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel; + let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) = + HST.ralloc_drgn d (read_sub_buffer src id_src len) + in + Buffer len content 0ul len + +#push-options "--max_fuel 0 --initial_ifuel 1 --max_ifuel 1 --z3rlimit 128 --split_queries no" +#restart-solver +let blit #a #rrel1 #rrel2 #rel1 #rel2 src idx_src dst idx_dst len = + let open HST in + match src, dst with + | Buffer _ _ _ _, Buffer _ _ _ _ -> + if len = 0ul then () + else + let h = get () in + let Buffer max_length1 content1 idx1 length1 = src in + let Buffer max_length2 content2 idx2 length2 = dst in + let s_full1 = !content1 in + let s_full2 = !content2 in + let s1 = Seq.slice s_full1 (U32.v idx1) (U32.v max_length1) in + let s2 = Seq.slice s_full2 (U32.v idx2) (U32.v max_length2) in + let s_sub_src = Seq.slice s1 (U32.v idx_src) (U32.v idx_src + U32.v len) in + let s2' = Seq.replace_subseq s2 (U32.v idx_dst) (U32.v idx_dst + U32.v len) s_sub_src in + let s_full2' = Seq.replace_subseq s_full2 (U32.v idx2) (U32.v max_length2) s2' in + + assert (Seq.equal (Seq.slice s2' (U32.v idx_dst) (U32.v idx_dst + U32.v len)) s_sub_src); + assert (Seq.equal (Seq.slice s2' 0 (U32.v idx_dst)) (Seq.slice s2 0 (U32.v idx_dst))); + assert (Seq.equal (Seq.slice s2' (U32.v idx_dst + U32.v len) (length dst)) + (Seq.slice s2 (U32.v idx_dst + U32.v len) (length dst))); + + // AF: Needed to trigger the preorder relation. A bit verbose because the second sequence + // has a ghost computation (U32.v (Ghost.reveal length)) + assert (s_full2' `Seq.equal` + Seq.replace_subseq s_full2 + (U32.v idx2) + (U32.v idx2 + U32.v length2) + (Seq.replace_subseq (as_seq h dst) + (U32.v idx_dst) + (U32.v idx_dst + U32.v len) + (Seq.slice (as_seq h src) + (U32.v idx_src) + (U32.v idx_src + U32.v len) + ) + ) + ); + + content2 := s_full2'; + + let h1 = get () in + assert (s_full2' `Seq.equal` Seq.replace_subseq s_full2 (U32.v idx2) (U32.v idx2 + U32.v length2) (Seq.slice s2' 0 (U32.v length2))); + assert (h1 == g_upd_seq dst (Seq.slice s2' 0 (U32.v length2)) h); + g_upd_seq_as_seq dst (Seq.slice s2' 0 (U32.v length2)) h //for modifies clause + | _, _ -> () +#pop-options + +#restart-solver +#push-options "--z3rlimit 256 --max_fuel 0 --max_ifuel 1 --initial_ifuel 1 --z3cliopt smt.qi.EAGER_THRESHOLD=4" +let fill' (#t:Type) (#rrel #rel: srel t) + (b: mbuffer t rrel rel) + (z:t) + (len:U32.t) +: HST.Stack unit + (requires (fun h -> + live h b /\ + U32.v len <= length b /\ + rel (as_seq h b) (Seq.replace_subseq (as_seq h b) 0 (U32.v len) (Seq.create (U32.v len) z)) + )) + (ensures (fun h0 _ h1 -> + modifies (loc_buffer b) h0 h1 /\ + live h1 b /\ + Seq.slice (as_seq h1 b) 0 (U32.v len) `Seq.equal` Seq.create (U32.v len) z /\ + Seq.slice (as_seq h1 b) (U32.v len) (length b) `Seq.equal` Seq.slice (as_seq h0 b) (U32.v len) (length b) + )) += let open HST in + if len = 0ul then () + else begin + let h = get () in + let Buffer max_length content idx length = b in + let s_full = !content in + let s = Seq.slice s_full (U32.v idx) (U32.v max_length) in + let s_src = Seq.create (U32.v len) z in + let s' = Seq.replace_subseq s 0 (U32.v len) s_src in + let s_full' = Seq.replace_subseq s_full (U32.v idx) (U32.v idx + U32.v len) s_src in + // AF: Needed to trigger the preorder relation. A bit verbose because the second sequence + // has a ghost computation (U32.v (Ghost.reveal length)) + assert (s_full' `Seq.equal` Seq.replace_subseq s_full (U32.v idx) (U32.v idx + U32.v length) (Seq.replace_subseq (Seq.slice s_full (U32.v idx) (U32.v idx + U32.v length)) 0 (U32.v len) s_src)); + content := s_full'; + let h' = HST.get () in + assert (s_full' `Seq.equal` Seq.replace_subseq s_full (U32.v idx) (U32.v idx + U32.v length) (Seq.slice s' 0 (U32.v length))); + assert (h' == g_upd_seq b (Seq.slice s' 0 (U32.v length)) h); + g_upd_seq_as_seq b (Seq.slice s' 0 (U32.v length)) h //for modifies clause + end +#pop-options + +let fill #t #rrel #rel b z len = fill' b z len + +let abuffer' = ubuffer' + +let coerce (t2: Type) (#t1: Type) (x1: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun y -> y == x1)) = x1 + +let cloc_cls = + assert_norm (MG.cls abuffer == MG.cls ubuffer); + coerce (MG.cls abuffer) cls + +let cloc_of_loc l = + assert_norm (MG.cls abuffer == MG.cls ubuffer); + coerce (MG.loc cloc_cls) l + +let loc_of_cloc l = + assert_norm (MG.cls abuffer == MG.cls ubuffer); + coerce loc l + +let loc_of_cloc_of_loc l = () + +let cloc_of_loc_of_cloc l = () + +let cloc_of_loc_none _ = () + +let cloc_of_loc_union _ _ = () + +let cloc_of_loc_addresses _ _ _ = () + +let cloc_of_loc_regions _ _ = () + +let loc_includes_to_cloc l1 l2 = () + +let loc_disjoint_to_cloc l1 l2 = () + +let modifies_to_cloc l h1 h2 = () diff --git a/stage0/ulib/LowStar.Monotonic.Buffer.fsti b/stage0/ulib/LowStar.Monotonic.Buffer.fsti new file mode 100644 index 00000000000..94e6961fe21 --- /dev/null +++ b/stage0/ulib/LowStar.Monotonic.Buffer.fsti @@ -0,0 +1,2348 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.Monotonic.Buffer + +module G = FStar.Ghost +module U32 = FStar.UInt32 +module Seq = FStar.Seq + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +(* Most comments are taken from the Low* tutorial at: + https://fstarlang.github.io/lowstar/html/LowStar.html + *) + +(* Shorthand for preorder over sequences *) +unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a) + +(* + * A compatibility relation between preorders of a sequence and its subsequence + *) +[@@"opaque_to_smt"] +unfold +let compatible_subseq_preorder (#a:Type0) + (len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a) + = (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2 + (Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel + (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel + (forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2 + (Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel + (rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel + + +/// Low* buffers +/// ============== +/// +/// The workhorse of Low*, this module allows modeling C arrays on the +/// stack and in the heap. At compilation time, KaRaMeL implements +/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C +/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``. +/// +/// The type is indexed by two preorders: +/// rrel is the preorder with which the buffer is initially created +/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one) +/// +/// The buffer contents are constrained to evolve according to rel + +(* + * rrel is part of the type for technical reasons + * If we make it part of the implementation of the buffer type, + * it bumps up the universe of buffer itself by one, + * which is too restrictive (e.g. no buffers of buffers) + * + * We expect that clients will rarely work with this directly + * Most of the times, they will use wrappers such as buffer, immutable buffer etc. + *) +val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0 + +/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For +/// any given type, there is exactly one ``null`` buffer of this type, +/// just like there is exactly one C ``NULL`` pointer of any given type. +/// +/// The nullity test ``g_is_null`` is ghost, for proof purposes +/// only. The corresponding stateful nullity test is ``is_null``, see +/// below. + +(* FIXME: The nullity test for proof purposes is currently expressed + as a ghost predicate, `g_is_null`, but it is scheduled to be + replaced with equality with `null` *) + +val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool + +val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b}) + +val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull) + +/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated +/// yet. + +val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0 + + +/// ``live h b`` holds if, and only if, buffer ``b`` is currently +/// allocated in ``h`` and has not been deallocated yet. +/// +/// This predicate corresponds to the C notion of "lifetime", and as +/// such, is a prerequisite for all stateful operations on buffers +/// (see below), per the C standard: +/// +/// If an object is referred to outside of its lifetime, the +/// behavior is undefined. +/// +/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2 +/// +/// By contrast, it is not required for the ghost versions of those +/// operators. + +val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0 + + +/// The null pointer is always live. + +val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel)) + +val live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) + :Lemma (requires (g_is_null b == true)) + (ensures (live h b)) + [SMTPat (live h b)] + +/// A live buffer has already been allocated. + +val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) + :Lemma (requires (live h b /\ b `unused_in` h)) (ensures False) + + +/// If two memories have equal domains, then liveness in one implies liveness in the other + +val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem) + :Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b)) + (ensures (live h1 b)) + [SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)] + + +(* FIXME: the following definition is necessary to isolate the pattern + because of unification. In other words, if we attached the pattern + to `live_not_unused_in`, then we would not be able to use + `FStar.Classical.forall_intro_`n and + `FStar.Classical.move_requires` due to unification issues. Anyway, + we plan to isolate patterns in a separate module to clean up the Z3 + context. + *) + +val live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) + :Lemma (requires (live h b /\ b `unused_in` h)) + (ensures False) + [SMTPat (live h b); SMTPat (b `unused_in` h)] + + +/// Buffers live in the HyperStack model, which is an extension of +/// the HyperHeap model, a hierarchical memory model that divides the +/// heap into a tree of regions. This coarse-grained separation +/// allows the programmer to state modifies clauses at the level of +/// regions, rather than on individual buffers. +/// +/// The HyperHeap memory model is described: +/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/ +/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/ +/// +/// ``frameOf b`` returns the identifier of the region in which the +/// buffer ``b`` lives. + +val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid + + +/// ``as_addr b`` returns the abstract address of the buffer in its +/// region, as an allocation unit: two buffers that are allocated +/// separately in the same region will actually have different +/// addresses, but a sub-buffer of a buffer will actually have the +/// same address as its enclosing buffer. + +val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat + + +/// A buffer is unused if, and only if, its address is unused. + +val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) + :Lemma (unused_in b h <==> + (HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b)))) + + +/// If a buffer is live, then so is its region. + +val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) + :Lemma (requires (live h b)) + (ensures (HS.live_region h (frameOf b))) + [SMTPatOr [ + [SMTPat (live h b)]; + [SMTPat (HS.live_region h (frameOf b))]; + ]] + + +/// The length of a buffer ``b`` is available as a machine integer ``len +/// b`` or as a mathematical integer ``length b``, but both in ghost +/// (proof) code only: just like in C, one cannot compute the length +/// of a buffer at run-time. + +val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t + +let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b) + + +/// The null pointer has length 0. + +val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul) + +val length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false)) + [SMTPat (length b)] + +val length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :Lemma (requires (g_is_null b == true)) (ensures (length b == 0)) + [SMTPat (g_is_null b)] + + +/// For functional correctness, buffers are reflected at the proof +/// level using sequences, via ``as_seq b h``, which returns the +/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not +/// live in ``h``, then the result is unspecified. + +(* TODO: why not return a lseq and remove length_as_seq lemma? *) +val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a) + + +/// The contents of a buffer ``b`` has the same length as ``b`` itself. + +val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) + :Lemma (Seq.length (as_seq h b) == length b) + [SMTPat (Seq.length (as_seq h b))] + + +/// ``get`` is an often-convenient shorthand to index the value of a +/// given buffer in a given heap, for proof purposes. + + +let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat) + :Ghost a (requires (i < length p)) (ensures (fun _ -> True)) + = Seq.index (as_seq h p) i + +/// Injectivity in the first preorder + +val mbuffer_injectivity_in_first_preorder (_:unit) + : Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a) + (b1:mbuffer a rrel1 rel1) + (b2:mbuffer a rrel2 rel2). + rrel1 =!= rrel2 ==> ~ (b1 === b2)) + +/// Before defining sub-buffer related API, we need to define the notion of "compatibility" +/// +/// +/// Sub-buffers can be taken at a different preorder than their parent buffers +/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder +/// of the parent buffer, and vice versa. + +(* + * The quantifiers are fiercely guarded, so if you are working directly with them, + * you may have to write additional asserts as triggers + *) +[@@"opaque_to_smt"] +unfold let compatible_sub + (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a) + = compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel + + +/// ``gsub`` is the way to carve a sub-buffer out of a given +/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from +/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and +/// ``len`` must fit within the length of ``b``. +/// +/// Further the clients can attach a preorder with the subbuffer (sub_rel), +/// provided it is compatible +/// +/// ``gsub`` is the ghost version, for proof purposes. Its stateful +/// counterpart is ``sub``, see below. + +val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) + (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) + :Ghost (mbuffer a rrel sub_rel) + (requires (U32.v i + U32.v len <= length b)) + (ensures (fun _ -> True)) + +// goffset + +/// A buffer is live exactly at the same time as all of its sub-buffers. + +val live_gsub (#a:Type0) (#rrel #rel:srel a) + (h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a) + :Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel)) + (ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b)))) + [SMTPatOr [ + [SMTPat (live h (mgsub sub_rel b i len))]; + [SMTPat (live h b); SMTPat (mgsub sub_rel b i len);] + ]] + + +val gsub_is_null (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a) + :Lemma (requires (U32.v i + U32.v len <= length b)) + (ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b)) + [SMTPat (g_is_null (mgsub sub_rel b i len))] + + +/// The length of a sub-buffer is exactly the one provided at ``gsub``. + + +val len_gsub (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a) + :Lemma (requires (U32.v i + U32.v len' <= length b)) + (ensures (len (mgsub sub_rel b i len') == len')) + [SMTPatOr [ + [SMTPat (len (mgsub sub_rel b i len'))]; + [SMTPat (length (mgsub sub_rel b i len'))]; + ]] + + +val frameOf_gsub (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a) + :Lemma (requires (U32.v i + U32.v len <= length b)) + (ensures (frameOf (mgsub sub_rel b i len) == frameOf b)) + [SMTPat (frameOf (mgsub sub_rel b i len))] + +val as_addr_gsub (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a) + :Lemma (requires (U32.v i + U32.v len <= length b)) + (ensures (as_addr (mgsub sub_rel b i len) == as_addr b)) + [SMTPat (as_addr (mgsub sub_rel b i len))] + +val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a) + (b1 b2:mbuffer a rrel rel) + (i1 i2:U32.t) + (len1 len2:U32.t) + :Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\ + U32.v i2 + U32.v len2 <= length b2 /\ + mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2)) + (ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2))) + + +/// Nesting two ``gsub`` collapses into one ``gsub``, transitively. + +val gsub_gsub (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) + (i1:U32.t) (len1:U32.t) (sub_rel1:srel a) + (i2: U32.t) (len2: U32.t) (sub_rel2:srel a) + :Lemma (requires (U32.v i1 + U32.v len1 <= length b /\ + U32.v i2 + U32.v len2 <= U32.v len1)) + (ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\ + mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2)) + [SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)] + + +/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and +/// length ``len b``. + +val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b)) + + +/// The contents of a sub-buffer is the corresponding slice of the +/// contents of its enclosing buffer. + +val as_seq_gsub (#a:Type0) (#rrel #rel:srel a) + (h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a) + :Lemma (requires (U32.v i + U32.v len <= length b)) + (ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len))) + [SMTPat (as_seq h (mgsub sub_rel b i len))] + +/// Two live non-null buffers having the same region and address have +/// their elements of the same type. + +val live_same_addresses_equal_types_and_preorders + (#a1 #a2: Type0) + (#rrel1 #rel1: srel a1) + (#rrel2 #rel2: srel a2) + (b1: mbuffer a1 rrel1 rel1) + (b2: mbuffer a2 rrel2 rel2) + (h: HS.mem) +: Lemma + ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2)) + + +/// # The modifies clause +/// +/// The modifies clause for regions, references and buffers. +/// ========================================================== +/// +/// This module presents the modifies clause, a way to track the set +/// of memory locations modified by a stateful Low* (or even F*) +/// program. The basic principle of modifies clauses is that any +/// location that is disjoint from a set of memory locations modified +/// by an operation is preserved by that operation. +/// +/// We start by specifying a monoid of sets of memory locations. From +/// a rough high-level view, ``loc`` is the type of sets of memory +/// locations, equipped with an identity element ``loc_none``, +/// representing the empty set, and an associative and commutative +/// operator, ``loc_union``, representing the union of two sets of +/// memory locations. +/// +/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT +/// matching loops with ``modifies_trans`` and ``modifies_refl`` below. + +[@@erasable] +val loc : Type0 + +val loc_none: loc + +val loc_union + (s1 s2: loc) +: GTot loc + +val loc_union_idem + (s: loc) +: Lemma + (loc_union s s == s) + [SMTPat (loc_union s s)] + +val loc_union_comm + (s1 s2: loc) +: Lemma + (loc_union s1 s2 == loc_union s2 s1) + [SMTPat (loc_union s1 s2)] + +val loc_union_assoc + (s1 s2 s3: loc) +: Lemma + (loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3) + +val loc_union_idem_1 + (s1 s2: loc) +: Lemma + (loc_union s1 (loc_union s1 s2) == loc_union s1 s2) + [SMTPat (loc_union s1 (loc_union s1 s2))] + +val loc_union_idem_2 + (s1 s2: loc) +: Lemma + (loc_union (loc_union s1 s2) s2 == loc_union s1 s2) + [SMTPat (loc_union (loc_union s1 s2) s2)] + +val loc_union_loc_none_l + (s: loc) +: Lemma + (loc_union loc_none s == s) + [SMTPat (loc_union loc_none s)] + +val loc_union_loc_none_r + (s: loc) +: Lemma + (loc_union s loc_none == s) + [SMTPat (loc_union s loc_none)] + +/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``. + +val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc + +val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc + +val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma + (loc_buffer b == loc_buffer_from_to b 0ul (len b)) + +val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) +: Lemma + (requires (length b <= U32.v to)) + (ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b))) + +val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t) +: Lemma + (requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from)) + (ensures (loc_buffer_from_to b from to == loc_none)) + +val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) + (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) + (from to: U32.t) +: Lemma + (requires ( + U32.v i + U32.v len <= length b /\ + U32.v from <= U32.v to /\ U32.v to <= U32.v len + )) + (ensures ( + loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to) + )) + +val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) + (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) + :Lemma + (requires (U32.v i + U32.v len <= length b)) + (ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len))) + +val loc_buffer_null (a:Type0) (rrel rel:srel a) + :Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none) + [SMTPat (loc_buffer (mnull #a #rrel #rel))] + +val loc_buffer_from_to_eq + (#a:Type0) (#rrel #rel:srel a) + (b: mbuffer a rrel rel) + (from to: U32.t) +: Lemma + (requires (U32.v from <= U32.v to /\ U32.v to <= length b)) + (ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from)))) + [SMTPat (loc_buffer_from_to b from to)] + +val loc_buffer_mgsub_rel_eq + (#a:Type0) (#rrel #rel:srel a) + (b: mbuffer a rrel rel) + (rel1 rel2: srel a) + (i len: U32.t) +: Lemma + (requires (U32.v i + U32.v len <= length b)) + (ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len))) + [SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))] + + +/// ``loc_addresses r n`` is the set of memory locations associated to a +/// set of addresses ``n`` in a given region ``r``. + +val loc_addresses + (preserve_liveness: bool) + (r: HS.rid) + (n: Set.set nat) +: GTot loc + +unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc = + loc_addresses false (frameOf b) (Set.singleton (as_addr b)) + +/// ``loc_regions r`` is the set of memory locations associated to a set +/// ``r`` of regions. + +val loc_regions + (preserve_liveness: bool) + (r: Set.set HS.rid) +: GTot loc + + +/// ``loc_mreference b`` is the set of memory locations associated to a +/// reference ``b``, which is actually the set of memory locations +/// associated to the address of ``b``. + +unfold +let loc_mreference + (#a: Type) + (#p: Preorder.preorder a) + (b: HS.mreference a p) +: GTot loc += loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b)) + +unfold +let loc_freed_mreference + (#a: Type) + (#p: Preorder.preorder a) + (b: HS.mreference a p) +: GTot loc += loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b)) + + +/// ``loc_region_only r`` is the set of memory locations associated to a +/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense +/// of ``FStar.HyperStack.extends``.) + +unfold +let loc_region_only + (preserve_liveness: bool) + (r: HS.rid) +: GTot loc += loc_regions preserve_liveness (Set.singleton r) + + +/// ``loc_all_regions_from r`` is the set of all memory locations +/// associated to a region ``r`` and any region ``r'`` that transitively +/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``, +/// e.g. nested stack frames.) + +unfold +let loc_all_regions_from + (preserve_liveness: bool) + (r: HS.rid) +: GTot loc += loc_regions preserve_liveness (HS.mod_set (Set.singleton r)) + + +/// We equip the ``loc`` monoid of sets of memory locations with an +/// inclusion relation, ``loc_includes``, which is a preorder compatible +/// with ``loc_union``. Although we consider sets of memory locations, +/// we do not specify them using any F* set library such as +/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes`` +/// encompasses more than just set-theoretic inclusion. + +val loc_includes + (s1 s2: loc) +: GTot Type0 + +val loc_includes_refl + (s: loc) +: Lemma + (loc_includes s s) + [SMTPat (loc_includes s s)] + +val loc_includes_trans + (s1 s2 s3: loc) +: Lemma + (requires (loc_includes s1 s2 /\ loc_includes s2 s3)) + (ensures (loc_includes s1 s3)) + +val loc_includes_trans_backwards + (s1 s2 s3: loc) +: Lemma + (requires (loc_includes s1 s2 /\ loc_includes s2 s3)) + (ensures (loc_includes s1 s3)) + [SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)] + +val loc_includes_union_r + (s s1 s2: loc) +: Lemma + (requires (loc_includes s s1 /\ loc_includes s s2)) + (ensures (loc_includes s (loc_union s1 s2))) + +val loc_includes_union_l + (s1 s2 s: loc) +: Lemma + (requires (loc_includes s1 s \/ loc_includes s2 s)) + (ensures (loc_includes (loc_union s1 s2) s)) + + +val loc_includes_union_l' + (s1 s2 s: loc) + : Lemma + (requires (loc_includes s1 s \/ loc_includes s2 s)) + (ensures (loc_includes (loc_union s1 s2) s)) + [SMTPat (loc_includes (loc_union s1 s2) s)] + +val loc_includes_union_r' + (s s1 s2: loc) +: Lemma + (loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2)) + [SMTPat (loc_includes s (loc_union s1 s2))] + +val loc_includes_none + (s: loc) +: Lemma + (loc_includes s loc_none) + [SMTPat (loc_includes s loc_none)] + + +/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer +/// theory (see ``LowStar.Buffer.includes``), then so are their +/// corresponding sets of memory locations. + +val loc_includes_gsub_buffer_r + (l:loc) + (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a) +: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\ + loc_includes l (loc_buffer b))) + (ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len)))) + [SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))] + +val loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a) + :Lemma (requires (UInt32.v i + UInt32.v len <= (length b))) + (ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len)))) + [SMTPat (mgsub sub_rel b i len)] + +val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + (i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a) + (i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a) + :Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\ + UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1 + )) + (ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2)))) + [SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)] + +val loc_includes_loc_buffer_loc_buffer_from_to + (#a: _) (#rrel #rel: _) + (b: mbuffer a rrel rel) + (from to: U32.t) +: Lemma + (loc_includes (loc_buffer b) (loc_buffer_from_to b from to)) + +val loc_includes_loc_buffer_from_to + (#a: _) (#rrel #rel: _) + (b: mbuffer a rrel rel) + (from1 to1 from2 to2: U32.t) +: Lemma + (requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1)) + (ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2))) + +/// If the contents of a buffer are equal in two given heaps, then so +/// are the contents of any of its sub-buffers. + +val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a) + (h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2) + :Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\ + as_seq h1 larger == as_seq h2 larger /\ + (live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller))) + (ensures (as_seq h1 smaller == as_seq h2 smaller)) + +/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in +/// the region of ``b``, then the set of memory locations corresponding +/// to ``b`` is included in the set of memory locations corresponding to +/// the addresses in ``s`` in region ``r``. +/// +/// In particular, the set of memory locations corresponding to a +/// buffer is included in the set of memory locations corresponding to +/// its region and address. + +val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a) + (preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel) + :Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s)) + (ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))) + [SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))] + +val loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b)) + [SMTPat (loc_buffer b)] + + +/// The set of memory locations corresponding to a buffer is included +/// in the set of memory locations corresponding to its region. + +val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a) + (preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel) + :Lemma (requires (Set.mem (frameOf b) s)) + (ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))) + [SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))] + +val loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b)) + [SMTPat (loc_buffer b)] + + +/// If a region ``r`` is in a set of regions ``s``, then the set of memory +/// locations corresponding to a set of addresses ``a`` in ``r`` is +/// included in the set of memory locations corresponding to the +/// regions in ``s``. +/// +/// In particular, the the set of memory locations corresponding to a +/// set of addresses ``a`` in a given region ``r`` is included in the set +/// of memory locations corresponding to region ``r``. + +val loc_includes_region_addresses + (preserve_liveness1: bool) + (preserve_liveness2: bool) + (s: Set.set HS.rid) + (r: HS.rid) + (a: Set.set nat) +: Lemma + (requires (Set.mem r s)) + (ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))) + [SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))] + +val loc_includes_region_addresses' + (preserve_liveness: bool) + (r: HS.rid) + (a: Set.set nat) +: Lemma + (loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a)) + [SMTPat (loc_addresses preserve_liveness r a)] + +/// If a set of region identifiers ``s1`` includes a set of region +/// identifiers ``s2``, then so are their corresponding sets of memory +/// locations. + +val loc_includes_region_region + (preserve_liveness1: bool) + (preserve_liveness2: bool) + (s1 s2: Set.set HS.rid) +: Lemma + (requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1)) + (ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))) + [SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))] + +val loc_includes_region_region' + (preserve_liveness: bool) + (s: Set.set HS.rid) +: Lemma + (loc_includes (loc_regions false s) (loc_regions preserve_liveness s)) + [SMTPat (loc_regions preserve_liveness s)] + +/// The following lemma can act as a cut when reasoning with sets of +/// memory locations corresponding to sets of regions. + +val loc_includes_region_union_l + (preserve_liveness: bool) + (l: loc) + (s1 s2: Set.set HS.rid) +: Lemma + (requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1))))) + (ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))) + [SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))] + + +/// If a set of addresses ``s1`` includes a set of addresses ``s2``, +/// then so are their corresponding memory locations +val loc_includes_addresses_addresses + (preserve_liveness1 preserve_liveness2: bool) + (r: HS.rid) + (s1 s2: Set.set nat) +: Lemma + (requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1)) + (ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2))) + +val loc_includes_addresses_addresses_1 + (preserve_liveness1 preserve_liveness2: bool) + (r1 r2: HS.rid) + (s1 s2: Set.set nat) +: Lemma + (requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1)) + (ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))) + [SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))] + +val loc_includes_addresses_addresses_2 + (preserve_liveness: bool) + (r: HS.rid) + (s: Set.set nat) +: Lemma + (loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s)) + [SMTPat (loc_addresses preserve_liveness r s)] + +/// Patterns with loc_includes, union on the left + +val loc_includes_union_l_buffer + (s1 s2:loc) + (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) + :Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b))) + (ensures (loc_includes (loc_union s1 s2) (loc_buffer b))) + [SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))] + +val loc_includes_union_l_addresses + (s1 s2: loc) + (prf: bool) + (r: HS.rid) + (a: Set.set nat) +: Lemma + (requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a))) + (ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a))) + [SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))] + +val loc_includes_union_l_regions + (s1 s2: loc) + (prf: bool) + (r: Set.set HS.rid) +: Lemma + (requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r))) + (ensures (loc_includes (loc_union s1 s2) (loc_regions prf r))) + [SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))] + +/// Since inclusion encompasses more than just set-theoretic +/// inclusion, we also need to specify disjointness accordingly, as a +/// symmetric relation compatible with union. + +val loc_disjoint + (s1 s2: loc) +: GTot Type0 + +val loc_disjoint_sym + (s1 s2: loc) +: Lemma + (requires (loc_disjoint s1 s2)) + (ensures (loc_disjoint s2 s1)) + +val loc_disjoint_sym' + (s1 s2: loc) +: Lemma + (loc_disjoint s1 s2 <==> loc_disjoint s2 s1) + [SMTPat (loc_disjoint s1 s2)] + +val loc_disjoint_none_r + (s: loc) +: Lemma + (ensures (loc_disjoint s loc_none)) + [SMTPat (loc_disjoint s loc_none)] + +val loc_disjoint_union_r + (s s1 s2: loc) +: Lemma + (requires (loc_disjoint s s1 /\ loc_disjoint s s2)) + (ensures (loc_disjoint s (loc_union s1 s2))) + + +/// If two sets of memory locations are disjoint, then so are any two +/// included sets of memory locations. + +val loc_disjoint_includes + (p1 p2 p1' p2' : loc) +: Lemma + (requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2)) + (ensures (loc_disjoint p1' p2')) + +val loc_disjoint_union_r' + (s s1 s2: loc) +: Lemma + (ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2))) + [SMTPat (loc_disjoint s (loc_union s1 s2))] + +val loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma + (requires (loc_includes b2 b2' /\ loc_disjoint b1 b2)) + (ensures (loc_disjoint b1 b2')) + [SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')] + +val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) + (b:mbuffer a rrel rel) + (i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a) + (i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a) + :Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\ + UInt32.v i2 + UInt32.v len2 <= (length b) /\ + (UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/ + UInt32.v i2 + UInt32.v len2 <= UInt32.v i1))) + (ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2)))) + [SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)] + +val loc_disjoint_loc_buffer_from_to + (#a: _) (#rrel #rel: _) + (b: mbuffer a rrel rel) + (from1 to1 from2 to2: U32.t) +: Lemma + (requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1)) + (ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2))) + +/// If two sets of addresses correspond to different regions or are +/// disjoint, then their corresponding sets of memory locations are +/// disjoint. + +val loc_disjoint_addresses + (preserve_liveness1 preserve_liveness2: bool) + (r1 r2: HS.rid) + (n1 n2: Set.set nat) +: Lemma + (requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty)) + (ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))) + [SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))] + +/// If two sets of region identifiers are disjoint, then so are their +/// corresponding sets of memory locations. + +val loc_disjoint_regions + (preserve_liveness1: bool) + (preserve_liveness2: bool) + (rs1 rs2: Set.set HS.rid) +: Lemma + (requires (Set.subset (Set.intersect rs1 rs2) Set.empty)) + (ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))) + [SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))] + + +/// Some utilities to work with lists of buffers and locs + +(* buf_t is a `buffer` at some type `a` *) +let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel + +(* A convenience to construct a buf_t *) +[@@BigOps.__reduce__] +let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|) + +(* A conjunction of liveness conditions on the buffers in `l` + Implicitly reduced at typechecking time *) +[@@"opaque_to_smt"] +unfold +let all_live (h:HS.mem) (l:list buf_t) : Type0 = + BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l + +(* Pairwise disjointness of locations; + Implicitly reduced at typechecking time *) +[@@"opaque_to_smt"] +unfold +let all_disjoint (l:list loc) : Type0 = + BigOps.pairwise_and loc_disjoint l + +(* Union of a list of locations; + Implicitly reduced at typechecking time *) +[@@"opaque_to_smt"] +unfold +let loc_union_l (l:list loc) = + BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none) + +(* + * Same as all_disjoint, retaining for backward compatibility + *) +[@@"opaque_to_smt"] +unfold +let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l + +/// The modifies clauses proper. +/// +/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two +/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if, +/// any memory location disjoint from ``s`` is preserved from ``h1`` into +/// ``h2``. Elimination lemmas illustrating this principle follow. + +val modifies + (s: loc) + (h1 h2: HS.mem) +: GTot Type0 + +/// If a region ``r`` is disjoint from a set ``s`` of memory locations +/// which is modified, then its liveness is preserved. + +val modifies_live_region + (s: loc) + (h1 h2: HS.mem) + (r: HS.rid) +: Lemma + (requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r)) + (ensures (HS.live_region h2 r)) + [SMTPatOr [ + [SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)]; + [SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)]; + ]] + +/// If a reference ``b`` is disjoint from a set ``p`` of memory locations +/// which is modified, then its liveness and contents are preserved. + +val modifies_mreference_elim + (#t: Type) + (#pre: Preorder.preorder t) + (b: HS.mreference t pre) + (p: loc) + (h h': HS.mem) +: Lemma + (requires ( + loc_disjoint (loc_mreference b) p /\ + HS.contains h b /\ + modifies p h h' + )) + (ensures ( + HS.contains h' b /\ + HS.sel h b == HS.sel h' b + )) + [SMTPatOr [ + [ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ; + [ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ]; + [ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ; + [ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ] + ] ] + +/// If a buffer ``b`` is disjoint from a set ``p`` of +/// memory locations which is modified, then its liveness and contents +/// are preserved. + +val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (p:loc) (h h':HS.mem) + :Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h')) + (ensures (live h' b /\ (as_seq h b == as_seq h' b))) + [SMTPatOr [ + [ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ; + [ SMTPat (modifies p h h'); SMTPat (live h b) ]; + [ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ; + [ SMTPat (modifies p h h'); SMTPat (live h' b) ] + ]] + +val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem) + :Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b)) + (ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))) + +/// If the memory state does not change, then any memory location is +/// modified (and, in particular, the empty set, ``loc_none``.) + +val modifies_refl + (s: loc) + (h: HS.mem) +: Lemma + (modifies s h h) + [SMTPat (modifies s h h)] + + +/// If a set ``s2`` of memory locations is modified, then so is any set +/// ``s1`` that includes ``s2``. In other words, it is always possible to +/// weaken a modifies clause by widening its set of memory locations. + +val modifies_loc_includes + (s1: loc) + (h h': HS.mem) + (s2: loc) +: Lemma + (requires (modifies s2 h h' /\ loc_includes s1 s2)) + (ensures (modifies s1 h h')) + [SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')] + +/// Some memory locations are tagged as liveness-insensitive: the +/// liveness preservation of a memory location only depends on its +/// disjointness from the liveness-sensitive memory locations of a +/// modifies clause. + +val address_liveness_insensitive_locs: loc + +val region_liveness_insensitive_locs: loc + +val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b)) + [SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))] + +val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma + (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a)) + [SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))] + +val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b)) + [SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))] + +val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma + (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a)) + [SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))] + +val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma + (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs)) + [SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))] + +val region_liveness_insensitive_address_liveness_insensitive: + squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs) + +val modifies_liveness_insensitive_mreference + (l1 l2 : loc) + (h h' : HS.mem) + (#t: Type) + (#pre: Preorder.preorder t) + (x: HS.mreference t pre) +: Lemma + (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x)) + (ensures (h' `HS.contains` x)) + [SMTPatOr [ + [SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');]; + [SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');]; + ]] + (* TODO: pattern *) + +val modifies_liveness_insensitive_buffer + (l1 l2:loc) + (h h':HS.mem) + (#a:Type0) (#rrel #rel:srel a) + (x:mbuffer a rrel rel) + :Lemma (requires (modifies (loc_union l1 l2) h h' /\ + loc_disjoint l1 (loc_buffer x) /\ + address_liveness_insensitive_locs `loc_includes` l2 /\ + live h x)) + (ensures (live h' x)) + [SMTPatOr [ + [SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');]; + [SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');]; + ]] + +let modifies_liveness_insensitive_mreference_weak + (l : loc) + (h h' : HS.mem) + (#t: Type) + (#pre: Preorder.preorder t) + (x: HS.mreference t pre) + : Lemma (requires (modifies l h h' /\ + address_liveness_insensitive_locs `loc_includes` l /\ + h `HS.contains` x)) + (ensures (h' `HS.contains` x)) + [SMTPatOr [ + [SMTPat (h `HS.contains` x); SMTPat (modifies l h h');]; + [SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');]; + ]] + = modifies_liveness_insensitive_mreference loc_none l h h' x + +let modifies_liveness_insensitive_buffer_weak + (l:loc) + (h h':HS.mem) + (#a:Type0) (#rrel #rel:srel a) + (x:mbuffer a rrel rel) + :Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x)) + (ensures (live h' x)) + [SMTPatOr [ + [SMTPat (live h x); SMTPat (modifies l h h');]; + [SMTPat (live h' x); SMTPat (modifies l h h');]; + ]] + = modifies_liveness_insensitive_buffer loc_none l h h' x + +val modifies_liveness_insensitive_region + (l1 l2 : loc) + (h h' : HS.mem) + (x: HS.rid) +: Lemma + (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x)) + (ensures (HS.live_region h' x)) + [SMTPatOr [ + [SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)]; + [SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)]; + ]] + +val modifies_liveness_insensitive_region_mreference + (l1 l2 : loc) + (h h' : HS.mem) + (#t: Type) + (#pre: Preorder.preorder t) + (x: HS.mreference t pre) +: Lemma + (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x))) + (ensures (HS.live_region h' (HS.frameOf x))) + [SMTPatOr [ + [SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))]; + [SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))]; + ]] + +val modifies_liveness_insensitive_region_buffer + (l1 l2:loc) + (h h':HS.mem) + (#a:Type0) (#rrel #rel:srel a) + (x:mbuffer a rrel rel) + :Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x))) + (ensures (HS.live_region h' (frameOf x))) + [SMTPatOr [ + [SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))]; + [SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))]; + ]] + +val modifies_liveness_insensitive_region_weak + (l2 : loc) + (h h' : HS.mem) + (x: HS.rid) +: Lemma + (requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x)) + (ensures (HS.live_region h' x)) + [SMTPatOr [ + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)]; + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)]; + ]] + +val modifies_liveness_insensitive_region_mreference_weak + (l2 : loc) + (h h' : HS.mem) + (#t: Type) + (#pre: Preorder.preorder t) + (x: HS.mreference t pre) + : Lemma (requires (modifies l2 h h' /\ + region_liveness_insensitive_locs `loc_includes` l2 /\ + HS.live_region h (HS.frameOf x))) + (ensures (HS.live_region h' (HS.frameOf x))) + [SMTPatOr [ + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))]; + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))]; + ]] + +val modifies_liveness_insensitive_region_buffer_weak + (l2:loc) + (h h':HS.mem) + (#a:Type0) (#rrel #rel:srel a) + (x:mbuffer a rrel rel) + :Lemma (requires (modifies l2 h h' /\ + region_liveness_insensitive_locs `loc_includes` l2 /\ + HS.live_region h (frameOf x))) + (ensures (HS.live_region h' (frameOf x))) + [SMTPatOr [ + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))]; + [SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))]; + ]] + +/// Modifies clauses are transitive. This lemma is the most general +/// one. + +val modifies_trans + (s12: loc) + (h1 h2: HS.mem) + (s23: loc) + (h3: HS.mem) +: Lemma + (requires (modifies s12 h1 h2 /\ modifies s23 h2 h3)) + (ensures (modifies (loc_union s12 s23) h1 h3)) + +val modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem) + : Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l)) + (ensures (modifies l_goal h1 h3)) + [SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)] + + +/// Regions that are not live can be removed from sets of memory +/// locations that are modified. + +val modifies_only_live_regions + (rs: Set.set HS.rid) + (l: loc) + (h h' : HS.mem) +: Lemma + (requires ( + modifies (loc_union (loc_regions false rs) l) h h' /\ + (forall r . Set.mem r rs ==> (~ (HS.live_region h r))) + )) + (ensures (modifies l h h')) + +/// As a consequence, fresh regions can be removed from modifies +/// clauses. + +val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma + (requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1)) + (ensures (modifies l h0 h1)) + [SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)] + +val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma + (requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col)))) + (ensures ( + let (_, m1) = HS.new_eternal_region m0 r0 col in + modifies loc_none m0 m1 + )) + [SMTPat (HS.new_eternal_region m0 r0 col)] + + +/// Stack discipline: any stack frame (and all its transitively +/// extending regions) that is pushed, modified and popped can be +/// removed from a modifies clause. + +/// AR: 01/29/2019: Removing the smt pattern from this lemma. +/// Clients are no longer expected to call it explicitly, +/// if you are having to, please raise an issue. + +val modifies_fresh_frame_popped + (h0 h1: HS.mem) + (s: loc) + (h2 h3: HS.mem) +: Lemma + (requires ( + HS.fresh_frame h0 h1 /\ + modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\ + (HS.get_tip h2) == (HS.get_tip h1) /\ + HS.popped h2 h3 + )) + (ensures ( + modifies s h0 h3 /\ + (HS.get_tip h3) == HS.get_tip h0 + )) + +/// Compatibility lemmas to rescue modifies clauses specified in the +/// standard F* HyperStack library. + +val modifies_loc_regions_intro + (rs: Set.set HS.rid) + (h1 h2: HS.mem) +: Lemma + (requires (HS.modifies rs h1 h2)) + (ensures (modifies (loc_regions true rs) h1 h2)) + +val modifies_loc_addresses_intro + (r: HS.rid) + (a: Set.set nat) + (l: loc) + (h1 h2: HS.mem) +: Lemma + (requires ( + HS.live_region h2 r /\ + modifies (loc_union (loc_region_only false r) l) h1 h2 /\ + HS.modifies_ref r a h1 h2 + )) + (ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2)) + +/// Modifies clauses for allocating a reference: nothing is +/// modified. (In particular, a modifies clause does not track +/// memory locations that are created.) + +val modifies_ralloc_post + (#a: Type) + (#rel: Preorder.preorder a) + (i: HS.rid) + (init: a) + (h: HS.mem) + (x: HST.mreference a rel) + (h' : HS.mem) +: Lemma + (requires (HST.ralloc_post i init h x h')) + (ensures (modifies loc_none h h')) + [SMTPat (HST.ralloc_post i init h x h')] + +val modifies_salloc_post + (#a: Type) + (#rel: Preorder.preorder a) + (init: a) + (h: HS.mem) + (x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } ) + (h' : HS.mem) +: Lemma + (requires (HST.salloc_post init h x h')) + (ensures (modifies loc_none h h')) + [SMTPat (HST.salloc_post init h x h')] + +/// Modifies clause for freeing a reference: the address is modified. + +val modifies_free + (#a: Type) + (#rel: Preorder.preorder a) + (r: HS.mreference a rel { HS.is_mm r } ) + (m: HS.mem { m `HS.contains` r } ) +: Lemma + (modifies (loc_freed_mreference r) m (HS.free r m)) + [SMTPat (HS.free r m)] + +/// Another compatibility lemma + +val modifies_none_modifies + (h1 h2: HS.mem) +: Lemma + (requires (HST.modifies_none h1 h2)) + (ensures (modifies loc_none h1 h2)) + [SMTPat (HST.modifies_none h1 h2)] + +/// Compatibility with HS.upd + +val modifies_upd + (#t: Type) (#pre: Preorder.preorder t) + (r: HS.mreference t pre) + (v: t) + (h: HS.mem) +: Lemma + (requires (HS.contains h r)) + (ensures (modifies (loc_mreference r) h (HS.upd h r v))) + [SMTPat (HS.upd h r v)] + +/// Introduction lemma for modifying loc_buffer_from_to + +val modifies_loc_buffer_from_to_intro + (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + (from to: U32.t) + (l: loc) (h h' : HS.mem) +: Lemma + (requires ( + let s = as_seq h b in + let s' = as_seq h' b in + live h b /\ + modifies (loc_union l (loc_buffer b)) h h' /\ + U32.v from <= U32.v to /\ + U32.v to <= length b /\ + Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\ + Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b) + )) + (ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h')) + + +/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted +/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is +/// not live, or address ``a`` is unused in region ``r``. + +(* BEGIN TODO: move to FStar.Monotonic.HyperStack *) + +val does_not_contain_addr + (h: HS.mem) + (ra: HS.rid & nat) +: GTot Type0 + +val not_live_region_does_not_contain_addr + (h: HS.mem) + (ra: HS.rid & nat) +: Lemma + (requires (~ (HS.live_region h (fst ra)))) + (ensures (h `does_not_contain_addr` ra)) + +val unused_in_does_not_contain_addr + (h: HS.mem) + (#a: Type) + (#rel: Preorder.preorder a) + (r: HS.mreference a rel) +: Lemma + (requires (r `HS.unused_in` h)) + (ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r))) + +val addr_unused_in_does_not_contain_addr + (h: HS.mem) + (ra: HS.rid & nat) +: Lemma + (requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra)))) + (ensures (h `does_not_contain_addr` ra)) + +val free_does_not_contain_addr + (#a: Type0) + (#rel: Preorder.preorder a) + (r: HS.mreference a rel) + (m: HS.mem) + (x: HS.rid & nat) +: Lemma + (requires ( + HS.is_mm r /\ + m `HS.contains` r /\ + fst x == HS.frameOf r /\ + snd x == HS.as_addr r + )) + (ensures ( + HS.free r m `does_not_contain_addr` x + )) + [SMTPat (HS.free r m `does_not_contain_addr` x)] + +val does_not_contain_addr_elim + (#a: Type0) + (#rel: Preorder.preorder a) + (r: HS.mreference a rel) + (m: HS.mem) + (x: HS.rid & nat) +: Lemma + (requires ( + m `does_not_contain_addr` x /\ + HS.frameOf r == fst x /\ + HS.as_addr r == snd x + )) + (ensures (~ (m `HS.contains` r))) + +(** END TODO *) + +/// Addresses that have not been allocated yet can be removed from +/// modifies clauses. + +val modifies_only_live_addresses + (r: HS.rid) + (a: Set.set nat) + (l: loc) + (h h' : HS.mem) +: Lemma + (requires ( + modifies (loc_union (loc_addresses false r a) l) h h' /\ + (forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x)) + )) + (ensures (modifies l h h')) + + +(* Generic way to ensure that a buffer just allocated is disjoint from + any other object, however the latter's liveness is defined. *) + +val loc_not_unused_in (h: HS.mem) : GTot loc + +val loc_unused_in (h: HS.mem) : GTot loc + +(* Shortcut notations with more handy names *) + +let loc_in (l: loc) (h: HS.mem) = + loc_not_unused_in h `loc_includes` l + +let loc_not_in (l: loc) (h: HS.mem) = + loc_unused_in h `loc_includes` l + +val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma + (requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r)))) + (ensures (loc_unused_in h `loc_includes` loc_regions false rs)) + +val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma + (loc_disjoint (loc_unused_in h) (loc_not_unused_in h)) + +val not_live_region_loc_not_unused_in_disjoint + (h0: HS.mem) + (r: HS.rid) +: Lemma + (requires (~ (HS.live_region h0 r))) + (ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0))) + +val fresh_frame_loc_not_unused_in_disjoint + (h0 h1: HS.mem) +: Lemma + (requires (HS.fresh_frame h0 h1)) + (ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0))) + [SMTPat (HS.fresh_frame h0 h1)] + +val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) + :Lemma (requires (live h b)) + (ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b)) + [SMTPat (live h b)] + +val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) + :Lemma (requires (unused_in b h)) + (ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b)) + [SMTPat (unused_in b h)] + +val modifies_address_liveness_insensitive_unused_in + (h h' : HS.mem) +: Lemma + (requires (modifies (address_liveness_insensitive_locs) h h')) + (ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h')) + +/// Addresses that have not been allocated yet can be removed from +/// modifies clauses. + +val modifies_only_not_unused_in + (l: loc) + (h h' : HS.mem) +: Lemma + (requires (modifies (loc_union (loc_unused_in h) l) h h')) + (ensures (modifies l h h')) + +val mreference_live_loc_not_unused_in + (#t: Type) + (#pre: Preorder.preorder t) + (h: HS.mem) + (r: HS.mreference t pre) +: Lemma + (requires (h `HS.contains` r)) + (ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r)) + [SMTPatOr [ + [SMTPat (HS.contains h r)]; + [SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)]; + [SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)]; + ]] + +val mreference_unused_in_loc_unused_in + (#t: Type) + (#pre: Preorder.preorder t) + (h: HS.mem) + (r: HS.mreference t pre) +: Lemma + (requires (r `HS.unused_in` h)) + (ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r)) + [SMTPatOr [ + [SMTPat (HS.unused_in r h)]; + [SMTPat (loc_unused_in h `loc_includes` loc_mreference r)]; + [SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)]; + ]] + +val unused_in_not_unused_in_disjoint_2 + (l1 l2 l1' l2': loc) + (h: HS.mem) +: Lemma + (requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' )) + (ensures (loc_disjoint l1' l2' )) + [SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)] + +val modifies_loc_unused_in + (l: loc) + (h1 h2: HS.mem) + (l' : loc) +: Lemma + (requires ( + modifies l h1 h2 /\ + address_liveness_insensitive_locs `loc_includes` l /\ + loc_unused_in h2 `loc_includes` l' + )) + (ensures (loc_unused_in h1 `loc_includes` l')) + [SMTPatOr [ + [SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')]; + [SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')]; + ]] + +/// Shorthand: freshness + +let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 = + loc_unused_in h `loc_includes` l /\ + loc_not_unused_in h' `loc_includes` l + +val ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem) + (x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma + (requires (HST.ralloc_post i init m0 x m1)) + (ensures (fresh_loc (loc_freed_mreference x) m0 m1)) + [SMTPat (HST.ralloc_post i init m0 x m1)] + +//AR: this is needed for liveness across fresh_frame +val fresh_frame_modifies (h0 h1: HS.mem) : Lemma + (requires (HS.fresh_frame h0 h1)) + (ensures (modifies loc_none h0 h1)) + [SMTPat (HS.fresh_frame h0 h1)] + +val popped_modifies (h0 h1: HS.mem) : Lemma + (requires (HS.popped h0 h1)) + (ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1)) + [SMTPat (HS.popped h0 h1)] + +val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem) + : Lemma (requires (fresh_loc l_fresh h1 h2 /\ + modifies l_aux h1 h2 /\ + l_goal `loc_includes` l_aux /\ + modifies (loc_union l_fresh l_goal) h2 h3)) + (ensures (modifies l_goal h1 h3)) + [SMTPat (fresh_loc l_fresh h1 h2); + SMTPat (modifies l_aux h1 h2); + SMTPat (modifies l_goal h1 h3)] + +(* + * AR: this lemma is framing the modifies clause across a fresh frame + * one way to do it would have been to reuse the lemma modifies_remove_new_locs, + * treating the fresh frame as another new location + * however, the way library is set up, loc_region in any form cannot be considered + * a fresh loc + * so, we have a special lemma for fresh_frame + *) +val modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc) + : Lemma (requires (HS.fresh_frame h1 h2 /\ + modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3)) + (ensures (modifies l h1 h3)) + [SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)] + +/// Legacy shorthands for disjointness and inclusion of buffers +/// + +let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2) + (b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 = + loc_disjoint (loc_buffer b1) (loc_buffer b2) + +let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2) + (b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 = + loc_includes (loc_buffer b1) (loc_buffer b2) /\ + (g_is_null b1 <==> g_is_null b2) + +val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2) + (b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) + :Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0)) + (ensures (~(b1 === b2))) + +val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma + (requires (length b1 == 0)) + (ensures (disjoint b1 b2)) + + +(* +/// The liveness of a sub-buffer entails from the liveness +/// of its enclosing buffer. + +val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a) + (h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2) + :Lemma (requires (larger `includes` smaller)) + (ensures (live h larger ==> live h smaller)) + [SMTPatOr [ + [SMTPat (includes larger smaller); SMTPat (live h larger)]; + [SMTPat (includes larger smaller); SMTPat (live h smaller)]; + ]] +*) + +val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2) + (larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2) + :Lemma (requires (larger `includes` smaller)) + (ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller)) + [SMTPat (larger `includes` smaller)] + +/// +/// Useful shorthands for pointers, or maybe-null pointers + +inline_for_extraction +type mpointer (a:Type0) (rrel:srel a) (rel:srel a) = + b:mbuffer a rrel rel{length b == 1} + +inline_for_extraction +type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) = + b:mbuffer a rrel rel{if g_is_null b then True else length b == 1} + +unfold +let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) = + get h x 0 + + +/// Two pointers having different contents are disjoint. This is valid +/// only for pointers, i.e. buffers of size 1. + +val pointer_distinct_sel_disjoint + (#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a) + (b1:mpointer a rrel1 rel1) + (b2:mpointer a rrel2 rel2) + (h:HS.mem) + :Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0)) + (ensures (disjoint b1 b2)) + +/// The following stateful operations on buffers do not change the +/// memory, but, as required by the C standard, they all require the +/// buffer in question to be live. + +/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``. + +val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :HST.Stack bool (requires (fun h -> live h b)) + (ensures (fun h y h' -> h == h' /\ y == g_is_null b)) + + +/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from +/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as +/// ``b + i`` (or, equivalently, ``&b[i]``.) + +val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel) + (i:U32.t) (len:Ghost.erased U32.t) + :HST.Stack (mbuffer a rrel sub_rel) + (requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b)) + (ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len))) + +/// ``offset b i`` construct the tail of the buffer ``b`` starting from +/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i`` +/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or +/// ``&b[i]``. +/// +/// This stateful operation cannot be derived from ``sub``, because the +/// length cannot be computed outside of proofs. + +val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel) + (i:U32.t) + :HST.Stack (mbuffer a rrel sub_rel) + (requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b)) + (ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i))) +// goffset + + +/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and +/// returns it. KaRaMeL compiles it as b[i]. + +val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t) + :HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b)) + (ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i))) + + +/// The following stateful operations on buffers modify the memory, +/// and, as usual, require the liveness of the buffer. + +/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in +/// heap `h` to correspond to the sequence `s` + +val g_upd_seq (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (s:Seq.lseq a (length b)) + (h:HS.mem{live h b}) + :GTot HS.mem + +val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) + :Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h)) + +/// A lemma specifying `g_upd_seq` in terms of its effect on the +/// buffer's underlying sequence + +val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) + (s:Seq.lseq a (length b)) + (h:HS.mem{live h b}) + : Lemma (let h' = g_upd_seq b s h in + (Seq.length s > 0 ==> not (g_is_null b)) /\ + modifies (loc_buffer b) h h' /\ + live h' b /\ + HST.equal_domains h h' /\ + as_seq h' b == s) + +/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location +/// `i` writing ``v`` there. This is the spec analog of the stateful +/// update `upd` below. + +let g_upd (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) + (i:nat{i < length b}) + (v:a) + (h:HS.mem{live h b}) + : GTot HS.mem + = g_upd_seq b (Seq.upd (as_seq h b) i v) h + +val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) + (i:nat{i < length b}) + (v:a) + (h:HS.mem{live h b}) + : Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h)) + +/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of +/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``. + +val upd' + (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) + (i:U32.t) + (v:a) + :HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\ + rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v))) + (ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h)) + +inline_for_extraction +let upd + (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) + (i:U32.t) + (v:a) + : HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\ + rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v))) + (ensures (fun h _ h' -> (not (g_is_null b)) /\ + modifies (loc_buffer b) h h' /\ + live h' b /\ + as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v)) + = let h = HST.get () in + upd' b i v; + g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h + +(* FIXME: Comment on `recall` *) + +val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0 + +val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0 + +(* + * A functoriality lemma + *) +unfold +let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) = + forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel + + +val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a) + (b0:mbuffer a rrel rel) + (b1:mbuffer a rrel subrel) +: Lemma + (requires rrel_rel_always_compatible rrel subrel) + (ensures + (region_lifetime_buf b0 /\ + (exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1) + +val recallable_null (#a:Type0) (#rrel #rel:srel a) + :Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))] + +(* +val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2) + (larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2) + :Lemma (requires (larger `includes` smaller)) + (ensures (recallable larger <==> recallable smaller)) + [SMTPatOr [ + [SMTPat (recallable larger); SMTPat (recallable smaller);]; + [SMTPat (larger `includes` smaller)]; + ]] +*) + +val recallable_mgsub (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a) + :Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b)) + (ensures (recallable (mgsub sub_rel b i len))) + [SMTPatOr [ + [SMTPat (recallable (mgsub sub_rel b i len))]; + [SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);] + ]] + +val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b)))) + (ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b)) + +(* + * Begin: API for general witness and recall + * Clients can witness predicates on the contents of the buffer, and later recall them + * Provided the predicates are stable w.r.t. the buffer preorder + *) + +(* Shorthand for predicates of Seq.seq a *) +unfold let spred (a:Type0) = Seq.seq a -> Type0 + +(* + * Note the tight patterns on the quantifier, you may need to write additional triggers + * if you are directly working with them + *) +unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) = + forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2 + +(* Clients get this pure token when they witness a predicate *) +val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0 + +(* + * We can only support witness and recall for gc-malloced buffers (i.e. recallable ones) + * This is not a fundamental limitation, but needs some tweaks to the underlying state model + *) +val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) + :HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel)) + (ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p)) + +val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) + :HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p)) + (ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b))) + +val witnessed_functorial (#a:Type0) + (#rrel #rel1 #rel2:srel a) + (b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t) + (s1 s2:spred a) +: Lemma + (requires + rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc. + U32.v i + U32.v len <= length b1 /\ + b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same + witnessed b1 s1 /\ + (forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2))) + (ensures witnessed b2 s2) + +(* + * A stateful version that relaxes the rrel and rel compatibility + * but requires liveness of b1 + *) +val witnessed_functorial_st (#a:Type0) + (#rrel #rel1 #rel2:srel a) + (b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t) + (s1 s2:spred a) +: HST.Stack unit + (requires fun h -> + live h b1 /\ + U32.v i + U32.v len <= length b1 /\ + b2 == mgsub rel2 b1 i len /\ + witnessed b1 s1 /\ + (forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2))) + (ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2) + +(* End: API for general witness and recall *) + + +/// Deallocation. A buffer that was allocated by ``malloc`` (see below) +/// can be ``free`` d. + +val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0 + +val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b)) + (ensures (fun h0 _ h1 -> (not (g_is_null b)) /\ + Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\ + (HS.get_tip h1) == (HS.get_tip h0) /\ + modifies (loc_addr_of_buffer b) h0 h1 /\ + HS.live_region h1 (frameOf b))) + +val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) + :Lemma (requires (freeable b)) (ensures (length b > 0)) + [SMTPat (freeable b)] + +val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2) + (b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) + :Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2)) + (ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2)) + +val freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2) + (b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) + :Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2)) + (ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2))) + [SMTPat (freeable b1); SMTPat (disjoint b1 b2)] + +(***** Begin allocation functions *****) + + +/// Allocation. This is the common postcondition of all allocation +/// operators, which tells that the resulting buffer is fresh, and +/// specifies its initial contents. + +(* + * Allocation functions: + * In the return type, we try to give heap-independent postconditions (such as length) + * in the refinement of the buffer type (for the usage pattern of top-level buffers) + * while heap dependent postconditions are provided in the ensures clause + * + * One unsatisfying aspect is that these functions are duplicated in the wrappers that we write + * (e.g. Buffer, ImmutableBuffer, etc.) + * If we don't duplicate, then the clients may face type inference issues (for preorders) + * + * So, if you change any of the pre- or postcondition, you should change the pre and post spec functions + * (such as alloc_post_mem_common etc.), rather than the specs directly + + * Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers? + * Will try that + * + * For memory dependent post, alloc_post_mem_common is the one used by everyone + * + * For heap allocations, the library also provides partial functions that could return null + * Clients need to explicitly check for non-null values when using these functions + * Partial function specs use alloc_partial_post_mem_common + * + * NOTE: a useful test for the implementation of partial functions is that + * their spec should be valid even when their implementation just returns null + *) + +unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat) + = b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)} + +unfold +let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a) + = live h1 b /\ + unused_in b h0 /\ + Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\ + (HS.get_tip h1) == (HS.get_tip h0) /\ + modifies loc_none h0 h1 /\ + as_seq h1 b == s + +(* Return type and post for partial allocation functions *) +unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid) + = b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)} + +unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a) + (b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a) + = (g_is_null b /\ h0 == h1) \/ + ((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s) + + +unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0 + + +/// ``gcmalloc r init len`` allocates a memory-managed buffer of some +/// positive length ``len`` in an eternal region ``r``. Every cell of this +/// buffer will have initial contents ``init``. Such a buffer cannot be +/// freed. In fact, it is eternal: it cannot be deallocated at all. + +(* + * See the Allocation comment above when changing the spec + *) +val mgcmalloc (#a:Type0) (#rrel:srel a) + (r:HS.rid) (init:a) (len:U32.t) + :HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b}) + (requires (fun _ -> malloc_pre r len)) + (ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) + + +(* + * Allocate a memory-managed buffer initialized with contents from src + * + * This allocates and initializes the buffer atomically (from the perspective of the Low* clients) + *) +val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid) + (#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t) + : HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b}) + (requires fun h0 -> + malloc_pre r len /\ + live h0 src /\ U32.v id_src + U32.v len <= length src) + (ensures fun h0 b h1 -> + alloc_post_mem_common b h0 h1 + (Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len))) + +(* + * See the Allocation comment above when changing the spec + *) +inline_for_extraction +let mgcmalloc_partial (#a:Type0) (#rrel:srel a) + (r:HS.rid) (init:a) (len:U32.t) + :HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b}) + (requires (fun _ -> malloc_pre r len)) + (ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) + = mgcmalloc r init len + + +/// ``malloc r init len`` allocates a hand-managed buffer of some +/// positive length ``len`` in an eternal region ``r``. Every cell of this +/// buffer will have initial contents ``init``. Such a buffer can be +/// freed using ``free`` above. Note that the ``freeable`` permission is +/// only on the whole buffer ``b``, and is not inherited by any of its +/// strict sub-buffers. + +(* + * See the Allocation comment above when changing the spec + *) +val mmalloc (#a:Type0) (#rrel:srel a) + (r:HS.rid) (init:a) (len:U32.t) + :HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b}) + (requires (fun _ -> malloc_pre r len)) + (ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) + +(* + * Allocate a hand-managed buffer initialized with contents from src + * + * This allocates and initializes the buffer atomically (from the perspective of the Low* clients) + *) +val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid) + (#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t) + : HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b}) + (requires fun h0 -> + malloc_pre r len /\ + live h0 src /\ U32.v id_src + U32.v len <= length src) + (ensures fun h0 b h1 -> + alloc_post_mem_common b h0 h1 + (Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len))) + +(* + * See the Allocation comment above when changing the spec + *) +inline_for_extraction +let mmalloc_partial (#a:Type0) (#rrel:srel a) + (r:HS.rid) (init:a) (len:U32.t) + :HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b}) + (requires (fun _ -> malloc_pre r len)) + (ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) + = mmalloc r init len + + +/// ``alloca init len`` allocates a buffer of some positive length ``len`` +/// in the current stack frame. Every cell of this buffer will have +/// initial contents ``init``. Such a buffer cannot be freed +/// individually, but is automatically freed as soon as its stack +/// frame is deallocated by ``HST.pop_frame``. + +unfold let alloca_pre (len:U32.t) = U32.v len > 0 + +(* + * See the Allocation comment above when changing the spec + *) +val malloca (#a:Type0) (#rrel:srel a) + (init:a) (len:U32.t) + :HST.StackInline (lmbuffer a rrel rrel (U32.v len)) + (requires (fun _ -> alloca_pre len)) + (ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init) /\ + frameOf b == HS.get_tip h0)) + +(* + * Allocate a stack buffer initialized with contents from src + * + * This allocates and initializes the buffer atomically (from the perspective of the Low* clients) + *) +val malloca_and_blit (#a:Type0) (#rrel:srel a) + (#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t) + : HST.StackInline (lmbuffer a rrel rrel (U32.v len)) + (requires fun h0 -> + alloca_pre len /\ + live h0 src /\ U32.v id_src + U32.v len <= length src) + (ensures fun h0 b h1 -> + alloc_post_mem_common b h0 h1 + (Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)) /\ + frameOf b == HS.get_tip h0) + + +/// ``alloca_of_list init`` allocates a buffer in the current stack +/// frame. The initial values of the cells of this buffer are +/// specified by the ``init`` list, which must be nonempty, and of +/// length representable as a machine integer. + +unfold let alloca_of_list_pre (#a:Type0) (init:list a) = + normalize (0 < FStar.List.Tot.length init) /\ + normalize (FStar.List.Tot.length init <= UInt.max_int 32) + +(* + * See the Allocation comment above when changing the spec + *) +val malloca_of_list (#a:Type0) (#rrel:srel a) (init: list a) + :HST.StackInline (lmbuffer a rrel rrel (normalize_term (List.Tot.length init))) + (requires (fun _ -> alloca_of_list_pre init)) + (ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init) /\ + frameOf b == HS.get_tip h0)) + +unfold let gcmalloc_of_list_pre (#a:Type0) (r:HS.rid) (init:list a) = + HST.is_eternal_region r /\ + normalize (FStar.List.Tot.length init <= UInt.max_int 32) + +(* + * See the Allocation comment above when changing the spec + *) +val mgcmalloc_of_list (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a) + :HST.ST (b:lmbuffer a rrel rrel (normalize_term (List.Tot.length init)){frameOf b == r /\ recallable b}) + (requires (fun _ -> gcmalloc_of_list_pre r init)) + (ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init))) + +(* + * See the Allocation comment above when changing the spec + *) +inline_for_extraction +let mgcmalloc_of_list_partial (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a) + :HST.ST (b:lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r{recallable b}) + (requires (fun _ -> gcmalloc_of_list_pre r init)) + (ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init))) + + = mgcmalloc_of_list r init + + +unfold let alloc_drgn_pre (h:HS.mem) (d:HST.drgn) (len:U32.t) = h `HS.live_region` (HST.rid_of_drgn d) /\ U32.v len > 0 + +val mmalloc_drgn (#a:Type0) (#rrel:srel a) + (d:HST.drgn) (init:a) (len:U32.t) +: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ region_lifetime_buf b}) + (requires fun h -> alloc_drgn_pre h d len) + (ensures fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)) + +val mmalloc_drgn_mm (#a:Type0) (#rrel:srel a) + (d:HST.drgn) (init:a) (len:U32.t) +: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ freeable b}) + (requires fun h -> alloc_drgn_pre h d len) + (ensures fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)) + +val mmalloc_drgn_and_blit (#a:Type0) (#rrel:srel a) + (#rrel1 #rel1:srel a) + (d:HST.drgn) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t) +: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ region_lifetime_buf b}) + (requires fun h -> + alloc_drgn_pre h d len /\ + live h src /\ + U32.v id_src + U32.v len <= length src) + (ensures fun h0 b h1 -> + alloc_post_mem_common b h0 h1 + (Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len))) + + + +(***** End allocation functions *****) + + +/// Derived operations + +val blit (#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a) + (src:mbuffer a rrel1 rel1) + (idx_src:U32.t) + (dst:mbuffer a rrel2 rel2) + (idx_dst:U32.t) + (len:U32.t) + :HST.Stack unit (requires (fun h -> live h src /\ live h dst /\ + U32.v idx_src + U32.v len <= length src /\ + U32.v idx_dst + U32.v len <= length dst /\ + (* TODO: remove the rhs part of this disjunction once patterns on loc_buffer_from_to are introduced *) + (loc_disjoint (loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) \/ disjoint src dst) /\ + rel2 (as_seq h dst) + (Seq.replace_subseq (as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) + (Seq.slice (as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len))))) + (ensures (fun h _ h' -> modifies (loc_buffer dst) h h' /\ + live h' dst /\ + Seq.slice (as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) == + Seq.slice (as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len) /\ + Seq.slice (as_seq h' dst) 0 (U32.v idx_dst) == + Seq.slice (as_seq h dst) 0 (U32.v idx_dst) /\ + Seq.slice (as_seq h' dst) (U32.v idx_dst + U32.v len) (length dst) == + Seq.slice (as_seq h dst) (U32.v idx_dst + U32.v len) (length dst))) + +val fill (#t:Type) (#rrel #rel: srel t) + (b: mbuffer t rrel rel) + (z:t) + (len:U32.t) +: HST.Stack unit + (requires (fun h -> + live h b /\ + U32.v len <= length b /\ + rel (as_seq h b) (Seq.replace_subseq (as_seq h b) 0 (U32.v len) (Seq.create (U32.v len) z)) + )) + (ensures (fun h0 _ h1 -> + modifies (loc_buffer b) h0 h1 /\ + live h1 b /\ + Seq.slice (as_seq h1 b) 0 (U32.v len) == Seq.create (U32.v len) z /\ + Seq.slice (as_seq h1 b) (U32.v len) (length b) == Seq.slice (as_seq h0 b) (U32.v len) (length b) + )) + +/// Type class instantiation for compositionality with other kinds of memory locations than regions, references or buffers (just in case). +/// No usage pattern has been found yet. + +module MG = FStar.ModifiesGen + +val abuffer' (region: HS.rid) (addr: nat) : Tot Type0 + +inline_for_extraction +let abuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (abuffer' region addr) + +val cloc_cls: MG.cls abuffer + +val cloc_of_loc (l: loc) : Tot (MG.loc cloc_cls) + +val loc_of_cloc (l: MG.loc cloc_cls) : Tot loc + +val loc_of_cloc_of_loc (l: loc) : Lemma + (loc_of_cloc (cloc_of_loc l) == l) + [SMTPat (loc_of_cloc (cloc_of_loc l))] + +val cloc_of_loc_of_cloc (l: MG.loc cloc_cls) : Lemma + (cloc_of_loc (loc_of_cloc l) == l) + [SMTPat (cloc_of_loc (loc_of_cloc l))] + +val cloc_of_loc_none: unit -> Lemma (cloc_of_loc loc_none == MG.loc_none) + +val cloc_of_loc_union (l1 l2: loc) : Lemma + (cloc_of_loc (loc_union l1 l2) == MG.loc_union (cloc_of_loc l1) (cloc_of_loc l2)) + +val cloc_of_loc_addresses + (preserve_liveness: bool) + (r: HS.rid) + (n: Set.set nat) +: Lemma + (cloc_of_loc (loc_addresses preserve_liveness r n) == MG.loc_addresses preserve_liveness r n) + +val cloc_of_loc_regions + (preserve_liveness: bool) + (r: Set.set HS.rid) +: Lemma + (cloc_of_loc (loc_regions preserve_liveness r) == MG.loc_regions preserve_liveness r) + +val loc_includes_to_cloc (l1 l2: loc) : Lemma + (loc_includes l1 l2 <==> MG.loc_includes (cloc_of_loc l1) (cloc_of_loc l2)) + +val loc_disjoint_to_cloc (l1 l2: loc) : Lemma + (loc_disjoint l1 l2 <==> MG.loc_disjoint (cloc_of_loc l1) (cloc_of_loc l2)) + +val modifies_to_cloc (l: loc) (h1 h2: HS.mem) : Lemma + (modifies l h1 h2 <==> MG.modifies (cloc_of_loc l) h1 h2) diff --git a/stage0/ulib/LowStar.PrefixFreezableBuffer.fst b/stage0/ulib/LowStar.PrefixFreezableBuffer.fst new file mode 100644 index 00000000000..d6f618db11b --- /dev/null +++ b/stage0/ulib/LowStar.PrefixFreezableBuffer.fst @@ -0,0 +1,148 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module LowStar.PrefixFreezableBuffer + +open FStar.HyperStack.ST + +include LowStar.Monotonic.Buffer + +module P = FStar.Preorder +module G = FStar.Ghost + +module U8 = FStar.UInt8 +module U32 = FStar.UInt32 +module Seq = FStar.Seq + +module HS = FStar.HyperStack +module ST = FStar.HyperStack.ST + +module E = FStar.Endianness +module LE = LowStar.Endianness + + +(* + * Implementation for LowStar.PrefixfreezableBuffer + *) + +#set-options "--max_fuel 0 --max_ifuel 0" + +let le_to_n s = E.le_to_n s + +let prefix_freezable_preorder = pre + +let prefix_freezable_preorder_elim _ _ = () + +private let update_frozen_until_alloc + (b:mbuffer u8 prefix_freezable_preorder prefix_freezable_preorder) + : Stack + unit + (requires fun h -> + live h b /\ + length b >= 4 /\ + frozen_until (as_seq h b) == 0) + (ensures fun h0 _ h1 -> + live h1 b /\ + modifies (loc_buffer b) h0 h1 /\ + frozen_until (as_seq h1 b) == 4 /\ + witnessed b (frozen_until_at_least 4)) + = LE.store32_le_i b 0ul 4ul; + witness_p b (frozen_until_at_least 4) + +let gcmalloc r len = + let h0 = ST.get () in + + let b = mgcmalloc #_ #prefix_freezable_preorder r 0uy (U32.add len 4ul) in + + let h = ST.get () in E.le_to_n_zeros (Seq.slice (as_seq h b) 0 4); + + assert (fresh_loc (loc_buffer b) h0 h); //TODO: necessary for firing modifies_remove_new_locs lemma? + update_frozen_until_alloc b; + b + +let malloc r len = + let h0 = ST.get () in + + let b = mmalloc #_ #prefix_freezable_preorder r 0uy (U32.add len 4ul) in + + let h = ST.get () in E.le_to_n_zeros (Seq.slice (as_seq h b) 0 4); + + assert (fresh_loc (loc_buffer b) h0 h); //TODO: necessary for firing modifies_remove_new_locs lemma? + update_frozen_until_alloc b; + b + +let alloca len = + let h0 = ST.get () in + + let b = malloca #_ #prefix_freezable_preorder 0uy (U32.add len 4ul) in + + let h = ST.get () in E.le_to_n_zeros (Seq.slice (as_seq h b) 0 4); + + assert (fresh_loc (loc_buffer b) h0 h); //TODO: necessary for firing modifies_remove_new_locs lemma? + update_frozen_until_alloc b; + b + +let upd b i v = + recall_p b (frozen_until_at_least 4); + upd b i v + +(* + * This lemma handles the mismatch between the style of the spec + * in LE.store_pre and LE.store_post, and the preorder of PrefixFreezableBuffers + * Basically the sequence library is missing a lemma that eliminates + * equality on two slices to some equality on the base sequences + *) +let le_pre_post_index + (s1 s2:Seq.seq u8) + : Lemma + (ensures + (Seq.length s1 == Seq.length s2 /\ + Seq.length s1 >= 4 /\ + Seq.equal (Seq.slice s1 0 0) (Seq.slice s2 0 0) /\ + Seq.equal (Seq.slice s1 4 (Seq.length s1)) + (Seq.slice s2 4 (Seq.length s2))) ==> + + (forall (i:nat).{:pattern (Seq.index s1 i); (Seq.index s2 i)} + (i >= 4 /\ i < Seq.length s1) ==> + (Seq.index s1 i == Seq.index s2 i))) + = assert (forall (s:Seq.seq u8). + Seq.length s >= 4 ==> + (forall (i:nat). + (i >= 4 /\ i < Seq.length s) ==> + Seq.index s i == Seq.index (Seq.slice s 4 (Seq.length s)) (i - 4))) + +let freeze b i = + recall_p b (frozen_until_at_least 4); + FStar.Classical.forall_intro_2 le_pre_post_index; + LE.store32_le_i b 0ul i; + witness_p b (frozen_until_at_least (U32.v i)) + +let frozen_until_st b = LE.load32_le_i b 0ul + +let witness_slice b i j snap = + witness_p b (slice_is i j snap) + +let recall_slice b i j snap = + recall_p b (slice_is i j snap) + +let witness_frozen_until b n = + witness_p b (frozen_until_at_least n) + +let recall_frozen_until b n = + recall_p b (frozen_until_at_least n) + +let recall_frozen_until_default b = + recall_p b (frozen_until_at_least 4) diff --git a/stage0/ulib/LowStar.PrefixFreezableBuffer.fsti b/stage0/ulib/LowStar.PrefixFreezableBuffer.fsti new file mode 100644 index 00000000000..46ec82badc1 --- /dev/null +++ b/stage0/ulib/LowStar.PrefixFreezableBuffer.fsti @@ -0,0 +1,268 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module LowStar.PrefixFreezableBuffer + +open FStar.HyperStack.ST + +include LowStar.Monotonic.Buffer + +module P = FStar.Preorder +module G = FStar.Ghost + +module U32 = FStar.UInt32 +module Seq = FStar.Seq + +module HS = FStar.HyperStack +module ST = FStar.HyperStack.ST + +(* + * A library for prefix freezable buffers of elements of type u8 + * + * Our monotonicity theory does not easily support preorders and predicates over + * multiple references. So instead of keeping the frozen-until counter in a + * separate (ghost) reference, the library maintains the frozen-until counter (a u32) + * in the first four bytes of the buffer itself + * + * Buffer contents up to the frozen-until counter are stable and clients can witness + * and recall them + * + *) + +type u8 = UInt8.t +type u32 = U32.t + +#set-options "--max_fuel 0 --max_ifuel 0" + + +/// This is the frozen until index in the sequence representation of a PrefixFreezableBuffer + +val le_to_n (s:Seq.seq u8) : Tot nat + +let frozen_until (s:Seq.seq u8{Seq.length s >= 4}) = le_to_n (Seq.slice s 0 4) + + +/// Preorder for PrefixFreezableBuffers + +private unfold let pre (s1 s2:Seq.seq u8) = + Seq.length s1 == Seq.length s2 /\ //lengths are same + (let len = Seq.length s1 in + len >= 4 ==> //if length >= 4 then + (let frozen_until1 = frozen_until s1 in + let frozen_until2 = frozen_until s2 in + (4 <= frozen_until1 /\ frozen_until1 <= len) ==> //if frozen_until1 is in the range [4, len] then + (frozen_until1 <= frozen_until2 /\ frozen_until2 <= len /\ //frozen until index increases monotonically, but remains <= len + (forall (i:nat).{:pattern Seq.index s2 i} + (4 <= i /\ i < frozen_until1) ==> Seq.index s2 i == Seq.index s1 i)))) //and the contents until frozen_until1 remain same + + +val prefix_freezable_preorder : srel u8 + +/// Clients can call the following lemma to reveal the preorder + +val prefix_freezable_preorder_elim (s1 s2:Seq.seq u8) + : Lemma (prefix_freezable_preorder s1 s2 <==> pre s1 s2) + + +/// Predicate for the frozen_until index being at least n +/// +/// It is stable w.r.t. the prefix_freezable_preorder + +let frozen_until_at_least (n:nat) : spred u8 = + fun s -> Seq.length s >= 4 /\ //it follows from the inequalities below, but we need it for typing of frozen_until + + 4 <= n /\ n <= frozen_until s /\ frozen_until s <= Seq.length s + + +/// Predicate for the frozen slice with indices in the [4, frozen_until) range +/// +/// It is stable w.r.t. the prefix_freezable_preorder + +let slice_is (i j:u32) (snap:G.erased (Seq.seq u8)) : spred u8 = + fun s -> let len = Seq.length s in len >= 4 /\ //for typing of frozen_until + (let frozen_until = frozen_until s in + let i = U32.v i in + let j = U32.v j in + let snap = G.reveal snap in + 4 <= i /\ i <= j /\ j <= frozen_until /\ frozen_until <= len /\ + Seq.length snap == j - i /\ + Seq.equal (Seq.slice s i j) snap) + + +/// Buffer type for PrefixfreezableBuffers +/// +/// And abbreviation for the length indexed version + +type buffer = + b:mbuffer u8 (prefix_freezable_preorder) (prefix_freezable_preorder) + {length b >= 4 /\ b `witnessed` frozen_until_at_least 4} + +unfold let lbuffer (len:u32) = + b:buffer{length b == U32.v len + 4} + + +/// Allocation precondition for prefix freezable buffers adds an additional constraint +/// that the input length + 4 must fit in u32 + +unfold let malloc_pre (r:HS.rid) (len:u32) = + UInt.size (U32.v len + 4) 32 /\ malloc_pre r len + + +/// The postcondition is also different in that there is no initializer +/// and an additional predicate for the initial value of the frozen_until_index + +unfold let alloc_post_mem_common + (h0:HS.mem) (b:buffer) (h1:HS.mem) + = live h1 b /\ + unused_in b h0 /\ + Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\ + HS.get_tip h1 == HS.get_tip h0 /\ + modifies loc_none h0 h1 /\ + frozen_until (as_seq h1 b) == 4 + + +/// Allocation functions + +val gcmalloc (r:HS.rid) (len:u32) + : ST (b:lbuffer len{frameOf b == r /\ recallable b}) + (requires fun _ -> malloc_pre r len) + (ensures alloc_post_mem_common) + +val malloc (r:HS.rid) (len:u32) + : ST + (b:lbuffer len{frameOf b == r /\ freeable b}) + (requires fun _ -> malloc_pre r len) + (ensures alloc_post_mem_common) + +unfold let alloca_pre (len:U32.t) = //precondition for stack allocated prefix freezable buffers + UInt.size (U32.v len + 4) 32 /\ alloca_pre len + +val alloca (len:u32) + : StackInline + (lbuffer len) + (requires fun _ -> alloca_pre len) + (ensures fun h0 b h1 -> + alloc_post_mem_common h0 b h1 /\ frameOf b == HS.get_tip h0) + + +/// Update function +/// +/// Input index must be geq than the current frozen until index + +val upd (b:buffer) (i:u32) (v:u8) + : Stack + unit + (requires fun h -> + live h b /\ U32.v i < length b /\ + U32.v i >= frozen_until (as_seq h b)) + (ensures fun h0 _ h1 -> + (not (g_is_null b)) /\ + modifies (loc_buffer b) h0 h1 /\ + live h1 b /\ + frozen_until (as_seq h0 b) == frozen_until (as_seq h1 b) /\ + as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) v) + + +/// API to freeze the buffer up-to the input index +/// +/// Also provides a witnessed frozen_until_at_least predicate + +val freeze (b:buffer) (i:u32) + : Stack + unit + (requires fun h -> + live h b /\ + U32.v i <= length b /\ + U32.v i >= frozen_until (as_seq h b)) + (ensures fun h0 _ h1 -> + (not (g_is_null b)) /\ + modifies (loc_buffer b) h0 h1 /\ + live h1 b /\ + frozen_until (as_seq h1 b) == U32.v i /\ + b `witnessed` frozen_until_at_least (U32.v i) /\ + (forall (k:nat).{:pattern (Seq.index (as_seq h1 b) k)} //contents from [4, len) remain same + (4 <= k /\ k < length b) ==> + (Seq.index (as_seq h1 b) k == Seq.index (as_seq h0 b) k))) + + +/// API for querying the current frozen until index + +val frozen_until_st (b:buffer) + : Stack + u32 + (requires fun h -> live h b) + (ensures fun h0 r h1 -> + h0 == h1 /\ + U32.v r == frozen_until (as_seq h1 b)) + + +/// Clients can witness contents of some [i, j) within the range [4, frozen_until) + +val witness_slice (b:buffer) (i j:u32) (snap:G.erased (Seq.seq u8)) + : Stack + unit + (requires fun h -> slice_is i j snap (as_seq h b)) + (ensures fun h0 _ h1 -> + h0 == h1 /\ + b `witnessed` slice_is i j snap) + + +/// Clients can recall contents of some previously witnessed slice + +val recall_slice (b:buffer) (i j:u32) (snap:G.erased (Seq.seq u8)) + : Stack + unit + (requires fun h -> + (recallable b \/ live h b) /\ + b `witnessed` slice_is i j snap) + (ensures fun h0 _ h1 -> + h0 == h1 /\ + slice_is i j snap (as_seq h1 b)) + + +/// Clients can also witness the value of the frozen until index + +val witness_frozen_until (b:buffer) (n:nat) + : Stack + unit + (requires fun h -> frozen_until_at_least n (as_seq h b)) + (ensures fun h0 _ h1 -> + h0 == h1 /\ + b `witnessed` frozen_until_at_least n) + + +/// And then recall the previously witnessed value of the frozen until index + +val recall_frozen_until (b:buffer) (n:nat) + : Stack + unit + (requires fun h -> + (recallable b \/ live h b) /\ + b `witnessed` frozen_until_at_least n) + (ensures fun h0 _ h1 -> + h0 == h1 /\ + frozen_until_at_least n (as_seq h1 b)) + + +/// By-default, clients can recall that 4 <= frozen until index <= length b + +val recall_frozen_until_default (b:buffer) + : Stack + unit + (requires fun h -> recallable b \/ live h b) + (ensures fun h0 _ h1 -> + h0 == h1 /\ + frozen_until_at_least 4 (as_seq h1 b)) diff --git a/stage0/ulib/LowStar.Printf.fst b/stage0/ulib/LowStar.Printf.fst new file mode 100644 index 00000000000..76c1feab457 --- /dev/null +++ b/stage0/ulib/LowStar.Printf.fst @@ -0,0 +1,538 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.Printf + +/// This module provides imperative printing functions for several +/// primitive Low* types, including +/// -- booleans (%b) +/// -- characters (%c) +/// -- strings (%s) +/// -- machine integers +/// (UInt8.t as %uy, UInt16.t as %us, UInt32.t as %ul, and UInt64.t as %uL; +/// Int8.t as %y, Int16.t as %i, Int32.t as %l , and Int64.t as %L) +/// -- and arrays (aka buffers) of these base types formatted +/// as %xN, where N is the format specifier for the array element type +/// e.g., %xuy for buffers of UInt8.t + +/// The main function of this module is `printf` +/// There are a few main differences relative to C printf +/// -- The format specifiers are different (see above) +/// +/// -- For technical reasons explained below, an extra dummy +/// argument `done` has to be provided at the end for the +/// computation to have any effect. +/// +/// E.g., one must write +/// `printf "%b %c" true 'c' done` +/// rather than just +/// `printf "%b %c" true 'c'` +/// +/// -- When printing arrays, two arguments must be passed; the +/// length of the array fragment to be formatted and the array +/// itself +/// +/// -- When extracted, rather than producing a C `printf` (which +/// does not, e.g., support printing of dynamically sized +/// arrays), our `printf` is specialized to a sequence of calls +/// to primitive printers for each supported type +/// +/// Before diving into the technical details of how this module works, +/// you might want to see a sample usage at the very end of this file. + +open FStar.Char +open FStar.String +open FStar.HyperStack.ST +module L = FStar.List.Tot +module LB = LowStar.Monotonic.Buffer + +/// `lmbuffer a r s l` is +/// - a monotonic buffer of `a` +/// - governed by preorders `r` and `s` +/// - with length `l` +let lmbuffer a r s l = + b:LB.mbuffer a r s{ + LB.len b == l + } + +/// `StTrivial`: A effect abbreviation for a stateful computation +/// with no precondition, and which does not change the state +effect StTrivial (a:Type) = + Stack a + (requires fun h -> True) + (ensures fun h0 _ h1 -> h0==h1) + +/// `StBuf a b`: A effect abbreviation for a stateful computation +/// that may read `b` does not change the state +effect StBuf (a:Type) #t #r #s #l (b:lmbuffer t r s l) = + Stack a + (requires fun h -> LB.live h b) + (ensures (fun h0 _ h1 -> h0 == h1)) + +/// Primitive printers for all the types supported by this module +assume val print_string: string -> StTrivial unit +assume val print_char : char -> StTrivial unit +assume val print_u8 : UInt8.t -> StTrivial unit +assume val print_u16 : UInt16.t -> StTrivial unit +assume val print_u32 : UInt32.t -> StTrivial unit +assume val print_u64 : UInt64.t -> StTrivial unit +assume val print_i8 : Int8.t -> StTrivial unit +assume val print_i16 : Int16.t -> StTrivial unit +assume val print_i32 : Int32.t -> StTrivial unit +assume val print_i64 : Int64.t -> StTrivial unit +assume val print_bool : bool -> StTrivial unit +assume val print_lmbuffer_bool (l:_) (#r:_) (#s:_) (b:lmbuffer bool r s l) : StBuf unit b +assume val print_lmbuffer_char (l:_) (#r:_) (#s:_) (b:lmbuffer char r s l) : StBuf unit b +assume val print_lmbuffer_string (l:_) (#r:_) (#s:_) (b:lmbuffer string r s l) : StBuf unit b +assume val print_lmbuffer_u8 (l:_) (#r:_) (#s:_) (b:lmbuffer UInt8.t r s l) : StBuf unit b +assume val print_lmbuffer_u16 (l:_) (#r:_) (#s:_) (b:lmbuffer UInt16.t r s l) : StBuf unit b +assume val print_lmbuffer_u32 (l:_) (#r:_) (#s:_) (b:lmbuffer UInt32.t r s l) : StBuf unit b +assume val print_lmbuffer_u64 (l:_) (#r:_) (#s:_) (b:lmbuffer UInt64.t r s l) : StBuf unit b +assume val print_lmbuffer_i8 (l:_) (#r:_) (#s:_) (b:lmbuffer Int8.t r s l) : StBuf unit b +assume val print_lmbuffer_i16 (l:_) (#r:_) (#s:_) (b:lmbuffer Int16.t r s l) : StBuf unit b +assume val print_lmbuffer_i32 (l:_) (#r:_) (#s:_) (b:lmbuffer Int32.t r s l) : StBuf unit b +assume val print_lmbuffer_i64 (l:_) (#r:_) (#s:_) (b:lmbuffer Int64.t r s l) : StBuf unit b + + +/// An attribute to control reduction +noextract irreducible +let __reduce__ = unit + +/// Base types supported so far +noextract +type base_typ = + | Bool + | Char + | String + | U8 + | U16 + | U32 + | U64 + | I8 + | I16 + | I32 + | I64 + +/// Argument types are base types and arrays thereof +/// Or polymorphic arguments specified by "%a" +noextract +type arg = + | Base of base_typ + | Array of base_typ + | Any + +/// Interpreting a `base_typ` as a type +[@@__reduce__] +noextract +let base_typ_as_type (b:base_typ) : Type0 = + match b with + | Bool -> bool + | Char -> char + | String -> string + | U8 -> FStar.UInt8.t + | U16 -> FStar.UInt16.t + | U32 -> FStar.UInt32.t + | U64 -> FStar.UInt64.t + | I8 -> FStar.Int8.t + | I16 -> FStar.Int16.t + | I32 -> FStar.Int32.t + | I64 -> FStar.Int64.t + +/// `fragment`: A format string is parsed into a list of fragments of +/// string literals and other arguments that need to be spliced in +/// (interpolated) +noextract +type fragment = + | Frag of string + | Interpolate of arg + +noextract +let fragments = list fragment + +/// `parse_format s`: +/// Parses a list of characters in a format string into a list of fragments +/// Or None, in case the format string is invalid +[@@__reduce__] +noextract inline_for_extraction +let rec parse_format + (s:list char) + : Tot (option fragments) + (decreases (L.length s)) + = let add_dir (d:arg) (ods : option fragments) + = match ods with + | None -> None + | Some ds -> Some (Interpolate d::ds) + in + let head_buffer (ods:option fragments) + = match ods with + | Some (Interpolate (Base t) :: rest) -> Some (Interpolate (Array t) :: rest) + | _ -> None + in + let cons_frag (c:char) (ods:option fragments) + = match ods with + | Some (Frag s::rest) -> Some (Frag (string_of_list (c :: list_of_string s)) :: rest) + | Some rest -> Some (Frag (string_of_list [c]) :: rest) + | _ -> None + in + match s with + | [] -> Some [] + | ['%'] -> None + + // %a... polymorphic arguments and preceded by their printers + | '%' :: 'a' :: s' -> + add_dir Any (parse_format s') + + // %x... arrays of base types + | '%' :: 'x' :: s' -> + head_buffer (parse_format ('%' :: s')) + + // %u ... Unsigned integers + | '%' :: 'u' :: s' -> begin + match s' with + | 'y' :: s'' -> add_dir (Base U8) (parse_format s'') + | 's' :: s'' -> add_dir (Base U16) (parse_format s'') + | 'l' :: s'' -> add_dir (Base U32) (parse_format s'') + | 'L' :: s'' -> add_dir (Base U64) (parse_format s'') + | _ -> None + end + + | '%' :: c :: s' -> begin + match c with + | '%' -> cons_frag '%' (parse_format s') + | 'b' -> add_dir (Base Bool) (parse_format s') + | 'c' -> add_dir (Base Char) (parse_format s') + | 's' -> add_dir (Base String) (parse_format s') + | 'y' -> add_dir (Base I8) (parse_format s') + | 'i' -> add_dir (Base I16) (parse_format s') + | 'l' -> add_dir (Base I32) (parse_format s') + | 'L' -> add_dir (Base I64) (parse_format s') + | _ -> None + end + + | c :: s' -> + cons_frag c (parse_format s') + + +/// `parse_format_string`: a wrapper around `parse_format` +[@@__reduce__] +noextract inline_for_extraction +let parse_format_string + (s:string) + : option fragments + = parse_format (list_of_string s) + +/// `lift a` lifts the type `a` to a higher universe +noextract +type lift (a:Type u#a) : Type u#(max a b) = + | Lift : a -> lift a + +/// `done` is a `unit` in universe 1 +noextract +let done : lift unit = Lift u#0 u#1 () + +/// `arg_t`: interpreting an argument as a type +/// (in universe 1) since it is polymorphic in the preorders of a buffer +/// GM: Somehow, this needs to be a `let rec` (even if it not really recursive) +/// or print_frags fails to verify. I don't know why; the generated +/// VC and its encoding seem identical (modulo hash consing in the +/// latter). +[@@__reduce__] +noextract +let rec arg_t (a:arg) : Type u#1 = + match a with + | Base t -> lift (base_typ_as_type t) + | Array t -> (l:UInt32.t & r:_ & s:_ & lmbuffer (base_typ_as_type t) r s l) + | Any -> (a:Type0 & (a -> StTrivial unit) & a) + +/// `frag_t`: a fragment is either a string literal or a argument to be interpolated +noextract +let frag_t = either string (a:arg & arg_t a) + +/// `live_frags h l` is a liveness predicate on all the buffers in `l` +[@@__reduce__] +noextract +let rec live_frags (h:_) (l:list frag_t) : prop = + match l with + | [] -> True + | Inl _ :: rest -> live_frags h rest + | Inr a :: rest -> + (match a with + | (| Base _, _ |) -> live_frags h rest + | (| Any, _ |) -> live_frags h rest + | (| Array _, (| _, _, _, b |) |) -> LB.live h b /\ live_frags h rest) + + +/// `interpret_frags` interprets a list of fragments as a Low* function type +/// Note `l` is the fragments in L-to-R order (i.e., parsing order) +/// `acc` accumulates the fragment values in reverse order +[@@__reduce__] +noextract +let rec interpret_frags (l:fragments) (acc:list frag_t) : Type u#1 = + match l with + | [] -> + // Always a dummy argument at the end + // Ensures that all cases of this match + // have the same universe, i.e., u#1 + lift u#0 u#1 unit + -> Stack unit + (requires fun h0 -> live_frags h0 acc) + (ensures fun h0 _ h1 -> h0 == h1) + + | Interpolate (Base t) :: args -> + // Base types are simple: we just take one more argument + x:base_typ_as_type t -> + interpret_frags args (Inr (| Base t, Lift x |) :: acc) + + | Interpolate (Array t) :: args -> + // Arrays are implicitly polymorphic in their preorders `r` and `s` + // which is what forces us to be in universe 1 + // Note, the length `l` is explicit + l:UInt32.t -> + #r:LB.srel (base_typ_as_type t) -> + #s:LB.srel (base_typ_as_type t) -> + b:lmbuffer (base_typ_as_type t) r s l -> + interpret_frags args (Inr (| Array t, (| l, r, s, b |) |) :: acc) + + | Interpolate Any :: args -> + #a:Type0 -> + p:(a -> StTrivial unit) -> + x:a -> + interpret_frags args (Inr (| Any, (| a, p, x |) |) :: acc) + + | Frag s :: args -> + // Literal fragments do not incur an additional argument + // We just accumulate them and recur + interpret_frags args (Inl s :: acc) + + +/// `normal` A normalization marker with very specific steps enabled +noextract unfold +let normal (#a:Type) (x:a) : a = + FStar.Pervasives.norm + [iota; + zeta; + delta_attr [`%__reduce__; `%BigOps.__reduce__]; + delta_only [`%Base?; `%Array?; `%Some?; `%Some?.v; `%list_of_string]; + primops; + simplify] + x + +/// `coerce`: A utility to trigger extensional equality of types +noextract +let coerce (x:'a{'a == 'b}) : 'b = x + +/// `fragment_printer`: The type of a printer of fragments +noextract +let fragment_printer = + (acc:list frag_t) + -> Stack unit + (requires fun h0 -> live_frags h0 acc) + (ensures fun h0 _ h1 -> h0 == h1) + +/// `print_frags`: Having accumulated all the pieces of a format +/// string and the arguments to the printed (i.e., the `list frag_t`), +/// this function does the actual work of printing them all using the +/// primitive printers +noextract inline_for_extraction +let rec print_frags (acc:list frag_t) + : Stack unit + (requires fun h0 -> live_frags h0 acc) + (ensures fun h0 _ h1 -> h0 == h1) + = match acc with + | [] -> () + | hd::tl -> + print_frags tl; + (match hd with + | Inl s -> print_string s + | Inr (| Base t, Lift value |) -> + (match t with + | Bool -> print_bool value + | Char -> print_char value + | String -> print_string value + | U8 -> print_u8 value + | U16 -> print_u16 value + | U32 -> print_u32 value + | U64 -> print_u64 value + | I8 -> print_i8 value + | I16 -> print_i16 value + | I32 -> print_i32 value + | I64 -> print_i64 value) + | Inr (| Array t, (| l, r, s, value |) |) -> + (match t with + | Bool -> print_lmbuffer_bool l value + | Char -> print_lmbuffer_char l value + | String -> print_lmbuffer_string l value + | U8 -> print_lmbuffer_u8 l value + | U16 -> print_lmbuffer_u16 l value + | U32 -> print_lmbuffer_u32 l value + | U64 -> print_lmbuffer_u64 l value + | I8 -> print_lmbuffer_i8 l value + | I16 -> print_lmbuffer_i16 l value + | I32 -> print_lmbuffer_i32 l value + | I64 -> print_lmbuffer_i64 l value) + | Inr (| Any, (| _, printer, value |) |) -> + printer value) + +[@@__reduce__] +let no_inst #a (#b:a -> Type) (f: (#x:a -> b x)) : unit -> #x:a -> b x = fun () -> f +[@@__reduce__] +let elim_unit_arrow #t (f:unit -> t) : t = f () + +// let test2 (f: (#a:Type -> a -> a)) : id_t 0 = test f () +// let coerce #a (#b: (a -> Type)) ($f: (#x:a -> b x)) (t:Type{norm t == (#x:a -> b x)}) +/// `aux frags acc`: This is the main workhorse which interprets a +/// parsed format string (`frags`) as a variadic, stateful function +[@@__reduce__] +noextract inline_for_extraction +let rec aux (frags:fragments) (acc:list frag_t) (fp: fragment_printer) : interpret_frags frags acc = + match frags with + | [] -> + let f (l:lift u#0 u#1 unit) + : Stack unit + (requires fun h0 -> live_frags h0 acc) + (ensures fun h0 _ h1 -> h0 == h1) + = fp acc + in + (f <: interpret_frags [] acc) + + | Frag s :: rest -> + coerce (aux rest (Inl s :: acc) fp) + + | Interpolate (Base t) :: args -> + let f (x:base_typ_as_type t) + : interpret_frags args (Inr (| Base t, Lift x |) :: acc) + = aux args (Inr (| Base t, Lift x |) :: acc) fp + in + f + + | Interpolate (Array t) :: rest -> + let f : + l:UInt32.t + -> #r:LB.srel (base_typ_as_type t) + -> #s:LB.srel (base_typ_as_type t) + -> b:lmbuffer (base_typ_as_type t) r s l + -> interpret_frags rest (Inr (| Array t, (| l, r, s, b |) |) :: acc) + = fun l #r #s b -> aux rest (Inr (| Array t, (| l, r, s, b |) |) :: acc) fp + in + f <: interpret_frags (Interpolate (Array t) :: rest) acc + + | Interpolate Any :: rest -> + let f : + unit + -> #a:Type + -> p:(a -> StTrivial unit) + -> x:a + -> interpret_frags rest (Inr (| Any, (| a, p, x |) |) :: acc) + = fun () #a p x -> aux rest (Inr (| Any, (| a, p, x |) |) :: acc) fp + in + elim_unit_arrow (no_inst (f ()) <: (unit -> interpret_frags (Interpolate Any :: rest) acc)) + +/// `format_string` : A valid format string is one that can be successfully parsed +[@@__reduce__] +noextract +let format_string = s:string{normal #bool (Some? (parse_format_string s))} + +/// `interpret_format_string` parses a string into fragments and then +/// interprets it as a type +[@@__reduce__] +noextract +let interpret_format_string (s:format_string) : Type = + interpret_frags (Some?.v (parse_format_string s)) [] + +/// `printf'`: Almost there ... this has a variadic type +/// and calls the actual printers for all its arguments. +/// +/// Note, the `normalize_term` in its body is crucial. It's what +/// allows the term to be specialized at extraction time. +noextract inline_for_extraction +let printf' (s:format_string) : interpret_format_string s = + normalize_term + (match parse_format_string s with + | Some frags -> aux frags [] print_frags) + +/// `intro_normal_f`: a technical gadget to introduce +/// implicit normalization in the domain and co-domain of a function type +noextract inline_for_extraction +let intro_normal_f (#a:Type) (b: (a -> Type)) (f:(x:a -> b x)) + : (x:(normal a) -> normal (b x)) + = f + +/// `printf`: The main function has type +/// `s:normal format_string -> normal (interpret_format_string s)` +/// Note: +/// This is the type F* infers for it and it is best to leave it that way +/// rather then writing it down and asking F* to re-check what it inferred. +/// +/// Annotating it results in a needless additional proof obligation to +/// equate types after they are partially reduced, which is pointless. +noextract inline_for_extraction +val printf : s:normal format_string -> normal (interpret_format_string s) +let printf = intro_normal_f #format_string interpret_format_string printf' + + +/// `skip`: We also provide `skip`, a function that has the same type as printf +/// but normalizes to `()`, i.e., it prints nothing. This is useful for conditional +/// printing in debug code, for instance. +noextract inline_for_extraction +let skip' (s:format_string) : interpret_format_string s = + normalize_term + (match parse_format_string s with + | Some frags -> aux frags [] (fun _ -> ())) + +noextract inline_for_extraction +val skip : s:normal format_string -> normal (interpret_format_string s) +let skip = intro_normal_f #format_string interpret_format_string skip' + + +/// `test`: A small test function +/// Running `fstar --codegen OCaml LowStar.Printf.fst --extract LowStar.Printf` +/// produces the following for the body of this function +/// ``` +/// print_string "Hello "; +/// print_bool true; +/// print_string " Low* "; +/// print_u64 m; +/// print_string " Printf "; +/// print_lmbuffer_bool l () () x; +/// print_string " "; +/// print_string "bye" +/// ``` +let test (m:UInt64.t) (l:UInt32.t) (#r:_) (#s:_) (x:LB.mbuffer bool r s{LB.len x = l}) + : Stack unit + (requires (fun h0 -> LB.live h0 x)) + (ensures (fun h0 _ h1 -> h0 == h1)) + = printf "Hello %b Low* %uL Printf %xb %s" + true //%b boolean + m //%uL u64 + l x //%xb (buffer bool) + "bye" + done //dummy universe coercion + +let test2 (x:(int & int)) (print_pair:(int & int) -> StTrivial unit) + : Stack unit + (requires (fun h0 -> True)) + (ensures (fun h0 _ h1 -> h0 == h1)) + = printf "Hello pair %a" print_pair x done + +let test3 (m:UInt64.t) (l:UInt32.t) (#r:_) (#s:_) (x:LB.mbuffer bool r s{LB.len x = l}) + : Stack unit + (requires (fun h0 -> LB.live h0 x)) + (ensures (fun h0 _ h1 -> h0 == h1)) + = skip "Hello %b Low* %uL Printf %xb %s" + true //%b boolean + m //%uL u64 + l x //%xb (buffer bool) + "bye" + done //dummy universe coercion diff --git a/stage0/ulib/LowStar.RVector.fst b/stage0/ulib/LowStar.RVector.fst new file mode 100644 index 00000000000..93e11cac701 --- /dev/null +++ b/stage0/ulib/LowStar.RVector.fst @@ -0,0 +1,1184 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module LowStar.RVector + +open FStar.Classical +open FStar.Integers +open LowStar.Modifies +open LowStar.Regional +open LowStar.Vector + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST +module S = FStar.Seq +module B = LowStar.Buffer +module V = LowStar.Vector + +module U32 = FStar.UInt32 + +/// Utilities + +/// A `regional` type `a` is also `copyable` when there exists a copy operator +/// that guarantees the same representation between `src` and `dst`. +/// For instance, the `copy` operation for `B.buffer a` is `B.blit`. +/// +/// Here, no reference at run-time is kept to the state argument of the +/// regional; conceivably, the caller will already have some reference handy to +/// the instance of the regional class and can retrieve the parameter from +/// there. +inline_for_extraction +noeq type copyable (#rst:Type) (a:Type0) (rg:regional rst a) = +| Cpy: + copy: (s:rst{s==Rgl?.state rg} -> src:a -> dst:a -> + HST.ST unit + (requires (fun h0 -> + rg_inv rg h0 src /\ rg_inv rg h0 dst /\ + HS.disjoint (Rgl?.region_of rg src) + (Rgl?.region_of rg dst))) + (ensures (fun h0 _ h1 -> + modifies (loc_all_regions_from + false (Rgl?.region_of rg dst)) h0 h1 /\ + rg_inv rg h1 dst /\ + Rgl?.r_repr rg h1 dst == Rgl?.r_repr rg h0 src))) -> + copyable a rg + +// rst: regional state +type rvector (#a:Type0) (#rst:Type) (rg:regional rst a) = V.vector a + +val loc_rvector: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> rv:rvector rg -> GTot loc +let loc_rvector #a #rst #rg rv = + loc_all_regions_from false (V.frameOf rv) + +/// The invariant of `rvector` +// Here we will define the invariant for `rvector #a` that contains +// the invariant for each element and some more about the vector itself. + +val rs_elems_inv: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + h:HS.mem -> rs:S.seq a -> + i:nat -> j:nat{i <= j && j <= S.length rs} -> + GTot Type0 +let rs_elems_inv #a #rst rg h rs i j = + V.forall_seq rs i j (rg_inv rg h) + +val rv_elems_inv: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg -> + i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} -> + GTot Type0 +let rv_elems_inv #a #rst #rg h rv i j = + rs_elems_inv rg h (V.as_seq h rv) (U32.v i) (U32.v j) + +val elems_inv: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg -> + GTot Type0 +let elems_inv #a #rst #rg h rv = + rv_elems_inv h rv 0ul (V.size_of rv) + +val rs_elems_reg: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + rs:S.seq a -> prid:HS.rid -> + i:nat -> j:nat{i <= j && j <= S.length rs} -> + GTot Type0 +let rs_elems_reg #a #rst rg rs prid i j = + V.forall_seq rs i j + (fun v -> HS.extends (Rgl?.region_of rg v) prid) /\ + V.forall2_seq rs i j + (fun v1 v2 -> HS.disjoint (Rgl?.region_of rg v1) + (Rgl?.region_of rg v2)) + +val rv_elems_reg: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg -> + i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} -> + GTot Type0 +let rv_elems_reg #a #rst #rg h rv i j = + rs_elems_reg rg (V.as_seq h rv) (V.frameOf rv) (U32.v i) (U32.v j) + +val elems_reg: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg -> + GTot Type0 +let elems_reg #a #rst #rg h rv = + rv_elems_reg h rv 0ul (V.size_of rv) + +val rv_itself_inv: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg -> GTot Type0 +let rv_itself_inv #a #rst #rg h rv = + V.live h rv /\ V.freeable rv /\ + HST.is_eternal_region (V.frameOf rv) + +// This is the invariant of `rvector`. +val rv_inv: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg -> GTot Type0 +let rv_inv #a #rst #rg h rv = + elems_inv h rv /\ + elems_reg h rv /\ + rv_itself_inv h rv + +val rs_elems_inv_live_region: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + h:HS.mem -> rs:S.seq a -> + i:nat -> j:nat{i <= j && j <= S.length rs} -> + Lemma (requires (rs_elems_inv rg h rs i j)) + (ensures (V.forall_seq rs i j + (fun r -> HS.live_region h (Rgl?.region_of rg r)))) +let rec rs_elems_inv_live_region #a #rst rg h rs i j = + if i = j then () + else (Rgl?.r_inv_reg rg h (S.index rs (j - 1)); + rs_elems_inv_live_region rg h rs i (j - 1)) + +val rv_elems_inv_live_region: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg -> + i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} -> + Lemma (requires (rv_elems_inv h rv i j)) + (ensures (V.forall_ h rv i j + (fun r -> HS.live_region h (Rgl?.region_of rg r)))) +let rv_elems_inv_live_region #a #rst #rg h rv i j = + rs_elems_inv_live_region rg h (V.as_seq h rv) (U32.v i) (U32.v j) + +/// Utilities for fine-grained region control + +val rs_loc_elem: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + rs:S.seq a -> i:nat{i < S.length rs} -> + GTot loc +let rs_loc_elem #a #rst rg rs i = + loc_all_regions_from false (Rgl?.region_of rg (S.index rs i)) + +val rs_loc_elems: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + rs:S.seq a -> i:nat -> j:nat{i <= j && j <= S.length rs} -> + GTot loc (decreases j) +let rec rs_loc_elems #a #rst rg rs i j = + if i = j then loc_none + else loc_union (rs_loc_elems rg rs i (j - 1)) + (rs_loc_elem rg rs (j - 1)) + +val rv_loc_elems: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg -> + i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} -> + GTot loc +let rv_loc_elems #a #rst #rg h rv i j = + rs_loc_elems rg (V.as_seq h rv) (U32.v i) (U32.v j) + +val rv_loc_elem: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg -> + i:uint32_t{i < V.size_of rv} -> + GTot loc +let rv_loc_elem #a #rst #rg h rv i = + rs_loc_elems rg (V.as_seq h rv) (U32.v i) (U32.v i+1) + + +// Properties about inclusion of locations + +val rs_loc_elems_rec_inverse: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + rs:S.seq a -> + i:nat -> j:nat{i < j && j <= S.length rs} -> + Lemma (requires true) + (ensures (rs_loc_elems rg rs i j == + loc_union (rs_loc_elem rg rs i) + (rs_loc_elems rg rs (i + 1) j))) + (decreases j) +let rec rs_loc_elems_rec_inverse #a #rst rg rs i j = + if i + 1 = j then () + else (assert (rs_loc_elems rg rs i j == + loc_union (rs_loc_elems rg rs i (j - 1)) + (rs_loc_elem rg rs (j - 1))); + assert (rs_loc_elems rg rs (i + 1) j == + loc_union (rs_loc_elems rg rs (i + 1) (j - 1)) + (rs_loc_elem rg rs (j - 1))); + rs_loc_elems_rec_inverse rg rs i (j - 1); + assert (rs_loc_elems rg rs i j == + loc_union (loc_union + (rs_loc_elem rg rs i) + (rs_loc_elems rg rs (i + 1) (j - 1))) + (rs_loc_elem rg rs (j - 1))); + loc_union_assoc (rs_loc_elem rg rs i) + (rs_loc_elems rg rs (i + 1) (j - 1)) + (rs_loc_elem rg rs (j - 1))) + +val rs_loc_elems_includes: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + rs:S.seq a -> + i:nat -> j:nat{i <= j && j <= S.length rs} -> + k:nat{i <= k && k < j} -> + Lemma (loc_includes (rs_loc_elems rg rs i j) + (rs_loc_elem rg rs k)) +let rec rs_loc_elems_includes #a #rst rg rs i j k = + if k = j - 1 then () + else rs_loc_elems_includes #a #rst rg rs i (j - 1) k + +val loc_all_exts_from: + preserve_liveness: bool -> r: HS.rid -> GTot loc +let loc_all_exts_from preserve_liveness r = + B.loc_regions + preserve_liveness + (Set.intersect + (HS.mod_set (Set.singleton r)) + (Set.complement (Set.singleton r))) + +val rs_loc_elem_included: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + rs:S.seq a -> prid:HS.rid -> + i:nat{i < S.length rs} -> + Lemma (requires (HS.extends (Rgl?.region_of rg (S.index rs i)) prid)) + (ensures (loc_includes (loc_all_exts_from false prid) + (rs_loc_elem rg rs i))) +let rs_loc_elem_included #a #rst rg rs prid i = () + +val rs_loc_elems_included: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + rs:S.seq a -> prid:HS.rid -> + i:nat -> j:nat{i <= j && j <= S.length rs} -> + Lemma (requires (rs_elems_reg rg rs prid i j)) + (ensures (loc_includes (loc_all_exts_from false prid) + (rs_loc_elems rg rs i j))) + (decreases j) +let rec rs_loc_elems_included #a #rst rg rs prid i j = + if i = j then () + else (rs_loc_elem_included rg rs prid (j - 1); + rs_loc_elems_included rg rs prid i (j - 1)) + +val rv_loc_elems_included: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg -> + i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} -> + Lemma (requires (rv_elems_reg h rv i j)) + (ensures (loc_includes (loc_all_exts_from false (V.frameOf rv)) + (rv_loc_elems h rv i j))) +let rv_loc_elems_included #a #rst #rg h rv i j = + rs_loc_elems_included rg (V.as_seq h rv) (V.frameOf rv) (U32.v i) (U32.v j) + +// Properties about disjointness of locations + +val rs_loc_elem_disj: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + rs:S.seq a -> prid:HS.rid -> + i:nat -> j:nat{i <= j && j <= S.length rs} -> + k:nat{i <= k && k < j} -> + l:nat{i <= l && l < j && k <> l} -> + Lemma (requires (rs_elems_reg rg rs prid i j)) + (ensures (loc_disjoint (rs_loc_elem rg rs k) + (rs_loc_elem rg rs l))) +let rs_loc_elem_disj #a #rst rg rs prid i j k l = () + +val rs_loc_elem_disj_forall: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + rs:S.seq a -> prid:HS.rid -> + i:nat -> j:nat{i <= j && j <= S.length rs} -> + Lemma (requires (rs_elems_reg rg rs prid i j)) + (ensures ( + forall (k:nat{i <= k && k < j}). + forall (l:nat{i <= l && l < j && k <> l}). + loc_disjoint (rs_loc_elem rg rs k) + (rs_loc_elem rg rs l))) +let rs_loc_elem_disj_forall #a #rst rg rs prid i j = () + +val rs_loc_elems_elem_disj: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + rs:S.seq a -> prid:HS.rid -> + i:nat -> j:nat{i <= j && j <= S.length rs} -> + k1:nat{i <= k1} -> + k2:nat{k1 <= k2 && k2 <= j} -> + l:nat{i <= l && l < j && (l < k1 || k2 <= l)} -> + Lemma (requires (rs_elems_reg rg rs prid i j)) + (ensures (loc_disjoint (rs_loc_elems rg rs k1 k2) + (rs_loc_elem rg rs l))) + (decreases k2) +let rec rs_loc_elems_elem_disj #a #rst rg rs prid i j k1 k2 l = + if k1 = k2 then () + else (rs_loc_elem_disj rg rs prid i j (k2 - 1) l; + rs_loc_elems_elem_disj rg rs prid i j k1 (k2 - 1) l) + +val rs_loc_elems_disj: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + rs:S.seq a -> prid:HS.rid -> + i:nat -> j:nat{i <= j && j <= S.length rs} -> + k1:nat{i <= k1} -> + k2:nat{k1 <= k2 && k2 <= j} -> + l1:nat{i <= l1} -> + l2:nat{l1 <= l2 && l2 <= j} -> + Lemma (requires (rs_elems_reg rg rs prid i j /\ (k2 <= l1 || l2 <= k1))) + (ensures (loc_disjoint (rs_loc_elems rg rs k1 k2) + (rs_loc_elems rg rs l1 l2))) + (decreases k2) +let rec rs_loc_elems_disj #a #rst rg rs prid i j k1 k2 l1 l2 = + if k1 = k2 then () + else (rs_loc_elems_elem_disj rg rs prid i j l1 l2 (k2 - 1); + rs_loc_elems_disj rg rs prid i j k1 (k2 - 1) l1 l2) + +val rv_loc_elems_disj: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg -> + i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} -> + k1:uint32_t{i <= k1} -> + k2:uint32_t{k1 <= k2 && k2 <= j} -> + l1:uint32_t{i <= l1} -> + l2:uint32_t{l1 <= l2 && l2 <= j} -> + Lemma (requires (rv_elems_reg h rv i j /\ (k2 <= l1 || l2 <= k1))) + (ensures (loc_disjoint (rv_loc_elems h rv k1 k2) + (rv_loc_elems h rv l1 l2))) +let rv_loc_elems_disj #a #rst #rg h rv i j k1 k2 l1 l2 = + rs_loc_elems_disj rg (V.as_seq h rv) (V.frameOf rv) + (U32.v i) (U32.v j) (U32.v k1) (U32.v k2) (U32.v l1) (U32.v l2) + +val rs_loc_elems_parent_disj: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + rs:S.seq a -> prid:HS.rid -> + i:nat -> j:nat{i <= j && j <= S.length rs} -> + Lemma (requires (rs_elems_reg rg rs prid i j)) + (ensures (loc_disjoint (rs_loc_elems rg rs i j) + (loc_region_only false prid))) + (decreases j) +let rec rs_loc_elems_parent_disj #a #rst rg rs prid i j = + if i = j then () + else rs_loc_elems_parent_disj rg rs prid i (j - 1) + +val rv_loc_elems_parent_disj: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg -> + i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} -> + Lemma (requires (rv_elems_reg h rv i j)) + (ensures (loc_disjoint (rv_loc_elems h rv i j) + (loc_region_only false (V.frameOf rv)))) +let rv_loc_elems_parent_disj #a #rst #rg h rv i j = + rs_loc_elems_parent_disj rg (V.as_seq h rv) (V.frameOf rv) (U32.v i) (U32.v j) + +val rs_loc_elems_each_disj: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + rs:S.seq a -> drid:HS.rid -> + i:nat -> j:nat{i <= j && j <= S.length rs} -> + Lemma (requires (V.forall_seq rs i j + (fun r -> HS.disjoint (Rgl?.region_of rg r) drid))) + (ensures (loc_disjoint (rs_loc_elems rg rs i j) + (loc_all_regions_from false drid))) + (decreases j) +let rec rs_loc_elems_each_disj #a #rst rg rs drid i j = + if i = j then () + else rs_loc_elems_each_disj rg rs drid i (j - 1) + +val rv_loc_elems_each_disj: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg -> + i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} -> + drid:HS.rid -> + Lemma (requires (V.forall_ h rv i j + (fun r -> HS.disjoint (Rgl?.region_of rg r) drid))) + (ensures (loc_disjoint (rv_loc_elems h rv i j) + (loc_all_regions_from false drid))) +let rv_loc_elems_each_disj #a #rst #rg h rv i j drid = + rs_loc_elems_each_disj rg (V.as_seq h rv) drid (U32.v i) (U32.v j) + +// Preservation based on disjointness + +val rv_loc_elems_preserved: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> rv:rvector rg -> + i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} -> + p:loc -> h0:HS.mem -> h1:HS.mem -> + Lemma (requires (V.live h0 rv /\ + loc_disjoint p (V.loc_vector_within rv i j) /\ + modifies p h0 h1)) + (ensures (rv_loc_elems h0 rv i j == + rv_loc_elems h1 rv i j)) + (decreases (U32.v j)) +let rec rv_loc_elems_preserved #a #rst #rg rv i j p h0 h1 = + if i = j then () + else (V.loc_vector_within_includes rv i j (j - 1ul) j; + V.get_preserved rv (j - 1ul) p h0 h1; + assert (V.get h0 rv (j - 1ul) == V.get h1 rv (j - 1ul)); + V.loc_vector_within_includes rv i j i (j - 1ul); + rv_loc_elems_preserved rv i (j - 1ul) p h0 h1) + +val rs_elems_inv_preserved: + #a:Type0 -> #rst:Type -> rg:regional rst a -> rs:S.seq a -> + i:nat -> j:nat{i <= j && j <= S.length rs} -> + p:loc -> h0:HS.mem -> h1:HS.mem -> + Lemma (requires (rs_elems_inv rg h0 rs i j /\ + loc_disjoint p (rs_loc_elems rg rs i j) /\ + modifies p h0 h1)) + (ensures (rs_elems_inv rg h1 rs i j)) + (decreases j) +let rec rs_elems_inv_preserved #a #rst rg rs i j p h0 h1 = + if i = j then () + else (rs_elems_inv_preserved rg rs i (j - 1) p h0 h1; + Rgl?.r_sep rg (S.index rs (j - 1)) p h0 h1) + +val rv_elems_inv_preserved: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> rv:rvector rg -> + i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} -> + p:loc -> h0:HS.mem -> h1:HS.mem -> + Lemma (requires (V.live h0 rv /\ + rv_elems_inv h0 rv i j /\ + loc_disjoint p (V.loc_vector rv) /\ + loc_disjoint p (rv_loc_elems h0 rv i j) /\ + modifies p h0 h1)) + (ensures (rv_elems_inv h1 rv i j)) +let rv_elems_inv_preserved #a #rst #rg rv i j p h0 h1 = + rs_elems_inv_preserved rg (V.as_seq h0 rv) (U32.v i) (U32.v j) p h0 h1 + +val rv_inv_preserved_: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> rv:rvector rg -> + p:loc -> h0:HS.mem -> h1:HS.mem -> + Lemma (requires (rv_inv h0 rv /\ + loc_disjoint p (loc_vector rv) /\ + loc_disjoint p (rv_loc_elems h0 rv 0ul (V.size_of rv)) /\ + modifies p h0 h1)) + (ensures (rv_inv h1 rv)) +let rv_inv_preserved_ #a #rst #rg rv p h0 h1 = + rv_elems_inv_preserved #a #rst #rg rv 0ul (V.size_of rv) p h0 h1 + +// The first core lemma of `rvector` +val rv_inv_preserved: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> rv:rvector rg -> + p:loc -> h0:HS.mem -> h1:HS.mem -> + Lemma (requires (rv_inv h0 rv /\ + loc_disjoint p (loc_rvector rv) /\ + modifies p h0 h1)) + (ensures (rv_inv h1 rv)) + [SMTPat (rv_inv h0 rv); + SMTPat (loc_disjoint p (loc_rvector rv)); + SMTPat (modifies p h0 h1)] +let rv_inv_preserved #a #rst #rg rv p h0 h1 = + assert (loc_includes (loc_rvector rv) (V.loc_vector rv)); + rv_loc_elems_included h0 rv 0ul (V.size_of rv); + assert (loc_includes (loc_rvector rv) (rv_loc_elems h0 rv 0ul (V.size_of rv))); + rv_inv_preserved_ rv p h0 h1 + +val rv_inv_preserved_int: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> rv:rvector rg -> + i:uint32_t{i < V.size_of rv} -> + h0:HS.mem -> h1:HS.mem -> + Lemma (requires (rv_inv h0 rv /\ + modifies (loc_all_regions_from false + (Rgl?.region_of rg (V.get h0 rv i))) h0 h1 /\ + rg_inv rg h1 (V.get h1 rv i))) + (ensures (rv_inv h1 rv)) +let rv_inv_preserved_int #a #rst #rg rv i h0 h1 = + rs_loc_elems_elem_disj + rg (V.as_seq h0 rv) (V.frameOf rv) + 0 (U32.v (V.size_of rv)) 0 (U32.v i) (U32.v i); + rs_elems_inv_preserved + rg (V.as_seq h0 rv) 0 (U32.v i) + (loc_all_regions_from false + (Rgl?.region_of rg (V.get h1 rv i))) + h0 h1; + rs_loc_elems_elem_disj + rg (V.as_seq h0 rv) (V.frameOf rv) + 0 (U32.v (V.size_of rv)) + (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i); + rs_elems_inv_preserved + rg (V.as_seq h0 rv) (U32.v i + 1) (U32.v (V.size_of rv)) + (loc_all_regions_from false + (Rgl?.region_of rg (V.get h1 rv i))) + h0 h1 + +/// Representation + +val as_seq_seq: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + h:HS.mem -> rs:S.seq a -> + i:nat -> + j:nat{i <= j /\ j <= S.length rs /\ + rs_elems_inv rg h rs i j} -> + GTot (s:S.seq (Rgl?.repr rg){S.length s = j - i}) + (decreases j) +let rec as_seq_seq #a #rst rg h rs i j = + if i = j then S.empty + else S.snoc (as_seq_seq rg h rs i (j - 1)) + (Rgl?.r_repr rg h (S.index rs (j - 1))) + +val as_seq_sub: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg -> + i:uint32_t -> + j:uint32_t{ + i <= j /\ + j <= V.size_of rv /\ + rv_elems_inv h rv i j} -> + GTot (s:S.seq (Rgl?.repr rg){S.length s = U32.v j - U32.v i}) + (decreases (U32.v j)) +let as_seq_sub #a #rst #rg h rv i j = + as_seq_seq rg h (V.as_seq h rv) (U32.v i) (U32.v j) + +val as_seq: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg{rv_inv h rv} -> + GTot (s:S.seq (Rgl?.repr rg){S.length s = U32.v (V.size_of rv)}) +let as_seq #a #rst #rg h rv = + as_seq_sub h rv 0ul (V.size_of rv) + +val as_seq_sub_as_seq: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + h:HS.mem -> rv:rvector rg{rv_inv h rv} -> + Lemma (S.equal (as_seq_sub h rv 0ul (V.size_of rv)) + (as_seq h rv)) + [SMTPat (as_seq_sub h rv 0ul (V.size_of rv))] +let as_seq_sub_as_seq #a #rst #rg h rv = () + +val as_seq_seq_index: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + h:HS.mem -> rs:S.seq a -> + i:nat -> + j:nat{i <= j /\ j <= S.length rs /\ rs_elems_inv rg h rs i j} -> + k:nat{k < j - i} -> + Lemma (requires true) + (ensures (S.index (as_seq_seq rg h rs i j) k == + Rgl?.r_repr rg h (S.index rs (i + k)))) + (decreases j) + [SMTPat (S.index (as_seq_seq rg h rs i j) k)] +let rec as_seq_seq_index #a #rst rg h rs i j k = + if i = j then () + else if k = j - i - 1 then () + else as_seq_seq_index rg h rs i (j - 1) k + +val as_seq_seq_eq: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + h:HS.mem -> rs1:S.seq a -> rs2:S.seq a -> + i:nat -> + j:nat{i <= j /\ j <= S.length rs1 /\ rs_elems_inv rg h rs1 i j} -> + k:nat -> + l:nat{k <= l /\ l <= S.length rs2 /\ rs_elems_inv rg h rs2 k l} -> + Lemma (requires (S.equal (S.slice rs1 i j) (S.slice rs2 k l))) + (ensures (S.equal (as_seq_seq rg h rs1 i j) + (as_seq_seq rg h rs2 k l))) +let as_seq_seq_eq #a #rst rg h rs1 rs2 i j k l = + assert (forall (a:nat{a < j - i}). + S.index (as_seq_seq rg h rs1 i j) a == + Rgl?.r_repr rg h (S.index rs1 (i + a))); + assert (forall (a:nat{a < l - k}). + S.index (as_seq_seq rg h rs2 k l) a == + Rgl?.r_repr rg h (S.index rs2 (k + a))); + assert (S.length (S.slice rs1 i j) = j - i); + assert (S.length (S.slice rs2 k l) = l - k); + assert (forall (a:nat{a < j - i}). + S.index (S.slice rs1 i j) a == + S.index (S.slice rs2 k l) a); + assert (forall (a:nat{a < j - i}). + S.index rs1 (i + a) == S.index rs2 (k + a)) + +val as_seq_seq_slice: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + h:HS.mem -> rs:S.seq a -> + i:nat -> j:nat{i <= j /\ j <= S.length rs /\ rs_elems_inv rg h rs i j} -> + k:nat -> l:nat{k <= l && l <= j - i} -> + Lemma (S.equal (S.slice (as_seq_seq rg h rs i j) k l) + (as_seq_seq rg h (S.slice rs (i + k) (i + l)) 0 (l - k))) +#reset-options "--z3rlimit 10" +let rec as_seq_seq_slice #a #rst rg h rs i j k l = + if k = l then () + else (as_seq_seq_slice rg h rs i j k (l - 1); + as_seq_seq_index rg h rs i j (l - 1); + as_seq_seq_eq rg h + (S.slice rs (i + k) (i + l - 1)) + (S.slice rs (i + k) (i + l)) + 0 (l - k - 1) 0 (l - k - 1)) + +val as_seq_seq_upd: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + h:HS.mem -> rs:S.seq a -> + i:nat -> + j:nat{ + i <= j /\ + j <= S.length rs /\ + rs_elems_inv rg h rs i j} -> + k:nat{i <= k && k < j} -> v:a{rg_inv rg h v} -> + Lemma (S.equal (as_seq_seq rg h (S.upd rs k v) i j) + (S.upd (as_seq_seq rg h rs i j) (k - i) + (Rgl?.r_repr rg h v))) +let rec as_seq_seq_upd #a #rst rg h rs i j k v = + if i = j then () + else if k = j - 1 then () + else as_seq_seq_upd rg h rs i (j - 1) k v + +// Preservation based on disjointness + +val as_seq_seq_preserved: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + rs:S.seq a -> i:nat -> j:nat{i <= j && j <= S.length rs} -> + p:loc -> h0:HS.mem -> h1:HS.mem -> + Lemma (requires (rs_elems_inv rg h0 rs i j /\ + loc_disjoint p (rs_loc_elems rg rs i j) /\ + modifies p h0 h1)) + (ensures (rs_elems_inv_preserved rg rs i j p h0 h1; + S.equal (as_seq_seq rg h0 rs i j) + (as_seq_seq rg h1 rs i j))) +let rec as_seq_seq_preserved #a #rst rg rs i j p h0 h1 = + if i = j then () + else (rs_elems_inv_preserved rg rs i (j - 1) p h0 h1; + as_seq_seq_preserved rg rs i (j - 1) p h0 h1; + Rgl?.r_sep rg (S.index rs (j - 1)) p h0 h1) + +val as_seq_sub_preserved: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + rv:rvector rg -> + i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} -> + p:loc -> h0:HS.mem -> h1:HS.mem -> + Lemma (requires (V.live h0 rv /\ + rv_elems_inv h0 rv i j /\ + loc_disjoint p (rv_loc_elems h0 rv i j) /\ + loc_disjoint p (V.loc_vector rv) /\ + modifies p h0 h1)) + (ensures (rv_elems_inv_preserved rv i j p h0 h1; + S.equal (as_seq_sub h0 rv i j) + (as_seq_sub h1 rv i j))) +let as_seq_sub_preserved #a #rst #rg rv i j p h0 h1 = + as_seq_seq_preserved rg (V.as_seq h0 rv) (U32.v i) (U32.v j) p h0 h1 + + +val as_seq_preserved_: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + rv:rvector rg -> + p:loc -> h0:HS.mem -> h1:HS.mem -> + Lemma (requires (rv_inv h0 rv /\ + loc_disjoint p (loc_vector rv) /\ + loc_disjoint p (rv_loc_elems h0 rv 0ul (V.size_of rv)) /\ + modifies p h0 h1)) + (ensures (rv_inv_preserved_ rv p h0 h1; + S.equal (as_seq h0 rv) (as_seq h1 rv))) +let as_seq_preserved_ #a #rst #rg rv p h0 h1 = + as_seq_sub_preserved rv 0ul (V.size_of rv) p h0 h1 + + +// The second core lemma of `rvector` +val as_seq_preserved: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + rv:rvector rg -> + p:loc -> h0:HS.mem -> h1:HS.mem -> + Lemma (requires (rv_inv h0 rv /\ + loc_disjoint p (loc_rvector rv) /\ + modifies p h0 h1)) + (ensures (rv_inv_preserved rv p h0 h1; + S.equal (as_seq h0 rv) (as_seq h1 rv))) + [SMTPat (rv_inv h0 rv); + SMTPat (loc_disjoint p (loc_rvector rv)); + SMTPat (modifies p h0 h1)] +let as_seq_preserved #a #rst #rg rv p h0 h1 = + assert (loc_includes (loc_rvector rv) (V.loc_vector rv)); + rv_loc_elems_included h0 rv 0ul (V.size_of rv); + assert (loc_includes (loc_rvector rv) (rv_loc_elems h0 rv 0ul (V.size_of rv))); + as_seq_preserved_ rv p h0 h1 + +/// Construction + +val alloc_empty: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + HST.ST (rvector rg) + (requires (fun h0 -> true)) + (ensures (fun h0 bv h1 -> h0 == h1 /\ V.size_of bv = 0ul)) +let alloc_empty #a #rst rg = + V.alloc_empty a + +val alloc_: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> rv:rvector rg -> + cidx:uint32_t{cidx <= V.size_of rv} -> + HST.ST unit + (requires (fun h0 -> rv_itself_inv h0 rv)) + (ensures (fun h0 _ h1 -> + modifies (V.loc_vector_within rv 0ul cidx) h0 h1 /\ + rv_itself_inv h1 rv /\ + rv_elems_inv h1 rv 0ul cidx /\ + rv_elems_reg h1 rv 0ul cidx /\ + S.equal (as_seq_sub h1 rv 0ul cidx) + (S.create (U32.v cidx) (Ghost.reveal (Rgl?.irepr rg))) /\ + // the loop invariant for this function + V.forall_ h1 rv 0ul cidx + (fun r -> HS.fresh_region (Rgl?.region_of rg r) h0 h1 /\ + Rgl?.r_alloc_p rg r) /\ + Set.subset (Map.domain (HS.get_hmap h0)) + (Map.domain (HS.get_hmap h1)))) + (decreases (U32.v cidx)) +#reset-options "--z3rlimit 20" +let rec alloc_ #a #rst #rg rv cidx = + let hh0 = HST.get () in + if cidx = 0ul then () + else (let nrid = HST.new_region (V.frameOf rv) in + let v = rg_alloc rg nrid in + + let hh1 = HST.get () in + V.assign rv (cidx - 1ul) v; + + let hh2 = HST.get () in + V.loc_vector_within_included rv (cidx - 1ul) cidx; + Rgl?.r_sep + rg (V.get hh2 rv (cidx - 1ul)) + (V.loc_vector_within rv (cidx - 1ul) cidx) + hh1 hh2; + alloc_ rv (cidx - 1ul); + + let hh3 = HST.get () in + V.loc_vector_within_included rv 0ul (cidx - 1ul); + Rgl?.r_sep + rg (V.get hh3 rv (cidx - 1ul)) + (V.loc_vector_within rv 0ul (cidx - 1ul)) + hh2 hh3; + V.forall2_extend hh3 rv 0ul (cidx - 1ul) + (fun r1 r2 -> HS.disjoint (Rgl?.region_of rg r1) + (Rgl?.region_of rg r2)); + V.loc_vector_within_union_rev rv 0ul cidx) + +val alloc_rid: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + len:uint32_t{len > 0ul} -> rid:HST.erid -> + HST.ST (rvector rg) + (requires (fun h0 -> true)) + (ensures (fun h0 rv h1 -> + modifies (V.loc_vector rv) h0 h1 /\ + rv_inv h1 rv /\ + V.frameOf rv = rid /\ + V.size_of rv = len /\ + V.forall_all h1 rv (fun r -> Rgl?.r_alloc_p rg r) /\ + S.equal (as_seq h1 rv) + (S.create (U32.v len) (Ghost.reveal (Rgl?.irepr rg))))) +let alloc_rid #a #rst rg len rid = + let vec = V.alloc_rid len (rg_dummy rg) rid in + alloc_ #a #rst #rg vec len; + V.loc_vector_within_included vec 0ul len; + vec + +val alloc_reserve: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + len:uint32_t{len > 0ul} -> rid:HST.erid -> + HST.ST (rvector rg) + (requires (fun h0 -> true)) + (ensures (fun h0 rv h1 -> + modifies (V.loc_vector rv) h0 h1 /\ + rv_inv h1 rv /\ + V.frameOf rv = rid /\ + V.size_of rv = 0ul /\ + S.equal (as_seq h1 rv) S.empty /\ + Set.equal (Map.domain (HS.get_hmap h0)) + (Map.domain (HS.get_hmap h1)) /\ + B.fresh_loc (V.loc_vector rv) h0 h1)) +let alloc_reserve #a #rst rg len rid = + V.alloc_reserve len (rg_dummy rg) rid + +val alloc: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + len:uint32_t{len > 0ul} -> + HST.ST (rvector rg) + (requires (fun h0 -> true)) + (ensures (fun h0 rv h1 -> + modifies (V.loc_vector rv) h0 h1 /\ + rv_inv h1 rv /\ + HS.fresh_region (V.frameOf rv) h0 h1 /\ + V.size_of rv = len /\ + V.forall_all h1 rv (fun r -> Rgl?.r_alloc_p rg r) /\ + S.equal (as_seq h1 rv) + (S.create (U32.v len) (Ghost.reveal (Rgl?.irepr rg))))) +let alloc #a #rst rg len = + let nrid = HST.new_region HS.root in + alloc_rid rg len nrid + +val insert: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + rv:rvector rg{not (V.is_full rv)} -> v:a -> + HST.ST (rvector rg) + (requires (fun h0 -> + rv_inv h0 rv /\ rg_inv rg h0 v /\ + HS.extends (Rgl?.region_of rg v) (V.frameOf rv) /\ + V.forall_all h0 rv + (fun b -> HS.disjoint (Rgl?.region_of rg b) + (Rgl?.region_of rg v)))) + (ensures (fun h0 irv h1 -> + V.size_of irv = V.size_of rv + 1ul /\ + V.frameOf rv = V.frameOf irv /\ + modifies (loc_union (V.loc_addr_of_vector rv) + (V.loc_vector irv)) h0 h1 /\ + rv_inv h1 irv /\ + V.get h1 irv (V.size_of rv) == v /\ + S.equal (as_seq h1 irv) + (S.snoc (as_seq h0 rv) (Rgl?.r_repr rg h0 v)))) +#reset-options "--z3rlimit 20" +let insert #a #rst #rg rv v = + let hh0 = HST.get () in + let irv = V.insert rv v in + let hh1 = HST.get () in + + // Safety + rs_loc_elems_parent_disj + rg (V.as_seq hh0 rv) (V.frameOf rv) 0 (U32.v (V.size_of rv)); + rs_elems_inv_preserved + rg (V.as_seq hh0 rv) 0 (U32.v (V.size_of rv)) + (loc_region_only false (V.frameOf rv)) + hh0 hh1; + Rgl?.r_sep rg v + (loc_region_only false (V.frameOf rv)) + hh0 hh1; + + // Correctness + assert (S.equal (V.as_seq hh0 rv) + (S.slice (V.as_seq hh1 irv) 0 (U32.v (V.size_of rv)))); + as_seq_seq_preserved + rg (V.as_seq hh0 rv) + 0 (U32.v (V.size_of rv)) + (loc_region_only false (V.frameOf rv)) hh0 hh1; + as_seq_seq_slice + rg hh1 (V.as_seq hh1 irv) 0 (U32.v (V.size_of irv)) + 0 (U32.v (V.size_of rv)); + irv + +val insert_copy: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> cp:copyable #rst a rg -> + rv:rvector rg{not (V.is_full rv)} -> v:a -> + HST.ST (rvector rg) + (requires (fun h0 -> + rv_inv h0 rv /\ rg_inv rg h0 v /\ + HS.disjoint (Rgl?.region_of rg v) (V.frameOf rv))) + (ensures (fun h0 irv h1 -> + V.size_of irv = V.size_of rv + 1ul /\ + V.frameOf rv = V.frameOf irv /\ + modifies (loc_rvector rv) h0 h1 /\ + rv_inv h1 irv /\ + S.equal (as_seq h1 irv) + (S.snoc (as_seq h0 rv) (Rgl?.r_repr rg h0 v)))) +let insert_copy #a #rst #rg cp rv v = + let hh0 = HST.get () in + rv_elems_inv_live_region hh0 rv 0ul (V.size_of rv); + let nrid = HST.new_region (V.frameOf rv) in + let nv = rg_alloc rg nrid in + + let hh1 = HST.get () in + Rgl?.r_sep rg v loc_none hh0 hh1; + rv_inv_preserved rv loc_none hh0 hh1; + as_seq_preserved rv loc_none hh0 hh1; + Cpy?.copy cp (Rgl?.state rg) v nv; + + let hh2 = HST.get () in + rv_loc_elems_each_disj hh2 rv 0ul (V.size_of rv) nrid; + rv_inv_preserved_ rv (loc_all_regions_from false nrid) hh1 hh2; + as_seq_preserved_ rv (loc_all_regions_from false nrid) hh1 hh2; + insert rv nv + +val assign: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> rv:rvector rg -> + i:uint32_t{i < V.size_of rv} -> v:a -> + HST.ST unit + (requires (fun h0 -> + // rv_inv h0 rv /\ + rv_itself_inv h0 rv /\ + rv_elems_inv h0 rv 0ul i /\ + rv_elems_inv h0 rv (i + 1ul) (V.size_of rv) /\ + elems_reg h0 rv /\ + + V.forall_ h0 rv 0ul i + (fun b -> HS.disjoint (Rgl?.region_of rg b) + (Rgl?.region_of rg v)) /\ + V.forall_ h0 rv (i + 1ul) (V.size_of rv) + (fun b -> HS.disjoint (Rgl?.region_of rg b) + (Rgl?.region_of rg v)) /\ + rg_inv rg h0 v /\ + HS.extends (Rgl?.region_of rg v) (V.frameOf rv))) + (ensures (fun h0 _ h1 -> + modifies (V.loc_vector_within rv i (i + 1ul)) h0 h1 /\ + rv_inv h1 rv /\ + V.get h1 rv i == v /\ + S.equal (as_seq h1 rv) + (S.append + (as_seq_sub h0 rv 0ul i) + (S.cons (Rgl?.r_repr rg h0 v) + (as_seq_sub h0 rv (i + 1ul) (V.size_of rv)))))) +let assign #a #rst #rg rv i v = + let hh0 = HST.get () in + V.assign rv i v; + let hh1 = HST.get () in + + // Safety + rs_loc_elems_parent_disj + rg (V.as_seq hh0 rv) (V.frameOf rv) 0 (U32.v i); + rs_loc_elems_parent_disj + rg (V.as_seq hh0 rv) (V.frameOf rv) (U32.v i + 1) (U32.v (V.size_of rv)); + rs_elems_inv_preserved + rg (V.as_seq hh0 rv) 0 (U32.v i) + (V.loc_vector rv) + hh0 hh1; + rs_elems_inv_preserved + rg (V.as_seq hh0 rv) (U32.v i + 1) (U32.v (V.size_of rv)) + (V.loc_vector rv) + hh0 hh1; + Rgl?.r_sep rg v (V.loc_vector rv) hh0 hh1; + + // Correctness + rs_loc_elems_parent_disj + rg (V.as_seq hh1 rv) (V.frameOf rv) 0 (U32.v (V.size_of rv)); + as_seq_seq_preserved + rg (V.as_seq hh1 rv) + 0 (U32.v (V.size_of rv)) + (V.loc_vector rv) hh0 hh1 + +private val r_sep_forall: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + p:loc -> h0:HS.mem -> h1:HS.mem -> + v:a{rg_inv rg h0 v} -> + Lemma (requires (loc_disjoint (loc_all_regions_from + false (Rgl?.region_of rg v)) p /\ + modifies p h0 h1)) + (ensures (rg_inv rg h1 v /\ + Rgl?.r_repr rg h0 v == Rgl?.r_repr rg h1 v)) +private let r_sep_forall #a #rst rg p h0 h1 v = + Rgl?.r_sep rg v p h0 h1 + +val assign_copy: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> cp:copyable #rst a rg -> + rv:rvector rg -> + i:uint32_t{i < V.size_of rv} -> v:a -> + HST.ST unit + (requires (fun h0 -> + rv_inv h0 rv /\ + rg_inv rg h0 v /\ + HS.disjoint (Rgl?.region_of rg v) (V.frameOf rv))) + (ensures (fun h0 _ h1 -> + modifies (loc_all_regions_from + false (Rgl?.region_of rg (V.get h1 rv i))) h0 h1 /\ + rv_inv h1 rv /\ + S.equal (as_seq h1 rv) + (S.upd (as_seq h0 rv) (U32.v i) (Rgl?.r_repr rg h0 v)))) +let assign_copy #a #rst #rg cp rv i v = + let hh0 = HST.get () in + Cpy?.copy cp (Rgl?.state rg) v (V.index rv i); + let hh1 = HST.get () in + + // Safety + rv_inv_preserved_int #a #rst #rg rv i hh0 hh1; + + // Correctness + forall_intro + (move_requires + (rs_loc_elem_disj + rg (V.as_seq hh0 rv) (V.frameOf rv) + 0 (U32.v (V.size_of rv)) + (U32.v i))); + assert (forall (k:nat{k <> U32.v i && k < U32.v (V.size_of rv)}). + loc_disjoint (rs_loc_elem rg (V.as_seq hh0 rv) k) + (rs_loc_elem rg (V.as_seq hh0 rv) (U32.v i))); + forall_intro + (move_requires + (r_sep_forall + rg (rs_loc_elem rg (V.as_seq hh0 rv) (U32.v i)) + hh0 hh1)); + assert (forall (k:nat{k <> U32.v i && k < U32.v (V.size_of rv)}). + loc_disjoint (rs_loc_elem rg (V.as_seq hh0 rv) k) + (rs_loc_elem rg (V.as_seq hh0 rv) (U32.v i)) ==> + Rgl?.r_repr rg hh1 (S.index (V.as_seq hh1 rv) k) == + Rgl?.r_repr rg hh0 (S.index (V.as_seq hh0 rv) k)); + assert (forall (k:nat{k <> U32.v i && k < U32.v (V.size_of rv)}). + Rgl?.r_repr rg hh1 (S.index (V.as_seq hh1 rv) k) == + Rgl?.r_repr rg hh0 (S.index (V.as_seq hh0 rv) k)); + assert (forall (k:nat{k <> U32.v i && k < U32.v (V.size_of rv)}). + S.index (as_seq_seq rg hh1 (V.as_seq hh1 rv) + 0 (U32.v (V.size_of rv))) k == + S.index (as_seq_seq rg hh0 (V.as_seq hh0 rv) + 0 (U32.v (V.size_of rv))) k) + +val free_elems: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> rv:rvector rg -> + idx:uint32_t{idx < V.size_of rv} -> + HST.ST unit + (requires (fun h0 -> + V.live h0 rv /\ + rv_elems_inv h0 rv 0ul (idx + 1ul) /\ + rv_elems_reg h0 rv 0ul (idx + 1ul))) + (ensures (fun h0 _ h1 -> + modifies (rv_loc_elems h0 rv 0ul (idx + 1ul)) h0 h1)) +let rec free_elems #a #rst #rg rv idx = + let hh0 = HST.get () in + rg_free rg (V.index rv idx); + + let hh1 = HST.get () in + rs_loc_elems_elem_disj + rg (V.as_seq hh0 rv) (V.frameOf rv) + 0 (U32.v idx + 1) 0 (U32.v idx) (U32.v idx); + rv_elems_inv_preserved + rv 0ul idx (rs_loc_elem rg (V.as_seq hh0 rv) (U32.v idx)) hh0 hh1; + + if idx <> 0ul then + free_elems rv (idx - 1ul) + +val flush: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + rv:rvector rg -> i:uint32_t{i <= V.size_of rv} -> + HST.ST (rvector rg) + (requires (fun h0 -> rv_inv h0 rv)) + (ensures (fun h0 frv h1 -> + V.size_of frv = V.size_of rv - i /\ + V.frameOf rv = V.frameOf frv /\ + modifies (loc_rvector rv) h0 h1 /\ + rv_inv h1 frv /\ + S.equal (as_seq h1 frv) + (S.slice (as_seq h0 rv) (U32.v i) (U32.v (V.size_of rv))))) +#reset-options "--z3rlimit 40" +let flush #a #rst #rg rv i = + let hh0 = HST.get () in + (if i = 0ul then () else free_elems rv (i - 1ul)); + rv_loc_elems_included hh0 rv 0ul i; + + let hh1 = HST.get () in + assert (modifies (rs_loc_elems rg (V.as_seq hh0 rv) 0 (U32.v i)) hh0 hh1); + let frv = V.flush rv (rg_dummy rg) i in + + let hh2 = HST.get () in + assert (modifies (loc_region_only false (V.frameOf rv)) hh1 hh2); + + // Safety + rs_loc_elems_disj + rg (V.as_seq hh0 rv) (V.frameOf rv) 0 (U32.v (V.size_of rv)) + 0 (U32.v i) (U32.v i) (U32.v (V.size_of rv)); + rs_loc_elems_parent_disj + rg (V.as_seq hh0 rv) (V.frameOf rv) + (U32.v i) (U32.v (V.size_of rv)); + rs_elems_inv_preserved + rg (V.as_seq hh0 rv) (U32.v i) (U32.v (V.size_of rv)) + (loc_union (rs_loc_elems rg (V.as_seq hh0 rv) 0 (U32.v i)) + (loc_region_only false (V.frameOf rv))) + hh0 hh2; + assert (rv_inv #a #rst #rg hh2 frv); + + // Correctness + as_seq_seq_preserved + rg (V.as_seq hh0 rv) (U32.v i) (U32.v (V.size_of rv)) + (loc_union (rs_loc_elems rg (V.as_seq hh0 rv) 0 (U32.v i)) + (loc_region_only false (V.frameOf rv))) + hh0 hh2; + as_seq_seq_slice + rg hh0 (V.as_seq hh0 rv) 0 (U32.v (V.size_of rv)) + (U32.v i) (U32.v (V.size_of rv)); + assert (S.equal (S.slice (as_seq hh0 rv) (U32.v i) (U32.v (V.size_of rv))) + (as_seq_seq rg hh2 (V.as_seq hh0 rv) + (U32.v i) (U32.v (V.size_of rv)))); + as_seq_seq_eq + rg hh2 (V.as_seq hh0 rv) (V.as_seq hh2 frv) + (U32.v i) (U32.v (V.size_of rv)) 0 (U32.v (V.size_of frv)); + assert (S.equal (as_seq_seq rg hh2 (V.as_seq hh2 frv) + 0 (U32.v (V.size_of frv))) + (as_seq_seq rg hh2 (V.as_seq hh0 rv) + (U32.v i) (U32.v (V.size_of rv)))); + assert (S.equal (S.slice (as_seq hh0 rv) (U32.v i) (U32.v (V.size_of rv))) + (as_seq hh2 frv)); + frv + +val free_elems_from: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> rv:rvector rg -> + idx:uint32_t{idx < V.size_of rv} -> + HST.ST unit + (requires (fun h0 -> + V.live h0 rv /\ + rv_elems_inv h0 rv idx (V.size_of rv) /\ + rv_elems_reg h0 rv idx (V.size_of rv))) + (ensures (fun h0 _ h1 -> + modifies (rv_loc_elems h0 rv idx (V.size_of rv)) h0 h1)) +let rec free_elems_from #a #rst #rg rv idx = + let hh0 = HST.get () in + rs_loc_elems_elem_disj + rg (V.as_seq hh0 rv) (V.frameOf rv) + (U32.v idx) (U32.v (V.size_of rv)) + (U32.v idx+1) (U32.v (V.size_of rv)) + (U32.v idx); + + rg_free rg (V.index rv idx); + + let hh1 = HST.get () in + rv_elems_inv_preserved + rv (idx+1ul) (V.size_of rv) + (rv_loc_elem hh0 rv idx) hh0 hh1; + + if idx + 1ul < V.size_of rv then + begin + free_elems_from rv (idx + 1ul); + rs_loc_elems_rec_inverse rg (V.as_seq hh0 rv) (U32.v idx) (U32.v (V.size_of rv)) + end + +val shrink: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> + rv:rvector rg -> new_size:uint32_t{new_size <= V.size_of rv} -> + HST.ST (rvector rg) + (requires (fun h0 -> rv_inv h0 rv)) + (ensures (fun h0 frv h1 -> + V.size_of frv = new_size /\ + V.frameOf rv = V.frameOf frv /\ + modifies (loc_rvector rv) h0 h1 /\ + rv_inv h1 frv /\ + S.equal (as_seq h1 frv) + (S.slice (as_seq h0 rv) 0 (U32.v new_size)))) +#reset-options "--z3rlimit 40" +let shrink #a #rst #rg rv new_size = + let size = V.size_of rv in + [@@inline_let] let sz = U32.v size in + [@@inline_let] let nsz = U32.v new_size in + let hh0 = HST.get () in + if new_size >= size then rv else + begin + free_elems_from rv new_size; + rv_loc_elems_included hh0 rv new_size size; + let hh1 = HST.get () in + assert (modifies (rs_loc_elems rg (V.as_seq hh0 rv) nsz sz) hh0 hh1); + let frv = V.shrink rv new_size in + + let hh2 = HST.get () in + assert (modifies (loc_region_only false (V.frameOf rv)) hh1 hh2); + + // Safety + rs_loc_elems_disj + rg (V.as_seq hh0 rv) (V.frameOf rv) 0 sz + 0 nsz nsz sz; + rs_loc_elems_parent_disj + rg (V.as_seq hh0 rv) (V.frameOf rv) 0 nsz; + rs_elems_inv_preserved + rg (V.as_seq hh0 rv) 0 nsz + (loc_union (rs_loc_elems rg (V.as_seq hh0 rv) nsz sz) + (loc_region_only false (V.frameOf rv))) + hh0 hh2; + assert (rv_inv #a #rst #rg hh2 frv); + + // Correctness + as_seq_seq_preserved + rg (V.as_seq hh0 rv) 0 nsz + (loc_union (rs_loc_elems rg (V.as_seq hh0 rv) nsz sz) + (loc_region_only false (V.frameOf rv))) + hh0 hh2; + as_seq_seq_slice + rg hh0 (V.as_seq hh0 rv) 0 sz 0 nsz; + assert (S.equal (S.slice (as_seq hh0 rv) 0 nsz) + (as_seq_seq rg hh2 (V.as_seq hh0 rv) 0 nsz)); + as_seq_seq_eq + rg hh2 (V.as_seq hh0 rv) (V.as_seq hh2 frv) 0 nsz 0 nsz; + assert (S.equal (as_seq_seq rg hh2 (V.as_seq hh2 frv) 0 nsz) + (as_seq_seq rg hh2 (V.as_seq hh0 rv) 0 nsz)); + assert (S.equal (S.slice (as_seq hh0 rv) 0 nsz) + (as_seq hh2 frv)); + frv +end + +val free: + #a:Type0 -> #rst:Type -> #rg:regional rst a -> rv:rvector rg -> + HST.ST unit + (requires (fun h0 -> rv_inv h0 rv)) + (ensures (fun h0 _ h1 -> modifies (loc_rvector rv) h0 h1)) +let free #a #rst #rg rv = + let hh0 = HST.get () in + (if V.size_of rv = 0ul then () + else free_elems rv (V.size_of rv - 1ul)); + let hh1 = HST.get () in + rv_loc_elems_included hh0 rv 0ul (V.size_of rv); + V.free rv diff --git a/stage0/ulib/LowStar.Regional.Instances.fst b/stage0/ulib/LowStar.Regional.Instances.fst new file mode 100644 index 00000000000..fdea7459d95 --- /dev/null +++ b/stage0/ulib/LowStar.Regional.Instances.fst @@ -0,0 +1,277 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module LowStar.Regional.Instances + +open FStar.Integers +open LowStar.Buffer +open LowStar.Regional +open LowStar.RVector + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST +module S = FStar.Seq +module B = LowStar.Buffer +module V = LowStar.Vector +module RV = LowStar.RVector + +/// `LowStar.Buffer` is regional + +val buffer_region_of: + #a:Type -> v:B.buffer a -> GTot HS.rid +let buffer_region_of #a v = + B.frameOf v + +val buffer_dummy: a:Type -> Tot (B.buffer a) +let buffer_dummy _ = B.null + +let nonzero = len:UInt32.t{len > 0ul} + +val buffer_r_inv: + #a:Type -> len:nonzero -> + h:HS.mem -> v:B.buffer a -> GTot Type0 +let buffer_r_inv #a len h v = + B.live h v /\ B.freeable v /\ + B.len v == len + +val buffer_r_inv_reg: + #a:Type -> len:nonzero -> + h:HS.mem -> v:B.buffer a -> + Lemma (requires (buffer_r_inv len h v)) + (ensures (HS.live_region h (buffer_region_of v))) +let buffer_r_inv_reg #a len h v = () + +val buffer_repr: a:Type0 -> len:nat{len > 0} -> Type0 +let buffer_repr a len = s:S.seq a{S.length s = len} + +val buffer_r_repr: + #a:Type -> len:UInt32.t{len > 0ul} -> + h:HS.mem -> v:B.buffer a{buffer_r_inv len h v} -> + GTot (buffer_repr a (UInt32.v len)) +let buffer_r_repr #a len h v = B.as_seq h v + +val buffer_r_sep: + #a:Type -> len:UInt32.t{len > 0ul} -> + v:B.buffer a -> p:loc -> h0:HS.mem -> h1:HS.mem -> + Lemma (requires (buffer_r_inv len h0 v /\ + loc_disjoint + (loc_all_regions_from false + (buffer_region_of v)) p /\ + modifies p h0 h1)) + (ensures (buffer_r_inv len h1 v /\ + buffer_r_repr len h0 v == buffer_r_repr len h1 v)) +let buffer_r_sep #a len v p h0 h1 = + assert (loc_includes (loc_all_regions_from false (buffer_region_of v)) + (loc_buffer v)); + B.modifies_buffer_elim v p h0 h1 + +val buffer_irepr: + #a:Type0 -> ia:a -> len:UInt32.t{len > 0ul} -> + Ghost.erased (buffer_repr a (UInt32.v len)) +let buffer_irepr #a ia len = + Ghost.hide (S.create (UInt32.v len) ia) + +val buffer_r_alloc_p: + #a:Type0 -> v:B.buffer a -> GTot Type0 +let buffer_r_alloc_p #a v = + True + +/// This is the key example here that illustrates how to efficiently do +/// closure-conversion by hand: we have at run-time a function that takes +/// ``arg`` (an actual parameter) that contains the closure state. However, if +/// the function only takes ``arg``, it will have a type that is too +/// polymorphic, i.e. it'll have type ``forall arg. arg -> ...``. Therefore, we +/// add ``arg'`` which is an erased, type-only index which, once instantiated, +/// restricts the domain of the function to operate on the sole value being +/// captured. +val buffer_r_alloc: + #a:Type -> #arg':Ghost.erased (a & nonzero) -> arg:(a & nonzero) { arg == Ghost.reveal arg' } -> r:HST.erid -> + HST.ST (B.buffer a) + (requires (fun h0 -> true)) + (ensures (fun h0 v h1 -> + let ia = fst arg in + let len = snd arg in + Set.subset (Map.domain (HS.get_hmap h0)) + (Map.domain (HS.get_hmap h1)) /\ + modifies loc_none h0 h1 /\ + fresh_loc (B.loc_buffer v) h0 h1 /\ + buffer_r_alloc_p v /\ + buffer_r_inv len h1 v /\ + buffer_region_of v = r /\ + buffer_r_repr len h1 v == Ghost.reveal (buffer_irepr ia len))) +let buffer_r_alloc #a #_ (ia, len) r = + B.malloc r ia len + +val buffer_r_free: + #a:Type -> + #arg':Ghost.erased (a & nonzero) -> + arg:(a & nonzero) { arg == Ghost.reveal arg' } -> + v:B.buffer a -> + HST.ST unit + (requires (fun h0 -> + let ia = fst arg in + let len = snd arg in + buffer_r_inv len h0 v)) + (ensures (fun h0 _ h1 -> + modifies (loc_all_regions_from false (buffer_region_of v)) h0 h1)) +let buffer_r_free #a len v = + B.free v + +val buffer_copy: + #a:Type -> + #arg':Ghost.erased (a & nonzero) -> + arg:(a & nonzero){ arg == Ghost.reveal arg' } -> + src:B.buffer a -> dst:B.buffer a -> + HST.ST unit + (requires (fun h0 -> + let len = snd arg in + buffer_r_inv len h0 src /\ buffer_r_inv len h0 dst /\ + HS.disjoint (buffer_region_of src) (buffer_region_of dst))) + (ensures (fun h0 _ h1 -> + let len = snd arg in + modifies (loc_all_regions_from false (buffer_region_of dst)) h0 h1 /\ + buffer_r_inv len h1 dst /\ + buffer_r_repr len h1 dst == buffer_r_repr len h0 src)) +let buffer_copy #a #_ (ia, len) src dst = + B.blit src 0ul dst 0ul len + +#set-options "--print_implicits" +val buffer_regional: + #a:Type -> ia:a -> len:nonzero -> + regional (a & nonzero) (B.buffer a) +let buffer_regional #a ia len = + Rgl (ia, len) + (buffer_region_of #a) + B.loc_buffer + (buffer_dummy a) + (buffer_r_inv #a len) + (buffer_r_inv_reg #a len) + (buffer_repr a (UInt32.v len)) + (buffer_r_repr #a len) + (buffer_r_sep #a len) + (buffer_irepr #a ia len) + (buffer_r_alloc_p #a) + // This is key: there is no partial application here, meaning this extracts to C. + (buffer_r_alloc #a #(ia, len)) + (buffer_r_free #a #(ia, len)) + + + +val buffer_copyable: + #a:Type -> ia:a -> len:nonzero -> + copyable #(a & nonzero) (B.buffer a) (buffer_regional ia len) +let buffer_copyable #a ia len = + Cpy (buffer_copy #_ #(ia, len)) + +/// If `a` is regional, then `vector a` is also regional. +/// +/// We keep a pointer at run-time to the parent type-class. + +val vector_region_of: + #a:Type0 -> #rst:Type -> rg:regional rst a -> v:rvector rg -> GTot HS.rid +let vector_region_of #a #rst rg v = V.frameOf v + +val vector_dummy: + #a:Type0 -> #rst:Type -> rg:Ghost.erased (regional rst a) -> Tot (rvector rg) +let vector_dummy #a #_ _ = V.alloc_empty a + +val vector_r_inv: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + h:HS.mem -> v:rvector rg -> GTot Type0 +let vector_r_inv #a #rst rg h v = RV.rv_inv h v + +val vector_r_inv_reg: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + h:HS.mem -> v:rvector rg -> + Lemma (requires (vector_r_inv rg h v)) + (ensures (HS.live_region h (vector_region_of rg v))) +let vector_r_inv_reg #a #rst rg h v = () + +val vector_repr: #a:Type0 -> #rst:Type -> rg:regional rst a -> Tot Type0 +let vector_repr #a #rst rg = S.seq (Rgl?.repr rg) + +val vector_r_repr: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + h:HS.mem -> v:rvector rg{vector_r_inv rg h v} -> + GTot (vector_repr rg) +let vector_r_repr #a #rst rg h v = RV.as_seq h v + +val vector_r_sep: + #a:Type0 -> #rst:Type -> rg:regional rst a -> + v:rvector rg -> p:loc -> h0:HS.mem -> h1:HS.mem -> + Lemma (requires (vector_r_inv rg h0 v /\ + loc_disjoint + (loc_all_regions_from false (vector_region_of rg v)) + p /\ + modifies p h0 h1)) + (ensures (vector_r_inv rg h1 v /\ + vector_r_repr rg h0 v == vector_r_repr rg h1 v)) +let vector_r_sep #a #rst rg v p h0 h1 = + RV.rv_inv_preserved v p h0 h1; + RV.as_seq_preserved v p h0 h1 + +val vector_irepr: + #a:Type0 -> #rst:Type -> rg:regional rst a -> Ghost.erased (vector_repr rg) +let vector_irepr #a #rst rg = + Ghost.hide S.empty + +val vector_r_alloc_p: + #a:Type0 -> #rst:Type -> rg:regional rst a -> v:rvector rg -> GTot Type0 +let vector_r_alloc_p #a #rst rg v = + V.size_of v = 0ul + +val vector_r_alloc: + #a:Type0 -> #rst:Type -> rg:regional rst a -> r:HST.erid -> + HST.ST (rvector rg) + (requires (fun h0 -> true)) + (ensures (fun h0 v h1 -> + Set.subset (Map.domain (HS.get_hmap h0)) + (Map.domain (HS.get_hmap h1)) /\ + modifies loc_none h0 h1 /\ + fresh_loc (V.loc_vector v) h0 h1 /\ + vector_r_alloc_p rg v /\ + vector_r_inv rg h1 v /\ + vector_region_of rg v = r /\ + vector_r_repr rg h1 v == Ghost.reveal (vector_irepr rg))) +let vector_r_alloc #a #rst rg r = + let nrid = HST.new_region r in + V.alloc_reserve 1ul (rg_dummy rg) r + +val vector_r_free: + #a:Type0 -> #rst:Type -> #rg:Ghost.erased (regional rst a) -> (s:regional rst a{s == Ghost.reveal rg}) -> v:rvector rg -> + HST.ST unit + (requires (fun h0 -> vector_r_inv rg h0 v)) + (ensures (fun h0 _ h1 -> + modifies (loc_all_regions_from false (vector_region_of rg v)) h0 h1)) +let vector_r_free #_ #_ _ v = + V.free v + +val vector_regional: + #a:Type0 -> #rst:Type -> rg:regional rst a -> regional (regional rst a) (rvector rg) +let vector_regional #a #rst rg = + Rgl rg + (vector_region_of #a #rst rg) + V.loc_vector + (vector_dummy #a #rst rg) + (vector_r_inv #a #rst rg) + (vector_r_inv_reg #a #rst rg) + (vector_repr #a rg) + (vector_r_repr #a #rst rg) + (vector_r_sep #a #rst rg) + (vector_irepr #a rg) + (vector_r_alloc_p #a #rst rg) + (vector_r_alloc #a #rst) + (vector_r_free #a #rst #rg) diff --git a/stage0/ulib/LowStar.Regional.fst b/stage0/ulib/LowStar.Regional.fst new file mode 100644 index 00000000000..bd3c3006fbb --- /dev/null +++ b/stage0/ulib/LowStar.Regional.fst @@ -0,0 +1,165 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module LowStar.Regional + +(** + * This module defines what is conceptually a typeclass called + * `regional` (although it is not syntactically marked as a `class` + * yet). + * + * `regional a` is the the class of types whose values have explicit + * memory allocations confined spatially within a single heap region + * in the hyperstack memory model. + * + * Being confined to a region, values of regional types support a + * natural framing principles: state mutations that do not overlap + * with a regional value's region are noninterfering. + * + * Instances of regional types are given for buffers and vectors: + * See LowStar.Regional.Instances, LowStar.RVector for samples. + * + *) + +open LowStar.Modifies + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +/// Regionality + +/// Motivation: we want to ensure that all stateful operations for a value of +/// type `a` are within the `region_of` the value. +/// +/// Furthermore, we would like regional to be parameterized over another type class +/// that elements can have. However, we are also trying to extract to C, meaning +/// that we can't incur any run-time lookups and indirections. In essence, we'd +/// like for members of a regional to potentially be partial applications where +/// the first argument may be an agility parameter, an extra type class for the +/// elements, etc. etc. except that partial applications are not allowed in C. +/// +/// We therefore add an "st" type, which is a piece of (pure) state (hence more +/// like a parameter) that functions are allowed to capture. Currently, only +/// ``r_alloc`` needs that extra parameter. The parameter is stored within the +/// type class, so that clients are not required to manage that piece of state +/// themselves. This is, in effect, closure-conversion for ``r_alloc`` where the +/// closure state is lifted and stored in the regional itself. As such, the only +/// piece of state that ``r_alloc`` may receive is the exact value that was +/// captured. +/// +/// Several alternative designs exist, e.g. making ``a`` at type ``st -> Type0`` +/// (won't extract); instantiating ``st`` as a singleton type and dropping the +/// refinement (also works, but doesn't make the intent of closure-conversion +/// explicit); dropping the refinement and leaving it up to the user to store +/// the refinement in ``r_inv`` (which would then take ``state`` as an +/// argument)... +noeq type regional (st:Type) (a:Type0) = +| Rgl: + // This is not really a piece of state, but more like a parameter. + state: st -> + + // The target type should have a region where it belongs. + region_of: (a -> GTot HS.rid) -> + + //loc_of for the underlying a + loc_of: (a -> GTot loc) -> + + // A parameterless value of type `a`. + // It does not have to satisfy the invariant `r_inv` described below. + dummy: a -> + + // An invariant we want to maintain for each operation. + // For example, it may include `live` and `freeable` properties + // for related objects. + r_inv: (HS.mem -> a -> GTot Type0) -> + r_inv_reg: + (h:HS.mem -> v:a -> + Lemma (requires (r_inv h v)) + (ensures (HS.live_region h (region_of v)))) -> + + // A representation type of `a` and a corresponding conversion function + repr: Type0 -> + r_repr: (h:HS.mem -> v:a{r_inv h v} -> GTot repr) -> + + // A core separation lemma, saying that the invariant and representation are + // preserved when an orthogonal state transition happens. + r_sep: + (v:a -> p:loc -> h:HS.mem -> h':HS.mem -> + Lemma (requires (r_inv h v /\ + loc_disjoint (loc_all_regions_from false (region_of v)) p /\ + modifies p h h')) + (ensures (r_inv h' v /\ r_repr h v == r_repr h' v))) -> + + /// Allocation + // The representation for the initial value of type `a` + irepr: Ghost.erased repr -> + + // A property that should hold for all initial values of type `a`. + r_alloc_p: (a -> GTot Type0) -> + + // An allocation operation. We might have several ways of initializing a + // given target type `a`; then multiple typeclass instances should be + // defined, and each of them can be used properly. + r_alloc: ((s:st { s == state }) -> r:HST.erid -> + HST.ST a + (requires (fun h0 -> True)) + (ensures (fun h0 v h1 -> + Set.subset (Map.domain (HS.get_hmap h0)) + (Map.domain (HS.get_hmap h1)) /\ + modifies loc_none h0 h1 /\ + fresh_loc (loc_of v) h0 h1 /\ //the underlying loc is fresh + r_alloc_p v /\ r_inv h1 v /\ region_of v == r /\ + r_repr h1 v == Ghost.reveal irepr))) -> + + // Destruction: note that it allows to `modify` all the regions, including + // its subregions. It is fair when we want to `free` a vector and its + // elements as well, assuming the elements belong to subregions. + r_free: ((s:st { s == state }) -> v:a -> + HST.ST unit + (requires (fun h0 -> r_inv h0 v)) + (ensures (fun h0 _ h1 -> + modifies (loc_all_regions_from false (region_of v)) h0 h1))) -> + + regional st a + +let rg_inv #a #rst (rg: regional rst a) = + Rgl?.r_inv rg + +inline_for_extraction +let rg_dummy #a #rst (rg:regional rst a) +: Tot a += Rgl?.dummy rg + +inline_for_extraction +let rg_alloc #a #rst (rg:regional rst a) (r:HST.erid) +: HST.ST a + (requires (fun h0 -> True)) + (ensures (fun h0 v h1 -> + Set.subset (Map.domain (HS.get_hmap h0)) + (Map.domain (HS.get_hmap h1)) /\ + modifies loc_none h0 h1 /\ + fresh_loc (Rgl?.loc_of rg v) h0 h1 /\ + (Rgl?.r_alloc_p rg) v /\ rg_inv rg h1 v /\ (Rgl?.region_of rg) v == r /\ + (Rgl?.r_repr rg) h1 v == Ghost.reveal (Rgl?.irepr rg))) += Rgl?.r_alloc rg (Rgl?.state rg) r + +inline_for_extraction +let rg_free #a #rst (rg:regional rst a) (v:a) +: HST.ST unit + (requires (fun h0 -> rg_inv rg h0 v)) + (ensures (fun h0 _ h1 -> + modifies (loc_all_regions_from false (Rgl?.region_of rg v)) h0 h1)) += (Rgl?.r_free rg) (Rgl?.state rg) v diff --git a/stage0/ulib/LowStar.UninitializedBuffer.fst b/stage0/ulib/LowStar.UninitializedBuffer.fst new file mode 100644 index 00000000000..e6082579d97 --- /dev/null +++ b/stage0/ulib/LowStar.UninitializedBuffer.fst @@ -0,0 +1,213 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.UninitializedBuffer + +include LowStar.Monotonic.Buffer + +module P = FStar.Preorder +module G = FStar.Ghost +module U32 = FStar.UInt32 +module Seq = FStar.Seq + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +(* + * Uninitialized buffers + * + * Modeled as: seq (option a) with a preorder that an index once set remains set + *) +private let initialization_preorder (a:Type0) :srel (option a) = + fun s1 s2 -> Seq.length s1 == Seq.length s2 /\ + (forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i)) + +type ubuffer (a:Type0) = + mbuffer (option a) (initialization_preorder a) (initialization_preorder a) + +unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a) + +unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) + +unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a) + +inline_for_extraction +type pointer (a:Type0) = b:ubuffer a{length b == 1} + +inline_for_extraction +type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1} + +inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) + +inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) + + +(****** main stateful API *****) + +(* + * b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b + *) +private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i) +let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i) + +(* + * Clients need to prove that b is initialized_at i + *) +let uindex (#a:Type0) (b:ubuffer a) (i:U32.t) + :HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i))) + (ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in + Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1)) + = let y_opt = index b i in + recall_p b (ipred (U32.v i)); + Some?.v y_opt + +(* + * b `initialized_at` i is a postcondition + *) +let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a) + :HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b)) + (ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\ + live h1 b /\ + as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\ + b `initialized_at` (U32.v i))) + = upd b i (Some v); + witness_p b (ipred (U32.v i)) + +unfold let lubuffer (a:Type0) (len:nat) = b:ubuffer a{length b == len} + +unfold let lubuffer_or_null (a:Type0) (len:nat) (r:HS.rid) = + b:ubuffer a{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)} + +(* + * No initializer + *) +let ugcmalloc (#a:Type0) (r:HS.rid) (len:U32.t) + :HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ recallable b}) + (requires (fun h0 -> malloc_pre r len)) + (ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) + = mgcmalloc r None len + +inline_for_extraction +let ugcmalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t) + :HST.ST (b:lubuffer_or_null a (U32.v len) r{recallable b}) + (requires (fun h0 -> malloc_pre r len)) + (ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) + = mgcmalloc r None len + +let umalloc (#a:Type0) (r:HS.rid) (len:U32.t) + :HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ freeable b}) + (requires (fun _ -> malloc_pre r len)) + (ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) + = mmalloc r None len + +inline_for_extraction +let umalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t) + :HST.ST (b:lubuffer_or_null a (U32.v len) r{(not (g_is_null b)) ==> freeable b}) + (requires (fun _ -> malloc_pre r len)) + (ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) + = mmalloc r None len + +let ualloca (#a:Type0) (len:U32.t) + :HST.StackInline (lubuffer a (U32.v len)) + (requires (fun _ -> alloca_pre len)) + (ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None) /\ + frameOf b == HS.get_tip h0)) + = malloca None len + +(* + * blit functionality, where src is a regular buffer + *) +[@@"opaque_to_smt"] +unfold let valid_j_for_blit + (#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t) + (dst:ubuffer a) (idx_dst:U32.t) (j:U32.t) + = U32.v idx_src + U32.v j <= length src /\ + U32.v idx_dst + U32.v j <= length dst + +(* + * postcondition of blit + *) +[@@"opaque_to_smt"] +unfold private let ublit_post_j + (#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t) + (dst:ubuffer a) (idx_dst:U32.t) (j:U32.t{valid_j_for_blit src idx_src dst idx_dst j}) + (h0 h1:HS.mem) + = modifies (loc_buffer dst) h0 h1 /\ live h1 dst /\ + (forall (i:nat).{:pattern (Seq.index (as_seq h1 dst) i)} (i >= U32.v idx_dst /\ i < U32.v idx_dst + U32.v j ==> + Seq.index (as_seq h1 dst) i == + Some (Seq.index (as_seq h0 src) (U32.v idx_src + i - U32.v idx_dst))) + ) /\ + Seq.slice (as_seq h1 dst) 0 (U32.v idx_dst) == Seq.slice (as_seq h0 dst) 0 (U32.v idx_dst) /\ + Seq.slice (as_seq h1 dst) (U32.v idx_dst + U32.v j) (length dst) == Seq.slice (as_seq h0 dst) (U32.v idx_dst + U32.v j) (length dst) /\ + (forall (i:nat).{:pattern (dst `initialized_at` i)} (i >= U32.v idx_dst /\ i < U32.v idx_dst + U32.v j) ==> + dst `initialized_at` i) + +let ublit (#a:Type0) (#rrel #rel:srel a) + (src:mbuffer a rrel rel) (idx_src:U32.t) + (dst:ubuffer a{disjoint src dst}) (idx_dst:U32.t) + (len:U32.t{valid_j_for_blit src idx_src dst idx_dst len}) + :HST.Stack unit (requires (fun h0 -> live h0 src /\ live h0 dst)) + (ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1)) + = let rec aux (j:U32.t{valid_j_for_blit src idx_src dst idx_dst j}) + :HST.Stack unit + (requires (fun h0 -> live h0 src /\ live h0 dst /\ ublit_post_j src idx_src dst idx_dst j h0 h0)) + (ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1)) + = let open FStar.UInt32 in + if j = len then () + else if j <^ len then begin + uupd dst (idx_dst +^ j) (index src (idx_src +^ j)); + aux (j +^ 1ul) + end + in + aux 0ul + +let witness_initialized (#a:Type0) (b:ubuffer a) (i:nat) + :HST.ST unit (fun h0 -> i < length b /\ Some? (Seq.index (as_seq h0 b) i)) + (fun h0 _ h1 -> h0 == h1 /\ b `initialized_at` i) + = witness_p b (ipred i) + +let recall_initialized (#a:Type0) (b:ubuffer a) (i:nat) + :HST.ST unit (fun h0 -> (recallable b \/ live h0 b) /\ b `initialized_at` i) + (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ (i < length b ==> Some? (Seq.index (as_seq h0 b) i))) + = recall_p b (ipred i) + +let buffer_immutable_buffer_disjoint + (#ti:Type) (#t:Type0) + (bi:LowStar.ImmutableBuffer.ibuffer ti) + (b:ubuffer t) + (h: HS.mem) +: Lemma + (requires ( + live h b /\ + live h bi /\ + (exists (x:t). True ) // If the type is not inhabited, the initialization and immutable preorders are effectively identical + )) + (ensures ( + disjoint b bi + )) += if length b = 0 + then empty_disjoint b bi + else if length bi = 0 + then empty_disjoint bi b + else begin + let open LowStar.ImmutableBuffer in + let s = as_seq h b in + let s0 = Seq.upd s 0 None in + let s1 = Seq.upd s 0 (Some (FStar.IndefiniteDescription.indefinite_description_ghost t (fun _ -> True))) in + assert(initialization_preorder _ s0 s1 /\ + Seq.index s0 0 =!= Seq.index s1 0 /\ + ~( immutable_preorder _ s0 s1 <==> initialization_preorder _ s0 s1)); + live_same_addresses_equal_types_and_preorders b bi h + end diff --git a/stage0/ulib/LowStar.Vector.fst b/stage0/ulib/LowStar.Vector.fst new file mode 100644 index 00000000000..802c6c4e92a --- /dev/null +++ b/stage0/ulib/LowStar.Vector.fst @@ -0,0 +1,718 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module LowStar.Vector + + +(** + * This module provides support for mutable, partially filled arrays + * whose contents may grow up to some fixed capacity determined + * during initialization. + * + * Vectors support an insertion operation that may, if the capacity + * has been reached, involve copying its contents to a new + * vector of twice the capacity (so long as the capacity has not + * already reached max_uint32). + * + * Conversely, an operation called `flush` is also provided to + * shrink a vector to some prefix of its current contents. + * + * Other operations are fairly standard, and involve reading, + * writing, and iterating over a vector. + * + *) + +open FStar.Integers +open LowStar.Modifies + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST +module B = LowStar.Buffer +module S = FStar.Seq + +type uint32_t = UInt32.t + +val max_uint32: uint32_t +let max_uint32 = 4294967295ul // UInt32.uint_to_t (UInt.max_int UInt32.n) + +module U32 = FStar.UInt32 + +/// Abstract vector type + +inline_for_extraction noeq type vector_str a = +| Vec: sz:uint32_t -> + cap:uint32_t{cap >= sz} -> + vs:B.buffer a{B.len vs = cap} -> + vector_str a + +val vector (a: Type0): Tot Type0 +let vector a = vector_str a + +/// Specification + +val as_seq: + HS.mem -> #a:Type -> vec:vector a -> + GTot (s:S.seq a{S.length s = U32.v (Vec?.sz vec)}) +let as_seq h #a vec = + B.as_seq h (B.gsub (Vec?.vs vec) 0ul (Vec?.sz vec)) + +/// Capacity + +inline_for_extraction val size_of: #a:Type -> vec:vector a -> Tot uint32_t +inline_for_extraction let size_of #a vec = + Vec?.sz vec + +inline_for_extraction val capacity_of: #a:Type -> vec:vector a -> Tot uint32_t +inline_for_extraction let capacity_of #a vec = + Vec?.cap vec + +val is_empty: #a:Type -> vec:vector a -> Tot bool +let is_empty #a vec = + size_of vec = 0ul + +val is_full: #a:Type -> vstr:vector_str a -> GTot bool +let is_full #a vstr = + Vec?.sz vstr >= max_uint32 + +/// Memory-related + +val live: #a:Type -> HS.mem -> vector a -> GTot Type0 +let live #a h vec = + B.live h (Vec?.vs vec) + +val freeable: #a:Type -> vector a -> GTot Type0 +let freeable #a vec = + B.freeable (Vec?.vs vec) + +val loc_vector: #a:Type -> vector a -> GTot loc +let loc_vector #a vec = + B.loc_buffer (Vec?.vs vec) + +val loc_addr_of_vector: #a:Type -> vector a -> GTot loc +let loc_addr_of_vector #a vec = + B.loc_addr_of_buffer (Vec?.vs vec) + +val loc_vector_within: + #a:Type -> vec:vector a -> + i:uint32_t -> j:uint32_t{i <= j && j <= size_of vec} -> + GTot loc (decreases (U32.v (j - i))) +let rec loc_vector_within #a vec i j = + if i = j then loc_none + else loc_union (B.loc_buffer (B.gsub (Vec?.vs vec) i 1ul)) + (loc_vector_within vec (i + 1ul) j) + +val loc_vector_within_includes_: + #a:Type -> vec:vector a -> + i:uint32_t -> + j1:uint32_t{i <= j1 && j1 <= size_of vec} -> + j2:uint32_t{i <= j2 && j2 <= j1} -> + Lemma (requires True) + (ensures (loc_includes (loc_vector_within vec i j1) + (loc_vector_within vec i j2))) + (decreases (U32.v (j1 - i))) +let rec loc_vector_within_includes_ #a vec i j1 j2 = + if i = j1 then () + else if i = j2 then () + else begin + loc_vector_within_includes_ vec (i + 1ul) j1 j2; + loc_includes_union_l (B.loc_buffer (B.gsub (Vec?.vs vec) i 1ul)) + (loc_vector_within vec (i + 1ul) j1) + (loc_vector_within vec (i + 1ul) j2); + loc_includes_union_r (loc_vector_within vec i j1) + (B.loc_buffer (B.gsub (Vec?.vs vec) i 1ul)) + (loc_vector_within vec (i + 1ul) j2) + end + +val loc_vector_within_includes: + #a:Type -> vec:vector a -> + i1:uint32_t -> j1:uint32_t{i1 <= j1 && j1 <= size_of vec} -> + i2:uint32_t{i1 <= i2} -> j2:uint32_t{i2 <= j2 && j2 <= j1} -> + Lemma (requires True) + (ensures (loc_includes (loc_vector_within vec i1 j1) + (loc_vector_within vec i2 j2))) + (decreases (U32.v (j1 - i1))) +let rec loc_vector_within_includes #a vec i1 j1 i2 j2 = + if i1 = j1 then () + else if i1 = i2 then loc_vector_within_includes_ vec i1 j1 j2 + else begin + loc_vector_within_includes vec (i1 + 1ul) j1 i2 j2; + loc_includes_union_l (B.loc_buffer (B.gsub (Vec?.vs vec) i1 1ul)) + (loc_vector_within vec (i1 + 1ul) j1) + (loc_vector_within vec i2 j2) + end + +val loc_vector_within_included: + #a:Type -> vec:vector a -> + i:uint32_t -> j:uint32_t{i <= j && j <= size_of vec} -> + Lemma (requires True) + (ensures (loc_includes (loc_vector vec) + (loc_vector_within vec i j))) + (decreases (U32.v (j - i))) +let rec loc_vector_within_included #a vec i j = + if i = j then () + else loc_vector_within_included vec (i + 1ul) j + +val loc_vector_within_disjoint_: + #a:Type -> vec:vector a -> + i1:uint32_t -> + i2:uint32_t{i1 < i2} -> + j2:uint32_t{i2 <= j2 && j2 <= size_of vec} -> + Lemma (requires True) + (ensures (loc_disjoint (B.loc_buffer (B.gsub (Vec?.vs vec) i1 1ul)) + (loc_vector_within vec i2 j2))) + (decreases (U32.v (j2 - i2))) +let rec loc_vector_within_disjoint_ #a vec i1 i2 j2 = + if i2 = j2 then () + else loc_vector_within_disjoint_ vec i1 (i2 + 1ul) j2 + +val loc_vector_within_disjoint: + #a:Type -> vec:vector a -> + i1:uint32_t -> j1:uint32_t{i1 <= j1 && j1 <= size_of vec} -> + i2:uint32_t{j1 <= i2} -> j2:uint32_t{i2 <= j2 && j2 <= size_of vec} -> + Lemma (requires True) + (ensures (loc_disjoint (loc_vector_within vec i1 j1) + (loc_vector_within vec i2 j2))) + (decreases (U32.v (j1 - i1))) +let rec loc_vector_within_disjoint #a vec i1 j1 i2 j2 = + if i1 = j1 then () + else (loc_vector_within_disjoint_ vec i1 i2 j2; + loc_vector_within_disjoint vec (i1 + 1ul) j1 i2 j2) + +val loc_vector_within_union_rev: + #a:Type -> vec:vector a -> + i:uint32_t -> j:uint32_t{i < j && j <= size_of vec} -> + Lemma (requires True) + (ensures (loc_vector_within vec i j == + loc_union (loc_vector_within vec i (j - 1ul)) + (loc_vector_within vec (j - 1ul) j))) + (decreases (U32.v (j - i))) +let rec loc_vector_within_union_rev #a vec i j = + if i = j - 1ul then () + else begin + loc_vector_within_union_rev vec (i + 1ul) j; + loc_union_assoc (B.loc_buffer (B.gsub (Vec?.vs vec) i 1ul)) + (loc_vector_within vec (i + 1ul) (j - 1ul)) + (loc_vector_within vec (j - 1ul) j) + end + +unfold val frameOf: #a:Type -> vector a -> Tot HS.rid +unfold let frameOf #a vec = + B.frameOf (Vec?.vs vec) + +unfold val hmap_dom_eq: h0:HS.mem -> h1:HS.mem -> GTot Type0 +unfold let hmap_dom_eq h0 h1 = + Set.equal (Map.domain (HS.get_hmap h0)) + (Map.domain (HS.get_hmap h1)) + +val modifies_as_seq: + #a:Type -> vec:vector a -> dloc:loc -> + h0:HS.mem -> h1:HS.mem -> + Lemma (requires (live h0 vec /\ + loc_disjoint (loc_vector vec) dloc /\ + modifies dloc h0 h1)) + (ensures (live h1 vec /\ + as_seq h0 vec == as_seq h1 vec)) + [SMTPat (live h0 vec); + SMTPat (loc_disjoint (loc_vector vec) dloc); + SMTPat (modifies dloc h0 h1)] +let modifies_as_seq #a vec dloc h0 h1 = + B.modifies_buffer_elim (Vec?.vs vec) dloc h0 h1 + +val modifies_as_seq_within: + #a:Type -> vec:vector a -> + i:uint32_t -> j:uint32_t{i <= j && j <= size_of vec} -> + dloc:loc -> h0:HS.mem -> h1:HS.mem -> + Lemma (requires (live h0 vec /\ + loc_disjoint (loc_vector_within vec i j) dloc /\ + modifies dloc h0 h1)) + (ensures (S.slice (as_seq h0 vec) (U32.v i) (U32.v j) == + S.slice (as_seq h1 vec) (U32.v i) (U32.v j))) + (decreases (U32.v (j - i))) + [SMTPat (live h0 vec); + SMTPat (loc_disjoint (loc_vector_within vec i j) dloc); + SMTPat (modifies dloc h0 h1)] +let rec modifies_as_seq_within #a vec i j dloc h0 h1 = + if i = j then () + else begin + B.modifies_buffer_elim (B.gsub (Vec?.vs vec) i 1ul) dloc h0 h1; + modifies_as_seq_within vec (i + 1ul) j dloc h0 h1; + assert (S.equal (S.slice (as_seq h0 vec) (U32.v i) (U32.v j)) + (S.append (S.slice (as_seq h0 vec) (U32.v i) (U32.v i + 1)) + (S.slice (as_seq h0 vec) (U32.v i + 1) (U32.v j)))); + assert (S.equal (S.slice (as_seq h1 vec) (U32.v i) (U32.v j)) + (S.append (S.slice (as_seq h1 vec) (U32.v i) (U32.v i + 1)) + (S.slice (as_seq h1 vec) (U32.v i + 1) (U32.v j)))) + end + +/// Construction + +inline_for_extraction val alloc_empty: + a:Type -> Tot (vec:vector a{size_of vec = 0ul}) +inline_for_extraction let alloc_empty a = + Vec 0ul 0ul B.null + +val alloc_empty_as_seq_empty: + a:Type -> h:HS.mem -> + Lemma (S.equal (as_seq h (alloc_empty a)) S.empty) + [SMTPat (as_seq h (alloc_empty a))] +let alloc_empty_as_seq_empty a h = () + +val alloc_rid: + #a:Type -> len:uint32_t{len > 0ul} -> v:a -> + rid:HS.rid{HST.is_eternal_region rid} -> + HST.ST (vector a) + (requires (fun h0 -> true)) + (ensures (fun h0 vec h1 -> + frameOf vec = rid /\ + live h1 vec /\ freeable vec /\ + modifies loc_none h0 h1 /\ + Set.equal (Map.domain (HS.get_hmap h0)) + (Map.domain (HS.get_hmap h1)) /\ + size_of vec = len /\ + S.equal (as_seq h1 vec) (S.create (U32.v len) v) /\ + B.fresh_loc (loc_vector vec) h0 h1)) +let alloc_rid #a len v rid = + Vec len len (B.malloc rid v len) + +// Allocate a vector with the length `len`, filled with the initial value `v`. +// Note that the vector is allocated in the root region. +val alloc: + #a:Type -> len:uint32_t{len > 0ul} -> v:a -> + HST.ST (vector a) + (requires (fun h0 -> true)) + (ensures (fun h0 vec h1 -> + frameOf vec = HS.root /\ + live h1 vec /\ freeable vec /\ + modifies loc_none h0 h1 /\ + Set.equal (Map.domain (HS.get_hmap h0)) + (Map.domain (HS.get_hmap h1)) /\ + size_of vec = len /\ + S.equal (as_seq h1 vec) (S.create (U32.v len) v) /\ + B.fresh_loc (loc_vector vec) h0 h1)) +let alloc #a len v = + alloc_rid len v HS.root + +// Allocate a vector with the _capacity_ `len`; we still need to provide an +// initial value `ia` in order to allocate space. +val alloc_reserve: + #a:Type -> len:uint32_t{len > 0ul} -> ia:a -> + rid:HS.rid{HST.is_eternal_region rid} -> + HST.ST (vector a) + (requires (fun h0 -> true)) + (ensures (fun h0 vec h1 -> + frameOf vec = rid /\ live h1 vec /\ freeable vec /\ + modifies loc_none h0 h1 /\ + B.fresh_loc (loc_vector vec) h0 h1 /\ + Set.equal (Map.domain (HS.get_hmap h0)) + (Map.domain (HS.get_hmap h1)) /\ + size_of vec = 0ul /\ + S.equal (as_seq h1 vec) S.empty)) +let alloc_reserve #a len ia rid = + Vec 0ul len (B.malloc rid ia len) + +// Allocate a vector with a given buffer with the length `len`. +// Note that it does not copy the buffer content; instead it directly uses the +// buffer pointer. +val alloc_by_buffer: + #a:Type -> len:uint32_t{len > 0ul} -> + buf:B.buffer a{B.len buf = len} -> + HST.ST (vector a) + (requires (fun h0 -> B.live h0 buf)) + (ensures (fun h0 vec h1 -> + frameOf vec = B.frameOf buf /\ loc_vector vec == B.loc_buffer buf /\ + live h1 vec /\ h0 == h1 /\ + size_of vec = len /\ + S.equal (as_seq h1 vec) (B.as_seq h0 buf))) +let alloc_by_buffer #a len buf = + Vec len len buf + +/// Destruction + +val free: + #a:Type -> vec:vector a -> + HST.ST unit + (requires (fun h0 -> live h0 vec /\ freeable vec)) + (ensures (fun h0 _ h1 -> modifies (loc_addr_of_vector vec) h0 h1)) +let free #a vec = + B.free (Vec?.vs vec) + +/// Element access + +val get: + #a:Type -> h:HS.mem -> vec:vector a -> + i:uint32_t{i < size_of vec} -> GTot a +let get #a h vec i = + S.index (as_seq h vec) (U32.v i) + +val index: + #a:Type -> vec:vector a -> i:uint32_t -> + HST.ST a + (requires (fun h0 -> live h0 vec /\ i < size_of vec)) + (ensures (fun h0 v h1 -> + h0 == h1 /\ S.index (as_seq h1 vec) (U32.v i) == v)) +let index #a vec i = + B.index (Vec?.vs vec) i + +val front: + #a:Type -> vec:vector a{size_of vec > 0ul} -> + HST.ST a + (requires (fun h0 -> live h0 vec)) + (ensures (fun h0 v h1 -> + h0 == h1 /\ S.index (as_seq h1 vec) 0 == v)) +let front #a vec = + B.index (Vec?.vs vec) 0ul + +val back: + #a:Type -> vec:vector a{size_of vec > 0ul} -> + HST.ST a + (requires (fun h0 -> live h0 vec)) + (ensures (fun h0 v h1 -> + h0 == h1 /\ S.index (as_seq h1 vec) (U32.v (size_of vec) - 1) == v)) +let back #a vec = + B.index (Vec?.vs vec) (size_of vec - 1ul) + +/// Operations + +val clear: + #a:Type -> vec:vector a -> + Tot (cvec:vector a{size_of cvec = 0ul}) +let clear #a vec = + Vec 0ul (Vec?.cap vec) (Vec?.vs vec) + +val clear_as_seq_empty: + #a:Type -> h:HS.mem -> vec:vector a -> + Lemma (S.equal (as_seq h (clear vec)) S.empty) + [SMTPat (as_seq h (clear vec))] +let clear_as_seq_empty #a h vec = () + +private val slice_append: + #a:Type -> s:S.seq a -> + i:nat -> j:nat{i <= j} -> k:nat{j <= k && k <= S.length s} -> + Lemma (S.equal (S.slice s i k) + (S.append (S.slice s i j) (S.slice s j k))) +private let slice_append #a s i j k = () + +val assign: + #a:Type -> vec:vector a -> + i:uint32_t -> v:a -> + HST.ST unit + (requires (fun h0 -> live h0 vec /\ i < size_of vec)) + (ensures (fun h0 _ h1 -> + hmap_dom_eq h0 h1 /\ + modifies (loc_vector_within #a vec i (i + 1ul)) h0 h1 /\ + get h1 vec i == v /\ + S.equal (as_seq h1 vec) (S.upd (as_seq h0 vec) (U32.v i) v) /\ + live h1 vec)) +#reset-options "--z3rlimit 10" +let assign #a vec i v = + let hh0 = HST.get () in + // NOTE: `B.upd (Vec?.vs vec) i v` makes more sense, + // but the `modifies` postcondition is coarse-grained. + B.upd (B.sub (Vec?.vs vec) i 1ul) 0ul v; + let hh1 = HST.get () in + loc_vector_within_disjoint vec 0ul i i (i + 1ul); + modifies_as_seq_within + vec 0ul i (loc_vector_within #a vec i (i + 1ul)) hh0 hh1; + loc_vector_within_disjoint vec i (i + 1ul) (i + 1ul) (size_of vec); + modifies_as_seq_within + vec (i + 1ul) (size_of vec) (loc_vector_within #a vec i (i + 1ul)) hh0 hh1; + slice_append (as_seq hh1 vec) 0 (U32.v i) (U32.v i + 1); + slice_append (as_seq hh1 vec) 0 (U32.v i + 1) (U32.v (size_of vec)); + slice_append (S.upd (as_seq hh0 vec) (U32.v i) v) 0 (U32.v i) (U32.v i + 1); + slice_append (S.upd (as_seq hh0 vec) (U32.v i) v) 0 (U32.v i + 1) (U32.v (size_of vec)) + +private val resize_ratio: uint32_t +private let resize_ratio = 2ul + +private val new_capacity: cap:uint32_t -> Tot uint32_t +private let new_capacity cap = + if cap >= max_uint32 / resize_ratio then max_uint32 + else if cap = 0ul then 1ul + else cap * resize_ratio + +val insert: + #a:Type -> vec:vector a -> v:a -> + HST.ST (vector a) + (requires (fun h0 -> + live h0 vec /\ freeable vec /\ not (is_full vec) /\ + HST.is_eternal_region (frameOf vec))) + (ensures (fun h0 nvec h1 -> + frameOf vec = frameOf nvec /\ + hmap_dom_eq h0 h1 /\ + live h1 nvec /\ freeable nvec /\ + modifies (loc_union (loc_addr_of_vector vec) + (loc_vector nvec)) h0 h1 /\ + size_of nvec = size_of vec + 1ul /\ + get h1 nvec (size_of vec) == v /\ + S.equal (as_seq h1 nvec) (S.snoc (as_seq h0 vec) v))) +#reset-options "--z3rlimit 20" +let insert #a vec v = + let sz = Vec?.sz vec in + let cap = Vec?.cap vec in + let vs = Vec?.vs vec in + if sz = cap + then (let ncap = new_capacity cap in + let nvs = B.malloc (B.frameOf vs) v ncap in + B.blit vs 0ul nvs 0ul sz; + B.upd nvs sz v; + B.free vs; + Vec (sz + 1ul) ncap nvs) + else + (B.upd vs sz v; + Vec (sz + 1ul) cap vs) + +// Flush elements in the vector until the index `i`. +// It also frees the original allocation and reallocates a smaller space for +// remaining elements. +val flush: + #a:Type -> vec:vector a -> ia:a -> + i:uint32_t{i <= size_of vec} -> + HST.ST (vector a) + (requires (fun h0 -> + live h0 vec /\ freeable vec /\ + HST.is_eternal_region (frameOf vec))) + (ensures (fun h0 fvec h1 -> + frameOf vec = frameOf fvec /\ + hmap_dom_eq h0 h1 /\ + live h1 fvec /\ freeable fvec /\ + modifies (loc_union (loc_addr_of_vector vec) + (loc_vector fvec)) h0 h1 /\ + size_of fvec = size_of vec - i /\ + S.equal (as_seq h1 fvec) + (S.slice (as_seq h0 vec) (U32.v i) (U32.v (size_of vec))))) +let flush #a vec ia i = + let fsz = Vec?.sz vec - i in + let asz = if Vec?.sz vec = i then 1ul else fsz in + let vs = Vec?.vs vec in + let fvs = B.malloc (B.frameOf vs) ia asz in + B.blit vs i fvs 0ul fsz; + B.free vs; + Vec fsz asz fvs + +val shrink: + #a:Type -> vec:vector a -> + new_size:uint32_t{new_size <= size_of vec} -> + Tot (vector a) +let shrink #a vec new_size = + Vec new_size (Vec?.cap vec) (Vec?.vs vec) + + +/// Iteration + +val fold_left_seq: + #a:Type -> #b:Type0 -> seq:S.seq a -> + f:(b -> a -> GTot b) -> ib:b -> + GTot b (decreases (S.length seq)) +let rec fold_left_seq #a #b seq f ib = + if S.length seq = 0 then ib + else fold_left_seq (S.tail seq) f (f ib (S.head seq)) + +val fold_left_buffer: + #a:Type -> #b:Type0 -> len:uint32_t -> + buf:B.buffer a{B.len buf = len} -> + f:(b -> a -> Tot b) -> ib:b -> + HST.ST b + (requires (fun h0 -> B.live h0 buf)) + (ensures (fun h0 v h1 -> + h0 == h1 /\ + v == fold_left_seq (B.as_seq h0 buf) f ib)) + (decreases (B.length buf)) +let rec fold_left_buffer #a #b len buf f ib = + if len = 0ul then ib + else (fold_left_buffer (len - 1ul) (B.sub buf 1ul (len - 1ul)) + f (f ib (B.index buf 0ul))) + +val fold_left: + #a:Type -> #b:Type0 -> vec:vector a -> + f:(b -> a -> Tot b) -> ib:b -> + HST.ST b + (requires (fun h0 -> live h0 vec)) + (ensures (fun h0 v h1 -> + h0 == h1 /\ + v == fold_left_seq (as_seq h0 vec) f ib)) +let fold_left #a #b vec f ib = + fold_left_buffer (Vec?.sz vec) (B.sub (Vec?.vs vec) 0ul (Vec?.sz vec)) f ib + +val forall_seq: + #a:Type -> seq:S.seq a -> + i:nat -> j:nat{i <= j && j <= S.length seq} -> + p:(a -> GTot Type0) -> GTot Type0 +let forall_seq #a seq i j p = + forall (idx:nat{i <= idx && idx < j}). + p (S.index seq idx) + +val forall_buffer: + #a:Type -> h:HS.mem -> buf:B.buffer a -> + i:nat -> j:nat{i <= j && j <= B.length buf} -> + p:(a -> GTot Type0) -> GTot Type0 +let forall_buffer #a h buf i j p = + forall_seq (B.as_seq h buf) i j p + +val forall_: + #a:Type -> h:HS.mem -> vec:vector a -> + i:uint32_t -> j:uint32_t{i <= j && j <= size_of vec} -> + p:(a -> Tot Type0) -> GTot Type0 +let forall_ #a h vec i j p = + forall_seq (as_seq h vec) (U32.v i) (U32.v j) p + +val forall_all: + #a:Type -> h:HS.mem -> vec:vector a -> + p:(a -> Tot Type0) -> GTot Type0 +let forall_all #a h vec p = + forall_ h vec 0ul (size_of vec) p + +val forall2_seq: + #a:Type -> seq:S.seq a -> + i:nat -> j:nat{i <= j && j <= S.length seq} -> + p:(a -> a -> GTot Type0) -> GTot Type0 +let forall2_seq #a seq i j p = + forall (k:nat{i <= k && k < j}) (l:nat{i <= l && l < j && k <> l}). + p (S.index seq k) (S.index seq l) + +val forall2_buffer: + #a:Type -> h:HS.mem -> buf:B.buffer a -> + i:nat -> j:nat{i <= j && j <= B.length buf} -> + p:(a -> a -> GTot Type0) -> GTot Type0 +let forall2_buffer #a h buf i j p = + forall2_seq (B.as_seq h buf) i j p + +val forall2: + #a:Type -> h:HS.mem -> vec:vector a -> + i:uint32_t -> j:uint32_t{i <= j && j <= size_of vec} -> + p:(a -> a -> GTot Type0) -> GTot Type0 +let forall2 #a h vec i j p = + forall2_seq (as_seq h vec) (U32.v i) (U32.v j) p + +val forall2_all: + #a:Type -> h:HS.mem -> vec:vector a -> + p:(a -> a -> GTot Type0) -> GTot Type0 +let forall2_all #a h vec p = + forall2 h vec 0ul (size_of vec) p + +/// Facts + +val get_as_seq_index: + #a:Type -> h:HS.mem -> buf:B.buffer a -> i:uint32_t{i < B.len buf} -> + Lemma (B.get h buf (U32.v i) == S.index (B.as_seq h (B.gsub buf i 1ul)) 0) +let get_as_seq_index #a h buf i = () + +val get_preserved: + #a:Type -> vec:vector a -> + i:uint32_t{i < size_of vec} -> + p:loc -> h0:HS.mem -> h1:HS.mem -> + Lemma (requires (live h0 vec /\ + loc_disjoint p (loc_vector_within vec i (i + 1ul)) /\ + modifies p h0 h1)) + (ensures (get h0 vec i == get h1 vec i)) +let get_preserved #a vec i p h0 h1 = + get_as_seq_index h0 (Vec?.vs vec) i; + get_as_seq_index h1 (Vec?.vs vec) i + +private val get_preserved_within: + #a:Type -> vec:vector a -> + i:uint32_t -> j:uint32_t{i <= j && j <= size_of vec} -> + k:uint32_t{(k < i || j <= k) && k < size_of vec} -> + h0:HS.mem -> h1:HS.mem -> + Lemma (requires (live h0 vec /\ + modifies (loc_vector_within vec i j) h0 h1)) + (ensures (get h0 vec k == get h1 vec k)) + [SMTPat (live h0 vec); + SMTPat (modifies (loc_vector_within vec i j) h0 h1); + SMTPat (get h0 vec k)] +let get_preserved_within #a vec i j k h0 h1 = + if k < i then begin + loc_vector_within_disjoint vec k (k + 1ul) i j; + get_preserved vec k (loc_vector_within vec i j) h0 h1 + end + else begin + loc_vector_within_disjoint vec i j k (k + 1ul); + get_preserved vec k (loc_vector_within vec i j) h0 h1 + end + +val forall_seq_ok: + #a:Type -> seq:S.seq a -> + i:nat -> j:nat{i <= j && j <= S.length seq} -> + k:nat{i <= k && k < j} -> + p:(a -> GTot Type0) -> + Lemma (requires (forall_seq seq i j p)) + (ensures (p (S.index seq k))) +let forall_seq_ok #a seq i j k p = () + +val forall2_seq_ok: + #a:Type -> seq:S.seq a -> + i:nat -> j:nat{i <= j && j <= S.length seq} -> + k:nat{i <= k && k < j} -> l:nat{i <= l && l < j && k <> l} -> + p:(a -> a -> GTot Type0) -> + Lemma (requires (forall2_seq seq i j p)) + (ensures (p (S.index seq k) (S.index seq l))) +let forall2_seq_ok #a seq i j k l p = () + +val forall_as_seq: + #a:Type -> s0:S.seq a -> s1:S.seq a{S.length s0 = S.length s1} -> + i:nat -> j:nat{i <= j && j <= S.length s0} -> + k:nat{i <= k && k < j} -> + p:(a -> Tot Type0) -> + Lemma (requires (p (S.index s0 k) /\ S.slice s0 i j == S.slice s1 i j)) + (ensures (p (S.index s1 k))) + [SMTPat (p (S.index s0 k)); + SMTPat (S.slice s0 i j); + SMTPat (S.slice s1 i j)] +let forall_as_seq #a s0 s1 i j k p = + assert (S.index (S.slice s0 i j) (k - i) == + S.index (S.slice s1 i j) (k - i)) + +val forall_preserved: + #a:Type -> vec:vector a -> + i:uint32_t -> j:uint32_t{i <= j && j <= size_of vec} -> + p:(a -> Tot Type0) -> + dloc:loc -> h0:HS.mem -> h1:HS.mem -> + Lemma (requires (live h0 vec /\ + loc_disjoint (loc_vector_within vec i j) dloc /\ + forall_ h0 vec i j p /\ + modifies dloc h0 h1)) + (ensures (forall_ h1 vec i j p)) +let forall_preserved #a vec i j p dloc h0 h1 = + modifies_as_seq_within vec i j dloc h0 h1; + assert (S.slice (as_seq h0 vec) (U32.v i) (U32.v j) == + S.slice (as_seq h1 vec) (U32.v i) (U32.v j)) + +val forall2_extend: + #a:Type -> h:HS.mem -> vec:vector a -> + i:uint32_t -> j:uint32_t{i <= j && j < size_of vec} -> + p:(a -> a -> Tot Type0) -> + Lemma (requires (forall2 h vec i j p /\ + forall_ h vec i j + (fun a -> p a (get h vec j) /\ p (get h vec j) a))) + (ensures (forall2 h vec i (j + 1ul) p)) +let forall2_extend #a h vec i j p = () + +val forall2_forall_left: + #a:Type -> h:HS.mem -> vec:vector a -> + i:uint32_t -> j:uint32_t{i <= j && j <= size_of vec} -> + k:uint32_t{i <= k && k < j} -> + p:(a -> a -> Tot Type0) -> + Lemma (requires (forall2 h vec i j p)) + (ensures (forall_ h vec i k (p (get h vec k)))) +let forall2_forall_left #a h vec i j k p = () + +val forall2_forall_right: + #a:Type -> h:HS.mem -> vec:vector a -> + i:uint32_t -> j:uint32_t{i <= j && j <= size_of vec} -> + k:uint32_t{i <= k && k < j} -> + p:(a -> a -> Tot Type0) -> + Lemma (requires (forall2 h vec i j p)) + (ensures (forall_ h vec (k + 1ul) j (p (get h vec k)))) +let forall2_forall_right #a h vec i j k p = () diff --git a/ulib/Makefile b/stage0/ulib/Makefile similarity index 100% rename from ulib/Makefile rename to stage0/ulib/Makefile diff --git a/ulib/Makefile.extract b/stage0/ulib/Makefile.extract similarity index 100% rename from ulib/Makefile.extract rename to stage0/ulib/Makefile.extract diff --git a/ulib/Makefile.extract.fsharp b/stage0/ulib/Makefile.extract.fsharp similarity index 100% rename from ulib/Makefile.extract.fsharp rename to stage0/ulib/Makefile.extract.fsharp diff --git a/ulib/Makefile.verify b/stage0/ulib/Makefile.verify similarity index 100% rename from ulib/Makefile.verify rename to stage0/ulib/Makefile.verify diff --git a/stage0/ulib/Prims.fst b/stage0/ulib/Prims.fst new file mode 100644 index 00000000000..f25def5bf9c --- /dev/null +++ b/stage0/ulib/Prims.fst @@ -0,0 +1,732 @@ +(* + Copyright 2008-2020 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module Prims + +/// This module is implicitly opened in the scope of all other modules. +/// +/// It provides the very basic primitives on which F* is +/// built, including the definition of total functions, the basic +/// logical connectives, the PURE and GHOST effects and the like. +/// +/// While some of the primitives have logical significance, others are +/// define various conveniences in the language, e.g., type of +/// attributes. + + +(***** Begin trusted primitives *****) + +(** Primitives up to the definition of the GTot effect are trusted + Beyond that all definitions are fully verified *) + + +(** Type of attributes *) +assume new +type attribute : Type0 + +(** An attribute indicating that some definition must be processed by the + Dijkstra monads for free construction *) +assume +val cps:attribute + +(** This attribute marks definitions for logical connectives that should + not be unfolded during tactics. *) +assume +val tac_opaque : attribute + +(** This attribute can be used on type binders to make unifier attempt + to unrefine them before instantiating them. This is useful in polymorphic + definitions where the type does not change the result type, for example + eq2 below. Using the attribute, an equality between two nats will happen + at type int, which is more canonical. + + This feature is experimental and only enabled with "--ext __unrefine" *) +assume +val unrefine : attribute + +(** This attribute can be attached to a type definition to partly counter the + behavior of the `unrefine` attribute. It will cause the definition marked + `do_not_unrefine` to not be unfolded during the unrefining process. *) +assume +val do_not_unrefine : attribute + +(** A predicate to express when a type supports decidable equality + The type-checker emits axioms for [hasEq] for each inductive type *) +assume +type hasEq : Type -> GTot Type0 + +(** A convenient abbreviation, [eqtype] is the type of types in + universe 0 which support decidable equality *) +type eqtype = a: Type0{hasEq a} + +(** [bool] is a two element type with elements [true] and [false]. We + assume it is primitive, for convenient interop with other + languages, although it could easily be defined as an inductive type + with two cases, [BTrue | BFalse] *) +assume new +type bool : eqtype + +(** [empty] is the empty inductive type. The type with no + inhabitants represents logical falsehood. Note, [empty] is + seldom used directly in F*. We instead use its "squashed" variant, + [False], see below. *) +type empty = + +(** [trivial] is the singleton inductive type---it is trivially + inhabited. Like [empty], [trivial] is seldom used. We instead use + its "squashed" variants, [True] *) +type trivial = | T + +(** [unit]: another singleton type, with its only inhabitant written [()] + we assume it is primitive, for convenient interop with other languages *) +assume new +type unit : eqtype + +(** [squash p] is a central type in F*---[squash p] is the proof + irrelevant analog of [p] and is represented as a unit + refinement. Squashed proofs are typically discharged using an SMT + solver, without any proof terms explicitly reconstructed. As + such, one way to think of [squash p] is as the type of properties + proven using classical axioms without building proof terms. + + Note, [squash p] is just a unit refinement, it resides in universe + 0, lowering the universe of [p]. From this perspective, one may + also see [squash] as a coercion down to universe 0. + + The type is marked [tac_opaque] to indicate to Meta-F* that + instances of [squash] should not be unfolded when evaluating + tactics (since many optimizations in F*'s SMT encoding rely + specifically on occurrences of [squash]. + + See FStar.Squash for various ways of manipulating squashed + types. *) +[@@ tac_opaque] +type squash (p: Type) : Type0 = x: unit{p} + +(** [auto_squash] is equivalent to [squash]. However, F* will + automatically insert `auto_squash` when simplifying terms, + converting terms of the form `p /\ True` to `auto_squash p`. + + We distinguish these automatically inserted squashes from explicit, + user-written squashes. + + A user should not have to manipulate [auto_squash] at all, except + in rare circumstances when writing tactics to process proofs that + have already been partially simplified by F*'s simplifier. +*) +let auto_squash (p: Type) = squash p + +(** The [logical] type is transitionary. It is just an abbreviation + for [Type0], but is used to classify uses of the basic squashed + logical connectives that follow. Some day, we plan to remove the + [logical] type, replacing it with [prop] (also defined below). + + The type is marked [private] to intentionally prevent user code + from referencing this type, hopefully easing the removal of + [logical] in the future. *) +private +type logical = Type0 + +(** An attribute indicating that a symbol is an smt theory symbol and + hence may not be used in smt patterns. The typechecker warns if + such symbols are used in patterns *) +assume +val smt_theory_symbol:attribute + +(** [l_True] has a special bit of syntactic sugar. It is written just + as "True" and rendered in the ide as [True]. It is a squashed version + of constructive truth, [trivial]. *) +[@@ tac_opaque; smt_theory_symbol] +let l_True:logical = squash trivial + +(** [l_False] has a special bit of syntactic sugar. It is written just + as "False" and rendered in the ide as [Falsee]. It is a squashed version + of constructive falsehood, the empty type. *) +[@@ tac_opaque; smt_theory_symbol] +let l_False:logical = squash empty + +(** The type of provable equalities, defined as the usual inductive + type with a single constructor for reflexivity. As with the other + connectives, we often work instead with the squashed version of + equality, below. *) +type equals (#a: Type) (x: a) : a -> Type = | Refl : equals x x + +(** [eq2] is the squashed version of [equals]. It's a proof + irrelevant, homogeneous equality in Type#0 and is written with + an infix binary [==]. + + TODO: instead of hard-wiring the == syntax, + we should just rename eq2 to op_Equals_Equals +*) +[@@ tac_opaque; smt_theory_symbol] +type eq2 (#[@@@unrefine] a: Type) (x: a) (y: a) : logical = squash (equals x y) + +(** bool-to-type coercion: This is often automatically inserted type, + when using a boolean in context expecting a type. But, + occasionally, one may have to write [b2t] explicitly *) +type b2t (b: bool) : logical = (b == true) + +(** constructive conjunction *) +type pair (p: Type) (q: Type) = | Pair : _1:p -> _2:q -> pair p q + +(** squashed conjunction, specialized to [Type0], written with an + infix binary [/\] *) +[@@ tac_opaque; smt_theory_symbol] +type l_and (p: logical) (q: logical) : logical = squash (pair p q) + +(** constructive disjunction *) +type sum (p: Type) (q: Type) = + | Left : v:p -> sum p q + | Right : v:q -> sum p q + +(** squashed disjunction, specialized to [Type0], written with an + infix binary [\/] *) +[@@ tac_opaque; smt_theory_symbol] +type l_or (p: logical) (q: logical) : logical = squash (sum p q) + +(** squashed (non-dependent) implication, specialized to [Type0], + written with an infix binary [==>]. Note, [==>] binds weaker than + [/\] and [\/] *) +[@@ tac_opaque; smt_theory_symbol] +type l_imp (p: logical) (q: logical) : logical = squash (p -> GTot q) +(* ^^^ NB: The GTot effect is primitive; *) +(* elaborated using GHOST a few lines below *) + +(** squashed double implication, infix binary [<==>] *) +[@@ smt_theory_symbol] +type l_iff (p: logical) (q: logical) : logical = (p ==> q) /\ (q ==> p) + +(** squashed negation, prefix unary [~] *) +[@@ smt_theory_symbol] +type l_not (p: logical) : logical = l_imp p False + +(** l_ITE is a weak form of if-then-else at the level of + logical formulae. It's not much used. + + TODO: Can we remove it *) +unfold +type l_ITE (p: logical) (q: logical) (r: logical) : logical = (p ==> q) /\ (~p ==> r) + + +(** One of the main axioms provided by prims is [precedes], a + built-in well-founded partial order over all terms. It's typically + written with an infix binary [<<]. + + The [<<] order includes: + * The [<] ordering on natural numbers + * The subterm ordering on inductive types + * [f x << D f] for data constructors D of an inductive t whose + arguments include a ghost or total function returning a t *) + +assume +type precedes : #a: Type -> #b: Type -> a -> b -> Type0 + +(** The type of primitive strings of characters; See FStar.String *) +assume new +type string : eqtype + +(** This attribute can be added to the declaration or definition of + any top-level symbol. It causes F* to report a warning on any + use of that symbol, printing the [msg] argument. + + This is used, for instance to: + + - tag every escape hatch, e.g., [assume], [admit], etc + + Reports for uses of symbols tagged with this attribute + are controlled using the `--report_assumes` option + and warning number 334. + + See tests/micro-benchmarks/WarnOnUse.fst + *) +assume +val warn_on_use (msg: string) : Tot unit + +(** The [deprecated "s"] attribute: "s" is an alternative function + that should be printed in the warning it can be omitted if the use + case has no such function *) +assume +val deprecated (s: string) : Tot unit + + +(** Within the SMT encoding, we have a relation [(HasType e t)] + asserting that (the encoding of) [e] has a type corresponding to + (the encoding of) [t]. + + It is sometimes convenient, e.g., when writing triggers for + quantifiers, to have access to this relation at the source + level. The [has_type] predicate below reflects the SMT encodings + [HasType] relation. We also use it to define the type [prop] or + proof irrelevant propositions, below. + + Note, unless you have a really good reason, you probably don't + want to use this [has_type] predicate. F*'s type theory certainly + does not internalize its own typing judgment *) +[@@deprecated "'has_type' is intended for internal use and debugging purposes only; \ + do not rely on it for your proofs"] +assume +type has_type : #a: Type -> a -> Type -> Type0 + +(** Squashed universal quantification, or dependent products, written + [forall (x:a). p x], specialized to Type0 *) +[@@ tac_opaque; smt_theory_symbol] +type l_Forall (#a: Type) (p: (a -> GTot Type0)) : logical = squash (x: a -> GTot (p x)) + +#push-options "--warn_error -288" +(** [p1 `subtype_of` p2] when every element of [p1] is also an element + of [p2]. *) +let subtype_of (p1 p2: Type) = forall (x: p1). has_type x p2 +#pop-options + +(** The type of squashed types. + + Note, the [prop] type is a work in progress in F*. In particular, + we would like in the future to more systematically use [prop] for + proof-irrelevant propositions throughout the libraries. However, + we still use [Type0] in many places. + + See https://github.com/FStarLang/FStar/issues/1048 for more + details and the current status of the work. + *) +type prop = a: Type0{a `subtype_of` unit} + +(**** The PURE effect *) + +(** The type of pure preconditions *) +let pure_pre = Type0 + +(** Pure postconditions, predicates on [a], on which the precondition + [pre] is also valid. This provides a way for postcondition formula + to be typed in a context where they can assume the validity of the + precondition. This is discussed extensively in Issue #57 *) +let pure_post' (a pre: Type) = _: a{pre} -> GTot Type0 +let pure_post (a: Type) = pure_post' a True + +(** A pure weakest precondition transforms postconditions on [a]-typed + results to pure preconditions + + We require the weakest preconditions to satisfy the monotonicity + property over the postconditions + To enforce it, we first define a vanilla wp type, + and then refine it with the monotonicity condition *) +let pure_wp' (a: Type) = pure_post a -> GTot pure_pre + +(** The monotonicity predicate is marked opaque_to_smt, + meaning that its definition is hidden from the SMT solver, + and if required, will need to be explicitly revealed + This has the advantage that clients that do not need to work with it + directly, don't have the (quantified) definition in their solver context *) + +let pure_wp_monotonic0 (a:Type) (wp:pure_wp' a) = + forall (p q:pure_post a). (forall (x:a). p x ==> q x) ==> (wp p ==> wp q) + +[@@ "opaque_to_smt"] +let pure_wp_monotonic = pure_wp_monotonic0 + +let pure_wp (a: Type) = wp:pure_wp' a{pure_wp_monotonic a wp} + +(** This predicate is an internal detail, used to optimize the + encoding of some quantifiers to SMT by omitting their typing + guards. This is safe to use only when the quantifier serves to + introduce a local macro---use with caution. *) +assume +type guard_free : Type0 -> Type0 + +(** The return combinator for the PURE effect requires + proving the postcondition only on [x] + + Clients should not use it directly, + instead use FStar.Pervasives.pure_return *) +unfold +let pure_return0 (a: Type) (x: a) : pure_wp a = + fun (p: pure_post a) -> + forall (return_val: a). return_val == x ==> p return_val + +(** Sequential composition for the PURE effect + + Clients should not use it directly, + instead use FStar.Pervasives.pure_bind_wp *) +unfold +let pure_bind_wp0 + (a b: Type) + (wp1: pure_wp a) + (wp2: (a -> GTot (pure_wp b))) + : pure_wp b + = fun (p: pure_post b) -> + wp1 (fun (bind_result_1: a) -> wp2 bind_result_1 p) + +(** Conditional composition for the PURE effect + + The combinator is optimized to make use of how the typechecker generates VC + for conditionals. + + The more intuitive form of the combinator would have been: + [(p ==> wp_then post) /\ (~p ==> wp_else post)] + + However, the way the typechecker constructs the VC, [wp_then] is already + weakened with [p]. + + Hence, here we only weaken [wp_else] + + Clients should not use it directly, + instead use FStar.Pervasives.pure_if_then_else *) +unfold +let pure_if_then_else0 (a p: Type) (wp_then wp_else: pure_wp a) : pure_wp a = + fun (post: pure_post a) -> + wp_then post /\ (~p ==> wp_else post) + +(** Conditional composition for the PURE effect, while trying to avoid + duplicating the postcondition by giving it a local name [k]. + + Note the use of [guard_free] here: [k] is just meant to be a macro + for [post]. + + Clients should not use it directly, + instead use FStar.Pervasives.pure_ite_wp *) +unfold +let pure_ite_wp0 (a: Type) (wp: pure_wp a) : pure_wp a = + fun (post: pure_post a) -> + forall (k: pure_post a). (forall (x: a). {:pattern (guard_free (k x))} post x ==> k x) ==> wp k + +(** Subsumption for the PURE effect *) +unfold +let pure_stronger (a: Type) (wp1 wp2: pure_wp a) = forall (p: pure_post a). wp1 p ==> wp2 p + +(** Closing a PURE WP under a binder for [b] + + Clients should not use it directly, + instead use FStar.Pervasives.pure_close_wp *) +unfold +let pure_close_wp0 (a b: Type) (wp: (b -> GTot (pure_wp a))) : pure_wp a = fun (p: pure_post a) -> forall (b: b). wp b p + +(** Trivial WP for PURE: Prove the WP with the trivial postcondition *) +unfold +let pure_trivial (a: Type) (wp: pure_wp a) = wp (fun (trivial_result: a) -> True) + +(** Introduces the PURE effect. + The definition of the PURE effect is fixed. + NO USER SHOULD EVER CHANGE THIS. *) +total +new_effect { + PURE : a: Type -> wp: pure_wp a -> Effect + with + return_wp = pure_return0 + ; bind_wp = pure_bind_wp0 + ; if_then_else = pure_if_then_else0 + ; ite_wp = pure_ite_wp0 + ; stronger = pure_stronger + ; close_wp = pure_close_wp0 + ; trivial = pure_trivial +} + +(** [Pure] is a Hoare-style counterpart of [PURE] + + Note the type of post, which allows to assume the precondition + for the well-formedness of the postcondition. c.f. #57 *) +effect Pure (a: Type) (pre: pure_pre) (post: pure_post' a pre) = + PURE a + (fun (p: pure_post a) -> pre /\ (forall (pure_result: a). post pure_result ==> p pure_result)) + +(** [Admit] is an effect abbreviation for a computation that + disregards the verification condition of its continuation *) +effect Admit (a: Type) = PURE a (fun (p: pure_post a) -> True) + +(** The primitive effect [Tot] is definitionally equal to an instance of [PURE] *) + +(** Clients should not use it directly, instead use FStar.Pervasives.pure_null_wp *) +unfold +let pure_null_wp0 (a: Type) : pure_wp a = fun (p: pure_post a) -> forall (any_result: a). p any_result + +(** [Tot]: From here on, we have [Tot] as a defined symbol in F*. *) +effect Tot (a: Type) = PURE a (pure_null_wp0 a) + +(** Clients should not use it directly, instead use FStar.Pervasives.pure_assert_wp *) +[@@ "opaque_to_smt"] +unfold +let pure_assert_wp0 (p: Type) : pure_wp unit = fun (post: pure_post unit) -> p /\ post () + +(** Clients should not use it directly, instead use FStar.Pervasives.pure_assume_wp *) +[@@ "opaque_to_smt"] +unfold +let pure_assume_wp0 (p: Type) : pure_wp unit = fun (post: pure_post unit) -> p ==> post () + +(**** The [GHOST] effect *) + +(** [GHOST] is logically equivalent to [PURE], but distinguished from + it nominally so that specific, computationally irrelevant + operations, are provided only in [GHOST] and are erased during + extraction *) +total +new_effect GHOST = PURE + +unfold +let purewp_id (a: Type) (wp: pure_wp a) = wp + +(** [PURE] computations can be lifted to the [GHOST] effect (but not + vice versa) using just the identity lifting on pure wps *) +sub_effect PURE ~> GHOST { lift_wp = purewp_id } + +(** [Ghost] is a the Hoare-style counterpart of [GHOST] *) +effect Ghost (a: Type) (pre: Type) (post: pure_post' a pre) = + GHOST a + (fun (p: pure_post a) -> pre /\ (forall (ghost_result: a). post ghost_result ==> p ghost_result) + ) + +(** As with [Tot], the primitive effect [GTot] is definitionally equal + to an instance of GHOST *) +effect GTot (a: Type) = GHOST a (pure_null_wp0 a) + + +(***** End trusted primitives *****) + +(** This point onward, F* fully verifies all the definitions *) + +(** [===] heterogeneous equality *) +let ( === ) (#a #b: Type) (x: a) (y: b) : logical = a == b /\ x == y + +(** Dependent pairs [dtuple2] in concrete syntax is [x:a & b x]. + Its values can be constructed with the concrete syntax [(| x, y |)] *) +unopteq +type dtuple2 (a: Type) (b: (a -> GTot Type)) = + | Mkdtuple2 : _1: a -> _2: b _1 -> dtuple2 a b + +(** Squashed existential quantification, or dependent sums, + are written [exists (x:a). p x] : specialized to Type0 *) +[@@ tac_opaque; smt_theory_symbol] +type l_Exists (#a: Type) (p: (a -> GTot Type0)) : logical = squash (x: a & p x) + +(** Primitive type of mathematical integers, mapped to zarith in OCaml + extraction and to the SMT sort of integers *) +assume new +type int : eqtype + +(**** Basic operators on booleans and integers *) + +(** [&&] boolean conjunction *) + +[@@ smt_theory_symbol] +assume +val op_AmpAmp: bool -> bool -> Tot bool + +(** [||] boolean disjunction *) + +[@@ smt_theory_symbol] +assume +val op_BarBar: bool -> bool -> Tot bool + +(** [not] boolean negation *) + +[@@ smt_theory_symbol] +assume +val op_Negation: bool -> Tot bool + +(** Integer multiplication, no special symbol. See FStar.Mul *) + +[@@ smt_theory_symbol] +assume +val op_Multiply: int -> int -> Tot int + +(** [-] integer subtraction *) + +[@@ smt_theory_symbol] +assume +val op_Subtraction: int -> int -> Tot int + +(** [+] integer addition *) + +[@@ smt_theory_symbol] +assume +val op_Addition: int -> int -> Tot int + +(** [-] prefix unary integer negation *) + +[@@ smt_theory_symbol] +assume +val op_Minus: int -> Tot int + +(** [<=] integer comparison *) + +[@@ smt_theory_symbol] +assume +val op_LessThanOrEqual: int -> int -> Tot bool + +(** [>] integer comparison *) + +[@@ smt_theory_symbol] +assume +val op_GreaterThan: int -> int -> Tot bool + +(** [>=] integer comparison *) + +[@@ smt_theory_symbol] +assume +val op_GreaterThanOrEqual: int -> int -> Tot bool + +(** [<] integer comparison *) + +[@@ smt_theory_symbol] +assume +val op_LessThan: int -> int -> Tot bool + +(** [=] decidable equality on [eqtype] *) + +[@@ smt_theory_symbol] +assume +val op_Equality: #[@@@unrefine]a: eqtype -> a -> a -> Tot bool + +(** [<>] decidable dis-equality on [eqtype] *) + +[@@ smt_theory_symbol] +assume +val op_disEquality: #[@@@unrefine]a: eqtype -> a -> a -> Tot bool + +(** The extensible open inductive type of exceptions *) +assume new +type exn : Type0 + +(** [array]: TODO: should be removed. + See FStar.Seq, LowStar.Buffer, etc. *) +assume new +type array : Type -> Type0 + + +(** String concatenation and its abbreviation as [^]. TODO, both + should be removed in favor of what is present in FStar.String *) +assume +val strcat: string -> string -> Tot string +inline_for_extraction unfold +let op_Hat s1 s2 = strcat s1 s2 + +(** The inductive type of polymorphic lists *) +type list (a: Type) = + | Nil : list a + | Cons : hd: a -> tl: list a -> list a + +(** The [M] marker is interpreted by the Dijkstra Monads for Free + construction. It has a "double meaning", either as an alias for + reasoning about the direct definitions, or as a marker for places + where a CPS transformation should happen. *) +effect M (a: Type) = Tot a (attributes cps) + +(** Returning a value into the [M] effect *) +let returnM (a: Type) (x: a) : M a = x + +(** [as_requires] turns a WP into a precondition, by applying it to + a trivial postcondition *) +unfold +let as_requires (#a: Type) (wp: pure_wp a) : pure_pre = wp (fun x -> True) + +(** [as_ensures] turns a WP into a postcondition, relying on a kind of + double negation translation. *) +unfold +let as_ensures (#a: Type) (wp: pure_wp a) : pure_post a = fun (x:a) -> ~(wp (fun y -> (y =!= x))) + +(** The keyword term-level keyword [assume] is desugared to [_assume]. + It explicitly provides an escape hatch to assume a given property + [p]. *) +[@@ warn_on_use "Uses an axiom"] +assume +val _assume (p: Type) : Pure unit (requires (True)) (ensures (fun x -> p)) + +(** [admit] is another escape hatch: It discards the continuation and + returns a value of any type *) +[@@ warn_on_use "Uses an axiom"] +assume +val admit: #a: Type -> unit -> Admit a + +(** [magic] is another escape hatch: It retains the continuation but + returns a value of any type *) +[@@ warn_on_use "Uses an axiom"] +assume +val magic: #a: Type -> unit -> Tot a + +(** [unsafe_coerce] is another escape hatch: It coerces an [a] to a + [b]. *) +[@@ warn_on_use "Uses an axiom"] +irreducible +let unsafe_coerce (#a #b: Type) (x: a) : b = + admit (); + x + +(** [admitP]: TODO: Unused ... remove? *) +[@@ warn_on_use "Uses an axiom"] +assume +val admitP (p: Type) : Pure unit True (fun x -> p) + +(** The keyword term-level keyword [assert] is desugared to [_assert]. + It force a proof of a property [p], then assuming [p] for the + continuation. *) +val _assert (p: Type) : Pure unit (requires p) (ensures (fun x -> p)) +let _assert p = () + +(** Logically equivalent to assert; TODO remove? *) +val cut (p: Type) : Pure unit (requires p) (fun x -> p) +let cut p = () + +(** The type of non-negative integers *) +type nat = i: int{i >= 0} + +(** The type of positive integers *) +type pos = i: int{i > 0} + +(** The type of non-zero integers *) +type nonzero = i: int{i <> 0} + +/// Arbitrary precision ints are compiled to zarith (big_ints) in +/// OCaml and to .NET BigInteger in F#. Both the modulus and division +/// operations are Euclidean and are mapped to the corresponding +/// theory symbols in the SMT encoding + +(** Euclidean modulus *) + +[@@ smt_theory_symbol] +assume +val op_Modulus: int -> nonzero -> Tot int + +(** Euclidean division, written [/] *) + +[@@ smt_theory_symbol] +assume +val op_Division: int -> nonzero -> Tot int + +(** [pow2 x] is [2^x]: + + TODO: maybe move this to FStar.Int *) +let rec pow2 (x: nat) : Tot pos = + match x with + | 0 -> 1 + | _ -> 2 `op_Multiply` (pow2 (x - 1)) + +(** [min] computes the minimum of two [int]s *) +let min x y = if x <= y then x else y + +(** [abs] computes the absolute value of an [int] *) +let abs (x: int) : Tot int = if x >= 0 then x else - x + +(** A primitive printer for booleans: + + TODO: unnecessary, this could easily be defined *) +assume +val string_of_bool: bool -> Tot string + +(** A primitive printer for [int] *) +assume +val string_of_int: int -> Tot string + +(** THIS IS MEANT TO BE KEPT IN SYNC WITH FStar.CheckedFiles.fs + Incrementing this forces all .checked files to be invalidated *) +irreducible +let __cache_version_number__ = 72 diff --git a/ulib/default.nix b/stage0/ulib/default.nix similarity index 100% rename from ulib/default.nix rename to stage0/ulib/default.nix diff --git a/stage0/ulib/experimental/FStar.ConstantTime.Integers.fst b/stage0/ulib/experimental/FStar.ConstantTime.Integers.fst new file mode 100644 index 00000000000..c6f890d4b0d --- /dev/null +++ b/stage0/ulib/experimental/FStar.ConstantTime.Integers.fst @@ -0,0 +1,84 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.ConstantTime.Integers + +(** + This module provides a refinement of FStar.IFC providing an + interface restricted only to constant-time operations on integers. + + In contrast, FStar.IFC provides a general monadic information-flow + control framework, which need not be restricted to constant-time + operations. +*) + +open FStar.IFC +open FStar.Integers + +/// A `secret_int l s` is a machine-integer at secrecy level `l` and +/// signedness/width `s`. +let secret_int (#sl:sl) + (l:lattice_element sl) + (s:sw) : Type0 = + protected l (int_t s) + +/// A `secret_int l s` can be seen as an int in spec +let reveal (#sl:sl) + (#l:lattice_element sl) + (#s:sw) + (x:secret_int l s) + : GTot (y:int{within_bounds s y}) + = v (reveal x) + +/// `hide` is the inverse of `reveal`, proving that `secret_int` is injective +let hide (#sl:sl) (#l:lattice_element sl) (#s:sw) (x:int{within_bounds s x}) + : GTot (secret_int l s) + = return l (u x) + +let reveal_hide #sl #l #s x = () +let hide_reveal #sl #l #s x = () + +let promote #sl #l0 #s x l1 = + join (return #_ #(secret_int l0 s) l1 x) + +////////////////////////////////////////////////////////////////////////////////////////// +/// The remainder of this module provides liftings of specific integers operations +/// to work on secret integers, i.e., only those that respect the constant time guarantees +/// and do not break confidentiality. +/// +/// Note, with our choice of representation, it is impossible to +/// implement functions that break basic IFC guarantees, e.g., we +/// cannot implement a boolean comparison function on secret_ints +noextract +inline_for_extraction +let addition #sl (#l:lattice_element sl) #s + (x : secret_int l s) + (y : secret_int l s {ok ( + ) (m x) (m y)}) + : Tot (z:secret_int l s{m z == m x + m y}) + = let>> a = x in + let>> b = y in + return l (a + b) + +noextract +inline_for_extraction +let addition_mod (#sl:sl) + (#l:lattice_element sl) + (#sw: _ {Unsigned? sw /\ width_of_sw sw <> W128}) + (x : secret_int l sw) + (y : secret_int l sw) + : Tot (z:secret_int l sw { m z == m x +% m y } ) + = let>> a = x in + let>> b = y in + return l (a +% b) diff --git a/stage0/ulib/experimental/FStar.ConstantTime.Integers.fsti b/stage0/ulib/experimental/FStar.ConstantTime.Integers.fsti new file mode 100644 index 00000000000..11d83600a42 --- /dev/null +++ b/stage0/ulib/experimental/FStar.ConstantTime.Integers.fsti @@ -0,0 +1,193 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.ConstantTime.Integers + +/// This module provides a refinement of FStar.IFC providing an +/// interface restricted only to constant-time operations on integers. +/// +/// In contrast, FStar.IFC provides a general monadic information-flow +/// control framework, which need not be restricted to constant-time +/// operations. + +open FStar.IFC +open FStar.Integers + + +(** `sw`: signedness and width of machine integers excluding + FStar.[U]Int128, which does not provide constant-time + operations. *) +let sw = s:signed_width{width_of_sw s <> Winfinite + /\ width_of_sw s <> W128} + +(** A `secret_int l s` is a machine-integer at secrecy level `l` and + signedness/width `s`. *) +val secret_int (#sl:sl u#c) + (l:lattice_element sl) + (s:sw) : Type0 + +(** A `secret_int l s` can be seen as an int in spec *) +val reveal (#sl:sl) + (#l:lattice_element sl) + (#s:sw) + (x:secret_int l s) + : GTot (y:int{within_bounds s y}) + +(** A `secret_int l s` can be also be seen as an machine integer in spec *) +let m #sl (#t:lattice_element sl) #s (x:secret_int t s) + : GTot (int_t s) + = u (reveal x) + +(** `hide` is the inverse of `reveal`, proving that `secret_int` is injective *) +val hide (#sl:sl) + (#l:lattice_element sl) + (#s:sw) + (x:int{within_bounds s x}) + : GTot (secret_int l s) + +val reveal_hide (#sl:sl) + (#l:lattice_element sl) + (#s:sw) + (x:int{within_bounds s x}) + : Lemma (reveal (hide #sl #l #s x) == x) + +val hide_reveal (#sl:sl) + (#l:lattice_element sl) + (#s:sw) + (x:secret_int l s) + : Lemma (hide (reveal x) == x) + [SMTPat (reveal x)] + +(** `promote x l` allows increasing the confidentiality classification of `x` + This can easily be programmed using the FStar.IFC interface *) +val promote (#sl:sl) + (#l0:lattice_element sl) + (#s:sw) + (x:secret_int l0 s) + (l1:lattice_element sl) + : Tot (y:secret_int (l1 `lub` l0) s{reveal y == reveal x}) + +/// The remainder of this module provides liftings of specific +/// integers operations to work on secret integers, i.e., only those +/// that respect the constant time guarantees and do not break +/// confidentiality. +/// +/// Note, with our choice of representation, it is impossible to +/// implement functions that break basic IFC guarantees, e.g., we +/// cannot implement a boolean comparison function on secret_ints + +(** Bounds-respecting addition *) +noextract +inline_for_extraction +val addition (#sl:sl) + (#l:lattice_element sl) + (#s:sw) + (x : secret_int l s) + (y : secret_int l s {ok ( + ) (m x) (m y)}) + : Tot (z:secret_int l s{m z == m x + m y}) + +(** Addition modulo *) +noextract +inline_for_extraction +val addition_mod (#sl:sl) + (#l:lattice_element sl) + (#sw: _ {Unsigned? sw /\ width_of_sw sw <> W128}) + (x : secret_int l sw) + (y : secret_int l sw) + : Tot (z:secret_int l sw { m z == m x +% m y } ) + +/// If we like this style, I will proceed to implement a lifting of +/// the rest of the constant-time integers over secret integers + + +/// Now, a multiplexing layer to overload operators over int_t and secret_int + +(** A type of qualifiers, distinguishing secret and public integers *) +noeq +type qual = + | Secret: #sl:sl + -> l:lattice_element sl + -> sw:sw + -> qual + | Public: sw:signed_width + -> qual + +(** The signedness and width of a qualifier *) +[@@mark_for_norm] +unfold +let sw_qual = function + | Secret _ sw -> sw + | Public sw -> sw + +(** The lattice element of a secret qualifier *) +[@@mark_for_norm] +unfold +let label_qual (q:qual{Secret? q}) : lattice_element (Secret?.sl q) = + match q with + | Secret l _ -> l + +(** The type corresponding to a qualifier, either an integer or a secret integer *) +[@@mark_for_norm] +unfold +let t (q:qual) = + match q with + | Secret l s -> secret_int l s + | Public s -> int_t s + +[@@mark_for_norm] +unfold +let i (#q:qual) (x:t q) : GTot (int_t (sw_qual q)) = + match q with + | Public s -> x + | Secret l s -> m (x <: secret_int l s) + +[@@mark_for_norm] +unfold +let as_secret (#q:qual{Secret? q}) (x:t q) + : secret_int (label_qual q) (sw_qual q) + = x + +[@@mark_for_norm] +unfold +let as_public (#q:qual{Public? q}) (x:t q) + : int_t (sw_qual q) + = x + +(** Lifting addition to work over both secret and public integers *) +[@@mark_for_norm] +unfold +noextract +inline_for_extraction +let ( + ) (#q:qual) (x:t q) (y:t q{ok (+) (i x) (i y)}) + : Tot (t q) + = match q with + | Public s -> as_public x + as_public y + | Secret l s -> as_secret x `addition` as_secret y + +(** Lifting addition modulo to work over both secret and public integers *) +[@@mark_for_norm] +unfold +noextract +inline_for_extraction +let ( +% ) (#q:qual{norm (Unsigned? (sw_qual q) /\ width_of_sw (sw_qual q) <> W128)}) + (x:t q) + (y:t q) + : Tot (t q) + = match q with + | Public s -> as_public x +% as_public y + | Secret l s -> as_secret x `addition_mod` as_secret y + +(**** See src/tests/microbenchmarks/Test.ConstantTimeIntegers.fst for some unit tests *) + diff --git a/stage0/ulib/experimental/FStar.InteractiveHelpers.Base.fst b/stage0/ulib/experimental/FStar.InteractiveHelpers.Base.fst new file mode 100644 index 00000000000..ee4baabf472 --- /dev/null +++ b/stage0/ulib/experimental/FStar.InteractiveHelpers.Base.fst @@ -0,0 +1,633 @@ +module FStar.InteractiveHelpers.Base + +open FStar.List.Tot +open FStar.Tactics +open FStar.Mul + +#set-options "--z3rlimit 15 --fuel 0 --ifuel 1" + +(*** Utilities *) +val bv_eq : bv -> bv -> Tot bool +let bv_eq (bv1 bv2 : bv) = + let bvv1 = inspect_bv bv1 in + let bvv2 = inspect_bv bv2 in + (* We don't check for type equality: the fact that no two different binders + * have the same name and index is an invariant which must be enforced - + * and actually we could limit ourselves to checking the index *) + bvv1.bv_index = bvv2.bv_index + +val fv_eq : fv -> fv -> Tot bool +let fv_eq fv1 fv2 = + let n1 = inspect_fv fv1 in + let n2 = inspect_fv fv2 in + n1 = n2 + +// TODO: use everywhere +val fv_eq_name : fv -> name -> Tot bool +let fv_eq_name fv n = + let fvn = inspect_fv fv in + fvn = n + +// TODO: use more +val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b) +let opt_apply #a #b f x = + match x with + | None -> None + | Some x' -> Some (f x') + +val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b) +let opt_tapply #a #b f x = + match x with + | None -> None + | Some x' -> Some (f x') + +val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string +let option_to_string #a f x = + match x with + | None -> "None" + | Some x' -> "Some (" ^ f x' ^ ")" + + +let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) = + match opt_x with + | Some x -> x :: ls + | None -> ls + +val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string +let list_to_string #a f ls = + (Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]" + + +/// Apply a term to a list of parameters, normalize the result to make sure +/// all the abstractions are simplified +val mk_app_norm : env -> term -> list term -> Tac term +let mk_app_norm e t params = + let t1 = mk_e_app t params in + let t2 = norm_term_env e [] t1 in + t2 + +val opt_mk_app_norm : env -> option term -> list term -> Tac (option term) +let opt_mk_app_norm e opt_t params = + opt_tapply (fun t -> mk_app_norm e t params) opt_t + +// TODO: remove +let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) = + match l with + | [] -> ([],[]) + | (hd1,hd2)::tl -> + let (tl1,tl2) = unzip tl in + (hd1::tl1,hd2::tl2) + +/// Alternative ``bv_to_string`` function where we print the index of the bv. +/// It can be very useful for debugging. +let abv_to_string bv : Tac string = + let bvv = inspect_bv bv in + name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")" + +let print_binder_info (full : bool) (b : binder) : Tac unit = + let open inspect_binder b <: binder_view in + let qual_str = match binder_qual with + | Q_Implicit -> "Implicit" + | Q_Explicit -> "Explicit" + | Q_Meta t -> "Meta: " ^ term_to_string t + in + let bview = inspect_bv binder_bv in + if full then + print ( + "> print_binder_info:" ^ + "\n- name: " ^ (name_of_binder b) ^ + "\n- as string: " ^ (binder_to_string b) ^ + "\n- aqual: " ^ qual_str ^ + "\n- ppname: " ^ name_of_bv binder_bv ^ + "\n- index: " ^ string_of_int bview.bv_index ^ + "\n- sort: " ^ term_to_string binder_sort + ) + else print (binder_to_string b) + +let print_binders_info (full : bool) (e:env) : Tac unit = + iter (print_binder_info full) (binders_of_env e) + +let acomp_to_string (c:comp) : Tac string = + match inspect_comp c with + | C_Total ret -> + "C_Total (" ^ term_to_string ret ^ ")" + | C_GTotal ret -> + "C_GTotal (" ^ term_to_string ret ^ ")" + | C_Lemma pre post patterns -> + "C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")" + | C_Eff us eff_name result eff_args _ -> + let eff_arg_to_string (a : term) : Tac string = + " (" ^ term_to_string a ^ ")" + in + let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in + let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in + "C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str + +exception MetaAnalysis of error_message +let mfail_doc m = + raise (MetaAnalysis m) +let mfail str = + raise (MetaAnalysis (mkmsg str)) + +(*** Debugging *) +/// Some debugging facilities +val print_dbg : bool -> string -> Tac unit +let print_dbg debug s = + if debug then print s + +/// Return the qualifier of a term as a string +val term_view_construct (t : term_view) : Tac string + +let term_view_construct (t : term_view) : Tac string = + match t with + | Tv_Var _ -> "Tv_Var" + | Tv_BVar _ -> "Tv_BVar" + | Tv_FVar _ -> "Tv_FVar" + | Tv_App _ _ -> "Tv_App" + | Tv_Abs _ _ -> "Tv_Abs" + | Tv_Arrow _ _ -> "Tv_Arrow" + | Tv_Type _ -> "Tv_Type" + | Tv_Refine _ _ _ -> "Tv_Refine" + | Tv_Const _ -> "Tv_Const" + | Tv_Uvar _ _ -> "Tv_Uvar" + | Tv_Let _ _ _ _ _ _ -> "Tv_Let" + | Tv_Match _ _ _ -> "Tv_Match" + | Tv_AscribedT _ _ _ _ -> "Tv_AscribedT" + | Tv_AscribedC _ _ _ _ -> "Tv_AScribedC" + | _ -> "Tv_Unknown" + +val term_construct (t : term) : Tac string + +let term_construct (t : term) : Tac string = + term_view_construct (inspect t) + +(*** Pretty printing *) +/// There are many issues linked to terms (pretty) printing. +/// The first issue is that when parsing terms, F* automatically inserts +/// ascriptions, which then clutter the terms printed to the user. The current +/// workaround is to filter those ascriptions in the terms before exploiting them. +/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b] +/// become [l_and a b]... + +val filter_ascriptions : bool -> term -> Tac term + +let filter_ascriptions dbg t = + print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t ); + visit_tm (fun t -> + match inspect t with + | Tv_AscribedT e _ _ _ + | Tv_AscribedC e _ _ _ -> e + | _ -> t) t + +/// Our prettification function. Apply it to all the terms which might be printed +/// back to the user. Note that the time at which the function is applied is +/// important: we can't apply it on all the assertions we export to the user, just +/// before exporting, because we may have inserted ascriptions on purpose, which +/// would then be filtered away. +val prettify_term : bool -> term -> Tac term +let prettify_term dbg t = filter_ascriptions dbg t + +(*** Environments *) +/// We need a way to handle environments with variable bindings +/// and name shadowing, to properly display the context to the user. + +/// A map linking variables to terms. For now we use a list to define it, because +/// there shouldn't be too many bindings. +type bind_map (a : Type) = list (bv & a) + +let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m + +let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) = + match m with + | [] -> None + | (b', x)::m' -> + if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b + +let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) : + Tac (option (bv & a)) = + match m with + | [] -> None + | (b', x)::m' -> + let b'v = inspect_bv b' in + if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name + +noeq type genv = +{ + env : env; + (* Whenever we evaluate a let binding, we keep track of the relation between + * the binder and its definition. + * The boolean indicates whether or not the variable is considered abstract. We + * often need to introduce variables which don't appear in the user context, for + * example when we need to deal with a postcondition for Stack or ST, which handles + * the previous and new memory states, and which may not be available in the user + * context, or where we don't always know which variable to use. + * In this case, whenever we output the term, we write its content as an + * abstraction applied to those missing parameters. For instance, in the + * case of the assertion introduced for a post-condition: + * [> assert((fun h1 h2 -> post) h1 h2); + * Besides the informative goal, the user can replace those parameters (h1 + * and h2 above) by the proper ones then normalize the assertion by using + * the appropriate command to get a valid assertion. *) + bmap : bind_map (typ & bool & term); + (* Whenever we introduce a new variable, we check whether it shadows another + * variable because it has the same name, and put it in the below + * list. Of course, for the F* internals such shadowing is not an issue, because + * the index of every variable should be different, but this is very important + * when generating code for the user *) + svars : list (bv & typ); +} + +let get_env (e:genv) : env = e.env +let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap +let mk_genv env bmap svars : genv = Mkgenv env bmap svars +let mk_init_genv env : genv = mk_genv env [] [] + +val genv_to_string : genv -> Tac string +let genv_to_string ge = + let binder_to_string (b : binder) : Tac string = + abv_to_string (bv_of_binder b) ^ "\n" + in + let binders_str = map binder_to_string (binders_of_env ge.env) in + let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string = + let bv, (_sort, abs, t) = e in + "(" ^ abv_to_string bv ^" -> (" ^ + string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n" + in + let bmap_str = map bmap_elem_to_string ge.bmap in + let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in + let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in + "> env:\n" ^ flatten binders_str ^ + "> bmap:\n" ^ flatten bmap_str ^ + "> svars:\n" ^ flatten svars_str + +let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) = + bind_map_get ge.bmap b + +let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) = + (* tweak return a bit to include sort *) + match bind_map_get_from_name ge.bmap name with + | None -> None + | Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x)) + +/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the +/// term the binder is bound to (in a `let _ = _ in` construct for example). +let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv = + let br = mk_binder b sort in + let sv = genv_get_from_name ge (name_of_bv b) in + let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in + let e' = push_binder ge.env br in + let tm = if Some? t then Some?.v t else pack (Tv_Var b) in + let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in + mk_genv e' bmap' svars' + +let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv = + genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t + +/// Check if a binder is shadowed by another more recent binder +let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool = + List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars + +let binder_is_shadowed (ge : genv) (b : binder) : Tot bool = + bv_is_shadowed ge (bv_of_binder b) + +let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) = + List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl + +let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) = + List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl + +val bv_is_abstract : genv -> bv -> Tot bool +let bv_is_abstract ge bv = + match genv_get ge bv with + | None -> false + | Some (_, abs, _) -> abs + +val binder_is_abstract : genv -> binder -> Tot bool +let binder_is_abstract ge b = + bv_is_abstract ge (bv_of_binder b) + +val genv_abstract_bvs : genv -> Tot (list (bv & typ)) +let genv_abstract_bvs ge = + List.Tot.concatMap + (fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap + +/// Versions of ``fresh_bv`` and ``fresh_binder`` inspired by the standard library +/// We make sure the name is fresh because we need to be able to generate valid code +/// (it is thus not enough to have a fresh integer). +let rec _fresh_bv binder_names basename i : Tac bv = + let name = basename ^ string_of_int i in + (* In worst case the performance is quadratic in the number of binders. + * TODO: fix that, it actually probably happens for anonymous variables ('_') *) + if List.Tot.mem name binder_names then _fresh_bv binder_names basename (i+1) + else fresh_bv_named name + +let fresh_bv (e : env) (basename : string) : Tac bv = + let binders = binders_of_env e in + let binder_names = Tactics.map name_of_binder binders in + _fresh_bv binder_names basename 0 + +let fresh_binder (e : env) (basename : string) (ty : typ) : Tac binder = + let bv = fresh_bv e basename in + mk_binder bv ty + +let genv_push_fresh_binder (ge : genv) (basename : string) (ty : typ) : Tac (genv & binder) = + let b = fresh_binder ge.env basename ty in + (* TODO: we can have a shortcircuit push (which performs less checks) *) + let ge' = genv_push_binder ge b true None in + ge', b + +// TODO: actually we should use push_fresh_bv more +let push_fresh_binder (e : env) (basename : string) (ty : typ) : Tac (env & binder) = + let b = fresh_binder e basename ty in + let e' = push_binder e b in + e', b + +let genv_push_fresh_bv (ge : genv) (basename : string) (ty : typ) : Tac (genv & bv) = + let ge', b = genv_push_fresh_binder ge basename ty in + ge', bv_of_binder b + +val push_fresh_var : env -> string -> typ -> Tac (term & binder & env) +let push_fresh_var e0 basename ty = + let e1, b1 = push_fresh_binder e0 basename ty in + let v1 = pack (Tv_Var (bv_of_binder b1)) in + v1, b1, e1 + +val genv_push_fresh_var : genv -> string -> typ -> Tac (term & binder & genv) +let genv_push_fresh_var ge0 basename ty = + let ge1, b1 = genv_push_fresh_binder ge0 basename ty in + let v1 = pack (Tv_Var (bv_of_binder b1)) in + v1, b1, ge1 + +val push_two_fresh_vars : env -> string -> typ -> Tac (term & binder & term & binder & env) +let push_two_fresh_vars e0 basename ty = + let e1, b1 = push_fresh_binder e0 basename ty in + let e2, b2 = push_fresh_binder e1 basename ty in + let v1 = pack (Tv_Var (bv_of_binder b1)) in + let v2 = pack (Tv_Var (bv_of_binder b2)) in + v1, b1, v2, b2, e2 + +val genv_push_two_fresh_vars : genv -> string -> typ -> Tac (term & binder & term & binder & genv) +let genv_push_two_fresh_vars ge0 basename ty = + let ge1, b1 = genv_push_fresh_binder ge0 basename ty in + let ge2, b2 = genv_push_fresh_binder ge1 basename ty in + let v1 = pack (Tv_Var (bv_of_binder b1)) in + let v2 = pack (Tv_Var (bv_of_binder b2)) in + v1, b1, v2, b2, ge2 + + + +(*** Substitutions *) +/// Substitutions + +/// Custom substitutions using the normalizer. This is the easiest and safest +/// way to perform a substitution: if you want to substitute [v] with [t] in [exp], +/// just normalize [(fun v -> exp) t]. Note that this may be computationally expensive. +val norm_apply_subst : env -> term -> list ((bv & typ) & term) -> Tac term +val norm_apply_subst_in_comp : env -> comp -> list ((bv & typ) & term) -> Tac comp + +let norm_apply_subst e t subst = + let bl, vl = unzip subst in + let bl = List.Tot.map (fun (bv,ty) -> mk_binder bv ty) bl in + let t1 = mk_abs bl t in + let t2 = mk_e_app t1 vl in + norm_term_env e [] t2 + +let norm_apply_subst_in_comp e c subst = + let subst = (fun x -> norm_apply_subst e x subst) in + let subst_in_aqualv a : Tac aqualv = + match a with + | Q_Implicit + | Q_Explicit -> a + | Q_Meta t -> Q_Meta (subst t) + in + match inspect_comp c with + | C_Total ret -> + let ret = subst ret in + pack_comp (C_Total ret) + | C_GTotal ret -> + let ret = subst ret in + pack_comp (C_GTotal ret) + | C_Lemma pre post patterns -> + let pre = subst pre in + let post = subst post in + let patterns = subst patterns in + pack_comp (C_Lemma pre post patterns) + | C_Eff us eff_name result eff_args decrs -> + let result = subst result in + let eff_args = map (fun (x, a) -> (subst x, subst_in_aqualv a)) eff_args in + let decrs = map subst decrs in + pack_comp (C_Eff us eff_name result eff_args decrs) + +/// As substitution with normalization is very expensive, we implemented another +/// technique which works by exploring terms. This is super fast, but the terms +/// seem not to be reconstructed in the same way, which has a big impact on pretty printing. +/// For example, terms like [A /\ B] get printed as [Prims.l_and A B]. +val deep_apply_subst : env -> term -> list (bv & term) -> Tac term +// Whenever we encounter a construction which introduces a binder, we need to apply +// the substitution in the binder type. Note that this gives a new binder, with +// which we need to replace the old one in what follows. +// Also note that it should be possible to rewrite [deep_apply_subst] in terms of [visit_tm], +// but [deep_apply_subst] seems to be a bit more precise with regard to type replacements (not +// sure it is really important, though). +val deep_apply_subst_in_bv : env -> bv -> list (bv & term) -> Tac (bv & list (bv & term)) +val deep_apply_subst_in_binder : env -> binder -> list (bv & term) -> Tac (binder & list (bv & term)) +val deep_apply_subst_in_comp : env -> comp -> list (bv & term) -> Tac comp +val deep_apply_subst_in_pattern : env -> pattern -> list (bv & term) -> Tac (pattern & list (bv & term)) + +let rec deep_apply_subst e t subst = + match inspect t with + | Tv_Var b -> + begin match bind_map_get subst b with + | None -> t + | Some t' -> t' + end + | Tv_BVar b -> + (* Note: Tv_BVar shouldn't happen *) + begin match bind_map_get subst b with + | None -> t + | Some t' -> t' + end + | Tv_FVar _ -> t + | Tv_App hd (a,qual) -> + let hd = deep_apply_subst e hd subst in + let a = deep_apply_subst e a subst in + pack (Tv_App hd (a, qual)) + | Tv_Abs br body -> + let body = deep_apply_subst e body subst in + pack (Tv_Abs br body) + | Tv_Arrow br c -> + let br, subst = deep_apply_subst_in_binder e br subst in + let c = deep_apply_subst_in_comp e c subst in + pack (Tv_Arrow br c) + | Tv_Type _ -> t + | Tv_Refine bv sort ref -> + let sort = deep_apply_subst e sort subst in + let bv, subst = deep_apply_subst_in_bv e bv subst in + let ref = deep_apply_subst e ref subst in + pack (Tv_Refine bv sort ref) + | Tv_Const _ -> t + | Tv_Uvar _ _ -> t + | Tv_Let recf attrs bv ty def body -> + (* No need to substitute in the attributes - that we filter for safety *) + let ty = deep_apply_subst e ty subst in + let def = deep_apply_subst e def subst in + let bv, subst = deep_apply_subst_in_bv e bv subst in + let body = deep_apply_subst e body subst in + pack (Tv_Let recf [] bv ty def body) + | Tv_Match scrutinee ret_opt branches -> (* TODO: type of pattern variables *) + let scrutinee = deep_apply_subst e scrutinee subst in + let ret_opt = map_opt (fun (b, asc) -> + let b, subst = deep_apply_subst_in_binder e b subst in + let asc = + match asc with + | Inl t, tacopt, use_eq -> + Inl (deep_apply_subst e t subst), + map_opt (fun tac -> deep_apply_subst e tac subst) tacopt, + use_eq + | Inr c, tacopt, use_eq -> + Inr (deep_apply_subst_in_comp e c subst), + map_opt (fun tac -> deep_apply_subst e tac subst) tacopt, + use_eq in + b, asc) ret_opt in + (* For the branches: we don't need to explore the patterns *) + let deep_apply_subst_in_branch branch = + let pat, tm = branch in + let pat, subst = deep_apply_subst_in_pattern e pat subst in + let tm = deep_apply_subst e tm subst in + pat, tm + in + let branches = map deep_apply_subst_in_branch branches in + pack (Tv_Match scrutinee ret_opt branches) + | Tv_AscribedT exp ty tac use_eq -> + let exp = deep_apply_subst e exp subst in + let ty = deep_apply_subst e ty subst in + (* no need to apply it on the tactic - that we filter for safety *) + pack (Tv_AscribedT exp ty None use_eq) + | Tv_AscribedC exp c tac use_eq -> + let exp = deep_apply_subst e exp subst in + let c = deep_apply_subst_in_comp e c subst in + (* no need to apply it on the tactic - that we filter for safety *) + pack (Tv_AscribedC exp c None use_eq) + | _ -> + (* Unknown *) + t + +and deep_apply_subst_in_bv e bv subst = + (* No substitution needs to happen for variables + (there is no longer a sort). But, shift the substitution. *) + bv, (bv, pack (Tv_Var bv))::subst + +(* + * AR: TODO: should apply subst in attrs? + *) +and deep_apply_subst_in_binder e br subst = + let open inspect_binder br <: binder_view in + let binder_sort = deep_apply_subst e binder_sort subst in + let binder_bv, subst = deep_apply_subst_in_bv e binder_bv subst in + pack_binder { + binder_bv=binder_bv; + binder_qual=binder_qual; + binder_attrs=binder_attrs; + binder_sort=binder_sort; + }, subst + +and deep_apply_subst_in_comp e c subst = + let subst = (fun x -> deep_apply_subst e x subst) in + let subst_in_aqualv a : Tac aqualv = + match a with + | Q_Implicit + | Q_Explicit -> a + | Q_Meta t -> Q_Meta (subst t) + in + match inspect_comp c with + | C_Total ret -> + let ret = subst ret in + pack_comp (C_Total ret) + | C_GTotal ret -> + let ret = subst ret in + pack_comp (C_GTotal ret) + | C_Lemma pre post patterns -> + let pre = subst pre in + let post = subst post in + let patterns = subst patterns in + pack_comp (C_Lemma pre post patterns) + | C_Eff us eff_name result eff_args decrs -> + let result = subst result in + let eff_args = map (fun (x, a) -> (subst x, subst_in_aqualv a)) eff_args in + let decrs = map subst decrs in + pack_comp (C_Eff us eff_name result eff_args decrs) + +and deep_apply_subst_in_pattern e pat subst = + match pat with + | Pat_Constant _ -> pat, subst + | Pat_Cons fv us patterns -> + (* The types of the variables in the patterns should be independent of each + * other: we use fold_left only to incrementally update the substitution *) + let patterns, subst = + fold_right (fun (pat, b) (pats, subst) -> + let pat, subst = deep_apply_subst_in_pattern e pat subst in + ((pat, b) :: pats, subst)) patterns ([], subst) + in + Pat_Cons fv us patterns, subst + | Pat_Var bv st -> + let st = Sealed.seal (deep_apply_subst e (unseal st) subst) in + let bv, subst = deep_apply_subst_in_bv e bv subst in + Pat_Var bv st, subst + | Pat_Dot_Term eopt -> + Pat_Dot_Term (map_opt (fun t -> deep_apply_subst e t subst) eopt), subst + +/// The substitution functions actually used in the rest of the meta F* functions. +/// For now, we use normalization because even though it is sometimes slow it +/// gives prettier terms, and readability is the priority. In order to mitigate +/// the performance issue, we try to minimize the number of calls to those functions, +/// by doing lazy instantiations for example (rather than incrementally apply +/// substitutions in a term, accumulate the substitutions and perform them all at once). +/// TODO: would it be good to have a native substitution function in F* +let apply_subst = norm_apply_subst +let apply_subst_in_comp = norm_apply_subst_in_comp + +val opt_apply_subst : env -> option term -> list ((bv & typ) & term) -> Tac (option term) +let opt_apply_subst e opt_t subst = + match opt_t with + | None -> None + | Some t -> Some (apply_subst e t subst) + +(*** Variable shadowing *) +/// Introduce fresh variables to generate a substitution for the variables shadowed +/// in the current environment. +val generate_shadowed_subst : genv -> Tac (genv & list (bv & typ & bv)) + +/// In order to introduce variables with coherent types (the variables' types +/// may be dependent) and make things simpler, we build one big term: +/// [> (fun x1 x2 ... xn -> ()) +/// Then, for each variable, we introduce a fresh variable with the same type as +/// the outermost abstraction, apply the above term to this new variable and +/// normalize to "apply" the substitution and reveal the next binding. + +let rec _generate_shadowed_subst (ge:genv) (t:term) (bvl : list (bv & typ)) : + Tac (genv & list (bv & typ & bv)) = + match bvl with + | [] -> ge, [] + | old_bv :: bvl' -> + match inspect t with + | Tv_Abs b _ -> + (* Introduce the new binder *) + let bv = (inspect_binder b).binder_bv in + let bvv = inspect_bv bv in + let ty = binder_sort b in + let name = unseal bvv.bv_ppname in + let ge1, fresh = genv_push_fresh_bv ge ("__" ^ name) ty in + let t1 = mk_e_app t [pack (Tv_Var fresh)] in + let t2 = norm_term_env ge1.env [] t1 in + (* Recursion *) + let ge2, nbvl = _generate_shadowed_subst ge1 t2 bvl' in + (* Return *) + ge2, (fst old_bv, ty, fresh) :: nbvl + | _ -> mfail "_subst_with_fresh_vars: not a Tv_Abs" + +let generate_shadowed_subst ge = + (* We need to replace the variables starting with the oldest *) + let bvl = List.Tot.rev ge.svars in + let bl = List.Tot.map (fun (bv, sort) -> mk_binder bv sort) bvl in + let dummy = mk_abs bl (`()) in + _generate_shadowed_subst ge dummy bvl diff --git a/stage0/ulib/experimental/FStar.InteractiveHelpers.Effectful.fst b/stage0/ulib/experimental/FStar.InteractiveHelpers.Effectful.fst new file mode 100644 index 00000000000..301763e6ad6 --- /dev/null +++ b/stage0/ulib/experimental/FStar.InteractiveHelpers.Effectful.fst @@ -0,0 +1,962 @@ +module FStar.InteractiveHelpers.Effectful + +module HS = FStar.HyperStack + +open FStar.List.Tot +open FStar.Tactics +open FStar.Mul +open FStar.InteractiveHelpers.Base +open FStar.InteractiveHelpers.ExploreTerm +open FStar.InteractiveHelpers.Propositions + +let term_eq = FStar.Reflection.TermEq.Simple.term_eq + +/// Effectful term analysis: retrieve information about an effectful term, including +/// its return type, its arguments, its correctly instantiated pre/postcondition, etc. + +#set-options "--z3rlimit 15 --fuel 0 --ifuel 1" + + +(*** Effectful term analysis *) +/// Cast information +noeq type cast_info = { + term : term; + p_ty : option type_info; // The type of the term + exp_ty : option type_info; // The type of the expected parameter +} + +let mk_cast_info t p_ty exp_ty : cast_info = + Mkcast_info t p_ty exp_ty + +val cast_info_to_string : cast_info -> Tac string +let cast_info_to_string info = + "Mkcast_info (" ^ term_to_string info.term ^ ") (" ^ + option_to_string type_info_to_string info.p_ty ^ ") (" ^ + option_to_string type_info_to_string info.exp_ty ^ ")" + +/// Extracts the effectful information from a computation +noeq type effect_info = { + ei_type : effect_type; + ei_ret_type : type_info; + ei_pre : option term; + ei_post : option term; +} + +let mk_effect_info = Mkeffect_info + +/// Convert a ``typ_or_comp`` to cast information +val effect_info_to_string : effect_info -> Tac string +let effect_info_to_string c = + "Mkeffect_info " ^ + effect_type_to_string c.ei_type ^ " (" ^ + option_to_string term_to_string c.ei_pre ^ ") (" ^ + type_info_to_string c.ei_ret_type ^ ") (" ^ + option_to_string term_to_string c.ei_post ^ ")" + +/// Effectful term information +noeq type eterm_info = { + einfo : effect_info; + (* Head and parameters of the decomposition of the term into a function application *) + head : term; + parameters : list cast_info; +} + +val eterm_info_to_string : eterm_info -> Tac string +let eterm_info_to_string info = + let params = map (fun x -> "(" ^ cast_info_to_string x ^ "); \n") info.parameters in + let params_str = List.Tot.fold_left (fun x y -> x ^ y) "" params in + "Mketerm_info (" ^ + effect_info_to_string info.einfo ^ ") (" ^ + term_to_string info.head ^ ")\n[" ^ + params_str ^ "]" + +let mk_eterm_info = Mketerm_info + + +(**** Step 1 *) +/// Decompose a function application between its body and parameters +val decompose_application : env -> term -> Tac (term & list cast_info) + +#push-options "--ifuel 1" +let rec decompose_application_aux (e : env) (t : term) : + Tac (term & list cast_info) = + match inspect t with + | Tv_App hd (a, qualif) -> + let hd0, params = decompose_application_aux e hd in + (* Parameter type *) + let a_type = get_type_info e a in + (* Type expected by the function *) + let hd_ty = safe_tc e hd in + let param_type = + match hd_ty with + | None -> None + | Some hd_ty' -> + match inspect hd_ty' with + | Tv_Arrow b c -> + Some (get_type_info_from_type (binder_sort b)) + | _ -> None + in + let cast_info = mk_cast_info a a_type param_type in + hd0, cast_info :: params + | _ -> t, [] +#pop-options + +let decompose_application e t = + let hd, params = decompose_application_aux e t in + hd, List.Tot.rev params + +/// Computes an effect type, its return type and its (optional) pre and post +val comp_view_to_effect_info : dbg:bool -> comp_view -> Tac (option effect_info) + +let comp_view_to_effect_info dbg cv = + match cv with + | C_Total ret_ty -> + let ret_type_info = get_type_info_from_type ret_ty in + Some (mk_effect_info E_Total ret_type_info None None) + | C_GTotal ret_ty -> + let ret_type_info = get_type_info_from_type ret_ty in + Some (mk_effect_info E_Total ret_type_info None None) + | C_Lemma pre post patterns -> + (* We use unit as the return type information *) + let pre = prettify_term dbg pre in + let post = prettify_term dbg post in + Some (mk_effect_info E_Lemma unit_type_info (Some pre) (Some post)) + | C_Eff univs eff_name ret_ty eff_args _ -> + print_dbg dbg ("comp_view_to_effect_info: C_Eff " ^ flatten_name eff_name); + let ret_type_info = get_type_info_from_type ret_ty in + let etype = effect_name_to_type eff_name in + let mk_res = mk_effect_info etype ret_type_info in + let eff_args = map (fun (x,a) -> (prettify_term dbg x, a)) eff_args in + begin match etype, eff_args with + | E_PURE, [(pre, _)] -> Some (mk_res (Some pre) None) + | E_Pure, [(pre, _); (post, _)] + | E_Stack, [(pre, _); (post, _)] + | E_ST, [(pre, _); (post, _)] -> Some (mk_res (Some pre) (Some post)) + (* If the effect is unknown and there are two parameters or less, we make the + * guess that the first one is a pre-condition and the second one is a + * post-condition *) + | E_Unknown, [] -> Some (mk_res None None) + | E_Unknown, [(pre, _)] -> Some (mk_res (Some pre) None) + | E_Unknown, [(pre, _); (post, _)] -> Some (mk_res (Some pre) (Some post)) + | _ -> None + end + +val comp_to_effect_info : dbg:bool -> comp -> Tac (option effect_info) + +let comp_to_effect_info dbg c = + let cv : comp_view = inspect_comp c in + comp_view_to_effect_info dbg cv + +val compute_effect_info : dbg:bool -> env -> term -> Tac (option effect_info) + +let compute_effect_info dbg e tm = + match safe_tcc e tm with + | Some c -> comp_to_effect_info dbg c + | None -> None + +/// Converts a ``typ_or_comp`` to an ``effect_info`` by flushing the instantiations +/// stored in the ``typ_or_comp``. +let typ_or_comp_to_effect_info (dbg : bool) (ge : genv) (c : typ_or_comp) : + Tac effect_info = +(* match c with + | TC_Typ ty pl num_unflushed -> + let tinfo = get_type_info_from_type ty in + mk_effect_info E_Total tinfo None None + | TC_Comp cv pl num_unflushed -> + let opt_einfo = comp_to_effect_info dbg cv in + match opt_einfo with + | None -> mfail ("typ_or_comp_to_effect_info failed on: " ^ acomp_to_string cv) + | Some einfo -> einfo *) + let c = flush_typ_or_comp dbg ge.env c in + match c with + | TC_Typ ty _ _ -> + let tinfo = get_type_info_from_type ty in + mk_effect_info E_Total tinfo None None + | TC_Comp cv _ _ -> + let opt_einfo = comp_to_effect_info dbg cv in + match opt_einfo with + | None -> mfail ("typ_or_comp_to_effect_info failed on: " ^ acomp_to_string cv) + | Some einfo -> einfo + + +/// ``tcc`` often returns a lifted effect which is not what we want (ex.: a +/// lemma called inside a Stack function may have been lifted to Stack, but +/// when studying this term effect, we want to retrieve its non-lifted effect). +/// The workaround is to decompose the term if it is an application, then retrieve +/// the effect of the head, and reconstruct it. +/// Note: I tried inspecting then repacking the term before calling ``tcc`` to +/// see if it allows to "forget" the context: it doesn't work. +val tcc_no_lift : env -> term -> Tac comp + +let tcc_no_lift e t = + match inspect t with + | Tv_App _ _ -> + let hd, args = collect_app t in + let c = tcc e hd in + inst_comp e c (List.Tot.map fst args) + | _ -> + (* Fall back to ``tcc`` *) + tcc e t + +/// Returns the effectful information about a term +val compute_eterm_info : dbg:bool -> env -> term -> Tac eterm_info + +#push-options "--ifuel 2" +let compute_eterm_info (dbg : bool) (e : env) (t : term) = + (* Decompose the term if it is a function application *) + let hd, parameters = decompose_application e t in + try + begin + let c : comp = tcc_no_lift e t in + let opt_einfo = comp_to_effect_info dbg c in + match opt_einfo with + | None -> mfail ("compute_eterm_info: failed on: " ^ term_to_string t) + | Some einfo -> + mk_eterm_info einfo hd parameters + end + with + | TacticFailure (msg, _) -> + mfail_doc ([text "compute_eterm_info: failure"] @ msg) + | e -> raise e +#pop-options + +(***** Types, casts and refinements *) + +(* TODO: those are not needed anymore *) +let has_refinement (ty:type_info) : Tot bool = + Some? ty.refin + +let get_refinement (ty:type_info{Some? ty.refin}) : Tot term = + Some?.v ty.refin + +let get_opt_refinment (ty:type_info) : Tot (option term) = + ty.refin + +let get_rawest_type (ty:type_info) : Tot typ = + ty.ty + +/// Compare the type of a parameter and its expected type +type type_comparison = | Refines | Same_raw_type | Unknown + +#push-options "--ifuel 1" +let type_comparison_to_string c = + match c with + | Refines -> "Refines" + | Same_raw_type -> "Same_raw_type" + | Unknown -> "Unknown" +#pop-options + +// TODO: without debugging information generation, is Tot +let compare_types (dbg : bool) (info1 info2 : type_info) : + Tac (c:type_comparison{c = Same_raw_type ==> has_refinement info2}) = + print_dbg dbg "[> compare_types"; + if term_eq info1.ty info2.ty then + let _ = print_dbg dbg "-> types are equal" in + if has_refinement info2 then + let _ = print_dbg dbg "-> 2nd type has refinement" in + // This doesn't work like in C: we need to check if info1 has a + // refinement, then we can compare the refinements inside ANOTHER if + if has_refinement info1 then + let _ = print_dbg dbg "-> 1st type has refinement" in + if term_eq (get_refinement info1) (get_refinement info2) then + let _ = print_dbg dbg "-> Refines" in + Refines + else + let _ = print_dbg dbg "-> Same_raw_type" in + Same_raw_type // Same raw type but can't say anything about the expected refinement + else + let _ = print_dbg dbg "-> 1st type has no refinement" in + let _ = print_dbg dbg "-> Same_raw_type" in + Same_raw_type // Same raw type but can't say anything about the expected refinement + else + let _ = print_dbg dbg "-> 2nd type has no refinement: Refines" in + Refines // The first type is more precise than the second type + else + let _ = print_dbg dbg "types are not equal" in + Unknown + +let compare_cast_types (dbg : bool) (p:cast_info) : + Tac (c:type_comparison{ + ((c = Refines \/ c = Same_raw_type) ==> (Some? p.p_ty /\ Some? p.exp_ty)) /\ + (c = Same_raw_type ==> has_refinement (Some?.v p.exp_ty))}) = + print_dbg dbg "[> compare_cast_types"; + match p.p_ty, p.exp_ty with + | Some info1, Some info2 -> + compare_types dbg info1 info2 + | _ -> Unknown + +(*/// Retrieve the list of types from the parameters stored in ``typ_or_comp``. +val typ_or_comp_to_param_types : typ_or_comp -> Tot (list typ) + +let typ_or_comp_to_param_types c = + let pl = params_of_typ_or_comp c in + List.Tot.map type_of_binder pl *) + +(**** Step 2 *) +/// The retrieved type refinements and post-conditions are not instantiated (they +/// are lambda functions): instantiate them to get propositions. + + +/// Generate a term of the form [has_type x ty] +val mk_has_type : term -> typ -> Tac term +let mk_has_type t ty = + let params = [(ty, Q_Implicit); (t, Q_Explicit); (ty, Q_Explicit)] in + mk_app (`has_type) params + + +// TODO: I don't understand why I need ifuel 2 here +#push-options "--ifuel 2" +/// Generate the propositions equivalent to a correct type cast. +/// Note that the type refinements need to be instantiated. +val cast_info_to_propositions : bool -> genv -> cast_info -> Tac (list proposition) +let cast_info_to_propositions dbg ge ci = + print_dbg dbg ("[> cast_info_to_propositions:\n" ^ cast_info_to_string ci); + match compare_cast_types dbg ci with + | Refines -> + print_dbg dbg ("-> Comparison result: Refines"); + [] + | Same_raw_type -> + print_dbg dbg ("-> Comparison result: Same_raw_type"); + let refin = get_refinement (Some?.v ci.exp_ty) in + let inst_refin = mk_app_norm ge.env refin [ci.term] in + [inst_refin] + | Unknown -> + print_dbg dbg ("-> Comparison result: Unknown"); + match ci.p_ty, ci.exp_ty with + | Some p_ty, Some e_ty -> + let p_rty = get_rawest_type p_ty in + let e_rty = get_rawest_type e_ty in + (* For the type cast, we generate an assertion of the form: + * [> has_type (p <: type_of_p) expected_type + * The reason is that we want the user to know which parameter is + * concerned (hence the ``has_type``), and which types should be + * related (hence the ascription). + *) + let ascr_term = pack (Tv_AscribedT ci.term p_rty None false) in + let has_type_params = [(p_rty, Q_Implicit); (ascr_term, Q_Explicit); (e_rty, Q_Explicit)] in + let type_cast = mk_app (`has_type) has_type_params in + (* Expected type's refinement *) + let inst_opt_refin = opt_mk_app_norm ge.env e_ty.refin [ci.term] in + opt_cons inst_opt_refin [type_cast] + | _ -> [] +#pop-options + +/// Generates a list of propositions from a list of ``cast_info``. Note that +/// the user should revert the list before printing the propositions. +val cast_info_list_to_propositions : bool -> genv -> list cast_info -> Tac (list proposition) +let cast_info_list_to_propositions dbg ge ls = + let lsl = map (cast_info_to_propositions dbg ge) ls in + flatten lsl + +/// When dealing with unknown effects, we try to guess what the pre and post-conditions +/// are. The effects are indeed very likely to have a pre and a post-condition, +/// and to be parameterized with an internal effect state, so that the pre and posts +/// have the following shapes: +/// - pre : STATE -> Type0 +/// - post : STATE -> RET -> STATE -> Type0 +/// Or (no state/pure): +/// - pre : Type0 +/// - post : RET -> Type0 +/// We try to detect that with the following functions: +noeq type pre_post_type = +| PP_Unknown | PP_Pure +| PP_State : (state_type:term) -> pre_post_type + +val compute_pre_type : bool -> env -> term -> Tac pre_post_type +let compute_pre_type dbg e pre = + print_dbg dbg "[> compute_pre_type"; + match safe_tc e pre with + | None -> + print_dbg dbg "safe_tc failed"; + PP_Unknown + | Some ty -> + print_dbg dbg "safe_tc succeeded"; + let brs, c = collect_arr_bs ty in + print_dbg dbg ("num binders: " ^ string_of_int (List.Tot.length brs)); + match brs, is_total_or_gtotal c with + | [], true -> + print_dbg dbg "PP_Pure"; + PP_Pure + | [b], true -> + print_dbg dbg ("PP_State " ^ (term_to_string (type_of_binder b))); + PP_State (type_of_binder b) + | _ -> + print_dbg dbg "PP_Unknown"; + PP_Unknown + +val opt_remove_refin : typ -> Tac typ +let opt_remove_refin ty = + match inspect ty with + | Tv_Refine _ sort _ -> sort + | _ -> ty + +val compute_post_type : bool -> env -> term -> term -> Tac pre_post_type +let compute_post_type dbg e ret_type post = + print_dbg dbg "[> compute_post_type"; + let get_tot_ret_type c : Tac (option term_view) = + match get_total_or_gtotal_ret_type c with + | Some ret_ty -> Some (inspect ret_ty) + | None -> None + in + match safe_tc e post with + | None -> + print_dbg dbg "safe_tc failed"; + PP_Unknown + | Some ty -> + print_dbg dbg "safe_tc succeeded"; + let brs, c = collect_arr_bs ty in + print_dbg dbg ("num binders: " ^ string_of_int (List.Tot.length brs)); + match brs, is_total_or_gtotal c with + | [r], true -> + (* Pure *) + print_dbg dbg "PP_Pure"; + PP_Pure + | [s1; r; s2], true -> + (* Stateful: check that the state types are coherent *) + let r_ty = type_of_binder r in + let s1_ty = type_of_binder s1 in + (* After testing with Stack: the final state seems to have a refinement + * (which gives the postcondition) so we need to remove it to match + * it against the initial state *) + let s2_ty = opt_remove_refin (type_of_binder s2) in + let ret_type_eq = term_eq ret_type r_ty in + let state_type_eq = term_eq s1_ty s2_ty in + print_dbg dbg ("- ret type:\n-- target: " ^ term_to_string ret_type ^ + "\n-- binder: " ^ term_to_string r_ty); + print_dbg dbg ("- state types:\n-- binder1: " ^ term_to_string s1_ty ^ + "\n-- binder2: " ^ term_to_string s2_ty); + print_dbg dbg ("- ret type equality: " ^ string_of_bool ret_type_eq); + print_dbg dbg ("- state types equality: " ^ string_of_bool state_type_eq); + if ret_type_eq && state_type_eq + then + begin + print_dbg dbg ("PP_State" ^ term_to_string (type_of_binder s1)); + PP_State (type_of_binder s1) + end + else + begin + print_dbg dbg "PP_Unknown"; + PP_Unknown + end + | _ -> + print_dbg dbg "PP_Unknown"; + PP_Unknown + +val check_pre_post_type : bool -> env -> term -> term -> term -> Tac pre_post_type +let check_pre_post_type dbg e pre ret_type post = + print_dbg dbg "[> check_pre_post_type"; + match compute_pre_type dbg e pre, compute_post_type dbg e ret_type post with + | PP_Pure, PP_Pure -> + print_dbg dbg "PP_Pure, PP_Pure"; + PP_Pure + | PP_State ty1, PP_State ty2 -> + print_dbg dbg "PP_State, PP_State"; + if term_eq ty1 ty2 then PP_State ty1 else PP_Unknown + | _ -> + print_dbg dbg "_, _"; + PP_Unknown + +val check_opt_pre_post_type : bool -> env -> option term -> term -> option term -> Tac (option pre_post_type) +let check_opt_pre_post_type dbg e opt_pre ret_type opt_post = + print_dbg dbg "[> check_opt_pre_post_type"; + match opt_pre, opt_post with + | Some pre, Some post -> + print_dbg dbg "Some pre, Some post"; + Some (check_pre_post_type dbg e pre ret_type post) + | Some pre, None -> + print_dbg dbg "Some pre, None"; + Some (compute_pre_type dbg e pre) + | None, Some post -> + print_dbg dbg "None, Some post"; + Some (compute_post_type dbg e ret_type post) + | None, None -> + print_dbg dbg "None, None"; + None + +val _introduce_variables_for_abs : genv -> typ -> Tac (list term & list binder & genv) +let rec _introduce_variables_for_abs ge ty = + match inspect ty with + | Tv_Arrow b c -> + let ge1, b1 = genv_push_fresh_binder ge ("__" ^ name_of_binder b) (type_of_binder b) in + let bv1 = bv_of_binder b1 in + let v1 = pack (Tv_Var bv1) in + begin match get_total_or_gtotal_ret_type c with + | Some ty1 -> + let vl, bl, ge2 = _introduce_variables_for_abs ge1 ty1 in + v1 :: vl, b1 :: bl, ge2 + | None -> [v1], [b1], ge1 + end + | _ -> [], [], ge + +val introduce_variables_for_abs : genv -> term -> Tac (list term & list binder & genv) +let introduce_variables_for_abs ge tm = + match safe_tc ge.env tm with + | Some ty -> _introduce_variables_for_abs ge ty + | None -> [], [], ge + +val introduce_variables_for_opt_abs : genv -> option term -> Tac (list term & list binder & genv) +let introduce_variables_for_opt_abs ge opt_tm = + match opt_tm with + | Some tm -> introduce_variables_for_abs ge tm + | None -> [], [], ge + + +val effect_type_is_stateful : effect_type -> Tot bool +let effect_type_is_stateful etype = + match etype with + | E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> false + | E_Stack | E_ST | E_Unknown -> true + +let is_st_get dbg t : Tac bool = + print_dbg dbg ("[> is_st_get:\n" ^ term_to_string t); + match inspect t with + | Tv_App hd (a, qual) -> + print_dbg dbg "-> Is Tv_App"; + begin match inspect hd with + | Tv_FVar fv -> + print_dbg dbg ("-> Head is Tv_FVar: " ^ fv_to_string fv); + fv_eq_name fv ["FStar"; "HyperStack"; "ST"; "get"] + | _ -> + print_dbg dbg "-> Head is not Tv_FVar"; + false + end + | _ -> + print_dbg dbg "-> Is not Tv_App"; + false + +let is_let_st_get dbg (t : term_view) = + print_dbg dbg ("[> is_let_st_get:\n" ^ term_to_string t); + match t with + | Tv_Let recf attrs bv ty def body -> + print_dbg dbg "The term is a let expression"; + if is_st_get dbg def then Some (bv, ty) else None + | _ -> + print_dbg dbg "The term is not a let expression"; + None + +// TODO: Define relation between parents and children in and use it in explore_term +// app: head or arg +// let : bv or def or child +// match: scrutinee or branch +// ascribed: e or ty + +/// Check if a term's computation is effectful. The return type is option +/// because we may not be able to retrieve the term computation. +val term_has_effectful_comp : bool -> env -> term -> Tac (option bool) +let term_has_effectful_comp dbg e tm = + print_dbg dbg "[> term_has_effectful_comp"; + let einfo_opt = compute_effect_info dbg e tm in + match einfo_opt with + | Some einfo -> + print_dbg dbg ("Effect type: " ^ effect_type_to_string einfo.ei_type); + Some (not (effect_type_is_pure einfo.ei_type)) + | None -> + print_dbg dbg "Could not compute effect info"; + None + +/// Check if a related term is effectful. This is used to look for instances of +/// ``HS.mem`` to instantiate pre/postconditions, which means that the term should +/// be a parent/child term of the term under study, as generated by ``explore_term`` +/// (otherwise the way we check that a term is effectful doesn't make sense). +/// The computation is an overapproximation: it may happen that, for instance, we +/// can't compute a term computation. In this case, we consider that the term is +/// effectful. There are also situations in which we may not be sure which term to +/// consider. +let related_term_is_effectul dbg ge tv : Tac bool = + let is_effectful tm = + term_has_effectful_comp dbg ge.env tm <> Some false + in + match tv with + | Tv_Var _ | Tv_BVar _ | Tv_FVar _ -> false + | Tv_App hd (a, qual) -> + (* The term under focus should be the app itself or an argument *) + false + | Tv_Abs br body -> false + | Tv_Arrow br c0 -> false + | Tv_Type _ -> false + | Tv_Refine bv sort ref -> + false + | Tv_Const _ -> false + | Tv_Uvar _ _ -> false + | Tv_Let recf attrs bv ty def body -> is_effectful def + | Tv_Match scrutinee _ret_opt branches -> + (* TODO: we need to keep track of the relation between parents and children *) + (* We assume the term under focus is in one the branches of the match - this + * assumption is safe: in the worst case, we won't be able to find a mem to use. + * Besides, in practice it is uncommon (impossible?) to use an effectful term + * as the scrutinee of a match *) + is_effectful scrutinee + | Tv_AscribedT e ty tac _ -> false (* The focused term should be inside the ascription *) + | Tv_AscribedC e c tac _ -> false (* The focused term should be inside the ascription *) + | _ -> (* Unknown: keep things safe *) true + +/// Look for a term of the form ``let h = ST.get ()`` in a list of parent/children terms +/// and return the let-bound bv. Abort the search if we find a non-effectful term. +/// The typical usages of this function are the following: +/// - look for a state variable to instantiate the precondition of the term under focus +/// - look for state variables for the pre/postconditions of a term defined before +/// the term under focus. +val find_mem_in_related: + dbg:bool + -> ge:genv + -> tms:list term_view + -> Tac (option (bv & typ)) + +let rec find_mem_in_related dbg ge tms = + match tms with + | [] -> None + | tv :: tms' -> + print_dbg dbg ("[> find_mem_in_related:\n" ^ term_to_string tv); + match is_let_st_get dbg tv with + | Some bvt -> + print_dbg dbg "Term is of the form `let x = FStar.HyperStack.ST.get ()`: success"; + Some bvt + | None -> + print_dbg dbg "Term is not of the form `let x = FStar.HyperStack.ST.get ()`: continuing"; + if related_term_is_effectul dbg ge tv + then + begin + print_dbg dbg "Term is effectful: stopping here"; + None + end + else + begin + print_dbg dbg "Term is not effectful: continuing"; + find_mem_in_related dbg ge tms' + end + +// TODO: not used for now +/// Look for a term of the form ``let h = ST.get ()`` in a child term (the +/// focused term is supposed to be a subterm of the definition of a let-construct). +val find_mem_in_children: + dbg:bool + -> ge:genv + -> child:term + -> Tac (genv & option bv) + +let rec find_mem_in_children dbg ge child = + (* We stop whenever we find an expression which is not a let-binding *) + match inspect child with + | Tv_Let recf attrs bv ty def body -> + if is_st_get dbg def then ge, Some bv + else if term_has_effectful_comp dbg ge.env def <> Some false then ge, None + else + let ge1 = genv_push_bv ge bv ty false None in + find_mem_in_children dbg ge1 body + | _ -> ge, None + +/// Instantiates optional pre and post conditions +val pre_post_to_propositions : + dbg:bool + -> ge:genv + -> etype:effect_type + -> ret_value:term + -> ret_abs_binder:option binder + -> ret_type:type_info + -> opt_pre:option term + -> opt_post:option term + -> parents:list term_view (* to look for state variables for the pre *) + -> children:list term_view (* to look for state variables for the pre and post *) + -> Tac (genv & option proposition & option proposition) + +let pre_post_to_propositions dbg ge0 etype v ret_abs_binder ret_type opt_pre opt_post + parents children = + print_dbg dbg "[> pre_post_to_propositions: begin"; + print_dbg dbg ("- uninstantiated pre: " ^ option_to_string term_to_string opt_pre); + print_dbg dbg ("- uninstantiated post: " ^ option_to_string term_to_string opt_post); + let brs = match ret_abs_binder with | None -> [] | Some b -> [b] in + (* Analyze the pre and the postcondition and introduce the necessary variables *) + let ge3, (pre_values, pre_binders), (post_values, post_binders) = + match etype with + | E_Lemma -> + print_dbg dbg "E_Lemma"; + ge0, ([], []), ([(`())], []) + | E_Total | E_GTotal -> + print_dbg dbg "E_Total/E_GTotal"; + ge0, ([], []), ([], []) + | E_PURE | E_Pure -> + print_dbg dbg "E_PURE/E_Pure"; + ge0, ([], []), ([v], brs) + | E_Stack | E_ST -> + print_dbg dbg "E_Stack/E_ST"; + (* Look for state variables in the context *) + print_dbg dbg "Looking for the initial state in the context"; + let b1_opt = find_mem_in_related dbg ge0 parents in + print_dbg dbg "Looking for the final state in the context"; + let b2_opt = find_mem_in_related dbg ge0 children in + (* Introduce state variables if necessary *) + let opt_push_fresh_state opt_bvt basename ge : Tac (term & binder & genv) = + match opt_bvt with + | Some (bv, ty) -> pack (Tv_Var bv), mk_binder bv ty, ge + | None -> genv_push_fresh_var ge basename (`HS.mem) + in + let h1, b1, ge1 = opt_push_fresh_state b1_opt "__h0_" ge0 in + let h2, b2, ge2 = opt_push_fresh_state b2_opt "__h1_" ge1 in + ge2, ([h1], [b1]), ([h1; v; h2], List.Tot.flatten ([b1]::brs::[[b2]])) + | E_Unknown -> + (* We don't know what the effect is and the current pre and post-conditions + * are currently guesses. Introduce any necessary variable abstracted by + * those parameters *) + (* The pre and post-conditions are likely to have the same shape as + * one of Pure or Stack (depending on whether we use or not an internal + * state). We try to check that and to instantiate them accordingly *) + let pp_type = check_opt_pre_post_type dbg ge0.env opt_pre ret_type.ty opt_post in + begin match pp_type with + | Some PP_Pure -> + print_dbg dbg "PP_Pure"; + (* We only need the return value *) + ge0, ([], []), ([v], brs) + | Some (PP_State state_type) -> + print_dbg dbg "PP_State"; + (* Introduce variables for the states *) + let s1, b1, s2, b2, ge1 = genv_push_two_fresh_vars ge0 "__s" state_type in + ge1, ([s1], [b1]), ([s1; v; s2], List.Tot.flatten ([b1]::brs::[[b2]])) + | Some PP_Unknown -> + print_dbg dbg "PP_Unknown"; + (* Introduce variables for all the values, for the pre and the post *) + let pre_values, pre_binders, ge1 = introduce_variables_for_opt_abs ge0 opt_pre in + let post_values, post_binders, ge1 = introduce_variables_for_opt_abs ge1 opt_post in + ge1, (pre_values, pre_binders), (post_values, post_binders) + | _ -> + print_dbg dbg "No pre and no post"; + (* No pre and no post *) + ge0, ([], []), ([], []) + end + in + (* Generate the propositions: *) + (* - from the precondition *) + let pre_prop = opt_mk_app_norm ge3.env opt_pre pre_values in + (* - from the postcondition - note that in the case of a global post-condition + * we might try to instantiate the return variable with a variable whose + * type is not correct, leading to an error. We thus catch errors below and + * drop the post if there is a problem *) + let post_prop = + try opt_mk_app_norm ge3.env opt_post post_values + with + | _ -> + print_dbg dbg "Dropping a postcondition because of incoherent typing"; + None + in + (* return *) + print_dbg dbg "[> pre_post_to_propositions: end"; + ge3, pre_prop, post_prop + +/// Convert effectful type information to a list of propositions. May have to +/// introduce additional binders for the preconditions/postconditions/goal (hence +/// the environment in the return type). +/// The ``bind_var`` parameter is a variable if the studied term was bound in a let +/// expression. +val eterm_info_to_assertions : + dbg:bool + -> with_gpre:bool + -> with_gpost:bool + -> ge:genv + -> t:term + -> is_let:bool (* the term is the bound expression in a let binding *) + -> is_assert:bool (* the term is an assert - in which case we only output the precondition *) + -> info:eterm_info + -> opt_bind_var:option term (* if let binding: the bound var *) + -> opt_c:option typ_or_comp + -> parents:list term_view + -> children:list term_view -> + Tac (genv & assertions) + +let eterm_info_to_assertions dbg with_gpre with_gpost ge t is_let is_assert info bind_var opt_c + parents children = + print_dbg dbg "[> eterm_info_to_assertions"; + (* Introduce additional variables to instantiate the return type refinement, + * the precondition, the postcondition and the goal *) + (* First, the return value: returns an updated env, the value to use for + * the return type and a list of abstract binders *) + let einfo = info.einfo in + let ge0, (v : term), (opt_b : option binder) = + match bind_var with + | Some v -> ge, v, None + | None -> + (* If the studied term is stateless, we can use it directly in the + * propositions. If the return value is of type unit, we can just use (). + * Otherwise we need to introduce a variable. + * For the reason why we do this: remember that the studied term might be + * a return value: it is not necessarily bound in a let. *) + if effect_type_is_stateful einfo.ei_type then + if is_unit_type einfo.ei_ret_type.ty then + ge, `(), None + else + let b = fresh_binder ge.env "__ret" einfo.ei_ret_type.ty in + let bv = bv_of_binder b in + let tm = pack (Tv_Var bv) in + genv_push_binder ge b true None, tm, Some b + else ge, t, None + in + (* Generate propositions from the pre and the post-conditions *) + (**) print_dbg dbg "> Instantiating local pre/post"; + let ge1, pre_prop, post_prop = + pre_post_to_propositions dbg ge0 einfo.ei_type v opt_b einfo.ei_ret_type + einfo.ei_pre einfo.ei_post parents children in + print_dbg dbg ("- pre prop: " ^ option_to_string term_to_string pre_prop); + print_dbg dbg ("- post prop: " ^ option_to_string term_to_string post_prop); + (* If the term is an assertion/assumption, only output the postcondition - + * note that in the case of an assertion, the pre and the post are the same, + * but in the case of an assumption, only the post is interesting *) + if is_assert then + begin + print_dbg dbg "The term is an assert: only keep the postcondition"; + ge1, { pres = opt_cons post_prop []; posts = [] } + end + else begin + (* Generate propositions from the target computation (pre, post, type cast) *) + let ge2, gparams_props, gpre_prop, gcast_props, gpost_prop = + (* Check if we do the computation (which can be expensive) - note that + * computing the global postcondition makes sense only if the focused + * term is the return value and thus not a let-binding *) + let with_goal : bool = with_gpre || ((not is_let) && with_gpost) in + begin match opt_c, with_goal with + | Some c, true -> + let ei = typ_or_comp_to_effect_info dbg ge1 c in + print_dbg dbg ("- target effect: " ^ effect_info_to_string ei); + print_dbg dbg ("- global unfilt. pre: " ^ option_to_string term_to_string ei.ei_pre); + print_dbg dbg ("- global unfilt. post: " ^ option_to_string term_to_string ei.ei_post); + (* The parameters' type information. To be used only if the variables are not + * shadowed (the parameters themselves, but also the variables inside the refinements) *) + let gparams_props = + begin + if with_gpre then + begin + print_dbg dbg "Generating assertions from the global parameters' types"; + print_dbg dbg ("Current genv:\n" ^ genv_to_string ge1); + (* Retrieve the types and pair them with the parameters - note that + * we need to reverse the list of parameters (the outer parameter was + * added first in the list and is thus last) *) + let params = + rev (List.Tot.map (fun x -> (x, type_of_binder x)) (params_of_typ_or_comp c)) in + iteri (fun i (b, _) -> print_dbg dbg ("Global parameter " ^ string_of_int i ^ + ": " ^ binder_to_string b)) params; + (* Filter the shadowed parameters *) + let params = filter (fun (b, _)-> not (binder_is_shadowed ge1 b)) params in + (* Generate the propositions *) + let param_to_props (x : (binder & typ)) : Tac (list term) = + let b, ty = x in + let bv = bv_of_binder b in + print_dbg dbg ("Generating assertions from global parameter: " ^ binder_to_string b); + let tinfo = get_type_info_from_type ty in + let v = pack (Tv_Var bv) in + let p1 = mk_has_type v tinfo.ty in + let pl = match tinfo.refin with + | None -> [] + | Some r -> + let p2 = mk_app_norm ge1.env r [v] in + (* Discard the proposition generated from the type refinement if + * it contains shadowed variables *) + if term_has_shadowed_variables ge1 p2 + then begin print_dbg dbg "Discarding type refinement because of shadowed variables"; [] end + else begin print_dbg dbg "Keeping type refinement"; [p2] end + in + p1 :: pl + in + let props = map param_to_props params in + List.Tot.flatten props + end + else + begin + print_dbg dbg "Ignoring the global parameters' types"; + [] + end + end <: Tac (list term) + in + (* The global pre-condition is to be used only if none of its variables + * are shadowed (which implies that we are close enough to the top of + * the function *) + let gpre = + match ei.ei_pre, with_gpre with + | Some pre, true -> + if term_has_shadowed_variables ge1 pre then + begin + print_dbg dbg "Dropping the global precondition because of shadowed variables"; + None + end + else ei.ei_pre + | _ -> None + in + (* The global post-condition and the type cast are relevant only if the + * studied term is not the definition in a let binding *) + let gpost, gcast_props = + if not with_gpost then None, [] + else if is_let then + begin + print_dbg dbg "Dropping the global postcondition and return type because we are studying a let expression"; + None, [] + end + else + (* Because of the way the studied function is rewritten before being sent to F* + * we might have a problem with the return type (we might instantiate + * the return variable from the global post or from the return type + * refinement with a variable whose type is not valid for that, triggering + * an exception. In that case, we drop the post and the target type + * refinement. Note that here only the type refinement may be instantiated, + * we thus also need to check for the post inside ``pre_post_to_propositions`` *) + try + print_dbg dbg "> Generating propositions from the global type cast"; + print_dbg dbg ("- known type: " ^ type_info_to_string einfo.ei_ret_type); + print_dbg dbg ("- exp. type : " ^ type_info_to_string ei.ei_ret_type); + let gcast = mk_cast_info v (Some einfo.ei_ret_type) (Some ei.ei_ret_type) in + print_dbg dbg (cast_info_to_string gcast); + let gcast_props = cast_info_to_propositions dbg ge1 gcast in + print_dbg dbg "> Propositions for global type cast:"; + print_dbg dbg (list_to_string term_to_string gcast_props); + ei.ei_post, List.Tot.rev gcast_props + with + | _ -> + print_dbg dbg "Dropping the global postcondition and return type because of incoherent typing"; + None, [] + in + (* Generate the propositions from the precondition and the postcondition *) + (* TODO: not sure about the return type parameter: maybe catch failures *) + print_dbg dbg "> Instantiating global pre/post"; + (* Note that we need to revert the lists of parents terms *) + (* For the children: + * - if the focused term is the return value and is pure: go look for + * a state variable introduced before + * - otherwise, use the children in revert order *) + let gchildren = + if is_let then rev children (* the postcondition should have been dropped anyway *) + else if effect_type_is_stateful einfo.ei_type then rev children + else parents + in + let ge2, gpre_prop, gpost_prop = + pre_post_to_propositions dbg ge1 ei.ei_type v opt_b einfo.ei_ret_type + gpre gpost (rev parents) gchildren in + (* Some debugging output *) + print_dbg dbg ("- global pre prop: " ^ option_to_string term_to_string gpre_prop); + print_dbg dbg ("- global post prop: " ^ option_to_string term_to_string gpost_prop); + (* Return type: *) + ge2, gparams_props, gpre_prop, gcast_props, gpost_prop + | _, _ -> + ge1, [], None, [], None + end <: Tac _ + in + (* Generate the propositions: *) + (* - from the parameters' cast info *) + let params_props = cast_info_list_to_propositions dbg ge2 info.parameters in + (* - from the return type *) + let (ret_values : list term), (ret_binders : list binder) = + if E_Lemma? einfo.ei_type then ([] <: list term), ([] <: list binder) + else [v], (match opt_b with | Some b -> [b] | None -> []) in + let ret_has_type_prop = + match ret_values with + | [v] -> Some (mk_has_type v einfo.ei_ret_type.ty) + | _ -> None + in + let ret_refin_prop = opt_mk_app_norm ge2.env (get_opt_refinment einfo.ei_ret_type) ret_values in + (* Concatenate, revert and return *) + let pres = opt_cons gpre_prop (List.Tot.append params_props (opt_cons pre_prop [])) in + let pres = append gparams_props pres in + let posts = opt_cons ret_has_type_prop + (opt_cons ret_refin_prop (opt_cons post_prop + (List.Tot.append gcast_props (opt_cons gpost_prop [])))) in + (* Debugging output *) + print_dbg dbg "- generated pres:"; + if dbg then iter (fun x -> print (term_to_string x)) pres; + print_dbg dbg "- generated posts:"; + if dbg then iter (fun x -> print (term_to_string x)) posts; + ge2, { pres = pres; posts = posts } + end diff --git a/stage0/ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst b/stage0/ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst new file mode 100644 index 00000000000..86795c53b76 --- /dev/null +++ b/stage0/ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst @@ -0,0 +1,622 @@ +module FStar.InteractiveHelpers.ExploreTerm + +open FStar.List.Tot +open FStar.Tactics +open FStar.Mul +open FStar.InteractiveHelpers.Base + +#set-options "--z3rlimit 15 --fuel 0 --ifuel 1" + +(*** Types and effects *) +/// Define utilities to handle and carry types and effects + +(**** Type analysis *) +/// Retrieve and deconstruct a type/effect + +/// Some constants +//let prims_true_qn = "Prims.l_True" +//let prims_true_term = `Prims.l_True + +let pure_effect_qn = "Prims.PURE" +let pure_hoare_effect_qn = "Prims.Pure" +let stack_effect_qn = "FStar.HyperStack.ST.Stack" +let st_effect_qn = "FStar.HyperStack.ST.ST" + + +/// Return the qualifier of a comp as a string +val comp_qualifier (c : comp) : Tac string + +#push-options "--ifuel 1" +let comp_qualifier (c : comp) : Tac string = + match inspect_comp c with + | C_Total _ -> "C_Total" + | C_GTotal _ -> "C_GTotal" + | C_Lemma _ _ _ -> "C_Lemma" + | C_Eff _ _ _ _ _ -> "C_Eff" +#pop-options + +/// Effect information: we list the current supported effects +type effect_type = +| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown + +val effect_type_to_string : effect_type -> string + +#push-options "--ifuel 1" +let effect_type_to_string ety = + match ety with + | E_Total -> "E_Total" + | E_GTotal -> "E_GTotal" + | E_Lemma -> "E_Lemma" + | E_PURE -> "E_PURE" + | E_Pure -> "E_Pure" + | E_Stack -> "E_Stack" + | E_ST -> "E_ST" + | E_Unknown -> "E_Unknown" +#pop-options + +val effect_name_to_type (ename : name) : Tot effect_type + +let effect_name_to_type (ename : name) : Tot effect_type = + let ename = flatten_name ename in + if ename = pure_effect_qn then E_PURE + else if ename = pure_hoare_effect_qn then E_Pure + else if ename = stack_effect_qn then E_Stack + else if ename = st_effect_qn then E_ST + else E_Unknown + +val effect_type_is_pure : effect_type -> Tot bool +let effect_type_is_pure etype = + match etype with + | E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true + | E_Stack | E_ST | E_Unknown -> false + +/// Type information +noeq type type_info = { + ty : typ; (* the type without refinement *) + refin : option term; +} + +let mk_type_info = Mktype_info + +val type_info_to_string : type_info -> Tac string +let type_info_to_string info = + "Mktype_info (" ^ + term_to_string info.ty ^ ") (" ^ + option_to_string term_to_string info.refin ^ ")" + +let unit_type_info = mk_type_info (`unit) None + +val safe_tc (e:env) (t:term) : Tac (option term) +let safe_tc e t = + try Some (tc e t) with | _ -> None + +val safe_tcc (e:env) (t:term) : Tac (option comp) +let safe_tcc e t = + try Some (tcc e t) with | _ -> None + +let get_type_info_from_type (ty:typ) : Tac type_info = + match inspect ty with + | Tv_Refine bv sort refin -> + let raw_type = prettify_term false sort in + let b : binder = mk_binder bv sort in + let refin = prettify_term false refin in + let refin = pack (Tv_Abs b refin) in + mk_type_info raw_type (Some refin) + | _ -> + let ty = prettify_term false ty in + mk_type_info ty None + +#push-options "--ifuel 1" +let get_type_info (e:env) (t:term) : Tac (option type_info) = + match safe_tc e t with + | None -> None + | Some ty -> Some (get_type_info_from_type ty) +#pop-options + +val get_total_or_gtotal_ret_type : comp -> Tot (option typ) +let get_total_or_gtotal_ret_type c = + match inspect_comp c with + | C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty + | _ -> None + +val get_comp_ret_type : comp -> Tot typ +let get_comp_ret_type c = + match inspect_comp c with + | C_Total ret_ty | C_GTotal ret_ty + | C_Eff _ _ ret_ty _ _ -> ret_ty + | C_Lemma _ _ _ -> (`unit) + +val is_total_or_gtotal : comp -> Tot bool +let is_total_or_gtotal c = + Some? (get_total_or_gtotal_ret_type c) + +val is_unit_type : typ -> Tac bool +let is_unit_type ty = + match inspect ty with + | Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid + | _ -> false + + +(**** typ_or_comp *) +/// This type is used to store typing information. +/// We use it mostly to track what the target type/computation is for a term, +/// while exploring this term. It is especially useful to generate post-conditions, +/// for example. We store the list of abstractions encountered so far at the +/// same time. +/// Note that in order to keep track of the type correctly, whenever we encounter +/// an abstraction in the term, we need to check that the term' type is an arrow, +/// in which case we need to do a substitution (the arrow takes as first parameter +/// which is not the same as the abstraction's binder). As the substitution is costly +/// (we do it by using the normalizer, but the "final" return term is the whole +/// function's body type, which is often super big) we do it lazily: we count how +/// many parameters we have encountered and not substituted, and "flush" when we +/// really need to inspect the typ_or_comp. +// TODO: actually we only need to carry a comp (if typ: consider it total) +(* TODO: remove the instantiation: instantiate incrementally *) +noeq type typ_or_comp = +| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp +| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp + +let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string = + match tyc with + | TC_Typ v pl num_unflushed -> + "TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^ + " " ^ string_of_int num_unflushed + | TC_Comp c pl num_unflushed -> + "TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^ + " " ^ string_of_int num_unflushed + +/// Return the list of parameters stored in a ``typ_or_comp`` +let params_of_typ_or_comp (c : typ_or_comp) : list binder = + match c with + | TC_Typ _ pl _ | TC_Comp _ pl _ -> pl + +let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat = +match c with + | TC_Typ _ _ n | TC_Comp _ _ n -> n + +/// Compute a ``typ_or_comp`` from the type of a term +// TODO: try to get a more precise comp +val safe_typ_or_comp : bool -> env -> term -> + Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)}) +let safe_typ_or_comp dbg e t = + match safe_tcc e t with + | None -> + print_dbg dbg ("[> safe_typ_or_comp:" ^ + "\n-term: " ^ term_to_string t ^ + "\n-comp: None"); + None + | Some c -> + print_dbg dbg ("[> safe_typ_or_comp:" ^ + "\n-term: " ^ term_to_string t ^ + "\n-comp: " ^ acomp_to_string c); + Some (TC_Comp c [] 0) + +val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp +let subst_bv_in_comp e b sort t c = + apply_subst_in_comp e c [((b, sort), t)] + +val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp +let subst_binder_in_comp e b t c = + subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c + +/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _, +/// fail otherwise +val unfold_until_arrow : env -> typ -> Tac typ +let rec unfold_until_arrow e ty0 = + if Tv_Arrow? (inspect ty0) then ty0 + else + begin + (* Start by normalizing the term - note that this operation is expensive *) + let ty = norm_term_env e [] ty0 in + (* Helper to unfold top-level identifiers *) + let unfold_fv (fv : fv) : Tac term = + let ty = pack (Tv_FVar fv) in + let fvn = flatten_name (inspect_fv fv) in + (* unfold the top level binding, check that it has changed, and recurse *) + let ty' = norm_term_env e [delta_only [fvn]] ty in + (* I'm not confident about using eq_term here *) + begin match inspect ty' with + | Tv_FVar fv' -> + if flatten_name (inspect_fv fv') = fvn + then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty' + | _ -> ty' + end + in + (* Inspect *) + match inspect ty with + | Tv_Arrow _ _ -> ty + | Tv_FVar fv -> + (* Try to unfold the top-level identifier and recurse *) + let ty' = unfold_fv fv in + unfold_until_arrow e ty' + | Tv_App _ _ -> + (* Strip all the parameters, try to unfold the head and recurse *) + let hd, args = collect_app ty in + begin match inspect hd with + | Tv_FVar fv -> + let hd' = unfold_fv fv in + let ty' = mk_app hd' args in + unfold_until_arrow e ty' + | _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) + end + | Tv_Refine bv sort ref -> + unfold_until_arrow e sort + | Tv_AscribedT body _ _ _ + | Tv_AscribedC body _ _ _ -> + unfold_until_arrow e body + | _ -> + (* Other situations: don't know what to do *) + mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) + end + +/// Instantiate a comp +val inst_comp_once : env -> comp -> term -> Tac comp +let inst_comp_once e c t = + let ty = get_comp_ret_type c in + let ty' = unfold_until_arrow e ty in + begin match inspect ty' with + | Tv_Arrow b1 c1 -> + subst_binder_in_comp e b1 t c1 + | _ -> (* Inconsistent state *) + mfail "inst_comp_once: inconsistent state" + end + +val inst_comp : env -> comp -> list term -> Tac comp +let rec inst_comp e c tl = + match tl with + | [] -> c + | t :: tl' -> + let c' = try inst_comp_once e c t + with | MetaAnalysis msg -> mfail_doc ([text "inst_comp: error"] @ msg) + | err -> raise err + in + inst_comp e c' tl' + +/// Update the current ``typ_or_comp`` before going into the body of an abstraction. +/// Explanations: +/// In the case we dive into a term of the form: +/// [> (fun x -> body) : y:ty -> body_type +/// we need to substitute y with x in body_type to get the proper type for body. +/// Note that we checked, and in practice the binders are indeed different. +// TODO: actually, we updated it to do a lazy instantiation +val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp + +let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) : + Tac typ_or_comp = + (* Try to reveal an arrow *) + try + let ty' = unfold_until_arrow e ty in + begin match inspect ty' with + | Tv_Arrow b1 c1 -> + let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in + TC_Comp c1' (b :: pl) 0 + | _ -> (* Inconsistent state *) + mfail "_abs_update_typ: inconsistent state" + end + with + | MetaAnalysis msg -> + mfail_doc ( + [text ("_abs_update_typ: could not find an arrow in " ^ term_to_string ty)] + @ msg + ) + | err -> raise err + +let abs_update_typ_or_comp (b:binder) (c : typ_or_comp) (e:env) : Tac typ_or_comp = + match c with + (*| TC_Typ v pl n -> _abs_update_typ b v pl e + | TC_Comp v pl n -> + (* Note that the computation is not necessarily pure, in which case we might + * want to do something with the effect arguments (pre, post...) - for + * now we just ignore them *) + let ty = get_comp_ret_type v in + _abs_update_typ b ty pl e *) + | TC_Typ v pl n -> TC_Typ v (b::pl) (n+1) + | TC_Comp v pl n -> TC_Comp v (b::pl) (n+1) + +val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env -> + Tac (option typ_or_comp) +let abs_update_opt_typ_or_comp b opt_c e = + match opt_c with + | None -> None + | Some c -> + try + let c = abs_update_typ_or_comp b c e in + Some c + with | MetaAnalysis msg -> None + | err -> raise err + +/// Flush the instantiation stored in a ``typ_or_comp`` +val flush_typ_or_comp : bool -> env -> typ_or_comp -> + Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0}) + +/// Strip all the arrows we can without doing any instantiation. When we can't +/// strip arrows anymore, do the instantiation at once. +/// We keep track of two list of binders: +/// - the remaining binders +/// - the instantiation corresponding to the arrows we have stripped so far, and +/// which will be applied all at once +let rec _flush_typ_or_comp_comp (dbg : bool) (e:env) (rem : list binder) (inst : list ((bv & typ) & term)) + (c:comp) : Tac comp = + let flush c inst = + let inst = List.Tot.rev inst in + apply_subst_in_comp e c inst + in + match rem with + | [] -> + (* No more binders: flush *) + flush c inst + | b :: rem' -> + (* Check if the return type is an arrow, if not flush and normalize *) + let ty = get_comp_ret_type c in + let ty, inst' = + if Tv_Arrow? (inspect ty) then ty, inst + else get_comp_ret_type (flush c inst), [] + in + match inspect ty with + | Tv_Arrow b' c' -> + _flush_typ_or_comp_comp dbg e rem' (((bv_of_binder b', binder_sort b'), pack (Tv_Var (bv_of_binder b)))::inst) c' + | _ -> + mfail ("_flush_typ_or_comp: inconsistent state" ^ + "\n-comp: " ^ acomp_to_string c ^ + "\n-remaning binders: " ^ list_to_string (fun b -> name_of_binder b) rem) + +let flush_typ_or_comp dbg e tyc = + let flush_comp pl n c : Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0}) = + let pl', _ = List.Tot.splitAt n pl in + let pl' = List.Tot.rev pl' in + let c = _flush_typ_or_comp_comp dbg e pl' [] c in + TC_Comp c pl 0 + in + try begin match tyc with + | TC_Typ ty pl n -> + let c = pack_comp (C_Total ty) in + flush_comp pl n c + | TC_Comp c pl n -> flush_comp pl n c + end + with | MetaAnalysis msg -> + mfail_doc ([text ("flush_typ_or_comp failed on: " ^ typ_or_comp_to_string tyc)] @ msg) + | err -> raise err + +/// Compute the target ``typ_or_comp`` for an argument by the type of the head: +/// in `hd a`, if `hd` has type `t -> ...`, use `t` +val safe_arg_typ_or_comp : bool -> env -> term -> + Tac (opt:option typ_or_comp{Some? opt ==> TC_Typ? (Some?.v opt)}) +let safe_arg_typ_or_comp dbg e hd = + print_dbg dbg ("safe_arg_typ_or_comp: " ^ term_to_string hd); + match safe_tc e hd with + | None -> None + | Some ty -> + print_dbg dbg ("hd type: " ^ term_to_string ty); + let ty = + if Tv_Arrow? (inspect ty) then + begin + print_dbg dbg "no need to unfold the type"; + ty + end + else + begin + print_dbg dbg "need to unfold the type"; + let ty = unfold_until_arrow e ty in + print_dbg dbg ("result of unfolding : "^ term_to_string ty); + ty + end + in + match inspect ty with + | Tv_Arrow b c -> Some (TC_Typ (type_of_binder b) [] 0) + | _ -> None + +/// Exploring a term + +(*** Term exploration *) +/// Explore a term, correctly updating the environment when traversing abstractions + +let convert_ctrl_flag (flag : ctrl_flag) = + match flag with + | Continue -> Continue + | Skip -> Continue + | Abort -> Abort + +/// TODO: for now I need to use universe 0 for type a because otherwise it doesn't +/// type check +/// ctrl_flag: +/// - Continue: continue exploring the term +/// - Skip: don't explore the sub-terms of this term +/// - Abort: stop exploration +/// TODO: we might want a more precise control (like: don't explore the type of the +/// ascription but explore its body) +/// Note that ``explore_term`` doesn't use the environment parameter besides pushing +/// binders and passing it to ``f``, which means that you can give it arbitrary +/// environments, ``explore_term`` itself won't fail (but the passed function might). + +let explorer (a : Type) = + a -> genv -> list (genv & term_view) -> option typ_or_comp -> term_view -> + Tac (a & ctrl_flag) + +// TODO: use more +let bind_expl (#a : Type) (x : a) (f1 f2 : a -> Tac (a & ctrl_flag)) : Tac (a & ctrl_flag) = + let x1, flag1 = f1 x in + if flag1 = Continue then + f2 x1 + else x1, convert_ctrl_flag flag1 + +// TODO: change the signature to move the dbg flag +val explore_term : + dbg : bool + -> dfs : bool (* depth-first search *) + -> #a : Type0 + -> f : explorer a + -> x : a + -> ge : genv + (* the list of terms traversed so far (first is most recent) with the environment + * at the time they were traversed *) + -> parents : list (genv & term_view) + -> c : option typ_or_comp + -> t:term -> + Tac (a & ctrl_flag) + +val explore_pattern : + dbg : bool + -> dfs : bool (* depth-first search *) + -> #a : Type0 + -> f : explorer a + -> x : a + -> ge:genv + -> pat:pattern -> + Tac (genv & a & ctrl_flag) + +(* TODO: carry around the list of encompassing terms *) +let rec explore_term dbg dfs #a f x ge0 pl0 c0 t0 = + print_dbg dbg ("[> explore_term: " ^ term_construct t0 ^ ":\n" ^ term_to_string t0); + let tv0 = inspect t0 in + let x0, flag = f x ge0 pl0 c0 tv0 in + let pl1 = (ge0, tv0) :: pl0 in + if flag = Continue then + begin match tv0 with + | Tv_Var _ | Tv_BVar _ | Tv_FVar _ -> x0, Continue + | Tv_App hd (a,qual) -> + (* Explore the argument - we update the target typ_or_comp when doing so. + * Note that the only way to get the correct target type is to deconstruct + * the type of the head *) + let a_c = safe_arg_typ_or_comp dbg ge0.env hd in + print_dbg dbg ("Tv_App: updated target typ_or_comp to:\n" ^ + option_to_string typ_or_comp_to_string a_c); + let x1, flag1 = explore_term dbg dfs f x0 ge0 pl1 a_c a in + (* Explore the head - no type information here: we can compute it, + * but it seems useless (or maybe use it only if it is not Total) *) + if flag1 = Continue then + explore_term dbg dfs f x1 ge0 pl1 None hd + else x1, convert_ctrl_flag flag1 + | Tv_Abs br body -> + let ge1 = genv_push_binder ge0 br false None in + let c1 = abs_update_opt_typ_or_comp br c0 ge1.env in + explore_term dbg dfs f x0 ge1 pl1 c1 body + | Tv_Arrow br c0 -> x0, Continue (* TODO: we might want to explore that *) + | Tv_Type _ -> x0, Continue + | Tv_Refine bv sort ref -> + let bvv = inspect_bv bv in + let x1, flag1 = explore_term dbg dfs f x0 ge0 pl1 None sort in + if flag1 = Continue then + let ge1 = genv_push_bv ge0 bv sort false None in + explore_term dbg dfs f x1 ge1 pl1 None ref + else x1, convert_ctrl_flag flag1 + | Tv_Const _ -> x0, Continue + | Tv_Uvar _ _ -> x0, Continue + | Tv_Let recf attrs bv ty def body -> + (* Binding definition exploration - for the target computation: initially we + * used the type of the definition, however it is often unnecessarily complex. + * Now, we use the type of the binder used for the binding. *) + let def_c = Some (TC_Typ ty [] 0) in + let explore_def x = explore_term dbg dfs f x ge0 pl1 def_c def in + (* Exploration of the following instructions *) + let ge1 = genv_push_bv ge0 bv ty false (Some def) in + let explore_next x = explore_term dbg dfs f x ge1 pl1 c0 body in + (* Perform the exploration in the proper order *) + let expl1, expl2 = if dfs then explore_next, explore_def else explore_def, explore_next in + bind_expl x0 expl1 expl2 + | Tv_Match scrutinee _ret_opt branches -> //AR: TODO: need to account for returns annotation here + (* Auxiliary function to explore the branches *) + let explore_branch (x_flag : a & ctrl_flag) (br : branch) : Tac (a & ctrl_flag)= + let x0, flag = x_flag in + if flag = Continue then + let pat, branch_body = br in + (* Explore the pattern *) + let ge1, x1, flag1 = explore_pattern dbg dfs #a f x0 ge0 pat in + if flag1 = Continue then + (* Explore the branch body *) + explore_term dbg dfs #a f x1 ge1 pl1 c0 branch_body + else x1, convert_ctrl_flag flag1 + (* Don't convert the flag *) + else x0, flag + in + (* Explore the scrutinee *) + let scrut_c = safe_typ_or_comp dbg ge0.env scrutinee in + let x1 = explore_term dbg dfs #a f x0 ge0 pl1 scrut_c scrutinee in + (* Explore the branches *) + fold_left explore_branch x1 branches + | Tv_AscribedT e ty tac _ -> + let c1 = Some (TC_Typ ty [] 0) in + let x1, flag = explore_term dbg dfs #a f x0 ge0 pl1 None ty in + if flag = Continue then + explore_term dbg dfs #a f x1 ge0 pl1 c1 e + else x1, convert_ctrl_flag flag + | Tv_AscribedC e c1 tac _ -> + (* TODO: explore the comp *) + explore_term dbg dfs #a f x0 ge0 pl1 (Some (TC_Comp c1 [] 0)) e + | _ -> + (* Unknown *) + x0, Continue + end + else x0, convert_ctrl_flag flag + +and explore_pattern dbg dfs #a f x ge0 pat = + print_dbg dbg ("[> explore_pattern:"); + match pat with + | Pat_Constant _ -> ge0, x, Continue + | Pat_Cons fv us patterns -> + let explore_pat ge_x_flag pat = + let ge0, x, flag = ge_x_flag in + let pat1, _ = pat in + if flag = Continue then + explore_pattern dbg dfs #a f x ge0 pat1 + else + (* Don't convert the flag *) + ge0, x, flag + in + fold_left explore_pat (ge0, x, Continue) patterns + | Pat_Var bv st -> + let ge1 = genv_push_bv ge0 bv (unseal st) false None in + ge1, x, Continue + | Pat_Dot_Term _ -> ge0, x, Continue + +(*** Variables in a term *) +/// Returns the list of free variables contained in a term +val free_in : term -> Tac (list bv) +let free_in t = + let same_name (bv1 bv2 : bv) : Tac bool = + name_of_bv bv1 = name_of_bv bv2 + in + let update_free (fl:list bv) (ge:genv) (pl:list (genv & term_view)) + (c:option typ_or_comp) (tv:term_view) : + Tac (list bv & ctrl_flag) = + match tv with + | Tv_Var bv | Tv_BVar bv -> + (* Check if the binding was not introduced during the traversal *) + begin match genv_get_from_name ge (name_of_bv bv) with + | None -> + (* Check if we didn't already count the binding *) + let fl' = if Tactics.tryFind (same_name bv) fl then fl else bv :: fl in + fl', Continue + | Some _ -> fl, Continue + end + | _ -> fl, Continue + in + let e = top_env () in (* we actually don't care about the environment *) + let ge = mk_genv e [] [] in + List.Tot.rev (fst (explore_term false false update_free [] ge [] None t)) + +/// Returns the list of abstract variables appearing in a term, in the order in +/// which they were introduced in the context. +val abs_free_in : genv -> term -> Tac (list (bv & typ)) +let abs_free_in ge t = + let fvl = free_in t in + let absl = List.Tot.rev (genv_abstract_bvs ge) in + let is_free_in_term bv = + Some? (List.Tot.find (bv_eq bv) fvl) + in + let absfree = List.Tot.concatMap + (fun (bv, ty) -> if is_free_in_term bv then [bv,ty] else []) absl + in + absfree + +/// Returns the list of free shadowed variables appearing in a term. +val shadowed_free_in : genv -> term -> Tac (list bv) +let shadowed_free_in ge t = + let fvl = free_in t in + List.Tot.filter (fun bv -> bv_is_shadowed ge bv) fvl + +/// Returns true if a term contains variables which are shadowed in a given environment +val term_has_shadowed_variables : genv -> term -> Tac bool +let term_has_shadowed_variables ge t = + let fvl = free_in t in + Some? (List.Tot.tryFind (bv_is_shadowed ge) fvl) diff --git a/stage0/ulib/experimental/FStar.InteractiveHelpers.Output.fst b/stage0/ulib/experimental/FStar.InteractiveHelpers.Output.fst new file mode 100644 index 00000000000..2407760edc3 --- /dev/null +++ b/stage0/ulib/experimental/FStar.InteractiveHelpers.Output.fst @@ -0,0 +1,187 @@ +module FStar.InteractiveHelpers.Output + +open FStar.List.Tot +open FStar.Tactics +open FStar.Mul +open FStar.InteractiveHelpers.Base +open FStar.InteractiveHelpers.ExploreTerm +open FStar.InteractiveHelpers.Propositions + +/// Facilities to output results to the IDE/emacs/whatever. +/// Contains datatypes and functions to carry information. + +#set-options "--z3rlimit 15 --fuel 0 --ifuel 1" + +(*** Convert terms to string *) +/// The important point is to handle variable shadowing properly, so that the +/// generated term is meaningful in the user context, or at least that it is clear +/// to the user that some variables are shadowed. + +/// Introduce fresh variables for the variables shadowed in the current environment +/// and substitute them in the terms. Note that as the binding of the value returned +/// by a function application might shadow one of its parameters, we need to treat +/// differently the pre-assertions and the post-assertions. Moreover, we need to +/// keep track of which variables are shadowed for every assertion. + +let rec _split_subst_at_bv (#a #b : Type) (x : bv) (subst : list ((bv & a) & b)) : + Tot (list ((bv & a) & b) & list ((bv & a) & b)) + (decreases subst) = + match subst with + | [] -> [], [] + | ((src, ty), tgt) :: subst' -> + if bv_eq x src then + [], subst' + else + let s1, s2 = _split_subst_at_bv x subst' in + ((src, ty), tgt) :: s1, s2 + +val subst_shadowed_with_abs_in_assertions : bool -> genv -> option bv -> assertions -> Tac (genv & assertions) +let subst_shadowed_with_abs_in_assertions dbg ge shadowed_bv es = + (* When generating the substitution, we need to pay attention to the fact that + * the returned value potentially bound by a let may shadow another variable. + * We need to take this into account for the post-assertions (but not the + * pre-assertions). *) + print_dbg dbg ("subst_shadowed_with_abs_in_assertions:\n" ^ genv_to_string ge); + (* Generate the substitution *) + let ge1, subst = generate_shadowed_subst ge in + let post_subst = map (fun (src, ty, tgt) -> (src, ty), pack (Tv_Var tgt)) subst in + (* The current substitution is valid for the post-assertions: derive from it + * a substitution valid for the pre-assertions (just cut it where the bv + * shadowed by the return value appears). Note that because we might introduce + * dummy variables for the return value, it is not valid just to ignore + * the last substitution pair. *) + let pre_subst = + if Some? shadowed_bv then fst (_split_subst_at_bv (Some?.v shadowed_bv) post_subst) + else post_subst + in + let subst_to_string subst : Tac string = + let to_string ((x, ty), y) = + "(" ^ abv_to_string x ^ " -> " ^ term_to_string y ^ ")\n" + in + let str = map to_string subst in + List.Tot.fold_left (fun x y -> x ^ y) "" str + in + if dbg then + begin + print_dbg dbg ("- pre_subst:\n" ^ subst_to_string pre_subst); + print_dbg dbg ("- post_subst:\n" ^ subst_to_string post_subst) + end; + (* Apply *) + let apply = (fun s -> map (fun t -> apply_subst ge1.env t s)) in + let pres = apply pre_subst es.pres in + let posts = apply post_subst es.posts in + ge1, mk_assertions pres posts + +(*** Convert propositions to string *) +/// Originally: we output the ``eterm_info`` and let the emacs commands do some +/// filtering and formatting. Now, we convert ``eterm_info`` to a ``assertions``. +/// Note that we also convert all the information to a string that we export at +/// once in order for the output not to be polluted by any other messages +/// (warning messages from F*, for example). + +let string_to_printout (prefix data:string) : Tot string = + prefix ^ ":\n" ^ data ^ "\n" + +let term_to_printout (ge:genv) (prefix:string) (t:term) : Tac string = + (* We need to look for abstract variables and abstract them away *) + let abs = abs_free_in ge t in + let abs_binders = List.Tot.map (fun (bv, t) -> mk_binder bv t) abs in + let abs_terms = map (fun (bv, _) -> pack (Tv_Var bv)) abs in + let t = mk_abs abs_binders t in + let t = mk_e_app t abs_terms in + string_to_printout prefix (term_to_string t) + +let opt_term_to_printout (ge:genv) (prefix:string) (t:option term) : Tac string = + match t with + | Some t' -> term_to_printout ge prefix t' + | None -> string_to_printout prefix "" + +let proposition_to_printout (ge:genv) (prefix:string) (p:proposition) : Tac string = + term_to_printout ge prefix p + +let propositions_to_printout (ge:genv) (prefix:string) (pl:list proposition) : Tac string = + let prop_to_printout i p = + let prefix' = prefix ^ ":prop" ^ string_of_int i in + proposition_to_printout ge prefix' p + in + let str = string_to_printout (prefix ^ ":num") (string_of_int (List.Tot.length pl)) in + let concat_prop s_i p : Tac (string & int) = + let s, i = s_i in + s ^ prop_to_printout i p, i+1 + in + let str, _ = fold_left concat_prop (str,0) pl in + str + +let error_message_to_printout (prefix : string) (message : option string) : Tot string = + let msg = match message with | Some msg -> msg | _ -> "" in + string_to_printout (prefix ^ ":error") msg + +/// Utility type and function to communicate the results to emacs. +noeq type export_result = +| ESuccess : ge:genv -> a:assertions -> export_result +| EFailure : err:string -> export_result + +let result_to_printout (prefix:string) (res:export_result) : + Tac string = + let str = prefix ^ ":BEGIN\n" in + (* Note that the emacs commands will always look for fields for the error message + * and the pre/post assertions, so we need to generate them, even though they + * might be empty. *) + let err, ge, pres, posts = + match res with + | ESuccess ge a -> None, ge, a.pres, a.posts + | EFailure err -> + let ge = mk_init_genv (top_env ()) in (* dummy environment - will not be used *) + Some err, ge, [], [] + in + (* Error message *) + let str = str ^ error_message_to_printout prefix err in + (* Assertions *) + let str = str ^ propositions_to_printout ge (prefix ^ ":pres") pres in + let str = str ^ propositions_to_printout ge (prefix ^ ":posts") posts in + str ^ prefix ^ ":END\n" ^ "%FIH:FSTAR_META:END%" + +let printout_result (prefix:string) (res:export_result) : + Tac unit = + print (result_to_printout prefix res) + +/// The function to use to export the results in case of success +let printout_success (ge:genv) (a:assertions) : Tac unit = + printout_result "ainfo" (ESuccess ge a) + +/// The function to use to communicate failure in case of error +let printout_failure (err : error_message) : Tac unit = + printout_result "ainfo" (EFailure (rendermsg err)) + +let _debug_print_var (name : string) (t : term) : Tac unit = + print ("_debug_print_var: " ^ name ^ ": " ^ term_to_string t); + begin match safe_tc (top_env ()) t with + | Some ty -> print ("type: " ^ term_to_string ty) + | _ -> () + end; + print ("qualifier: " ^ term_construct t); + begin match inspect t with + | Tv_Var bv -> + let b : bv_view = inspect_bv bv in + print ("Tv_Var: ppname: " ^ name_of_bv bv ^ + "; index: " ^ (string_of_int b.bv_index)) + | _ -> () + end; + print "end of _debug_print_var" + +/// We use the following to solve goals requiring a unification variable (for +/// which we might not have a candidate, or for which the candidate may not +/// typecheck correctly). We can't use the tactic ``tadmit`` for the simple +/// reason that it generates a warning which may mess up with the subsequent +/// parsing of the data generated by the tactics. +// TODO: actually, there already exists Prims.magic +assume val magic_witness (#a : Type) : a + +let tadmit_no_warning () : Tac unit = + apply (`magic_witness) + +let pp_tac () : Tac unit = + print ("post-processing: " ^ (term_to_string (cur_goal ()))); + dump ""; + trefl() + diff --git a/stage0/ulib/experimental/FStar.InteractiveHelpers.PostProcess.fst b/stage0/ulib/experimental/FStar.InteractiveHelpers.PostProcess.fst new file mode 100644 index 00000000000..cf8f9122c77 --- /dev/null +++ b/stage0/ulib/experimental/FStar.InteractiveHelpers.PostProcess.fst @@ -0,0 +1,745 @@ +module FStar.InteractiveHelpers.PostProcess + +open FStar.List.Tot +open FStar.Tactics +open FStar.Mul +open FStar.InteractiveHelpers.Base +open FStar.InteractiveHelpers.ExploreTerm +open FStar.InteractiveHelpers.Propositions +open FStar.InteractiveHelpers.Effectful +open FStar.InteractiveHelpers.Output + +/// The high-level post-processing tactics, used to retrieve some specific +/// information from the context and generate output which can be exploited +/// on the IDE side. + +#set-options "--z3rlimit 15 --fuel 0 --ifuel 1" + +(*** General utilities *) +/// We declare some identifiers that we will use to guide the meta processing +assume type meta_info +assume val focus_on_term : meta_info + +let end_proof () = + tadmit_t (`()) + +let unsquash_equality (t:term) : Tac (option (term & term)) = + begin match term_as_formula t with + | Comp (Eq _) l r -> Some (l, r) + | _ -> None + end + +#push-options "--ifuel 2" +let pp_explore (dbg dfs : bool) + (#a : Type0) + (f : explorer a) + (x : a) : + Tac unit = + let g = cur_goal () in + let e = cur_env () in + print_dbg dbg ("[> pp_explore:\n" ^ term_to_string g); + begin match unsquash_equality g with + | Some (l, _) -> + let c = safe_typ_or_comp dbg e l in + let ge = mk_genv e [] [] in + print_dbg dbg ("[> About to explore term:\n" ^ term_to_string l); + let x = explore_term dbg dfs #a f x ge [] c l in + end_proof () + | _ -> mfail "pp_explore: not a squashed equality" + end +#pop-options + +/// This function goes through the goal, which is presumed to be a squashed equality, +/// and prints all the subterms of its left operand. Very useful for debugging. +val pp_explore_print_goal : unit -> Tac unit +let pp_explore_print_goal () = + let f : explorer unit = + fun _ _ _ _ _ -> ((), Continue) + in + pp_explore true false f () + +/// Check for meta-identifiers. Note that we can't simply use ``term_eq`` which +/// sometimes unexpectedly fails (maybe because of information hidden to Meta-F*) +val is_focus_on_term : term -> Tac bool +let is_focus_on_term t = is_fvar t (`%focus_on_term) + +/// Check if a term is an assertion or an assumption and return its content +/// if it is the case. +val term_is_assert_or_assume : term -> Tac (option term) +let term_is_assert_or_assume t = + match inspect t with + | Tv_App hd (a, Q_Explicit) -> + if is_any_fvar a [`%Prims._assert; `%FStar.Pervasives.assert_norm; `%Prims._assume] + then Some a + else None + | _ -> None + +/// Check if the given term view is of the form: 'let _ = focus_on_term in body' +/// Returns 'body' if it is the case. +val is_focused_term : term_view -> Tac (option term) +let is_focused_term tv = + match tv with + | Tv_Let recf attrs _ _ def body -> + if is_focus_on_term def then Some body else None + | _ -> None + +noeq type exploration_result (a : Type)= { + ge : genv; + parents : list (genv & term_view); + tgt_comp : option typ_or_comp; + res : a; +} + +let mk_exploration_result = Mkexploration_result + +let pred_explorer (a:Type) = + genv -> list (genv & term_view) -> option typ_or_comp -> term_view -> + Tac (option a) + +val find_predicated_term_explorer : #a:Type0 -> pred_explorer a -> bool -> + explorer (option (exploration_result a)) +let find_predicated_term_explorer #a pred dbg acc ge pl opt_c t = + if Some? acc then mfail "find_focused_term_explorer: non empty accumulator"; + if dbg then + begin + print ("[> find_focused_term_explorer: " ^ term_view_construct t ^ ":\n" ^ term_to_string t) + end; + match pred ge pl opt_c t with + | Some ft -> Some (mk_exploration_result ge pl opt_c ft), Abort + | None -> None, Continue + +val find_predicated_term : #a:Type0 -> pred_explorer a -> bool -> bool -> + genv -> list (genv & term_view) -> + option typ_or_comp -> term -> + Tac (option (exploration_result a)) +let find_predicated_term #a pred dbg dfs ge pl opt_c t = + fst (explore_term dbg dfs #(option (exploration_result a)) + (find_predicated_term_explorer #a pred dbg) + None ge pl opt_c t) + +val _is_focused_term_explorer : pred_explorer term +let _is_focused_term_explorer ge pl opt_c tv = + is_focused_term tv + +val find_focused_term : bool -> bool -> genv -> list (genv & term_view) -> option typ_or_comp -> term -> + Tac (option (exploration_result term)) +let find_focused_term dbg dfs ge pl opt_c t = + find_predicated_term #term _is_focused_term_explorer dbg dfs ge pl opt_c t + +/// This function raises a MetaAnalysis exception if it can't find a focused term +val find_focused_term_in_current_goal : bool -> Tac (exploration_result term) +let find_focused_term_in_current_goal dbg = + let g = cur_goal () in + let e = cur_env () in + print_dbg dbg ("[> find_focused_assert_in_current_goal:\n" ^ term_to_string g); + begin match unsquash_equality g with + | Some (l, _) -> + let c = safe_typ_or_comp dbg e l in + let ge = mk_genv e [] [] in + print_dbg dbg ("[> About to explore term:\n" ^ term_to_string l); + begin match find_focused_term dbg true ge [] c l with + | Some res -> + print_dbg dbg ("[> Found focused term:\n" ^ term_to_string res.res); + res + | None -> + mfail ("find_focused_term_in_current_goal: could not find a focused term in the current goal: " + ^ term_to_string g) + end + | _ -> mfail "find_focused_term_in_current_goal: not a squashed equality" + end + +/// This function raises a MetaAnalysis exception if it can't find a focused term +val find_focused_assert_in_current_goal : bool -> Tac (exploration_result term) +let find_focused_assert_in_current_goal dbg = + print_dbg dbg ("[> find_focused_assert_in_current_goal"); + let res = find_focused_term_in_current_goal dbg in + print_dbg dbg ("[> Found focused term:\n" ^ term_to_string res.res); + (* Check that it is an assert or an assume, retrieve the assertion *) + let res' = + match inspect res.res with + | Tv_Let _ _ bv0 ty fterm _ -> + let ge' = genv_push_bv res.ge bv0 ty false None in + ({ res with res = fterm; ge = ge' }) + | _ -> res + in + begin match term_is_assert_or_assume res'.res with + | None -> mfail ("find_focused_assert_in_current_goal: the found focused term is not an assertion or an assumption:" ^ term_to_string res.res) + | Some tm -> { res' with res = tm } + end + +(*** Analyze effectful term *) +/// Analyze a term in order to print properly instantiated pre/postconditions +/// and type conditions. + +/// with_globals states whether to analyze the target pre/post together with the +/// focused term. +val analyze_effectful_term : + dbg:bool + -> with_gpre:bool + -> with_gpost:bool + -> res:exploration_result term + -> Tac unit + +let analyze_effectful_term dbg with_gpre with_gpost res = + let ge = res.ge in + let opt_c = res.tgt_comp in + (* Analyze the effectful term and check whether it is a 'let' or not *) + let ge1, studied_term, info, ret_bv, shadowed_bv, is_let = + begin match inspect res.res with + | Tv_Let _ _ bv0 ty fterm _ -> + (* Before pushing the binder, check if it shadows another variable. + * If it is the case, we will need it to correctly output the pre + * and post-assertions (because for the pre-assertions the variable + * will not be shadowed yet, while it will be the case for the post- + * assertions) *) + print_dbg dbg ("Restraining to: " ^ term_to_string fterm); + let shadowed_bv : option bv = + match genv_get_from_name ge (name_of_bv bv0) with + | None -> None + | Some (sbv, _) -> Some (fst sbv) + in + let ge1 = genv_push_bv ge bv0 ty false None in + (* If the bv name is "uu___", introduce a fresh variable and use it instead: + * the underscore might have been introduced when desugaring a let using + * tuples. If doing that is not necessary, the introduced variable will + * not appear in the generated assertions anyway. *) + let ge2, (bv1 : bv) = + let bvv0 = inspect_bv bv0 in + let _ = print_dbg dbg ("Variable bound in let: " ^ abv_to_string bv0) in + if unseal bvv0.bv_ppname = "uu___" (* this is a bit hacky *) + then genv_push_fresh_bv ge1 "ret" ty + else ge1, bv0 + in + let info = compute_eterm_info dbg ge2.env fterm in + (ge2, fterm, (info <: eterm_info), Some bv1, shadowed_bv, true) + | _ -> (ge, res.res, compute_eterm_info dbg ge.env res.res, None, None, false) + end + in + print_dbg dbg ("[> Focused term constructor: " ^ term_construct studied_term); + print_dbg dbg ("[> Environment information (after effect analysis):\n" ^ genv_to_string ge1); + (* Check if the considered term is an assert, in which case we will only + * display the precondition (otherwise we introduce too many assertions + * in the context) *) + let is_assert = Some? (term_is_assert_or_assume studied_term) in + (* Instantiate the refinements *) + (* TODO: use bv rather than term for ret_arg *) + let ret_arg = opt_tapply (fun x -> pack (Tv_Var x)) ret_bv in + let parents = List.Tot.map snd res.parents in + let ge2, asserts = + eterm_info_to_assertions dbg with_gpre with_gpost ge1 studied_term is_let + is_assert info ret_arg opt_c parents [] in + (* Simplify and filter *) + let asserts = simp_filter_assertions ge2.env simpl_norm_steps asserts in + (* Introduce fresh variables for the shadowed ones and substitute *) + let ge3, asserts = subst_shadowed_with_abs_in_assertions dbg ge2 shadowed_bv asserts in + (* If not a let, insert all the assertions before the term *) + let asserts = + if is_let then asserts + else mk_assertions (List.Tot.append asserts.pres asserts.posts) [] + in + (* Print *) + printout_success ge3 asserts + +[@plugin] +val pp_analyze_effectful_term : bool -> bool -> bool -> unit -> Tac unit +let pp_analyze_effectful_term dbg with_gpre with_gpost () = + try + let res = find_focused_term_in_current_goal dbg in + analyze_effectful_term dbg with_gpre with_gpost res; + end_proof () + with | MetaAnalysis msg -> printout_failure msg; end_proof () + | err -> (* Shouldn't happen, so transmit the exception for debugging *) raise err + +(*** Split conjunctions *) +/// Split an assert made of conjunctions so that there is one assert per +/// conjunction. We try to be a bit smart. For instance, if the assertion is of +/// the form: +/// [> assert(let Construct x1 ... xn = e in A1 /\ ... /\ An); +/// We generate: +/// [> assert(let Construct x1 ... xn = e in A1); +/// [> ... +/// [> assert(let Construct x1 ... xn = e in An); + +/// Remove ``b2t`` if it is the head of the term +val remove_b2t : term -> Tac term +let remove_b2t (t:term) : Tac term = + match inspect t with + | Tv_App hd (a, Q_Explicit) -> + begin match inspect hd with + | Tv_FVar fv -> + if fv_eq_name fv b2t_qn then a else t + | _ -> t + end + | _ -> t + +// TODO: gather all the functions like split_conjunctions, is_eq... +/// Try to destruct a term of the form '_ && _' or '_ /\ _' +val is_conjunction : term -> Tac (option (term & term)) +let is_conjunction t = + let t = remove_b2t t in + let hd, params = collect_app t in + match params with + | [(x,Q_Explicit);(y,Q_Explicit)] -> + begin match inspect hd with + | Tv_FVar fv -> + let fvn = inspect_fv fv in + if fvn = and_qn || fvn = ["Prims"; "op_AmpAmp"] + then Some (x, y) else None + | _ -> None + end + | _ -> None + +val split_conjunctions : term -> Tac (list term) + +let rec _split_conjunctions (ls : list term) (t : term) : Tac (list term) = + match is_conjunction t with + | None -> t :: ls + | Some (l, r) -> + let ls1 = _split_conjunctions ls r in + let ls2 = _split_conjunctions ls1 l in + ls2 + +let split_conjunctions t = + _split_conjunctions [] t + +/// Split a term of the form: +/// [> let Constuct x1 ... xn = x in A1 /\ ... /\ Am +/// into m terms: +/// [> let Constuct x1 ... xn = x in A1 +/// ... +/// [> let Constuct x1 ... xn = x in Am +val split_conjunctions_under_match : bool -> term -> Tac (list term) + +let split_conjunctions_under_match dbg t = + let t1 = remove_b2t t in + print_dbg dbg ("[> split_conjunctions_under_match: " ^ term_construct t1); + match inspect t1 with + | Tv_Match scrut ret_opt [(pat, br)] -> + let tl = split_conjunctions br in + map (fun x -> pack (Tv_Match scrut ret_opt [(pat, x)])) tl + | _ -> + (* Not of the proper shape: return the original term *) + [t] + +val split_assert_conjs : bool -> exploration_result term -> Tac unit +let split_assert_conjs dbg res = + let ge0 = res.ge in + (* Simplify the term (it may be an abstraction applied to some parameters) *) + let t = norm_term_env ge0.env simpl_norm_steps res.res in + (* Split the conjunctions *) + let conjs = split_conjunctions t in + (* If there is only one conjunction, check if it is of the following form + * and try to split: + * [> let Construct x1 .. xn = x in A1 /\ ... /\ Am + *) + let conjs = + if List.Tot.length conjs = 1 then split_conjunctions_under_match dbg t + else conjs + in + let asserts = mk_assertions conjs [] in + (* Print *) + printout_success ge0 asserts + +[@plugin] +val pp_split_assert_conjs : bool -> unit -> Tac unit +let pp_split_assert_conjs dbg () = + try + let res = find_focused_assert_in_current_goal dbg in + split_assert_conjs dbg res; + end_proof () + with | MetaAnalysis msg -> printout_failure msg; end_proof () + | err -> (* Shouldn't happen, so transmit the exception for debugging *) raise err + +(*** Term unfolding in assert *) +/// Unfold/rewrite a term in an assert. +/// If the term is a (recursive) top-level identifier, unfold it. +/// Otherwise look for an equality or a pure let-binding to replace it with. +/// If the assert is an equality, unfold/rewrite only on the side chosen by the user. + +// TODO: use "kind" keyword rather than type above +/// An equality kind +noeq type eq_kind = + | Eq_Dec : typ -> eq_kind (* = *) + | Eq_Undec : typ -> eq_kind (* == *) + | Eq_Hetero : typ -> typ -> eq_kind (* === *) + +/// Deconstruct an equality +// We use our own construct because ``term_as_formula`` doesn't always work +// TODO: update ``term_as_formula`` +val is_eq : bool -> term -> Tac (option (eq_kind & term & term)) +let is_eq dbg t = + let t = remove_b2t t in + print_dbg dbg ("[> is_eq: " ^ term_to_string t); + let hd, params = collect_app t in + print_dbg dbg ("- hd:\n" ^ term_to_string hd); + print_dbg dbg ("- parameters:\n" ^ list_to_string (fun (x, y) -> term_to_string x) params); + match inspect hd with + | Tv_FVar fv -> + begin match params with + | [(a,Q_Implicit);(x,Q_Explicit);(y,Q_Explicit)] -> + if is_any_fvar a [`%Prims.op_Equality; `%Prims.equals; "Prims.op_Equals"] then + Some ((Eq_Dec a), x, y) + else if is_any_fvar a [`%Prims.eq2; "Prims.op_Equals_Equals"] then + Some ((Eq_Undec a), x, y) + else None + | [(a,Q_Implicit);(b,Q_Implicit);(x,Q_Explicit);(y,Q_Explicit)] -> + if is_fvar a (`%Prims.op_Equals_Equals_Equals) then + Some ((Eq_Hetero a b), x, y) + else None + | _ -> None + end + | _ -> None + +/// Reconstruct an equality +val mk_eq : eq_kind -> term -> term -> Tot term +let mk_eq k t1 t2 = + match k with + | Eq_Dec ty -> + mk_app (`Prims.op_Equality) [(ty, Q_Implicit); (t1, Q_Explicit); (t2, Q_Explicit)] + | Eq_Undec ty -> + mk_app (`Prims.eq2) [(ty, Q_Implicit); (t1, Q_Explicit); (t2, Q_Explicit)] + | Eq_Hetero ty1 ty2 -> + mk_app Prims.(`( === )) [(ty1, Q_Implicit); (ty2, Q_Implicit); + (t1, Q_Explicit); (t2, Q_Explicit)] + +let formula_construct (f : formula) : Tac string = + match f with + | True_ -> "True_" + | False_ -> "False_" + | Comp _ _ _ -> "Comp" + | And _ _ -> "And" + | Or _ _ -> "Or" + | Not _ -> "Not" + | Implies _ _ -> "Implies" + | Iff _ _ -> "Iff" + | Forall _ _ _ -> "Forall" + | Exists _ _ _ -> "Exists" + | App _ _ -> "Apply" + | Name _ -> "Name" + | FV _ -> "FV" + | IntLit _ -> "IntLit" + | F_Unknown -> "F_Unknown" + +/// Check if a proposition is an equality which can be used to rewrite a term. +/// Return the operand of the equality which the term is equal to if it is the case. +val is_equality_for_term : bool -> term -> term -> Tac (option term) +let is_equality_for_term dbg tm p = + print_dbg dbg ("[> is_equality_for_term:" ^ + "\n- term: " ^ term_to_string tm ^ + "\n- prop: " ^ term_to_string p); + (* Specialize equality for bv - TODO: not sure if necessary, but I had problems + * in the past *) + let check_eq : term -> Tac bool = + match inspect tm with + | Tv_Var bv -> + (fun tm' -> match inspect tm' with | Tv_Var bv' -> bv_eq bv bv' | _ -> false) + | _ -> (fun tm' -> term_eq tm tm') + in + match is_eq dbg p with + | Some (ekind, l, r) -> + (* We ignore heterogeneous equality, because it risks to cause havoc at + * typing after substitution *) + print_dbg dbg ("Term is eq: " ^ term_to_string l ^ " = " ^ term_to_string r); + if Eq_Hetero? ekind then + begin + print_dbg dbg "Ignoring heterogeneous equality"; + None + end + else if check_eq l then Some r + else if check_eq r then Some l + else None + | _ -> + print_dbg dbg "Term is not eq"; + None + +val find_subequality : bool -> term -> term -> Tac (option term) +let find_subequality dbg tm p = + print_dbg dbg ("[> find_subequality:" ^ + "\n- ter : " ^ term_to_string tm ^ + "\n- props: " ^ term_to_string p); + let conjuncts = split_conjunctions p in + print_dbg dbg ("Conjuncts:\n" ^ list_to_string term_to_string conjuncts); + tryPick (is_equality_for_term dbg tm) conjuncts + +/// Look for an equality in a postcondition which can be used for rewriting. +val find_equality_from_post : + bool -> genv -> term -> bv -> typ -> term -> effect_info -> + list term_view -> list term_view -> Tac (genv & option term) +let find_equality_from_post dbg ge0 tm let_bv let_bvty ret_value einfo parents children = + print_dbg dbg "[> find_equality_from_post"; + let tinfo = get_type_info_from_type let_bvty in + (* Compute the post-condition *) + let ge1, _, post_prop = + pre_post_to_propositions dbg ge0 einfo.ei_type ret_value (Some (mk_binder let_bv let_bvty)) + tinfo einfo.ei_pre einfo.ei_post parents children + in + print_dbg dbg ("Computed post: " ^ option_to_string term_to_string post_prop); + (* Look for an equality in the post *) + let res = + match post_prop with + | None -> None + | Some p -> find_subequality dbg tm p + in + (* If we found something, we return the updated environment, + * otherwise we can return the original one *) + match res with + | None -> ge0, None + | Some tm -> ge1, Some tm + +/// Given a list of parent terms (as generated by ``explore_term``), look for an +/// equality given by a post-condition which can be used to replace a term. +val find_context_equality : + dbg:bool + -> ge0:genv + -> tm:term + -> parents:list term_view + -> children:list term_view + -> Tac (genv & option term) + +/// Auxiliary function which actually performs the search +let rec find_context_equality_aux dbg ge0 tm (opt_bv : option bv) + (parents children : list term_view) : + Tac (genv & option term) = + match parents with + | [] -> ge0, None + | tv :: parents' -> + print_dbg dbg ("[> find_context_equality:\n" ^ + "- term : " ^ term_to_string tm ^ "\n" ^ + "- parent: " ^ term_to_string tv); + (* We only consider let-bindings *) + match tv with + | Tv_Let _ _ bv' ty def _ -> + print_dbg dbg "Is Tv_Let"; + let tm_info = compute_eterm_info dbg ge0.env def in + let einfo = tm_info.einfo in + (* If the searched term is a bv and the current let is the one which + * introduces it: + * - if the term is effectful, use it + * - otherwise, try to use its postcondition. If we don't find any + * equalities, some there *) + let let_bv_is_tm = + match opt_bv with + | Some tm_bv -> bv_eq tm_bv bv' + | None -> false + in + if let_bv_is_tm && effect_type_is_pure einfo.ei_type then ge0, Some def + else + let ret_value = pack (Tv_Var bv') in + begin match find_equality_from_post dbg ge0 tm bv' ty ret_value + einfo parents children with + | ge1, Some p -> ge1, Some p + | _, None -> find_context_equality_aux dbg ge0 tm opt_bv parents' (tv :: children) + end + | _ -> find_context_equality_aux dbg ge0 tm opt_bv parents' (tv :: children) + +let find_context_equality dbg ge0 tm parents children = + let opt_bv = + match inspect tm with + | Tv_Var bv -> Some bv + | _ -> None + in + find_context_equality_aux dbg ge0 tm opt_bv parents children + +/// Replace a subterm by another term +val replace_term_in : bool -> term -> term -> term -> Tac term +let rec replace_term_in dbg from_term to_term tm = + if term_eq from_term tm then to_term else + match inspect tm with + | Tv_Var _ | Tv_BVar _ | Tv_FVar _ -> tm + | Tv_App hd (a, qual) -> + let a' = replace_term_in dbg from_term to_term a in + let hd' = replace_term_in dbg from_term to_term hd in + pack (Tv_App hd' (a', qual)) + | Tv_Abs br body -> + let body' = replace_term_in dbg from_term to_term body in + pack (Tv_Abs br body') + | Tv_Arrow br c0 -> tm (* TODO: we might want to explore that *) + | Tv_Type _ -> tm + | Tv_Refine bv sort ref -> + let sort' = replace_term_in dbg from_term to_term sort in + let ref' = replace_term_in dbg from_term to_term ref in + pack (Tv_Refine bv sort' ref') + | Tv_Const _ -> tm + | Tv_Uvar _ _ -> tm + | Tv_Let recf attrs bv ty def body -> + (* GM 2023-04-27: leaving ty untouched, old code did not + descend into sort. *) + let def' = replace_term_in dbg from_term to_term def in + let body' = replace_term_in dbg from_term to_term body in + pack (Tv_Let recf attrs bv ty def' body') + | Tv_Match scrutinee ret_opt branches -> //AR: TODO: account for the returns annotation + (* Auxiliary function to explore the branches *) + let explore_branch (br : branch) : Tac branch = + (* Only explore the branch body *) + let pat, body = br in + let body' = replace_term_in dbg from_term to_term body in + (pat, body') + in + let scrutinee' = replace_term_in dbg from_term to_term scrutinee in + let branches' = map explore_branch branches in + pack (Tv_Match scrutinee' ret_opt branches') + | Tv_AscribedT e ty tac use_eq -> + let e' = replace_term_in dbg from_term to_term e in + let ty' = replace_term_in dbg from_term to_term ty in + pack (Tv_AscribedT e' ty' tac use_eq) + | Tv_AscribedC e c tac use_eq -> + let e' = replace_term_in dbg from_term to_term e in + pack (Tv_AscribedC e' c tac use_eq) + | _ -> + (* Unknown *) + tm + +val strip_implicit_parameters : term -> Tac term +let rec strip_implicit_parameters tm = + match inspect tm with + | Tv_App hd (a,qualif) -> + if Q_Implicit? qualif then strip_implicit_parameters hd else tm + | _ -> tm + +val unfold_in_assert_or_assume : bool -> exploration_result term -> Tac unit +let unfold_in_assert_or_assume dbg ares = + print_dbg dbg ("[> unfold_in_assert_or_assume:\n" ^ term_to_string ares.res); + (* Find the focused term inside the assert, and on which side of the + * equality if the assert is an equality *) + let find_focused_in_term t = + find_focused_term dbg false ares.ge ares.parents ares.tgt_comp t + in + let find_in_whole_term () : Tac _ = + match find_focused_in_term ares.res with + | Some res -> + ares.res, res, (fun x -> x <: Tac term), true + | None -> mfail "unfold_in_assert_or_assume: could not find a focused term in the assert" + in + (* - subterm: the subterm of the assertion in which we found the focused term + * (if an equality, left or right operand, otherwise whole assertion) + * - unf_res: the result of the exploration for the focused term inside the + * assertion, which gives the term to unfold + * - rebuild: a Tot function which, given a term, rebuilds the equality by + * replacing the above subterm with the given term + * - insert_before: whether to insert the new assertion before or after the + * current assertion in the user file *) + let subterm, unf_res, (rebuild : term -> Tac term), insert_before = + let _ = print_dbg dbg ("Assertion: " ^ term_to_string ares.res) in + match is_eq dbg ares.res with + | Some (kd, l, r) -> + print_dbg dbg "The assertion is an equality"; + begin match find_focused_in_term l with + | Some res -> + print_dbg dbg ("Found focused term in left operand:" ^ + "\n- left : " ^ term_to_string l ^ + "\n- right : " ^ term_to_string r ^ + "\n- focused: " ^ term_to_string res.res); + let rebuild t : Tac term = mk_eq kd t r in + l, res, rebuild, true + | None -> + begin match find_focused_in_term r with + | Some res -> + print_dbg dbg ("Found focused term in right operand:" ^ + "\n- left : " ^ term_to_string l ^ + "\n- right : " ^ term_to_string r ^ + "\n- focused: " ^ term_to_string res.res); + let rebuild (t : term) : Tac term = mk_eq kd l t in + r, res, rebuild, false + | None -> + mfail "unfold_in_assert_or_assume: could not find a focused term in the assert" + end + end + | None -> + print_dbg dbg "The assertion is not an equality"; + find_in_whole_term () + in + print_dbg dbg ("Found subterm in assertion/assumption:\n" ^ + "- subterm: " ^ term_to_string subterm ^ "\n" ^ + "- focused term: " ^ term_to_string unf_res.res); + (* Unfold the term *) + let res_view = inspect unf_res.res in + let ge1, opt_unf_tm = + match res_view with + | Tv_FVar fv -> + print_dbg dbg ("The focused term is a top identifier: " ^ fv_to_string fv); + (* The easy case: just use the normalizer *) + let fname = flatten_name (inspect_fv fv) in + let subterm' = norm_term_env ares.ge.env [delta_only [fname]; zeta] subterm in + print_dbg dbg ("Normalized subterm: " ^ term_to_string subterm'); + ares.ge, Some subterm' + | _ -> + (* Look for an equality given by a previous post-condition. In the case + * the term is a bv, we can also use the let-binding which introduces it, + * if it is pure. *) + let parents = List.Tot.map snd ares.parents in + let opt_bvty : option (bv & typ) = + match res_view with + | Tv_Var bv -> + print_dbg dbg ("The focused term is a local variable: " ^ bv_to_string bv); + (* Check that the binder was not introduced by an abstraction inside the assertion *) + if not (Some? (genv_get ares.ge bv)) then + mfail "unfold_in_assert_or_assume: can't unfold a variable locally introduced in an assertion"; + Some (bv, pack_ln Tv_Unknown) // FIXME + | _ -> + print_dbg dbg ("The focused term is an arbitrary term: " ^ term_to_string unf_res.res); + None + in + let ge1, eq_tm = find_context_equality dbg ares.ge unf_res.res parents [] in + (* Check if we found an equality *) + let opt_eq_tm = + match eq_tm with + | Some eq_tm -> Some eq_tm + | _ -> None + in + (* Apply it *) + let subterm' = + match opt_bvty, opt_eq_tm with + | Some bvty, Some eq_tm -> Some (apply_subst ge1.env subterm [(bvty, eq_tm)]) + | None, Some eq_tm -> Some (replace_term_in dbg unf_res.res eq_tm subterm) + | _ -> None + in + ge1, subterm' + in + (* If we couldn't unfold the term, check if it is a top-level identifier with + * implicit parameters (it may happen that the user calls the command on a + * top-level identifier which has implicit parameters without providing + * those parameters, in which case the focused term is the identifier applied + * to those implicits inferred by F*, and thus an app and not an fvar). + * Note that so far we have no way to check if the implicit parameters have + * been explicitly provided by the user or not, which is why we can't do better + * than greedy tests.*) + let ge2, unf_tm = + match opt_unf_tm with + | Some unf_tm -> ge1, unf_tm + | None -> + begin match inspect (strip_implicit_parameters unf_res.res) with + | Tv_FVar fv -> + print_dbg dbg ("The focused term is a top identifier with implicit parameters: " + ^ fv_to_string fv); + (* The easy case: just use the normalizer *) + let fname = flatten_name (inspect_fv fv) in + let subterm' = norm_term_env ge1.env [delta_only [fname]; zeta] subterm in + print_dbg dbg ("Normalized subterm: " ^ term_to_string subterm'); + ge1, subterm' + | _ -> + mfail ("unfold_in_assert_or_assume: " ^ + "couldn't find equalities with which to rewrite: " ^ + term_to_string unf_res.res) + end + in + (* Create the assertions to output *) + let final_assert = rebuild unf_tm in + let final_assert = prettify_term dbg final_assert in + print_dbg dbg ("-> Final assertion:\n" ^ term_to_string final_assert); + let asserts = + if insert_before then mk_assertions [final_assert] [] else mk_assertions [] [final_assert] + in + (* Introduce fresh variables for the shadowed ones and substitute *) + let ge3, asserts = subst_shadowed_with_abs_in_assertions dbg ge2 None asserts in + (* Output *) + printout_success ge3 asserts + +[@plugin] +val pp_unfold_in_assert_or_assume : bool -> unit -> Tac unit +let pp_unfold_in_assert_or_assume dbg () = + try + let res = find_focused_assert_in_current_goal dbg in + unfold_in_assert_or_assume dbg res; + end_proof () + with | MetaAnalysis msg -> printout_failure msg; end_proof () + | err -> (* Shouldn't happen, so transmit the exception for debugging *) raise err diff --git a/stage0/ulib/experimental/FStar.InteractiveHelpers.Propositions.fst b/stage0/ulib/experimental/FStar.InteractiveHelpers.Propositions.fst new file mode 100644 index 00000000000..c0537d1e81f --- /dev/null +++ b/stage0/ulib/experimental/FStar.InteractiveHelpers.Propositions.fst @@ -0,0 +1,44 @@ +module FStar.InteractiveHelpers.Propositions + +open FStar.List.Tot +open FStar.Tactics +open FStar.Mul +open FStar.InteractiveHelpers.Base +open FStar.InteractiveHelpers.ExploreTerm + +private +let term_eq = FStar.Reflection.TermEq.Simple.term_eq + +/// Propositions and assertions. +/// Assertions are propositions to be inserted in the F* code: we differentiate +/// between pre and post assertions, which are to be inserted before a point in +/// the code. For instance, if we analyze an effectful term in F*: +/// [> assert(y <> 0); // pre assertion +/// [> let z = x / y in // term of interest +/// [> assert(...); // post assertion + +#set-options "--z3rlimit 15 --fuel 0 --ifuel 1" + +/// Analyze a term to retrieve its effectful information + +let proposition_to_string p = term_to_string p + +let is_trivial_proposition p = + term_eq (`Prims.l_True) p + +let simp_filter_proposition (e:env) (steps:list norm_step) (p:proposition) : + Tac (list proposition) = + let prop1 = norm_term_env e steps p in + (* If trivial, filter *) + if term_eq (`Prims.l_True) prop1 then [] + else [prop1] + +let simp_filter_propositions (e:env) (steps:list norm_step) (pl:list proposition) : + Tac (list proposition) = + List.Tot.flatten (map (simp_filter_proposition e steps) pl) + +let simp_filter_assertions (e:env) (steps:list norm_step) (a:assertions) : + Tac assertions = + let pres = simp_filter_propositions e steps a.pres in + let posts = simp_filter_propositions e steps a.posts in + mk_assertions pres posts diff --git a/stage0/ulib/experimental/FStar.InteractiveHelpers.Propositions.fsti b/stage0/ulib/experimental/FStar.InteractiveHelpers.Propositions.fsti new file mode 100644 index 00000000000..65a691d2abe --- /dev/null +++ b/stage0/ulib/experimental/FStar.InteractiveHelpers.Propositions.fsti @@ -0,0 +1,52 @@ +module FStar.InteractiveHelpers.Propositions + +open FStar.Tactics.Effect +open FStar.Stubs.Reflection.Types + +/// Propositions and assertions. +/// Assertions are propositions to be inserted in the F* code: we differentiate +/// between pre and post assertions, which are to be inserted before a point in +/// the code. For instance, if we analyze an effectful term in F*: +/// [> assert(y <> 0); // pre assertion +/// [> let z = x / y in // term of interest +/// [> assert(...); // post assertion + +/// Analyze a term to retrieve its effectful information + +type proposition = term + +[@@plugin] +val proposition_to_string : proposition -> Tac string + +/// Propositions split between pre and post assertions +[@@plugin] +noeq type assertions = { + pres : list proposition; + posts : list proposition; +} + +let mk_assertions pres posts : assertions = + Mkassertions pres posts + +(*** Simplification *) +/// Whenever we generate assertions, we simplify them to make them cleaner, +/// prettier and remove the trivial ones. The normalization steps we apply +/// are listed below. +let simpl_norm_steps = [primops; simplify; iota] + +/// Simplify the propositions and filter the trivial ones. +/// Check if a proposition is trivial (i.e.: is True) +[@@plugin] +val is_trivial_proposition : proposition -> Tac bool + +[@@plugin] +val simp_filter_proposition (e:env) (steps:list norm_step) (p:proposition) : + Tac (list proposition) + +[@@plugin] +val simp_filter_propositions (e:env) (steps:list norm_step) (pl:list proposition) : + Tac (list proposition) + +[@@plugin] +val simp_filter_assertions (e:env) (steps:list norm_step) (a:assertions) : + Tac assertions diff --git a/stage0/ulib/experimental/FStar.InteractiveHelpers.fst b/stage0/ulib/experimental/FStar.InteractiveHelpers.fst new file mode 100644 index 00000000000..5ae34d12610 --- /dev/null +++ b/stage0/ulib/experimental/FStar.InteractiveHelpers.fst @@ -0,0 +1,11 @@ +module FStar.InteractiveHelpers + +/// FStar.InteractiveHelpers defines a library of meta functions to help the user +/// interact with F*. + +include FStar.InteractiveHelpers.Base +include FStar.InteractiveHelpers.ExploreTerm +include FStar.InteractiveHelpers.Propositions +include FStar.InteractiveHelpers.Effectful +include FStar.InteractiveHelpers.Output +include FStar.InteractiveHelpers.PostProcess diff --git a/stage0/ulib/experimental/FStar.MST.fst b/stage0/ulib/experimental/FStar.MST.fst new file mode 100644 index 00000000000..df1d3733c64 --- /dev/null +++ b/stage0/ulib/experimental/FStar.MST.fst @@ -0,0 +1,247 @@ +(* + Copyright 2020 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.MST + +module P = FStar.Preorder +module W = FStar.Witnessed.Core +open FStar.Monotonic.Pure + +type pre_t (state:Type u#2) = state -> Type0 +type post_t (state:Type u#2) (a:Type u#a) = state -> a -> state -> Type0 + +type repr + (a:Type) + (state:Type u#2) + (rel:P.preorder state) + (req:pre_t state) + (ens:post_t state a) + = + s0:state -> + DIV (a & state) + (as_pure_wp (fun p -> + req s0 /\ + (forall (x:a) (s1:state). (ens s0 x s1 /\ rel s0 s1) ==> p (x, s1)))) + +let return + (a:Type) + (x:a) + (state:Type u#2) + (rel:P.preorder state) + : repr a state rel + (fun _ -> True) + (fun s0 r s1 -> r == x /\ s0 == s1) + = + fun s0 -> x, s0 + +let bind + (a:Type) + (b:Type) + (state:Type u#2) + (rel:P.preorder state) + (req_f:pre_t state) + (ens_f:post_t state a) + (req_g:a -> pre_t state) + (ens_g:a -> post_t state b) + (f:repr a state rel req_f ens_f) + (g:(x:a -> repr b state rel (req_g x) (ens_g x))) + : repr b state rel + (fun s0 -> req_f s0 /\ (forall x s1. ens_f s0 x s1 ==> (req_g x) s1)) + (fun s0 r s2 -> req_f s0 /\ (exists x s1. ens_f s0 x s1 /\ (req_g x) s1 /\ (ens_g x) s1 r s2)) + = + fun s0 -> + let x, s1 = f s0 in + (g x) s1 + +let subcomp + (a:Type) + (state:Type u#2) + (rel:P.preorder state) + (req_f:pre_t state) + (ens_f:post_t state a) + (req_g:pre_t state) + (ens_g:post_t state a) + (f:repr a state rel req_f ens_f) + : Pure (repr a state rel req_g ens_g) + (requires + (forall s. req_g s ==> req_f s) /\ + (forall s0 x s1. (req_g s0 /\ ens_f s0 x s1) ==> ens_g s0 x s1)) + (ensures fun _ -> True) + = + f + +let if_then_else + (a:Type) + (state:Type u#2) + (rel:P.preorder state) + (req_then:pre_t state) + (ens_then:post_t state a) + (req_else:pre_t state) + (ens_else:post_t state a) + (f:repr a state rel req_then ens_then) + (g:repr a state rel req_else ens_else) + (p:bool) + : Type + = + repr a state rel + (fun s -> (p ==> req_then s) /\ ((~ p) ==> req_else s)) + (fun s0 x s1 -> (p ==> ens_then s0 x s1) /\ ((~ p) ==> ens_else s0 x s1)) + +[@@ primitive_extraction] +reflectable +effect { + MSTATE (a:Type) + ([@@@ effect_param] state:Type u#2) + ([@@@ effect_param] rel:P.preorder state) + (req:pre_t state) + (ens:post_t state a) + with { repr; return; bind; subcomp; if_then_else } +} + +[@@ noextract_to "krml"] +let get (#state:Type u#2) (#rel:P.preorder state) () + : MSTATE state state rel + (fun _ -> True) + (fun s0 r s1 -> s0 == r /\ r == s1) + = + MSTATE?.reflect (fun s0 -> s0, s0) + +[@@ noextract_to "krml"] +let put (#state:Type u#2) (#rel:P.preorder state) (s:state) + : MSTATE unit state rel + (fun s0 -> rel s0 s) + (fun _ _ s1 -> s1 == s) + = + MSTATE?.reflect (fun _ -> (), s) + +assume +val witness (state:Type u#2) + (rel:P.preorder state) + (p:W.s_predicate state) + : MSTATE (W.witnessed state rel p) state rel + (fun s0 -> p s0 /\ W.stable state rel p) + (fun s0 _ s1 -> s0 == s1) + +assume +val recall (state:Type u#2) + (rel:P.preorder state) + (p:W.s_predicate state) + (w:W.witnessed state rel p) + : MSTATE unit state rel + (fun _ -> True) + (fun s0 _ s1 -> s0 == s1 /\ p s1) + + +(* + * AR: why do we need the first conjunct in the postcondition? + * + * without this some proofs that use `assert e by t` fail + * the way `assert e by t` works is that, it is desugared into `with_tactic e t` + * that is abstract and remains in the VC as is at some point, we take a pass over + * the VC, find the `with_tactic e t` nodes in it, farm out `G |= e by t` where `G` + * is the context at that point in the VC in the original VC, `with_tactic e t` + * is simply replace by `True`. + * So why is it OK to replace it by `True`, don't we lose the fact that `e` holds for + * the rest of the VC? + * In the wp world of things, this works fine, since the wp of `assert e by t` is + * (fun _ -> with_tactic e t /\ (e ==> ...)) + * i.e. the type of `assert e by t` already introduces a cut, so replacing it by + * `True` works fine. + * + * But this doesn't work when we use the intricate `~ (wp (fun r -> r =!= x))` + * combinator to convert from wp to pre post + * + * Basically, the shape of the VC in that case becomes: + * (with_tactic e t /\ (((~ with_tactic e t) \/ (e /\ ...)) ==> ...)) + * + * In this VC, if we replace the first `with_tactic e t` with `True`, for the second conjunct, + * the solver can no longer reason that the first disjunct cannot hold + * + * The wp (fun _ -> True) below helps add that assumption to the second conjunct + *) + +let lift_pure_mst + (a:Type) + (wp:pure_wp a) + (state:Type u#2) + (rel:P.preorder state) + (f:eqtype_as_type unit -> PURE a wp) + : repr a state rel + (fun s0 -> wp (fun _ -> True)) + (fun s0 x s1 -> wp (fun _ -> True) /\ (~ (wp (fun r -> r =!= x \/ s0 =!= s1)))) + = + elim_pure_wp_monotonicity wp; + fun s0 -> + let x = f () in + x, s0 + +sub_effect PURE ~> MSTATE = lift_pure_mst + + +(* + * A polymonadic bind between DIV and MSTATE + * + * This is ultimately used when defining par and frame in Steel.Effect.fst (via NMST layer) + * par and frame try to compose reified Steel with Steel, since Steel is non total, its reification + * incurs a Div effect, and so, we need a way to compose Div and Steel + * + * To do so, we have to go all the way down and have a story for MST and NMST too + * + * This polymonadic bind gives us bare minimum to realize that + * It is quite imprecise, in that it doesn't say anything about the post of the Div computation + * That's because, the as_ensures combinator is not encoded for Div effect in the SMT, + * the way it is done for PURE and GHOST + * + * However, since the reification use case gives us Dv anyway, this is fine for now + *) +let bind_div_mst (a:Type) (b:Type) + (wp:pure_wp a) + (state:Type u#2) (rel:P.preorder state) (req:a -> pre_t state) (ens:a -> post_t state b) + (f:eqtype_as_type unit -> DIV a wp) (g:(x:a -> repr b state rel (req x) (ens x))) +: repr b state rel + (fun s0 -> wp (fun _ -> True) /\ (forall x. req x s0)) + (fun s0 y s1 -> exists x. (ens x) s0 y s1) += elim_pure_wp_monotonicity wp; + fun s0 -> + let x = f () in + (g x) s0 + +polymonadic_bind (DIV, MSTATE) |> MSTATE = bind_div_mst + + +let mst_assume (#state:Type u#2) (#rel:P.preorder state) (p:Type) + : MSTATE unit state rel (fun _ -> True) (fun m0 _ m1 -> p /\ m0 == m1) + = + assume p + +let mst_admit (#state:Type u#2) (#rel:P.preorder state) (#a:Type) () + : MSTATE a state rel (fun _ -> True) (fun _ _ _ -> False) + = + admit () + +let mst_assert (#state:Type u#2) (#rel:P.preorder state) (p:Type) + : MSTATE unit state rel (fun _ -> p) (fun m0 _ m1 -> p /\ m0 == m1) + = + assert p + +let lift_mst_total_mst (a:Type) + (state:Type u#2) (rel:P.preorder state) + (req:pre_t state) (ens:post_t state a) + (f:MSTTotal.repr a state rel req ens) +: repr a state rel req ens += fun s0 -> f s0 + +sub_effect MSTTotal.MSTATETOT ~> MSTATE = lift_mst_total_mst diff --git a/stage0/ulib/experimental/FStar.MSTTotal.fst b/stage0/ulib/experimental/FStar.MSTTotal.fst new file mode 100644 index 00000000000..23ed925c669 --- /dev/null +++ b/stage0/ulib/experimental/FStar.MSTTotal.fst @@ -0,0 +1,208 @@ +(* + Copyright 2020 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.MSTTotal +module W = FStar.Witnessed.Core +module P = FStar.Preorder + +open FStar.Monotonic.Pure + +type pre_t (state:Type u#2) = state -> Type0 +type post_t (state:Type u#2) (a:Type u#a) = state -> a -> state -> Type0 + +type repr + (a:Type) + (state:Type u#2) + (rel:P.preorder state) + (req:pre_t state) + (ens:post_t state a) + = + s0:state -> + PURE (a & state) + (as_pure_wp (fun p -> + req s0 /\ + (forall (x:a) (s1:state). (ens s0 x s1 /\ rel s0 s1) ==> p (x, s1)))) + +let return + (a:Type) + (x:a) + (state:Type u#2) + (rel:P.preorder state) + : repr a state rel + (fun _ -> True) + (fun s0 r s1 -> r == x /\ s0 == s1) + = + fun s0 -> x, s0 + +let bind + (a:Type) + (b:Type) + (state:Type u#2) + (rel:P.preorder state) + (req_f:pre_t state) + (ens_f:post_t state a) + (req_g:a -> pre_t state) + (ens_g:a -> post_t state b) + (f:repr a state rel req_f ens_f) + (g:(x:a -> repr b state rel (req_g x) (ens_g x))) + : repr b state rel + (fun s0 -> req_f s0 /\ (forall x s1. ens_f s0 x s1 ==> (req_g x) s1)) + (fun s0 r s2 -> req_f s0 /\ (exists x s1. ens_f s0 x s1 /\ (req_g x) s1 /\ (ens_g x) s1 r s2)) + = + fun s0 -> + let x, s1 = f s0 in + (g x) s1 + +let subcomp + (a:Type) + (state:Type u#2) + (rel:P.preorder state) + (req_f:pre_t state) + (ens_f:post_t state a) + (req_g:pre_t state) + (ens_g:post_t state a) + (f:repr a state rel req_f ens_f) + : Pure (repr a state rel req_g ens_g) + (requires + (forall s. req_g s ==> req_f s) /\ + (forall s0 x s1. (req_g s0 /\ ens_f s0 x s1) ==> ens_g s0 x s1)) + (ensures fun _ -> True) + = + f + +let if_then_else + (a:Type) + (state:Type u#2) + (rel:P.preorder state) + (req_then:pre_t state) + (ens_then:post_t state a) + (req_else:pre_t state) + (ens_else:post_t state a) + (f:repr a state rel req_then ens_then) + (g:repr a state rel req_else ens_else) + (p:bool) + : Type + = + repr a state rel + (fun s -> (b2t p ==> req_then s) /\ ((~ (b2t p)) ==> req_else s)) + (fun s0 x s1 -> (b2t p ==> ens_then s0 x s1) /\ ((~ (b2t p)) ==> ens_else s0 x s1)) + +[@@ primitive_extraction] +total +reflectable +effect { + MSTATETOT (a:Type) + ([@@@ effect_param] state:Type u#2) + ([@@@ effect_param] rel:P.preorder state) + (req:pre_t state) + (ens:post_t state a) + with { repr; return; bind; subcomp; if_then_else } +} + +[@@ noextract_to "krml"] +let get (#state:Type u#2) (#rel:P.preorder state) () + : MSTATETOT state state rel + (fun _ -> True) + (fun s0 r s1 -> s0 == r /\ r == s1) + = + MSTATETOT?.reflect (fun s0 -> s0, s0) + +[@@ noextract_to "krml"] +let put (#state:Type u#2) (#rel:P.preorder state) (s:state) + : MSTATETOT unit state rel + (fun s0 -> rel s0 s) + (fun _ _ s1 -> s1 == s) + = + MSTATETOT?.reflect (fun _ -> (), s) + +assume +val witness (state:Type u#2) + (rel:P.preorder state) + (p:W.s_predicate state) + : MSTATETOT (W.witnessed state rel p) state rel + (fun s0 -> p s0 /\ W.stable state rel p) + (fun s0 _ s1 -> s0 == s1) + +assume +val recall (state:Type u#2) + (rel:P.preorder state) + (p:W.s_predicate state) + (w:W.witnessed state rel p) + : MSTATETOT unit state rel + (fun _ -> True) + (fun s0 _ s1 -> s0 == s1 /\ p s1) + + +(* + * AR: why do we need the first conjunct in the postcondition? + * + * without this some proofs that use `assert e by t` fail + * the way `assert e by t` works is that, it is desugared into `with_tactic e t` + * that is abstract and remains in the VC as is at some point, we take a pass over + * the VC, find the `with_tactic e t` nodes in it, farm out `G |= e by t` where `G` + * is the context at that point in the VC in the original VC, `with_tactic e t` + * is simply replace by `True`. + * So why is it OK to replace it by `True`, don't we lose the fact that `e` holds for + * the rest of the VC? + * In the wp world of things, this works fine, since the wp of `assert e by t` is + * (fun _ -> with_tactic e t /\ (e ==> ...)) + * i.e. the type of `assert e by t` already introduces a cut, so replacing it by + * `True` works fine. + * + * But this doesn't work when we use the intricate `~ (wp (fun r -> r =!= x))` + * combinator to convert from wp to pre post + * + * Basically, the shape of the VC in that case becomes: + * (with_tactic e t /\ (((~ with_tactic e t) \/ (e /\ ...)) ==> ...)) + * + * In this VC, if we replace the first `with_tactic e t` with `True`, for the second conjunct, + * the solver can no longer reason that the first disjunct cannot hold + * + * The wp (fun _ -> True) below helps add that assumption to the second conjunct + *) + +let lift_pure_mst_total + (a:Type) + (wp:pure_wp a) + (state:Type u#2) + (rel:P.preorder state) + (f:eqtype_as_type unit -> PURE a wp) + : repr a state rel + (fun s0 -> wp (fun _ -> True)) + (fun s0 x s1 -> wp (fun _ -> True) /\ (~ (wp (fun r -> r =!= x \/ s0 =!= s1)))) + = + elim_pure_wp_monotonicity wp; + fun s0 -> + let x = f () in + x, s0 + +sub_effect PURE ~> MSTATETOT = lift_pure_mst_total + + +let mst_tot_assume (#state:Type u#2) (#rel:P.preorder state) (p:Type) + : MSTATETOT unit state rel (fun _ -> True) (fun m0 _ m1 -> p /\ m0 == m1) + = + assume p + +let mst_tot_admit (#state:Type u#2) (#rel:P.preorder state) (#a:Type) () + : MSTATETOT a state rel (fun _ -> True) (fun _ _ _ -> False) + = + admit () + +let mst_tot_assert (#state:Type u#2) (#rel:P.preorder state) (p:Type) + : MSTATETOT unit state rel (fun _ -> p) (fun m0 _ m1 -> p /\ m0 == m1) + = + assert p diff --git a/stage0/ulib/experimental/FStar.NMST.fst b/stage0/ulib/experimental/FStar.NMST.fst new file mode 100644 index 00000000000..fe30b2c40ba --- /dev/null +++ b/stage0/ulib/experimental/FStar.NMST.fst @@ -0,0 +1,224 @@ +(* + Copyright 2020 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.NMST + +#set-options "--compat_pre_typed_indexed_effects" + +module W = FStar.Witnessed.Core +module P = FStar.Preorder + +module M = FStar.MST + +open FStar.Monotonic.Pure + +type tape = nat -> bool + +type repr + (a:Type) + (state:Type u#2) + (rel:P.preorder state) + (req:M.pre_t state) + (ens:M.post_t state a) + = + (tape & nat) -> + M.MSTATE (a & nat) state rel req (fun s0 (x, _) s1 -> ens s0 x s1) + + +let return (a:Type) (x:a) (state:Type u#2) (rel:P.preorder state) + : repr a state rel (fun _ -> True) (fun s0 r s1 -> r == x /\ s0 == s1) + = + fun (_, n) -> x, n + +let bind + (a:Type) + (b:Type) + (state:Type u#2) + (rel:P.preorder state) + (req_f:M.pre_t state) + (ens_f:M.post_t state a) + (req_g:a -> M.pre_t state) + (ens_g:a -> M.post_t state b) + (f:repr a state rel req_f ens_f) + (g:(x:a -> repr b state rel (req_g x) (ens_g x))) + : repr b state rel + (fun s0 -> req_f s0 /\ (forall x s1. ens_f s0 x s1 ==> (req_g x) s1)) + (fun s0 r s2 -> req_f s0 /\ (exists x s1. ens_f s0 x s1 /\ (req_g x) s1 /\ (ens_g x) s1 r s2)) + = + fun (t, n) -> + let x, n1 = f (t, n) in + (g x) (t, n1) + +let subcomp + (a:Type) + (state:Type u#2) + (rel:P.preorder state) + (req_f:M.pre_t state) + (ens_f:M.post_t state a) + (req_g:M.pre_t state) + (ens_g:M.post_t state a) + (f:repr a state rel req_f ens_f) + : Pure (repr a state rel req_g ens_g) + (requires + (forall s. req_g s ==> req_f s) /\ + (forall s0 x s1. (req_g s0 /\ ens_f s0 x s1) ==> ens_g s0 x s1)) + (ensures fun _ -> True) + = + f + +let if_then_else + (a:Type) + (state:Type u#2) + (rel:P.preorder state) + (req_then:M.pre_t state) + (ens_then:M.post_t state a) + (req_else:M.pre_t state) + (ens_else:M.post_t state a) + (f:repr a state rel req_then ens_then) + (g:repr a state rel req_else ens_else) + (p:bool) + : Type + = + repr a state rel + (fun s0 -> (p ==> req_then s0) /\ ((~ p) ==> req_else s0)) + (fun s0 x s1 -> (p ==> ens_then s0 x s1) /\ ((~ p) ==> ens_else s0 x s1)) + +[@@ primitive_extraction] +reflectable +effect { + NMSTATE (a:Type) + ([@@@ effect_param] state:Type u#2) + ([@@@ effect_param] rel:P.preorder state) + (req:M.pre_t state) + (ens:M.post_t state a) + with { repr; return; bind; subcomp; if_then_else } +} + +[@@ noextract_to "krml"] +let get (#state:Type u#2) (#rel:P.preorder state) () + : NMSTATE state state rel + (fun _ -> True) + (fun s0 s s1 -> s0 == s /\ s == s1) + = + NMSTATE?.reflect (fun (_, n) -> MST.get (), n) + +[@@ noextract_to "krml"] +let put (#state:Type u#2) (#rel:P.preorder state) (s:state) + : NMSTATE unit state rel + (fun s0 -> rel s0 s) + (fun _ _ s1 -> s1 == s) + = + NMSTATE?.reflect (fun (_, n) -> MST.put s, n) + + +[@@ noextract_to "krml"] +let witness (state:Type u#2) (rel:P.preorder state) (p:W.s_predicate state) + : NMSTATE (W.witnessed state rel p) state rel + (fun s0 -> p s0 /\ W.stable state rel p) + (fun s0 _ s1 -> s0 == s1) + = + NMSTATE?.reflect (fun (_, n) -> M.witness state rel p, n) + +[@@ noextract_to "krml"] +let recall (state:Type u#2) + (rel:P.preorder state) + (p:W.s_predicate state) + (w:W.witnessed state rel p) + : NMSTATE unit state rel + (fun _ -> True) + (fun s0 _ s1 -> s0 == s1 /\ p s1) + = + NMSTATE?.reflect (fun (_, n) -> M.recall state rel p w, n) + +[@@ noextract_to "krml"] +let sample (#state:Type u#2) (#rel:P.preorder state) () + : NMSTATE bool state rel + (fun _ -> True) + (fun s0 _ s1 -> s0 == s1) + = + NMSTATE?.reflect (fun (t, n) -> t n, n+1) + +let lift_pure_nmst + (a:Type) + (wp:pure_wp a) + (state:Type u#2) + (rel:P.preorder state) + (f:eqtype_as_type unit -> PURE a wp) + : repr a state rel + (fun s0 -> wp (fun _ -> True)) + (fun s0 x s1 -> wp (fun _ -> True) /\ (~ (wp (fun r -> r =!= x \/ s0 =!= s1)))) + = + fun (_, n) -> + elim_pure_wp_monotonicity wp; + let x = f () in + x, n + +sub_effect PURE ~> NMSTATE = lift_pure_nmst + + +(* + * A polymonadic bind between DIV and NMSTATE + * + * This is ultimately used when defining par and frame in Steel.Effect.fst + * par and frame try to compose reified Steel with Steel, since Steel is non total, its reification + * incurs a Div effect, and so, we need a way to compose Div and Steel + * + * To do so, we have to go all the way down and have a story for MST and NMST too + * + * This polymonadic bind gives us bare minimum to realize that + * It is quite imprecise, in that it doesn't say anything about the post of the Div computation + * That's because, the as_ensures combinator is not encoded for Div effect in the SMT, + * the way it is done for PURE and GHOST + * + * However, since the reification use case gives us Dv anyway, this is fine for now + *) +let bind_div_nmst (a:Type) (b:Type) + (wp:pure_wp a) + (state:Type u#2) (rel:P.preorder state) (req:a -> M.pre_t state) (ens:a -> M.post_t state b) + (f:eqtype_as_type unit -> DIV a wp) (g:(x:a -> repr b state rel (req x) (ens x))) +: repr b state rel + (fun s0 -> wp (fun _ -> True) /\ (forall x. req x s0)) + (fun s0 y s1 -> exists x. (ens x) s0 y s1) += elim_pure_wp_monotonicity wp; + fun s0 -> + let x = f () in + (g x) s0 + +polymonadic_bind (DIV, NMSTATE) |> NMSTATE = bind_div_nmst + + +let nmst_assume (#state:Type u#2) (#rel:P.preorder state) (p:Type) + : NMSTATE unit state rel (fun _ -> True) (fun m0 _ m1 -> p /\ m0 == m1) + = + assume p + +let nmst_admit (#state:Type u#2) (#rel:P.preorder state) (#a:Type) () + : NMSTATE a state rel (fun _ -> True) (fun _ _ _ -> False) + = + admit () + +let nmst_assert (#state:Type u#2) (#rel:P.preorder state) (p:Type) + : NMSTATE unit state rel (fun _ -> p) (fun m0 _ m1 -> p /\ m0 == m1) + = + assert p + +let lift_nmst_total_nmst (a:Type) (state:Type u#2) (rel:P.preorder state) + (req:M.pre_t state) (ens:M.post_t state a) + (f:NMSTTotal.repr a state rel req ens) +: repr a state rel req ens += fun (t, n) -> f (t, n) + +sub_effect NMSTTotal.NMSTATETOT ~> NMSTATE = lift_nmst_total_nmst diff --git a/stage0/ulib/experimental/FStar.NMSTTotal.fst b/stage0/ulib/experimental/FStar.NMSTTotal.fst new file mode 100644 index 00000000000..770911b170c --- /dev/null +++ b/stage0/ulib/experimental/FStar.NMSTTotal.fst @@ -0,0 +1,183 @@ +(* + Copyright 2020 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.NMSTTotal + +module W = FStar.Witnessed.Core +module P = FStar.Preorder + +module M = FStar.MSTTotal + +open FStar.Monotonic.Pure + +type tape = nat -> bool + +type repr + (a:Type) + (state:Type u#2) + (rel:P.preorder state) + (req:M.pre_t state) + (ens:M.post_t state a) + = + (tape & nat) -> + M.MSTATETOT (a & nat) state rel req (fun s0 (x, _) s1 -> ens s0 x s1) + + +let return (a:Type) (x:a) (state:Type u#2) (rel:P.preorder state) + : repr a state rel (fun _ -> True) (fun s0 r s1 -> r == x /\ s0 == s1) + = + fun (_, n) -> x, n + +let bind + (a:Type) + (b:Type) + (state:Type u#2) + (rel:P.preorder state) + (req_f:M.pre_t state) + (ens_f:M.post_t state a) + (req_g:a -> M.pre_t state) + (ens_g:a -> M.post_t state b) + (f:repr a state rel req_f ens_f) + (g:(x:a -> repr b state rel (req_g x) (ens_g x))) + : repr b state rel + (fun s0 -> req_f s0 /\ (forall x s1. ens_f s0 x s1 ==> (req_g x) s1)) + (fun s0 r s2 -> req_f s0 /\ (exists x s1. ens_f s0 x s1 /\ (req_g x) s1 /\ (ens_g x) s1 r s2)) + = + fun (t, n) -> + let x, n1 = f (t, n) in + (g x) (t, n1) + +let subcomp + (a:Type) + (state:Type u#2) + (rel:P.preorder state) + (req_f:M.pre_t state) + (ens_f:M.post_t state a) + (req_g:M.pre_t state) + (ens_g:M.post_t state a) + (f:repr a state rel req_f ens_f) + : Pure (repr a state rel req_g ens_g) + (requires + (forall s. req_g s ==> req_f s) /\ + (forall s0 x s1. (req_g s0 /\ ens_f s0 x s1) ==> ens_g s0 x s1)) + (ensures fun _ -> True) + = + f + +let if_then_else + (a:Type) + (state:Type u#2) + (rel:P.preorder state) + (req_then:M.pre_t state) + (ens_then:M.post_t state a) + (req_else:M.pre_t state) + (ens_else:M.post_t state a) + (f:repr a state rel req_then ens_then) + (g:repr a state rel req_else ens_else) + (p:bool) + : Type + = + repr a state rel + (fun s0 -> (p ==> req_then s0) /\ ((~ p) ==> req_else s0)) + (fun s0 x s1 -> (p ==> ens_then s0 x s1) /\ ((~ p) ==> ens_else s0 x s1)) + +[@@ primitive_extraction] +total +reflectable +effect { + NMSTATETOT (a:Type) + ([@@@ effect_param] state:Type u#2) + ([@@@ effect_param] rel:P.preorder state) + (req:M.pre_t state) + (ens:M.post_t state a) + with { repr; return; bind; subcomp; if_then_else } +} + +[@@ noextract_to "krml"] +let get (#state:Type u#2) (#rel:P.preorder state) () + : NMSTATETOT state state rel + (fun _ -> True) + (fun s0 s s1 -> s0 == s /\ s == s1) + = + NMSTATETOT?.reflect (fun (_, n) -> MSTTotal.get (), n) + +[@@ noextract_to "krml"] +let put (#state:Type u#2) (#rel:P.preorder state) (s:state) + : NMSTATETOT unit state rel + (fun s0 -> rel s0 s) + (fun _ _ s1 -> s1 == s) + = + NMSTATETOT?.reflect (fun (_, n) -> MSTTotal.put s, n) + + +[@@ noextract_to "krml"] +let witness (state:Type u#2) (rel:P.preorder state) (p:W.s_predicate state) + : NMSTATETOT (W.witnessed state rel p) state rel + (fun s0 -> p s0 /\ W.stable state rel p) + (fun s0 _ s1 -> s0 == s1) + = + NMSTATETOT?.reflect (fun (_, n) -> M.witness state rel p, n) + +[@@ noextract_to "krml"] +let recall (state:Type u#2) + (rel:P.preorder state) + (p:W.s_predicate state) + (w:W.witnessed state rel p) + : NMSTATETOT unit state rel + (fun _ -> True) + (fun s0 _ s1 -> s0 == s1 /\ p s1) + = + NMSTATETOT?.reflect (fun (_, n) -> M.recall state rel p w, n) + +[@@ noextract_to "krml"] +let sample (#state:Type u#2) (#rel:P.preorder state) () + : NMSTATETOT bool state rel + (fun _ -> True) + (fun s0 _ s1 -> s0 == s1) + = + NMSTATETOT?.reflect (fun (t, n) -> t n, n+1) + +let lift_pure_nmst + (a:Type) + (wp:pure_wp a) + (state:Type u#2) + (rel:P.preorder state) + (f:eqtype_as_type unit -> PURE a wp) + : repr a state rel + (fun s0 -> wp (fun _ -> True)) + (fun s0 x s1 -> wp (fun _ -> True) /\ (~ (wp (fun r -> r =!= x \/ s0 =!= s1)))) + = + fun (_, n) -> + elim_pure_wp_monotonicity wp; + let x = f () in + x, n + +sub_effect PURE ~> NMSTATETOT = lift_pure_nmst + +let nmst_tot_assume (#state:Type u#2) (#rel:P.preorder state) (p:Type) + : NMSTATETOT unit state rel (fun _ -> True) (fun m0 _ m1 -> p /\ m0 == m1) + = + assume p + +let nmst_tot_admit (#state:Type u#2) (#rel:P.preorder state) (#a:Type) () + : NMSTATETOT a state rel (fun _ -> True) (fun _ _ _ -> False) + = + admit () + +let nmst_tot_assert (#state:Type u#2) (#rel:P.preorder state) (p:Type) + : NMSTATETOT unit state rel (fun _ -> p) (fun m0 _ m1 -> p /\ m0 == m1) + = + assert p diff --git a/stage0/ulib/experimental/FStar.OrdMap.fst b/stage0/ulib/experimental/FStar.OrdMap.fst new file mode 100644 index 00000000000..78857934fcd --- /dev/null +++ b/stage0/ulib/experimental/FStar.OrdMap.fst @@ -0,0 +1,146 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.OrdMap + +open FStar.OrdSet +open FStar.FunctionalExtensionality +module F = FStar.FunctionalExtensionality + +let map_t (k:eqtype) (v:Type) (f:cmp k) (d:ordset k f) = + g:F.restricted_t k (fun _ -> option v){forall x. mem x d == Some? (g x)} + +noeq +type ordmap (k:eqtype) (v:Type) (f:cmp k) = + | Mk_map: d:ordset k f -> m:map_t k v f d -> ordmap k v f + +let empty (#k:eqtype) (#v:Type) #f = + let d = OrdSet.empty in + let g = F.on_dom k (fun x -> None) in + Mk_map d g + +let const_on (#k:eqtype) (#v:Type) #f d x = + let g = F.on_dom k (fun y -> if mem y d then Some x else None) in + Mk_map d g + +let select (#k:eqtype) (#v:Type) #f x m = (Mk_map?.m m) x + +let insert (#a:eqtype) (#f:cmp a) (x:a) (s:ordset a f) = union #a #f (singleton #a #f x) s + +let update (#k:eqtype) (#v:Type) #f x y m = + let s' = insert x (Mk_map?.d m) in + let g' = F.on_dom k (fun (x':k) -> if x' = x then Some y else (Mk_map?.m m) x') in + Mk_map s' g' + +let contains (#k:eqtype) (#v:Type) #f x m = mem x (Mk_map?.d m) + +let dom (#k:eqtype) (#v:Type) #f m = (Mk_map?.d m) + +let remove (#k:eqtype) (#v:Type) #f x m = + let s' = remove x (Mk_map?.d m) in + let g' = F.on_dom k (fun x' -> if x' = x then None else (Mk_map?.m m) x') in + Mk_map s' g' + +let choose (#k:eqtype) (#v:Type) #f m = + match OrdSet.choose (Mk_map?.d m) with + | None -> None + | Some x -> Some (x, Some?.v ((Mk_map?.m m) x)) + +let size (#k:eqtype) (#v:Type) #f m = OrdSet.size (Mk_map?.d m) + +let equal (#k:eqtype) (#v:Type) (#f:cmp k) (m1:ordmap k v f) (m2:ordmap k v f) = + forall x. select #k #v #f x m1 == select #k #v #f x m2 + +let eq_intro (#k:eqtype) (#v:Type) #f m1 m2 = () + +let eq_lemma (#k:eqtype) (#v:Type) #f m1 m2 = + let Mk_map s1 g1 = m1 in + let Mk_map s2 g2 = m2 in + let _ = cut (feq g1 g2) in + let _ = OrdSet.eq_lemma s1 s2 in + () + +let upd_order (#k:eqtype) (#v:Type) #f x y x' y' m = + let (Mk_map s1 g1) = update #k #v #f x' y' (update #k #v #f x y m) in + let (Mk_map s2 g2) = update #k #v #f x y (update #k #v #f x' y' m) in + cut (feq g1 g2) + +let upd_same_k (#k:eqtype) (#v:Type) #f x y y' m = + let (Mk_map s1 g1) = update #k #v #f x y m in + let (Mk_map s2 g2) = update #k #v #f x y (update #k #v #f x y' m) in + cut (feq g1 g2) + +let sel_upd1 (#k:eqtype) (#v:Type) #f x y m = () + +let sel_upd2 (#k:eqtype) (#v:Type) #f x y x' m = () + +let sel_empty (#k:eqtype) (#v:Type) #f x = () + +let sel_contains (#k:eqtype) (#v:Type) #f x m = () + +let contains_upd1 (#k:eqtype) (#v:Type) #f x y x' m = () + +let contains_upd2 (#k:eqtype) (#v:Type) #f x y x' m = () + +let contains_empty (#k:eqtype) (#v:Type) #f x = () + +let contains_remove (#k:eqtype) (#v:Type) #f x y m = () + +let eq_remove (#k:eqtype) (#v:Type) #f x m = + let (Mk_map s g) = m in + let m' = remove #k #v #f x m in + let (Mk_map s' g') = m' in + let _ = cut (feq g g') in + () + +let choose_empty (#k:eqtype) (#v:Type) #f = () + +private val dom_empty_helper: #k:eqtype -> #v:Type -> #f:cmp k -> m:ordmap k v f + -> Lemma (requires (True)) + (ensures ((dom #k #v #f m = OrdSet.empty) ==> + (m == empty #k #v #f))) +let dom_empty_helper (#k:eqtype) (#v:Type) #f m = + let (Mk_map s g) = m in + if (not (s = OrdSet.empty)) then () + else + let (Mk_map s' g') = empty #k #v #f in + cut (feq g g') + +let choose_m (#k:eqtype) (#v:Type) #f m = + dom_empty_helper #k #v #f m; + let c = choose #k #v #f m in + match c with + | None -> () + | Some (x, y) -> + let m' = remove #k #v #f x m in + let (Mk_map s' g') = m' in + let (Mk_map s'' g'') = update #k #v #f x y m' in + cut (feq (Mk_map?.m m) g'') + +let size_empty (#k:eqtype) (#v:Type) #f = () + +let size_remove (#k:eqtype) (#v:Type) #f y m = () + +let dom_lemma (#k:eqtype) (#v:Type) #f x m = () + +let contains_const_on (#k:eqtype) (#v:Type) #f d x y = () + +let select_const_on (#k:eqtype) (#v:Type) #f d x y = () + +let sel_rem1 (#k:eqtype) (#v:Type) #f x m = () + +let sel_rem2 (#k:eqtype) (#v:Type) #f x x' m = () + +let rem_upd (#k:eqtype) (#v:Type) #f x y x' m = () diff --git a/stage0/ulib/experimental/FStar.OrdMap.fsti b/stage0/ulib/experimental/FStar.OrdMap.fsti new file mode 100644 index 00000000000..445cf9b95f8 --- /dev/null +++ b/stage0/ulib/experimental/FStar.OrdMap.fsti @@ -0,0 +1,183 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.OrdMap + +open FStar.OrdSet + +(* TODO (KM) : move me this should go in a common file on relations *) +type total_order (a:eqtype) (f: (a -> a -> Tot bool)) = + (forall a1 a2. (f a1 a2 /\ f a2 a1) ==> a1 = a2) (* anti-symmetry *) + /\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *) + /\ (forall a1 a2. f a1 a2 \/ f a2 a1) (* totality *) + +let cmp (a:eqtype) = f:(a -> a -> Tot bool){total_order a f} + +val ordmap (k:eqtype) (v:Type u#a) (f:cmp k) : Type u#a + +val empty : #key:eqtype -> #value:Type -> #f:cmp key -> Tot (ordmap key value f) +val const_on: #key:eqtype -> #value:Type -> #f:cmp key -> d:ordset key f -> x:value -> Tot (ordmap key value f) +val select : #key:eqtype -> #value:Type -> #f:cmp key -> k:key + -> m:ordmap key value f -> Tot (option value) +val update : #key:eqtype -> #value:Type -> #f:cmp key -> key -> value + -> m:ordmap key value f -> Tot (ordmap key value f) +val contains: #key:eqtype -> #value:Type -> #f:cmp key -> key -> ordmap key value f + -> Tot bool +val dom : #key:eqtype -> #value:Type -> #f:cmp key -> m:ordmap key value f -> + Tot (ordset key f) + +val remove : #key:eqtype -> #value:Type -> #f:cmp key -> key + -> ordmap key value f -> Tot (ordmap key value f) +val choose : #key:eqtype -> #value:Type -> #f:cmp key -> ordmap key value f + -> Tot (option (key & value)) + +val size : #key:eqtype -> #value:Type -> #f:cmp key -> ordmap key value f + -> Tot nat + +val equal (#k:eqtype) (#v:Type) (#f:cmp k) (m1:ordmap k v f) (m2:ordmap k v f) : prop + +val eq_intro: #k:eqtype -> #v:Type -> #f:cmp k -> m1:ordmap k v f -> m2:ordmap k v f + -> Lemma (requires (forall x. select #k #v #f x m1 == select #k #v #f x m2)) + (ensures (equal m1 m2)) + [SMTPat (equal m1 m2)] + +val eq_lemma: #k:eqtype -> #v:Type -> #f:cmp k -> m1:ordmap k v f -> m2:ordmap k v f + -> Lemma (requires (equal m1 m2)) + (ensures (m1 == m2)) + [SMTPat (equal m1 m2)] + +val upd_order: #k:eqtype -> #v:Type -> #f:cmp k -> x:k -> y:v -> x':k -> y':v + -> m:ordmap k v f + -> Lemma (requires (x =!= x')) + (ensures (equal (update #k #v #f x y (update #k #v #f x' y' m)) + (update #k #v #f x' y' (update #k #v #f x y m)))) + [SMTPat (update #k #v #f x y (update #k #v #f x' y' m))] //NS:This pattern is too aggresive; it will fire for any pair of updates + +val upd_same_k: #k:eqtype -> #v:Type -> #f:cmp k -> x:k -> y:v -> y':v + -> m:ordmap k v f + -> Lemma (requires (True)) + (ensures (equal (update #k #v #f x y (update #k #v #f x y' m)) + (update #k #v #f x y m))) + [SMTPat (update #k #v #f x y (update #k #v #f x y' m))] //NS:This pattern is too aggresive; it will fire for any pair of updates + +val sel_upd1: #k:eqtype -> #v:Type -> #f:cmp k -> x:k -> y:v -> m:ordmap k v f + -> Lemma (requires True) (ensures select #k #v #f x + (update #k #v #f x y m) == Some y) + [SMTPat (select #k #v #f x (update #k #v #f x y m))] + +val sel_upd2: #k:eqtype -> #v:Type -> #f:cmp k -> x:k -> y:v -> x':k -> m:ordmap k v f + -> Lemma (requires True) + (ensures (x =!= x' ==> (select #k #v #f x' (update #k #v #f x y m) + == select #k #v #f x' m))) + [SMTPat (select #k #v #f x' (update #k #v #f x y m))] + +val sel_empty: #k:eqtype -> #v:Type -> #f:cmp k -> x:k + -> Lemma (requires True) + (ensures (select #k #v #f x (empty #k #v #f) == None)) + [SMTPat (select #k #v #f x (empty #k #v #f))] + +val sel_contains: #k:eqtype -> #v:Type -> #f:cmp k -> x:k -> m:ordmap k v f + -> Lemma (requires (True)) + (ensures (contains #k #v #f x m = Some? (select #k #v #f x m))) + [SMTPat (select #k #v #f x m); SMTPat (contains #k #v #f x m)] + +val contains_upd1: #k:eqtype -> #v:Type -> #f:cmp k -> x:k -> y:v -> x':k + -> m:ordmap k v f + -> Lemma (requires True) + (ensures (contains #k #v #f x' (update #k #v #f x y m) = + (x = x' || contains #k #v #f x' m))) + [SMTPat (contains #k #v #f x' (update #k #v #f x y m))] + +val contains_upd2: #k:eqtype -> #v:Type -> #f:cmp k -> x:k -> y:v -> x':k + -> m:ordmap k v f + -> Lemma (requires True) + (ensures (x =!= x' ==> (contains #k #v #f x' (update #k #v #f x y m) + = contains #k #v #f x' m))) + [SMTPat (contains #k #v #f x' (update #k #v #f x y m))] + +val contains_empty: #k:eqtype -> #v:Type -> #f:cmp k -> x:k + -> Lemma (requires True) + (ensures (not (contains #k #v #f x (empty #k #v #f)))) + [SMTPat (contains #k #v #f x (empty #k #v #f))] + +val contains_remove: #k:eqtype -> #v:Type -> #f:cmp k -> x:k -> y:k -> m:ordmap k v f + -> Lemma (requires True) + (ensures (contains #k #v #f x (remove #k #v #f y m) = + (contains #k #v #f x m && not (x = y)))) + [SMTPat (contains #k #v #f x (remove #k #v #f y m))] + +val eq_remove: #k:eqtype -> #v:Type -> #f:cmp k -> x:k -> m:ordmap k v f + -> Lemma (requires (not (contains #k #v #f x m))) + (ensures (equal m (remove #k #v #f x m))) + [SMTPat (remove #k #v #f x m)] + +val choose_empty: #k:eqtype -> #v:Type -> #f:cmp k + -> Lemma (requires True) (ensures (None? (choose #k #v #f + (empty #k #v #f)))) + [SMTPat (choose #k #v #f (empty #k #v #f))] + +val choose_m: #k:eqtype -> #v:Type -> #f:cmp k -> m:ordmap k v f + -> Lemma (requires (~ (equal m (empty #k #v #f)))) + (ensures (Some? (choose #k #v #f m) /\ + (select #k #v #f (fst (Some?.v (choose #k #v #f m))) m == + Some (snd (Some?.v (choose #k #v #f m)))) /\ + (equal m (update #k #v #f (fst (Some?.v (choose #k #v #f m))) + (snd (Some?.v (choose #k #v #f m))) + (remove #k #v #f (fst (Some?.v (choose #k #v #f m))) m))))) + [SMTPat (choose #k #v #f m)] + +val size_empty: #k:eqtype -> #v:Type -> #f:cmp k + -> Lemma (requires True) + (ensures (size #k #v #f (empty #k #v #f) = 0)) + [SMTPat (size #k #v #f (empty #k #v #f))] + +val size_remove: #k:eqtype -> #v:Type -> #f:cmp k -> y:k -> m:ordmap k v f + -> Lemma (requires (contains #k #v #f y m)) + (ensures (size #k #v #f m = size #k #v #f (remove #k #v #f y m) + 1)) + [SMTPat (size #k #v #f (remove #k #v #f y m))] + +val dom_lemma: #k:eqtype -> #v:Type -> #f:cmp k -> x:k -> m:ordmap k v f + -> Lemma (requires True) + (ensures (contains #k #v #f x m <==> + OrdSet.mem #k #f x (dom #k #v #f m))) + [SMTPat (mem #k #f x (dom #k #v #f m))] + +val contains_const_on: #k:eqtype -> #v:Type -> #f:cmp k -> d:ordset k f -> x:v -> y:k + -> Lemma (requires (True)) + (ensures (mem y d = contains y (const_on d x))) + //(contains y (const_on d x) ==> Some?.v (select p w) = x))) + [SMTPat (contains #k #v #f y (const_on #k #v #f d x))] + +val select_const_on: #k:eqtype -> #v:Type -> #f:cmp k -> d:ordset k f -> x:v -> y:k + -> Lemma (requires True) + (ensures (mem y d ==> (contains y (const_on d x) /\ Some?.v (select y (const_on d x)) == x))) + [SMTPat (select #k #v #f y (const_on #k #v #f d x))] + +val sel_rem1: #k:eqtype -> #v:Type -> #f:cmp k -> x:k -> m:ordmap k v f + -> Lemma (requires True) (ensures select #k #v #f x + (remove #k #v #f x m) == None) + [SMTPat (select #k #v #f x (remove #k #v #f x m))] + +val sel_rem2: #k:eqtype -> #v:Type -> #f:cmp k -> x:k -> x':k -> m:ordmap k v f + -> Lemma (requires True) (ensures (x =!= x' ==> + select #k #v #f x' + (remove #k #v #f x m) == select #k #v #f x' m)) + [SMTPat (select #k #v #f x' (remove #k #v #f x m))] + +val rem_upd: #k:eqtype -> #v:Type -> #f:cmp k -> x:k -> y:v -> x':k -> m:ordmap k v f + -> Lemma (requires (True)) (ensures (x =!= x' ==> + equal (update #k #v #f x y (remove #k #v #f x' m)) + (remove #k #v #f x' (update #k #v #f x y m)))) + [SMTPat (update #k #v #f x y (remove #k #v #f x' m))] diff --git a/stage0/ulib/experimental/FStar.OrdMapProps.fst b/stage0/ulib/experimental/FStar.OrdMapProps.fst new file mode 100644 index 00000000000..87c998314a6 --- /dev/null +++ b/stage0/ulib/experimental/FStar.OrdMapProps.fst @@ -0,0 +1,26 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.OrdMapProps + +open FStar.OrdMap + +val fold: #k:eqtype -> #v:Type -> #a:Type -> #f:cmp k -> (k -> v -> a -> Tot a) + -> m:ordmap k v f -> a -> Tot a (decreases (size m)) +let rec fold #k #v #t #f g m a = + if size m = 0 then a + else + let Some (k, v) = choose m in + fold g (remove k m) (g k v a) diff --git a/stage0/ulib/experimental/FStar.OrdSet.fst b/stage0/ulib/experimental/FStar.OrdSet.fst new file mode 100644 index 00000000000..5a3bd3069ac --- /dev/null +++ b/stage0/ulib/experimental/FStar.OrdSet.fst @@ -0,0 +1,773 @@ +(* + Copyright 2008-2022 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.OrdSet + +[@@do_not_unrefine] +type ordset a f = l:(list a){sorted f l} + +let hasEq_ordset _ _ = () + +let rec simple_induction #t #f (p: ordset t f -> Type0) (x: ordset t f) + : Lemma (requires p [] /\ (forall (l: ordset t f{Cons? l}). p (Cons?.tl l) ==> p l)) + (ensures p x) = match x with + | [] -> () + | ph::pt -> simple_induction p pt; + assert (p (Cons?.tl (ph::pt))) + +let rec base_induction #t #f (p: ordset t f -> Type0) (x: ordset t f) + : Lemma (requires (forall (l: ordset t f{List.Tot.Base.length l < 2}). p l) + /\ (forall (l: ordset t f{Cons? l}). p (Cons?.tl l) ==> p l)) + (ensures p x) + (decreases List.Tot.Base.length x) = + if List.Tot.Base.length x < 2 then () + else match x with + | ph::pt -> base_induction p pt; + assert (p (Cons?.tl (ph::pt))) + +let empty #_ #_ = [] + +let tail #a #f s = Cons?.tl s <: ordset a f +let head #_ #_ s = Cons?.hd s + +let mem #_ #_ x s = List.Tot.mem x s + +(* In case snoc-based List implementation is optimized, we use library ones, + but we additionally supply them with relevant postconditions that come + from s being an ordset. *) +let rec last_direct #a #f (s: ordset a f{s <> empty}) + : (x:a{mem x s /\ (forall (z:a{mem z s}). f z x)}) + = match s with + | [x] -> x + | h::g::t -> last_direct (tail s) + +let last_lib #a #f (s: ordset a f{s <> empty}) + = snd (List.Tot.Base.unsnoc s) + +let last_eq #a #f (s: ordset a f{s <> empty}) + : Lemma (last_direct s = last_lib s) = simple_induction + (fun p -> if p<>[] then last_direct #a #f p = last_lib p else true) s + +let last #a #f s = last_eq s; last_lib s + +let rec liat_direct #a #f (s: ordset a f{s <> empty}) : (l:ordset a f{ + (forall x. mem x l = (mem x s && (x <> last s))) /\ + (if tail s <> empty then head s = head l else true) + }) = + match s with + | [x] -> [] + | h::g::t -> h::(liat_direct #a #f (g::t)) + +let liat_lib #a #f (s: ordset a f{s <> empty}) = fst (List.Tot.Base.unsnoc s) + +let liat_eq #a #f (s:ordset a f {s<>empty}) + : Lemma (liat_direct s = liat_lib s) = simple_induction + (fun p -> if p<>[] then liat_direct p = liat_lib p else true) s + +let liat #a #f s = liat_eq s; liat_lib s + +let unsnoc #a #f s = + liat_eq s; + last_eq s; + let l = List.Tot.Base.unsnoc s in + (fst l, snd l) + +let as_list (#a:eqtype) (#f:cmp a) (s:ordset a f) : Tot (l:list a{sorted f l}) = s + +val insert': #a:eqtype -> #f:cmp a -> x:a -> s:ordset a f + -> Tot (l:(ordset a f){let s = as_list s in let l = as_list l in + (Cons? l /\ + (head #a #f l = x \/ + (Cons? s /\ head #a #f l = head #a #f s)))}) +let rec insert' #_ #f x s = + match s with + | [] -> [x] + | hd::tl -> + if x = hd then hd::tl + else if f x hd then x::hd::tl + else hd::(insert' #_ #f x tl) + +let rec distinct' #a f l : Tot (ordset a f) = + match l with + | [] -> [] + | x::t -> insert' x (distinct' f t) + +let rec insert_mem (#a:eqtype) #f (x:a) (s:ordset a f) + : Lemma (mem x (insert' x s)) + = if s<>empty then insert_mem #a #f x (tail s) + +let insert_sub (#a:eqtype) #f x (s:ordset a f) test + : Lemma (mem test (insert' x s) = (mem test s || test = x)) = + simple_induction (fun p -> mem test (insert' x p) = (mem test p || test = x)) s + +let rec distinct_props #a (f:cmp a) (l: list a) + : Lemma (forall x. (mem x (distinct' f l) = List.Tot.Base.mem x l)) = + match l with + | [] -> () + | x::t -> distinct_props f t; + Classical.forall_intro (insert_sub x (distinct' f t)) + +let distinct #a f l = distinct_props f l; distinct' f l + +let rec union #_ #_ s1 s2 = match s1 with + | [] -> s2 + | hd::tl -> union tl (insert' hd s2) + +val remove': #a:eqtype -> #f:cmp a -> x:a -> s:ordset a f + -> Tot (l:(ordset a f){ ((Nil? s ==> Nil? l) /\ + (Cons? s ==> head s = x ==> l = tail s) /\ + (Cons? s ==> head s =!= x ==> (Cons? l /\ head l = Cons?.hd s)))}) + +let rec remove' #a #f x s = match s with + | [] -> [] + | hd::(tl: ordset a f) -> + if x = hd then tl + else hd::(remove' x tl) + +let size' (#a:eqtype) (#f:cmp a) (s:ordset a f) = List.Tot.length s + +let liat_length #a #f (s:ordset a f{s<>empty}) : Lemma (size' (liat s) = ((size' s) - 1)) + = simple_induction (fun p -> if p<>empty then size' (liat p) = ((size' p)-1) else true) s + +let rec not_mem_aux (#a:eqtype) (#f:cmp a) (x:a) (s:ordset a f) + : Lemma (requires (size' s > 0) && (head s <> x) && (f x (head s))) + (ensures not (mem x s)) = + if tail s <> [] then not_mem_aux x (tail s) + +let rec subset' #a #f (s1 s2: ordset a f) = match s1, s2 with + | [], _ -> true + | hd::tl, hd'::tl' -> if f hd hd' && hd = hd' then subset' #a #f tl tl' + else if f hd hd' && not (hd = hd') then false + else subset' #a #f s1 tl' + | _, _ -> false + +let tail_is_subset #a #f (s:ordset a f{size' s > 0}) + : Lemma (Cons?.tl s `subset'` s) = + simple_induction (fun (s:ordset a f) -> size' s=0 || subset' (Cons?.tl s) s) s + +let self_is_subset #a #f (s:ordset a f) + : Lemma (subset' s s) = simple_induction (fun (s:ordset a f) -> subset' s s) s + +(* + returns a pair of (fst z) = (everything from s that goes after x) + and (snd z) = (true if x was found in s, false otherwise) +*) +let rec remove_until_greater_than #a #f x (s: ordset a f) + : z:(ordset a f & bool) { (size' (fst z) <= size' s) && + (not(mem x (fst z))) && + (subset' (fst z) s) && + (snd z = mem x s) && + (match (fst z) with + | [] -> true + | h::t -> (sorted f (x::(fst z)))) + } = + match s with + | [] -> ([], false) + | h::(t:ordset a f) -> if h=x then begin + if size' t > 0 then not_mem_aux x t; + tail_is_subset s; + (t, true) + end + else if f x h then begin + not_mem_aux x s; + self_is_subset s; + (s, false) + end + else remove_until_greater_than x t + +let rec remove_until_gt_prop #a #f (s: ordset a f) (x:a) (test:a) + : Lemma (f test x ==> not (mem test (fst (remove_until_greater_than x s)))) = + match s with + | [] -> () + | h::(t:ordset a f) -> + let aux (test:a) : Lemma (requires f test x && h<>test) + (ensures not (mem test (fst (remove_until_greater_than x s)))) = + remove_until_gt_prop #a #f t x test + in Classical.move_requires aux test; + if h <> x then remove_until_gt_prop t x test + +let rec remove_until_gt_mem #a #f (s: ordset a f) (x:a) (test:a) + : Lemma (mem test (fst (remove_until_greater_than x s)) = ( + mem test s && + f x test && + (x<>test) + )) + = if size' s > 0 then remove_until_gt_mem (tail s) x test + +let mem_implies_f #a #f (s: ordset a f) (x:a) + : Lemma (requires mem x s) (ensures f (Cons?.hd s) x) + = simple_induction (fun s -> mem x s ==> f (head s) x) s + +(* + Smart intersect is the set intersect that accounts for the ordering of both lists, + eliminating some checks by trimming leading elements of one of the input lists + that are guaranteeed to not belong to the other list. + E.g. smart_intersect [1;2;3] [3;4;5] can safely trim [1;2;3] to just [3] + upon inspecting the head of [3;4;5]. +*) +let rec smart_intersect #a #f (s1 s2: ordset a f) : Tot (z:ordset a f{ + (forall x. mem x z = (mem x s1 && mem x s2)) /\ + (forall (x:a{sorted f (x::s1)}). sorted f (x::z)) /\ + (forall (x:a{sorted f (x::s2)}). sorted f (x::z)) + }) (decreases size' s1 + size' s2) = + match s1 with + | [] -> [] + | h1::(t1:ordset a f) -> match s2 with + | [] -> [] + | h2::(t2:ordset a f) -> + if h1=h2 then h1::smart_intersect t1 t2 + else begin + if f h1 h2 then ( + let skip1, found = remove_until_greater_than #a #f h2 t1 in + let subresult : ordset a f = smart_intersect skip1 t2 in + Classical.forall_intro (remove_until_gt_mem t1 h2); + Classical.forall_intro (Classical.move_requires (mem_implies_f s2)); + if found then h2::subresult else subresult + ) else ( + let skip2, found = remove_until_greater_than #a #f h1 t2 in + let subresult = smart_intersect #a #f t1 skip2 in + Classical.forall_intro (remove_until_gt_mem t2 h1); + Classical.forall_intro (Classical.move_requires (mem_implies_f s1)); + if found then h1::subresult + else subresult + ) + end + +let intersect #a #f s1 s2 = smart_intersect s1 s2 + +let choose #a #f s = match s with | [] -> None | x::_ -> Some x + +let remove #a #f x s = remove' #_ #f x s + +let size #a #f s = size' s + +let subset #a #f s1 s2 = subset' s1 s2 + +let singleton (#a:eqtype) #f x = [x] + +let mem_of_empty #a #f (s: ordset a f{size s = 0}) (x: a) + : Lemma (not (mem x s)) = () + +let mem_of_tail #a #f (s: ordset a f{size s > 0}) (x:a) + : Lemma ((mem #a #f x (Cons?.tl s) || (x = Cons?.hd s)) = mem x s) = () + +let not_mem_of_tail #a #f (s: ordset a f{size s > 0}) (x:a) + : Lemma (not (mem x (tail s)) = not (mem x s) || x = head s) + = simple_induction (fun s -> mem x s ==> f (head s) x) s + +let rec set_props #a #f (s:ordset a f) + : Lemma (if size s > 0 then ((forall x. mem x (tail s) ==> f (head s) x /\ head s <> x)) + else forall x. not (mem x s)) + = if (size s > 1) then set_props (tail s) + +let rec same_members_means_eq #a #f (s1 s2: ordset a f) + : Lemma (requires forall x. mem x s1 = mem x s2) (ensures s1 == s2) = + match s1 with + | [] -> if size s2>0 then assert (mem (head s2) s2) + | h1::t1 -> set_props s1; + set_props s2; + match s2 with + | h2::t2 -> same_members_means_eq #a #f t1 t2 + +let intersect_is_symmetric #a #f (s1 s2: ordset a f) + : Lemma (intersect s1 s2 = intersect s2 s1) + = same_members_means_eq (intersect s1 s2) (intersect s2 s1) + +let remove_until_gt_exclusion #a #f (s:ordset a f) (x:a) (test:a) + : Lemma (requires f x test && (not (mem test (fst (remove_until_greater_than x s))))) + (ensures x=test || not (mem test s)) = + remove_until_gt_mem s x test + +let rec mem_implies_subset #a #f (s1 s2: ordset a f) + : Lemma ((forall x. mem x s1 ==> mem x s2) ==> subset s1 s2) + = match s1 with + | [] -> () + | h1::(t1:ordset a f) -> set_props s1; + set_props s2; + mem_implies_subset t1 s2; + if (size s2 > 0 && f (head s2) h1) + then mem_implies_subset s1 (tail s2) + +let rec subset_implies_mem #a #f (p q: ordset a f) + : Lemma (subset p q ==> (forall x. mem x p ==> mem x q)) = + if Cons? p && Cons? q then + if head p = head q + then subset_implies_mem (tail p) (tail q) + else subset_implies_mem p (tail q) + +let subset_transitivity #a #f (p q r: ordset a f) + : Lemma (requires p `subset` q /\ q `subset` r) + (ensures p `subset` r) = + subset_implies_mem p q; + subset_implies_mem q r; + mem_implies_subset p r + +let head_is_never_in_tail #a #f (s:ordset a f{size s > 0}) + : Lemma (not (mem (head s) (tail s))) = set_props s + +let rec smart_minus #a #f (p q: ordset a f) + : z:ordset a f { ( forall x. mem x z = (mem x p && (not (mem x q)))) /\ + (match p,z with + | ph::pt, zh::zt -> f ph zh + | ph::pt, [] -> subset p q + | [], _ -> z = []) + } = + match p with + | [] -> [] + | ph::(pt:ordset a f) -> match q with + | [] -> p + | qh::(qt:ordset a f) -> + let q_after_ph, found = remove_until_greater_than ph q in + Classical.forall_intro (remove_until_gt_mem q ph); + Classical.forall_intro (Classical.move_requires (mem_implies_f p)); + if found then begin + let result = smart_minus pt q_after_ph in + set_props p; + if result = [] then begin + subset_transitivity pt q_after_ph q; + subset_implies_mem pt q; + mem_implies_subset p q + end; + result + end + else ph::(smart_minus pt q_after_ph) + +let empty_minus_means_subset #a #f (p q: ordset a f) + : Lemma (requires size (smart_minus p q) = 0) (ensures subset p q) = () + +// a little test versus integers :) +let ncmp (x y:nat) = x <= y +let _ = assert (smart_minus #nat #ncmp [1;2;3;4] [3] == [1;2;4]) + +let minus #a #f s1 s2 = smart_minus s1 s2 + +let strict_subset #a #f s1 s2 = s1 <> s2 && subset s1 s2 + +let eq_lemma #a #f s1 s2 = same_members_means_eq s1 s2 + +let mem_empty #_ #_ _ = () + +let mem_singleton #_ #_ _ _ = () + +let mem_insert (#a:eqtype) #f (el:a) (s: ordset a f) (x:a) + : Lemma (mem x (insert' el s) = (x=el || mem x s)) = + simple_induction (fun p -> mem x (insert' el p) = (x=el || mem x p)) s + +let rec mem_union #_ #_ s1 s2 x = + if size s1 > 0 then + match s1 with | hd::tl -> + mem_union tl (insert' hd s2) x; + mem_insert hd s2 x + +let mem_intersect #_ #f s1 s2 x = () + +let mem_subset (#a:eqtype) #f s1 s2 = + subset_implies_mem s1 s2; + mem_implies_subset s1 s2 + +let choose_empty (#a:eqtype) #f = () + +let choose_s (#a:eqtype) #f s = () + +let rec mem_remove (#a:eqtype) #f x y s = + if size s > 0 then (set_props s; mem_remove x y (tail s)) + +let eq_remove (#a:eqtype) #f x s + = simple_induction (fun p -> not (mem x p) ==> p = remove x p) s + +let size_empty (#a:eqtype) #f s = () + +let rec size_remove (#a:eqtype) #f x s = match s with + | hd::tl -> if x<>hd then size_remove #_ #f x tl + +let size_singleton (#a:eqtype) #f x = () + +let rec subset_size (#a:eqtype) #f x y = match x, y with + | [], _ -> () + | hd::tl, hd'::(tl':ordset a f) -> + if f hd hd' && hd = hd' then subset_size tl tl' + else subset_size x tl' + +let insert_when_already_exists (#a:eqtype) #f (s: ordset a f) (x:a) + : Lemma (requires mem x s) + (ensures insert' x s == s) + = simple_induction (fun p -> mem x p <==> insert' x p = p) s + +let size_insert (#a:eqtype) #f (s: ordset a f) (x:a) + : Lemma (size (insert' x s) >= size s) + = simple_induction (fun p -> size (insert' x p) >= size p) s + +let rec precise_size_insert (#a:eqtype) #f (s: ordset a f) (x:a) + : Lemma (size (insert' x s) = (if mem x s then size s else (size s) + 1)) + = if size s > 0 then precise_size_insert (tail s) x + +let rec size_of_union_left (#a:eqtype) #f (s1 s2: ordset a f) + : Lemma (ensures size (union s1 s2) >= size s2) = + match s1 with + | [] -> () + | hd::tl -> size_of_union_left tl (insert' hd s2); + precise_size_insert s2 hd + +let size_of_union_right (#a:eqtype) #f (s1 s2: ordset a f) + : Lemma (ensures size (union s1 s2) >= size s1) = + eq_lemma (union s1 s2) (union s2 s1); + size_of_union_left s2 s1 + +let size_union #a #f s1 s2 = + size_of_union_left s1 s2; + size_of_union_right s1 s2 + +let fold #a #acc #f g init s = List.Tot.fold_left g init s + +private +let rec map_internal (#a #b:eqtype) (#fa:cmp a) (#fb:cmp b) (g:a -> b) (sa:ordset a fa) + : Pure (ordset b fb) + (requires (forall x y. (x `fa` y ==> g x `fb` g y) /\ (x = y <==> g x = g y))) + (ensures (fun sb -> Cons? sb ==> Cons? sa /\ Cons?.hd sb == g (Cons?.hd sa))) += match sa with + | [] -> [] + | x :: xs -> + let y = g x in + let ys = map_internal #a #b #fa #fb g xs in + if not (Cons? ys) || Cons?.hd ys <> y then + y :: ys + else ys + +let rec map_size (#a #b:eqtype) (#fa:cmp a) (#fb: cmp b) (g: a->b) (sa:ordset a fa) + : Lemma (requires (forall x y. (x `fa` y ==> g x `fb` g y) /\ (x = y <==> g x = g y))) + (ensures size (map_internal #a #b #fa #fb g sa) <= size sa) + = if size sa > 0 then map_size #a #b #fa #fb g (tail sa) + +let rec map_as_list (#a #b:eqtype) (#fa:cmp a) (#fb: cmp b) (g: a->b) (sa:ordset a fa) + : Lemma (requires (forall x y. (x `fa` y ==> g x `fb` g y) /\ (x = y <==> g x = g y))) + (ensures as_list (map_internal #a #b #fa #fb g sa) == FStar.List.Tot.map g (as_list sa)) = + match sa with + | [] -> () + | h::(t:ordset a fa) -> map_as_list #a #b #fa #fb g t + +let map #a #b #fa #fb g sa = + map_size #a #b #fa #fb g sa; + map_as_list #a #b #fa #fb g sa; + map_internal #a #b #fa #fb g sa + +let lemma_strict_subset_size #a #f s1 s2 = + let eql (p q: ordset a f) + : Lemma (requires forall x. mem x p = mem x q) + (ensures p=q) + = eq_lemma p q in Classical.move_requires_2 eql s1 s2; + eliminate exists x. mem x s2 && not (mem x s1) + returns size s2 > size s1 with _. + begin + Classical.forall_intro (mem_insert x s1); + precise_size_insert s1 x; + assert (subset (insert' x s1) s2) + end + +let lemma_minus_mem #a #f s1 s2 x = () + +let rec strict_subset_implies_diff_element #a #f (s1 s2: ordset a f) + : Lemma (requires strict_subset s1 s2) + (ensures exists x. (mem x s2 /\ not (mem x s1))) = + match s1,s2 with + | [], h::t -> () + | h1::t1, h2::t2 -> Classical.move_requires (mem_implies_f s1) h2; + if h1=h2 then begin + strict_subset_implies_diff_element #a #f t1 t2; + set_props s2 + end + +let diff_element_implies_strict_subset #a #f (s1 s2: ordset a f) + : Lemma (requires subset s1 s2 /\ (exists x. (mem x s2 /\ not (mem x s1)))) + (ensures strict_subset s1 s2) = () + +let lemma_strict_subset_exists_diff #a #f (s1 s2: ordset a f) + : Lemma (requires subset s1 s2) + (ensures (strict_subset s1 s2) <==> (exists x. (mem x s2 /\ not (mem x s1)))) + = Classical.move_requires_2 strict_subset_implies_diff_element s1 s2 + +let rec count #a #f s c : nat = + match s with + | [] -> 0 + | h::t -> if c h + then 1 + count #a #f t c + else count #a #f t c + +let count_of_empty #_ #_ _ _ = () + +let count_of_impossible #a #f s c = simple_induction (fun p -> count p c = 0) s + +let count_all #a #f s c = simple_induction (fun p -> count p c = size p) s + +let rec count_of_cons #a #f s c = if size s > 1 then count_of_cons (tail s) c + +let rec all #a #f (s: ordset a f) (c: condition a) : Tot bool = + match s with + | [] -> true + | h::t -> c h && all #a #f t c + +let rec any #a #f (s: ordset a f) (c: condition a) : Tot bool = + match s with + | [] -> false + | h::t -> c h || any #a #f t c + +let rec mem_if_any #a #f s c x = if head s<>x then mem_if_any (tail s) c x + +let any_if_mem #a #f (s:ordset a f) (c: condition a) x + : Lemma (requires mem x s && c x) (ensures any s c) = + simple_induction (fun p -> mem x p && c x ==> any p c) s + +let all_means_not_any_not #a #f s c = simple_induction (fun p -> all p c = not (any p (inv c))) s + +let rec find_first #a #f s c = match s with + | [] -> None + | h::(t:ordset a f) -> if c h then Some h else find_first t c + +let find_first_is_some_iff_any #_ #_ s c = simple_induction (fun p -> Some? (find_first p c) = any p c) s + +let rec find_first_precedes_any_other #a #f s c x = + if head s<>x then find_first_precedes_any_other (tail s) c x; + set_props s + +let liat_size #a #f (s:ordset a f{s<>[]}) : Lemma (size (liat s) = size s - 1) = + base_induction (fun p -> if p<>[] then size (liat p) = size p - 1 else true) s + +let mem_liat #a #f (s:ordset a f{s<>[]}) (x:a) + : Lemma (mem x s = (mem x (liat s) || x = last s)) = () + +let rec any_liat #a #f (s:ordset a f{s<>[]}) (c: condition a) + : Lemma (any s c = (any (liat s) c || c (last s))) = match s with + | [x] -> () + | h::(t:ordset a f) -> if size t > 0 then any_liat t c + +let rec find_last' #a #f (s: ordset a f) (c: condition a) : Tot (option a) (decreases size s) = + if s=empty then None + else let liat,last = unsnoc s in + liat_size s; + if c last then Some last + else find_last' liat c + +let rec find_last_props #a #f (s:ordset a f) (c: condition a) + : Lemma (ensures (match find_last' s c with + | None -> not (any s c) + | Some v -> (any s c /\ (forall (x:a{mem x s && c x}). f x v)))) + (decreases size s) = + if size s > 0 then let liat,last = unsnoc s in + liat_size s; + find_last_props liat c; + if c last then any_if_mem s c last else any_liat s c + +let find_last #a #f s c = + find_last_props s c; + find_last' s c + +let find_last_is_some_iff_any #a #f s c = find_last_props s c + +let find_last_follows_any_other #a #f s c x = + any_if_mem s c x; + find_last_is_some_iff_any s c; + find_last_props s c + +let size_of_tail #a #f s = () + +let count_of_tail #_ #_ _ _ = () + +let rec where #a #f s c = + match s with + | [] -> [] + | h::[] -> if c h then [h] else [] + | h::(t:ordset a f) -> if c h then h::(where t c) else where t c + +let intersect_eq_where #_ #_ s1 s2 = + same_members_means_eq (where s1 (mem_of s2)) (intersect s1 s2) + +let minus_eq_where #_ #_ s1 s2 = + same_members_means_eq (where s1 (inv (mem_of s2))) (minus s1 s2) + +let count_is_size_of_where #_ #_ s c + = simple_induction (fun p -> count p c = size (where p c)) s + +let size_of_intersect #_ #_ s1 s2 = + intersect_eq_where s1 s2; + intersect_eq_where s2 s1; + intersect_is_symmetric s1 s2; + count_is_size_of_where s1 (mem_of s2); + count_is_size_of_where s2 (mem_of s1) + +let union_mem_forall #a #f (s1 s2: ordset a f) + : Lemma (forall x. (mem x (union s1 s2)) = (mem x s1 || mem x s2)) = + let aux x : Lemma (mem x (union s1 s2) = (mem x s1 || mem x s2)) = + mem_union s1 s2 x in Classical.forall_intro aux + +let union_with_empty #a #f (s: ordset a f) + : Lemma (union s empty = s) = eq_lemma (union s empty) s + +let union_head_lemma #a #f (s1 s2: ordset a f) + : Lemma (match s1, s2 with + | [],[] -> (union s1 s2 = []) + | [],h::t -> size (union s1 s2) > 0 && Cons?.hd (union s1 s2) = h + | h::t,[] -> size (union s1 s2) > 0 && Cons?.hd (union s1 s2) = h + | h1::t1, h2::t2 -> size (union s1 s2) > 0 && + (Cons?.hd (union s1 s2) = (if f h1 h2 then h1 else h2)) + ) = + match s1,s2 with + | [],[] -> () + | [],h::t -> () + | h::t,[] -> union_with_empty s1 + | h1::t1, h2::t2 -> union_mem_forall s1 s2; + set_props s1; + set_props s2; + set_props (union s1 s2) + +let union_sort_lemma (#a:eqtype) #f (h:a) (t1 t2: ordset a f) + : Lemma (requires sorted f (h::t1) /\ sorted f (h::t2)) + (ensures sorted f (h::(union t1 t2))) = + if size t1 = 0 then union_with_empty t2 + else if size t2 = 0 then union_with_empty t1 + else begin + union_mem_forall t1 t2; + set_props t1; + set_props t2; + set_props (union t1 t2) + end + +let union_with_prefix (#a:eqtype) #f (h:a) (t1 t2: (z:ordset a f{sorted f (h::z)})) + : Lemma (union #a #f (h::t1) (h::t2) = h::(union t1 t2)) = + union_mem_forall t1 t2; + union_sort_lemma h t1 t2; + same_members_means_eq (union #a #f (h::t1) (h::t2)) (h::(union t1 t2)) + +let union_of_tails_size (#a:eqtype) #f (s1 s2: ordset a f) + : Lemma (requires size s1 > 0 && size s2 > 0 && (Cons?.hd s1 <> Cons?.hd s2) && f (Cons?.hd s1) (Cons?.hd s2)) + (ensures size (union s1 s2) = 1 + size (union #a #f (Cons?.tl s1) s2)) = + match s1 with | h1::(t1:ordset a f) -> match s2 with | h2::(t2:ordset a f) -> + union_mem_forall t1 s2; + set_props s1; + set_props s2; + same_members_means_eq (h1::(union t1 s2)) (union s1 s2) + +let union_is_symmetric #a #f (s1 s2: ordset a f) : Lemma (union s1 s2 = union s2 s1) = + same_members_means_eq (union s1 s2) (union s2 s1) + +let size_of_union_aux_1 #a #f (s1 s2: (z:ordset a f{z<>empty})) + : Lemma (requires (head s1) <> (head s2) + && (f (head s1) (head s2)) + && (size (union (tail s1) s2) = size (tail s1) + size s2 - size (intersect (tail s1) s2))) + (ensures size (union s1 s2) = (size s1 + size s2 - size (intersect s1 s2))) = + union_of_tails_size s1 s2; + same_members_means_eq (intersect (tail s1) s2) (intersect s1 s2) + +let size_of_union_aux_2 #a #f (s1 s2: (z:ordset a f{z<>empty})) + : Lemma (requires (head s1) <> (head s2) + && not (f (head s1) (head s2)) + && (size (union s1 (tail s2)) = size s1 + size (tail s2) - size (intersect s1 (tail s2)))) + (ensures size (union s1 s2) = (size s1 + size s2 - size (intersect s1 s2))) + = Classical.forall_intro_2 (union_is_symmetric #a #f); + Classical.forall_intro_2 (intersect_is_symmetric #a #f); + size_of_union_aux_1 s2 s1 + +let rec size_of_union #a #f s1 s2 = + let size = size #a #f in + match s1,s2 with + | [], _ -> same_members_means_eq s2 (union s1 s2) + | _, [] -> same_members_means_eq s1 (union s1 s2) + | h1::(t1:ordset a f), h2::(t2:ordset a f) + -> size_of_union t1 s2; + size_of_union s1 t2; + if h1 = h2 then union_with_prefix h1 t1 t2 + else if f h1 h2 then size_of_union_aux_1 s1 s2 + else size_of_union_aux_2 s1 s2 + +let rec count_dichotomy #_ #_ s c = if s<>[] then count_dichotomy (tail s) c + +let size_of_minus #_ #_ s1 s2 = + minus_eq_where s1 s2; + intersect_eq_where s1 s2; + count_dichotomy s1 (mem_of s2); + count_is_size_of_where s1 (mem_of s2); + count_is_size_of_where s1 (inv (mem_of s2)) + +let intersect_with_subset #_ #_ s1 s2 = same_members_means_eq (intersect s1 s2) s1 + +let lemma_strict_subset_minus_size #_ #_ s1 s2 s = + let size_diff : pos = size s2 - size s1 in + size_of_minus s s2; + size_of_minus s s1; + intersect_with_subset s2 s; + intersect_is_symmetric s2 s; + intersect_with_subset s1 s; + intersect_is_symmetric s1 s + +let lemma_disjoint_union_subset #_ #_ s1 s2 = size_of_union s1 s2 + +let lemma_subset_union #_ #_ _ _ _ = () + +let lemma_strict_subset_transitive #_ #_ _ _ _ = () + +let lemma_intersect_symmetric = intersect_is_symmetric + +let lemma_intersect_union_empty_aux1 #a #f (s1 s2 s3: ordset a f) + : Lemma (requires (intersect s1 s3 == empty /\ intersect s2 s3 == empty)) + (ensures (intersect (union s1 s2) s3 = empty)) + = eq_lemma empty (intersect (union s1 s2) s3) + +let lemma_intersect_union_empty_aux2 #a #f (s1 s2 s3: ordset a f) + : Lemma (requires (intersect (union s1 s2) s3 = empty)) + (ensures (intersect s1 s3 == empty /\ intersect s2 s3 == empty)) + = eq_lemma empty (intersect s1 s3); + eq_lemma empty (intersect s2 s3) + +let lemma_intersect_union_empty #a #f (s1 s2 s3: ordset a f) + : Lemma ((intersect (union s1 s2) s3 = empty) = + (intersect s1 s3 = empty && intersect s2 s3 = empty)) + [SMTPat (intersect (union s1 s2) s3)] = + Classical.move_requires_3 (lemma_intersect_union_empty_aux1 #a #f) s1 s2 s3; + Classical.move_requires_3 (lemma_intersect_union_empty_aux2 #a #f) s1 s2 s3 + +let lemma_union_symmetric #a #f s1 s2 = eq_lemma (union s1 s2) (union s2 s1) + +let union_of_disjoint #a #f s1 s2 = eq_lemma (minus (union s1 s2) s1) s2 + +let distinct_is_idempotent #a #f s = eq_lemma (distinct f s) s + +(* Conversion from OrdSet to Set *) + +module S = FStar.Set + +let rec as_set #a #f s = + match s with + | [] -> S.empty + | hd::tl -> S.union (S.singleton hd) (as_set #a #f tl) + +let rec lemma_as_set_mem #a #f s x + = match s with + | [] -> () + | hd::tl -> + if x = hd + then () + else lemma_as_set_mem #a #f tl x + +let lemma_as_set_disjoint_right #a #f (s1 s2: ordset a f) + : Lemma (requires intersect s1 s2 = empty) + (ensures S.disjoint (as_set s1) (as_set s2)) = () + +let lemma_as_set_disjoint_left #a #f (s1 s2: ordset a f) + : Lemma (requires S.disjoint (as_set s1) (as_set s2)) + (ensures intersect s1 s2 = empty) = + let mem_eq p q : Lemma (S.mem p (as_set q) <==> mem #a #f p q) = () in + Classical.forall_intro_2 mem_eq + +let lemma_as_set_disjoint #a #f s1 s2 = + Classical.move_requires_2 (lemma_as_set_disjoint_right #a #f) s1 s2; + Classical.move_requires_2 (lemma_as_set_disjoint_left #a #f) s1 s2 diff --git a/stage0/ulib/experimental/FStar.OrdSet.fsti b/stage0/ulib/experimental/FStar.OrdSet.fsti new file mode 100644 index 00000000000..d5dfa8a16dc --- /dev/null +++ b/stage0/ulib/experimental/FStar.OrdSet.fsti @@ -0,0 +1,352 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.OrdSet + +type total_order (a:eqtype) (f: (a -> a -> Tot bool)) = + (forall a1 a2. (f a1 a2 /\ f a2 a1) ==> a1 = a2) (* anti-symmetry *) + /\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *) + /\ (forall a1 a2. f a1 a2 \/ f a2 a1) (* totality *) + +type cmp (a:eqtype) = f:(a -> a -> Tot bool){total_order a f} + +let rec sorted (#a:eqtype) (f:cmp a) (l:list a) : Tot bool = + match l with + | [] -> true + | x::[] -> true + | x::y::tl -> f x y && x <> y && sorted f (y::tl) + +val ordset (a:eqtype) (f:cmp a) : Type0 + +val hasEq_ordset: a:eqtype -> f:cmp a + -> Lemma (requires (True)) (ensures (hasEq (ordset a f))) + [SMTPat (hasEq (ordset a f))] + +val empty : #a:eqtype -> #f:cmp a -> Tot (ordset a f) + +val tail (#a:eqtype) (#f:cmp a) (s:ordset a f{s<>empty}) : ordset a f +val head (#a:eqtype) (#f:cmp a) (s:ordset a f{s<>empty}) : a + +val mem : #a:eqtype -> #f:cmp a -> a -> s:ordset a f -> Tot bool + +(* currying-friendly version of mem, ready to be used as a lambda *) +unfold let mem_of #a #f (s:ordset a f) x = mem x s + +val last (#a:eqtype) (#f:cmp a) (s: ordset a f{s <> empty}) + : Tot (x:a{(forall (z:a{mem z s}). f z x) /\ mem x s}) + +(* + liat is the reverse of tail, i.e. a list of all but the last element. + A shortcut to (fst (unsnoc s)), which as a word is composed + in a remarkably similar fashion. +*) +val liat (#a:eqtype) (#f:cmp a) (s: ordset a f{s <> empty}) : Tot (l:ordset a f{ + (forall x. mem x l = (mem x s && (x <> last s))) /\ + (if tail s <> empty then (l <> empty) && (head s = head l) else true) + }) + +val unsnoc (#a:eqtype) (#f:cmp a) (s: ordset a f{s <> empty}) : Tot (p:(ordset a f & a){ + p = (liat s, last s) + }) + +val as_list (#a:eqtype) (#f:cmp a) (s:ordset a f) : Tot (l:list a{ + sorted f l /\ + (forall x. (List.Tot.mem x l = mem x s)) +}) + +val distinct (#a:eqtype) (f:cmp a) (l: list a) : Pure (ordset a f) + (requires True) (ensures fun z -> forall x. (mem x z = List.Tot.Base.mem x l)) + +val union : #a:eqtype -> #f:cmp a -> ordset a f -> ordset a f -> Tot (ordset a f) +val intersect : #a:eqtype -> #f:cmp a -> ordset a f -> ordset a f -> Tot (ordset a f) + +val choose : #a:eqtype -> #f:cmp a -> s:ordset a f -> Tot (option a) +val remove : #a:eqtype -> #f:cmp a -> a -> ordset a f -> Tot (ordset a f) + +val size : #a:eqtype -> #f:cmp a -> ordset a f -> Tot nat + +val subset : #a:eqtype -> #f:cmp a -> ordset a f -> ordset a f -> Tot bool +let superset #a #f (s1 s2: ordset a f) : Tot bool = subset s2 s1 + +val singleton : #a:eqtype -> #f:cmp a -> a -> Tot (ordset a f) + +val minus : #a:eqtype -> #f:cmp a -> ordset a f -> ordset a f -> Tot (ordset a f) + +val strict_subset: #a:eqtype -> #f:cmp a -> ordset a f -> ordset a f -> Tot bool +let strict_superset #a #f (s1 s2: ordset a f) : Tot bool = strict_subset s2 s1 + +let disjoint #a #f (s1 s2 : ordset a f) : Tot bool = intersect s1 s2 = empty + +let equal (#a:eqtype) (#f:cmp a) (s1:ordset a f) (s2:ordset a f) : Tot prop = + forall x. mem #_ #f x s1 = mem #_ #f x s2 + +val eq_lemma: #a:eqtype -> #f:cmp a -> s1:ordset a f -> s2:ordset a f + -> Lemma (requires (equal s1 s2)) + (ensures (s1 = s2)) + [SMTPat (equal s1 s2)] + +val mem_empty: #a:eqtype -> #f:cmp a -> x:a + -> Lemma (requires True) (ensures (not (mem #a #f x (empty #a #f)))) + [SMTPat (mem #a #f x (empty #a #f))] + +val mem_singleton: #a:eqtype -> #f:cmp a -> x:a -> y:a + -> Lemma (requires True) + (ensures (mem #a #f y (singleton #a #f x)) = (x = y)) + [SMTPat (mem #a #f y (singleton #a #f x))] + +val mem_union: #a:eqtype -> #f:cmp a -> s1:ordset a f -> s2:ordset a f -> x:a + -> Lemma (requires True) + (ensures (mem #a #f x (union #a #f s1 s2) = + (mem #a #f x s1 || mem #a #f x s2))) + [SMTPat (mem #a #f x (union #a #f s1 s2))] + +val mem_intersect: #a:eqtype -> #f:cmp a -> s1:ordset a f -> s2:ordset a f -> x:a + -> Lemma (requires True) + (ensures (mem #a #f x (intersect s1 s2) = + (mem #a #f x s1 && mem #a #f x s2))) + [SMTPat (mem #a #f x (intersect #a #f s1 s2))] + +val mem_subset: #a:eqtype -> #f:cmp a -> s1:ordset a f -> s2:ordset a f + -> Lemma (requires True) + (ensures (subset #a #f s1 s2 <==> + (forall x. mem #a #f x s1 ==> mem #a #f x s2))) + [SMTPat (subset #a #f s1 s2)] + +val choose_empty: #a:eqtype -> #f:cmp a + -> Lemma (requires True) (ensures (None? (choose #a #f (empty #a #f)))) + [SMTPat (choose #a #f (empty #a #f))] + +(* TODO: FIXME: Pattern does not contain all quantified vars *) +val choose_s: #a:eqtype -> #f:cmp a -> s:ordset a f + -> Lemma (requires (not (s = (empty #a #f)))) + (ensures (Some? (choose #a #f s) /\ + s = union #a #f (singleton #a #f (Some?.v (choose #a #f s))) + (remove #a #f (Some?.v (choose #a #f s)) s))) + [SMTPat (choose #a #f s)] + +val mem_remove: #a:eqtype -> #f:cmp a -> x:a -> y:a -> s:ordset a f + -> Lemma (requires True) + (ensures (mem #a #f x (remove #a #f y s) = + (mem #a #f x s && not (x = y)))) + [SMTPat (mem #a #f x (remove #a #f y s))] + +val eq_remove: #a:eqtype -> #f:cmp a -> x:a -> s:ordset a f + -> Lemma (requires (not (mem #a #f x s))) + (ensures (s = remove #a #f x s)) + [SMTPat (remove #a #f x s)] + +val size_empty: #a:eqtype -> #f:cmp a -> s:ordset a f + -> Lemma (requires True) (ensures ((size #a #f s = 0) = (s = empty #a #f))) + [SMTPat (size #a #f s)] + +val size_remove: #a:eqtype -> #f:cmp a -> y:a -> s:ordset a f + -> Lemma (requires (mem #a #f y s)) + (ensures (size #a #f s = size #a #f (remove #a #f y s) + 1)) + [SMTPat (size #a #f (remove #a #f y s))] + +val size_singleton: #a:eqtype -> #f:cmp a -> x:a + -> Lemma (requires True) (ensures (size #a #f (singleton #a #f x) = 1)) + [SMTPat (size #a #f (singleton #a #f x))] + +val subset_size: #a:eqtype -> #f:cmp a -> x:ordset a f -> y:ordset a f + -> Lemma (requires (subset #a #f x y)) + (ensures (size #a #f x <= size #a #f y)) + [SMTPat (subset #a #f x y)] + +(**********) + +val size_union: #a:eqtype -> #f:cmp a -> s1:ordset a f -> s2:ordset a f + -> Lemma (requires True) + (ensures ((size #a #f (union #a #f s1 s2) >= size #a #f s1) && + (size #a #f (union #a #f s1 s2) >= size #a #f s2))) + [SMTPat (size #a #f (union #a #f s1 s2))] + +(**********) + +val fold (#a:eqtype) (#acc:Type) (#f:cmp a) (g:acc -> a -> acc) (init:acc) (s:ordset a f) + : Tot acc + +val map (#a #b:eqtype) (#fa:cmp a) (#fb:cmp b) (g:a -> b) (sa:ordset a fa) + : Pure (ordset b fb) + (requires (forall x y. (x `fa` y ==> g x `fb` g y) /\ (x = y <==> g x = g y))) + (ensures (fun sb -> (size sb <= size sa) /\ + (as_list sb == FStar.List.Tot.map g (as_list sa)) /\ + (let sa = as_list sa in + let sb = as_list sb in + Cons? sb ==> Cons? sa /\ Cons?.hd sb == g (Cons?.hd sa)))) + +val lemma_strict_subset_size (#a:eqtype) (#f:cmp a) (s1:ordset a f) (s2:ordset a f) + : Lemma (requires (strict_subset s1 s2)) + (ensures (subset s1 s2 /\ size s1 < size s2)) + [SMTPat (strict_subset s1 s2)] + +val lemma_minus_mem (#a:eqtype) (#f:cmp a) (s1:ordset a f) (s2:ordset a f) (x:a) + : Lemma (requires True) (ensures (mem x (minus s1 s2) = (mem x s1 && not (mem x s2)))) + [SMTPat (mem x (minus s1 s2))] + +val lemma_strict_subset_exists_diff (#a:eqtype) (#f:cmp a) (s1:ordset a f) (s2:ordset a f) + : Lemma (requires subset s1 s2) + (ensures (strict_subset s1 s2) <==> (exists x. (mem x s2 /\ not (mem x s1)))) + +type condition a = a -> bool + +let inv #a (c: condition a) : (z:condition a{forall x. c x = not (z x)}) = fun x -> not (c x) + +val count (#a:eqtype) (#f: cmp a) (s: ordset a f) (c: condition a) : nat + +val count_of_empty (#a:eqtype) (#f: cmp a) (s: ordset a f{size s = 0}) (c: condition a) + : Lemma (count s c = 0) + +val count_of_impossible (#a:eqtype) (#f: cmp a) (s: ordset a f) (c: condition a{forall p. not (c p)}) + : Lemma (count s c = 0) + +val count_all (#a:eqtype) (#f: cmp a) (s: ordset a f) (c: condition a{forall p. c p}) + : Lemma (count s c = size s) + +val count_of_cons (#a:eqtype) (#f: cmp a) (s: ordset a f{size s > 0}) (c: condition a) + : Lemma (count s c = (count (tail s) c + (if (c (head s)) then 1 else 0))) + +val all (#a:eqtype) (#f:cmp a) (s: ordset a f) (c: condition a) : Tot bool + +val any (#a:eqtype) (#f:cmp a) (s: ordset a f) (c: condition a) : Tot bool + +val mem_if_any (#a:eqtype) (#f:cmp a) (s:ordset a f) (c: condition a) (x:a{mem x s && c x}) + : Lemma (any s c) + +val all_means_not_any_not (#a:eqtype) (#f:cmp a) (s: ordset a f) (c: condition a) + : Lemma (all s c = not (any s (inv c))) + +val find_first (#a:eqtype) (#f:cmp a) (s: ordset a f) (c: condition a) : option a + +val find_first_is_some_iff_any (#a:eqtype) (#f:cmp a) (s:ordset a f) (c: condition a) + : Lemma (Some? (find_first s c) = any s c) + +val find_first_precedes_any_other (#a:eqtype) (#f:cmp a) (s:ordset a f) (c: condition a) (x:a{mem x s && c x}) + : Lemma (Some? (find_first s c) && f (Some?.v (find_first s c)) x) + +val liat_size (#a:eqtype) (#f:cmp a) (s:ordset a f{s<>empty}) + : Lemma (size (liat s) = ((size s)-1)) + +val mem_liat (#a:eqtype) (#f:cmp a) (s:ordset a f{s<>empty}) (x:a) + : Lemma (mem x s = (mem x (liat s) || x = last s)) + +val any_liat (#a:eqtype) (#f:cmp a) (s:ordset a f{s<>empty}) (c:condition a) + : Lemma (any s c = (any (liat s) c || c (last s))) + +val find_last (#a:eqtype) (#f:cmp a) (s: ordset a f) (c: condition a) : (z:option a{ match z with + | None -> not (any s c) + | Some v -> (any s c /\ (forall (x:a{mem x s && c x}). f x v)) +}) + +val find_last_is_some_iff_any (#a:eqtype) (#f:cmp a) (s:ordset a f) (c: condition a) + : Lemma (Some? (find_last s c) = any s c) + +val find_last_follows_any_other (#a:eqtype) (#f:cmp a) (s:ordset a f) (c: condition a) (x:a{mem x s && c x}) + : Lemma (Some? (find_last s c) && f x (Some?.v (find_last s c))) + +val size_of_tail (#a:eqtype) (#f:cmp a) (s:ordset a f{size s > 0}) + : Lemma (size s = 1 + (size (tail s))) + +val count_of_tail (#a:eqtype) (#f:cmp a) (s:ordset a f{size s > 0}) (c: condition a) + : Lemma (count s c = (count (tail s) c + (if c (head s) then 1 else 0))) + +val where (#a:eqtype) (#f:cmp a) (s:ordset a f) (c: condition a) + : Pure (ordset a f) + (requires True) + (ensures fun (z:ordset a f) -> + (as_list #a z == FStar.List.Tot.Base.filter c (as_list s)) /\ + (forall x. mem x z = (mem x s && c x)) /\ + (if size z > 0 && size s > 0 then f (head s) (head z) else true)) + +val intersect_eq_where (#a:eqtype) (#f:cmp a) (s1 s2:ordset a f) + : Lemma (intersect s1 s2 = where s1 (mem_of s2)) + +val minus_eq_where (#a:eqtype) (#f:cmp a) (s1 s2: ordset a f) + : Lemma (minus s1 s2 = where s1 (inv (mem_of s2))) + +val count_is_size_of_where (#a:eqtype) (#f:cmp a) (s: ordset a f) (c: condition a) + : Lemma (count s c = size (where s c)) + +val size_of_intersect (#a:eqtype) (#f:cmp a) (s1 s2: ordset a f) + : Lemma (ensures size (intersect s1 s2) = count s1 (mem_of s2) /\ + size (intersect s1 s2) = count s2 (mem_of s1)) + +val size_of_union (#a:eqtype) (#f:cmp a) (s1 s2: ordset a f) + : Lemma (size (union s1 s2) = (size s1 + size s2 - size (intersect s1 s2))) + +val count_dichotomy (#a:eqtype) (#f:cmp a) (s: ordset a f) (c: condition a) + : Lemma (size s = count s c + count s (inv c)) + +val size_of_minus (#a:eqtype) (#f:cmp a) (s1 s2: ordset a f) + : Lemma (size (minus s1 s2) = size s1 - size (intersect s1 s2)) + +val intersect_with_subset (#a:eqtype) (#f:cmp a) (s1 s2: ordset a f) + : Lemma (requires subset s1 s2) + (ensures intersect s1 s2 = s1) + +val lemma_strict_subset_minus_size (#a:eqtype) (#f:cmp a) (s1:ordset a f) (s2:ordset a f) (s:ordset a f) + : Lemma (requires (strict_subset s1 s2 /\ subset s1 s /\ subset s2 s)) + (ensures (size (minus s s2) < size (minus s s1))) + [SMTPat (strict_subset s1 s2); SMTPat (subset s1 s); SMTPat (subset s2 s)] + +val lemma_disjoint_union_subset (#a:eqtype) (#f:cmp a) (s1:ordset a f) (s2:ordset a f) + : Lemma (requires (~ (s1 == empty) /\ ~ (s2 == empty) /\ intersect s1 s2 == empty)) + (ensures (strict_subset s1 (union s1 s2) /\ strict_subset s2 (union s1 s2))) + [SMTPatOr [[SMTPat (strict_subset s1 (union s1 s2))]; [SMTPat (strict_subset s2 (union s1 s2))]]] + +val lemma_subset_union (#a:eqtype) (#f:cmp a) (s1:ordset a f) (s2:ordset a f) (s:ordset a f) + : Lemma (requires (subset s1 s /\ subset s2 s)) + (ensures (subset (union s1 s2) s)) + [SMTPat (subset (union s1 s2) s)] + +val lemma_strict_subset_transitive (#a:eqtype) (#f:cmp a) (s1:ordset a f) (s2:ordset a f) (s3:ordset a f) + : Lemma (requires (strict_subset s1 s2 /\ strict_subset s2 s3)) + (ensures (strict_subset s1 s3)) + [SMTPat (strict_subset s1 s2); SMTPat (strict_subset s2 s3)] + +val lemma_intersect_symmetric (#a:eqtype) (#f:cmp a) (s1:ordset a f) (s2:ordset a f) + : Lemma (requires True) (ensures (intersect s1 s2 == intersect s2 s1)) + [SMTPatOr [[SMTPat (intersect s1 s2)]; [SMTPat (intersect s2 s1)]]] + +val lemma_intersect_union_empty (#a:eqtype) (#f:cmp a) (s1:ordset a f) (s2:ordset a f) (s3:ordset a f) + : Lemma ((intersect (union s1 s2) s3 = empty) = (intersect s1 s3 = empty && intersect s2 s3 = empty)) + [SMTPat (intersect (union s1 s2) s3)] + +val lemma_union_symmetric (#a:eqtype) (#f:cmp a) (s1:ordset a f) (s2:ordset a f) + : Lemma (union s1 s2 == union s2 s1) + [SMTPat (union s1 s2)] + +val union_of_disjoint (#a:eqtype) (#f:cmp a) (s1:ordset a f) (s2:ordset a f) + : Lemma (requires (disjoint s1 s2)) + (ensures (minus (union s1 s2) s1 == s2)) + [SMTPat (union s1 s2); SMTPat (disjoint s1 s2)] + +val distinct_is_idempotent (#a:eqtype) (#f: cmp a) (s: ordset a f) + : Lemma (distinct f (as_list s) = s) + +(* Conversion from OrdSet to Set *) + +module S = FStar.Set + +val as_set : #a:eqtype -> #f:cmp a -> ordset a f -> Tot (S.set a) + +val lemma_as_set_mem (#a:eqtype) (#f:cmp a) (s:ordset a f) (x:a) + : Lemma (mem x s <==> S.mem x (as_set s)) + [SMTPat (mem x s); + SMTPat (S.mem x (as_set s))] + +val lemma_as_set_disjoint (#a:eqtype) (#f:cmp a) (s1 s2:ordset a f) + : Lemma (intersect s1 s2 = empty <==> S.disjoint (as_set s1) (as_set s2)) + [SMTPat (S.disjoint (as_set s1) (as_set s2))] diff --git a/stage0/ulib/experimental/FStar.Reflection.Typing.Builtins.fsti b/stage0/ulib/experimental/FStar.Reflection.Typing.Builtins.fsti new file mode 100644 index 00000000000..aaae32e06fb --- /dev/null +++ b/stage0/ulib/experimental/FStar.Reflection.Typing.Builtins.fsti @@ -0,0 +1,36 @@ +(* + Copyright 2008-2023 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Reflection.Typing.Builtins + +(** This module defines some utilities in support of the reflection + typing judgment of F*, defined in Refl.Typing.fsti. + + IT IS HIGHLY EXPERIMENTAL AND NOT YET READY TO USE. + *) + +open FStar.Reflection.V2 +open FStar.Range + +val dummy_range : range + +val open_with (t:term) (v:term) : term + +val open_term (t:term) (v:var) : term + +val close_term (t:term) (v:var) : term + +val rename (t:term) (x y:var) : term diff --git a/stage0/ulib/experimental/FStar.Reflection.Typing.fst b/stage0/ulib/experimental/FStar.Reflection.Typing.fst new file mode 100644 index 00000000000..a3181d3fec8 --- /dev/null +++ b/stage0/ulib/experimental/FStar.Reflection.Typing.fst @@ -0,0 +1,980 @@ +(* + Copyright 2008-2023 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Reflection.Typing + +(** This module defines a typing judgment for (parts of) the total + fragment of F*. We are using it in the meta DSL framework as an + official specification for the F* type system. + + We expect it to grow to cover more of the F* language. + + IT IS HIGHLY EXPERIMENTAL AND NOT YET READY TO USE. + *) + +open FStar.List.Tot +open FStar.Reflection.V2 + +module R = FStar.Reflection.V2 +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Stubs.Tactics.Types +open FStar.Tactics.Effect +module RTB = FStar.Reflection.Typing.Builtins + +let inspect_pack = R.inspect_pack_inv +let pack_inspect = R.pack_inspect_inv + +let inspect_pack_namedv = R.inspect_pack_namedv +let pack_inspect_namedv = R.pack_inspect_namedv + +let inspect_pack_bv = R.inspect_pack_bv +let pack_inspect_bv = R.pack_inspect_bv + +let inspect_pack_binder = R.inspect_pack_binder +let pack_inspect_binder = R.pack_inspect_binder + +let inspect_pack_comp = R.inspect_pack_comp_inv +let pack_inspect_comp = R.pack_inspect_comp_inv + +let inspect_pack_fv = R.inspect_pack_fv +let pack_inspect_fv = R.pack_inspect_fv + +let inspect_pack_universe = R.inspect_pack_universe +let pack_inspect_universe = R.pack_inspect_universe + +let inspect_pack_lb = R.inspect_pack_lb +let pack_inspect_lb = R.pack_inspect_lb + +let inspect_pack_sigelt = R.inspect_pack_sigelt +let pack_inspect_sigelt = R.pack_inspect_sigelt + +let lookup_bvar (e:env) (x:int) : option term = magic () + +let lookup_fvar_uinst (e:R.env) (x:R.fv) (us:list R.universe) + : option R.term = magic () + +let lookup_bvar_extend_env (g:env) (x y:var) (ty:term) = admit () + +let lookup_fvar_extend_env (g:env) (x:fv) (us:universes) (y:var) (ty:term) = admit () + +let subst_ctx_uvar_and_subst _ _ = magic () + +let open_with (t:term) (v:term) = RTB.open_with t v + +let open_with_spec (t v:term) = admit () + +let open_term (t:term) (v:var) = RTB.open_term t v + +let open_term_spec (t:term) (v:var) = admit () + +let close_term (t:term) (v:var) = RTB.close_term t v + +let close_term_spec (t:term) (v:var) = admit () + +let rename (t:term) (x y:var)= RTB.rename t x y + +let rename_spec (t:term) (x y:var) = admit () + +let bv_index_of_make_bv (n:nat) = () +let namedv_uniq_of_make_namedv (n:nat) = () + +let bindings_ok_for_pat bnds pat = magic () +let bindings_ok_pat_constant c = admit () + +let subtyping_token_renaming (g:env) + (bs0:bindings) + (bs1:bindings) + (x:var { None? (lookup_bvar (extend_env_l g (bs1@bs0)) x) }) + (y:var { None? (lookup_bvar (extend_env_l g (bs1@bs0)) y) }) + (t:term) + (t0 t1:term) + (d:subtyping_token (extend_env_l g (bs1@(x,t)::bs0)) t0 t1) = magic () + +let subtyping_token_weakening (g:env) + (bs0:bindings) + (bs1:bindings) + (x:var { None? (lookup_bvar (extend_env_l g (bs1@bs0)) x) }) + (t:term) + (t0 t1:term) + (d:subtyping_token (extend_env_l g (bs1@bs0)) t0 t1) = magic () + +let well_typed_terms_are_ln _ _ _ _ = admit () + +let type_correctness _ _ _ _ = admit () + +let rec binder_offset_pattern_invariant (p:pattern) (ss:subst) + : Lemma (ensures binder_offset_pattern p == + binder_offset_pattern (subst_pattern p ss)) + (decreases p) + = match p with + | Pat_Cons _ _ pats -> + binder_offset_patterns_invariant pats ss + | _ -> () + +and binder_offset_patterns_invariant (p:list (pattern & bool)) (ss:subst) + : Lemma (ensures binder_offset_patterns p == + binder_offset_patterns (subst_patterns p ss)) + (decreases p) + = match p with + | [] -> () + | (hd, _)::tl -> + binder_offset_pattern_invariant hd ss; + let n = binder_offset_pattern hd in + binder_offset_patterns_invariant tl (shift_subst_n n ss) + +let rec open_close_inverse' (i:nat) (t:term { ln' t (i - 1) }) (x:var) + : Lemma + (ensures subst_term + (subst_term t [ ND x i ]) + (open_with_var x i) + == t) + (decreases t) + = match inspect_ln t with + | Tv_UInst _ _ + | Tv_FVar _ + | Tv_Type _ + | Tv_Const _ + | Tv_Unsupp + | Tv_Unknown + | Tv_BVar _ -> () + | Tv_Var _ -> () + | Tv_App t1 a -> + open_close_inverse' i t1 x; + open_close_inverse' i (fst a) x + + | Tv_Abs b body -> + open_close_inverse'_binder i b x; + open_close_inverse' (i + 1) body x + + | Tv_Arrow b c -> + open_close_inverse'_binder i b x; + open_close_inverse'_comp (i + 1) c x + + | Tv_Refine b f -> + open_close_inverse'_binder i b x; + open_close_inverse' (i + 1) f x + + | Tv_Let recf attrs b def body -> + open_close_inverse'_terms i attrs x; + open_close_inverse'_binder i b x; + (if recf + then open_close_inverse' (i + 1) def x + else open_close_inverse' i def x); + open_close_inverse' (i + 1) body x + + | Tv_Match scr ret brs -> + open_close_inverse' i scr x; + (match ret with + | None -> () + | Some m -> open_close_inverse'_match_returns i m x); + open_close_inverse'_branches i brs x + + | Tv_AscribedT e t tac b -> + open_close_inverse' i e x; + open_close_inverse' i t x; + (match tac with + | None -> () + | Some tac -> open_close_inverse' i tac x) + + | Tv_AscribedC e c tac b -> + open_close_inverse' i e x; + open_close_inverse'_comp i c x; + (match tac with + | None -> () + | Some tac -> open_close_inverse' i tac x) + + +and open_close_inverse'_binder (i:nat) (b:binder { ln'_binder b (i - 1) }) (x:var) + : Lemma (ensures subst_binder + (subst_binder b [ ND x i ]) + (open_with_var x i) + == b) + (decreases b) + = let bndr = inspect_binder b in + let {ppname; qual=q; attrs=attrs; sort=sort} = bndr in + open_close_inverse' i sort x; + open_close_inverse'_terms i attrs x; + assert (subst_terms (subst_terms attrs [ ND x i ]) + (open_with_var x i) == attrs); + pack_inspect_binder b; + assert (pack_binder {ppname; qual=q; attrs=attrs; sort=sort} == b) + +and open_close_inverse'_terms (i:nat) (ts:list term { ln'_terms ts (i - 1) }) (x:var) + : Lemma (ensures subst_terms + (subst_terms ts [ ND x i ]) + (open_with_var x i) + == ts) + (decreases ts) + = match ts with + | [] -> () + | t::ts -> + open_close_inverse' i t x; + open_close_inverse'_terms i ts x + +and open_close_inverse'_comp (i:nat) (c:comp { ln'_comp c (i - 1) }) (x:var) + : Lemma + (ensures subst_comp + (subst_comp c [ ND x i ]) + (open_with_var x i) + == c) + (decreases c) + = match inspect_comp c with + | C_Total t + | C_GTotal t -> open_close_inverse' i t x + + | C_Lemma pre post pats -> + open_close_inverse' i pre x; + open_close_inverse' i post x; + open_close_inverse' i pats x + + | C_Eff us eff_name res args decrs -> + open_close_inverse' i res x; + open_close_inverse'_args i args x; + open_close_inverse'_terms i decrs x + +and open_close_inverse'_args (i:nat) + (ts:list argv { ln'_args ts (i - 1) }) + (x:var) + : Lemma + (ensures subst_args + (subst_args ts [ ND x i ]) + (open_with_var x i) + == ts) + (decreases ts) + = match ts with + | [] -> () + | (t,q)::ts -> + open_close_inverse' i t x; + open_close_inverse'_args i ts x + +and open_close_inverse'_patterns (i:nat) + (ps:list (pattern & bool) { ln'_patterns ps (i - 1) }) + (x:var) + : Lemma + (ensures subst_patterns + (subst_patterns ps [ ND x i ]) + (open_with_var x i) + == ps) + (decreases ps) + = match ps with + | [] -> () + | (p, b)::ps' -> + open_close_inverse'_pattern i p x; + let n = binder_offset_pattern p in + binder_offset_pattern_invariant p [ ND x i ]; + open_close_inverse'_patterns (i + n) ps' x + +and open_close_inverse'_pattern (i:nat) (p:pattern{ln'_pattern p (i - 1)}) (x:var) + : Lemma + (ensures subst_pattern + (subst_pattern p [ ND x i ]) + (open_with_var x i) + == p) + (decreases p) + = match p with + | Pat_Constant _ -> () + + | Pat_Cons fv us pats -> + open_close_inverse'_patterns i pats x + + | Pat_Var bv _ -> () + + | Pat_Dot_Term topt -> + match topt with + | None -> () + | Some t -> open_close_inverse' i t x + + +and open_close_inverse'_branch (i:nat) (br:branch{ln'_branch br (i - 1)}) (x:var) + : Lemma + (ensures subst_branch + (subst_branch br [ ND x i ]) + (open_with_var x i) + == br) + (decreases br) + = let p, t = br in + let j = binder_offset_pattern p in + binder_offset_pattern_invariant p [ ND x i ]; + open_close_inverse'_pattern i p x; + open_close_inverse' (i + j) t x + +and open_close_inverse'_branches (i:nat) + (brs:list branch { ln'_branches brs (i - 1) }) + (x:var) + : Lemma + (ensures subst_branches + (subst_branches brs [ ND x i ]) + (open_with_var x i) + == brs) + (decreases brs) + = match brs with + | [] -> () + | br::brs -> + open_close_inverse'_branch i br x; + open_close_inverse'_branches i brs x + +and open_close_inverse'_match_returns (i:nat) + (m:match_returns_ascription { ln'_match_returns m (i - 1) }) + (x:var) + : Lemma + (ensures subst_match_returns + (subst_match_returns m [ ND x i ]) + (open_with_var x i) + == m) + (decreases m) + = let b, (ret, as_, eq) = m in + open_close_inverse'_binder i b x; + let ret = + match ret with + | Inl t -> + open_close_inverse' (i + 1) t x + | Inr c -> + open_close_inverse'_comp (i + 1) c x + in + let as_ = + match as_ with + | None -> () + | Some t -> + open_close_inverse' (i + 1) t x + in + () + +let open_close_inverse (e:R.term { ln e }) (x:var) + : Lemma (open_term (close_term e x) x == e) + = close_term_spec e x; + open_term_spec (close_term e x) x; + open_close_inverse' 0 e x + +let rec close_open_inverse' (i:nat) + (t:term) + (x:var { ~(x `Set.mem` freevars t) }) + : Lemma + (ensures subst_term + (subst_term t (open_with_var x i)) + [ ND x i ] + == t) + (decreases t) + = match inspect_ln t with + | Tv_Uvar _ _ -> assert false + | Tv_UInst _ _ + | Tv_FVar _ + | Tv_Type _ + | Tv_Const _ + | Tv_Unsupp + | Tv_Unknown -> () + | Tv_BVar _ -> () + | Tv_Var _ -> () + | Tv_App t1 a -> + close_open_inverse' i t1 x; + close_open_inverse' i (fst a) x + + | Tv_Abs b body -> + close_open_inverse'_binder i b x; + close_open_inverse' (i + 1) body x + + | Tv_Arrow b c -> + close_open_inverse'_binder i b x; + close_open_inverse'_comp (i + 1) c x + + | Tv_Refine b f -> + close_open_inverse'_binder i b x; + close_open_inverse' (i + 1) f x + + | Tv_Let recf attrs b def body -> + close_open_inverse'_terms i attrs x; + close_open_inverse'_binder i b x; + close_open_inverse' (if recf then (i + 1) else i) def x; + close_open_inverse' (i + 1) body x + + | Tv_Match scr ret brs -> + close_open_inverse' i scr x; + (match ret with + | None -> () + | Some m -> close_open_inverse'_match_returns i m x); + close_open_inverse'_branches i brs x + + | Tv_AscribedT e t tac b -> + close_open_inverse' i e x; + close_open_inverse' i t x; + (match tac with + | None -> () + | Some t -> close_open_inverse' i t x) + + | Tv_AscribedC e c tac b -> + close_open_inverse' i e x; + close_open_inverse'_comp i c x; + (match tac with + | None -> () + | Some t -> close_open_inverse' i t x) + +and close_open_inverse'_comp (i:nat) + (c:comp) + (x:var{ ~(x `Set.mem` freevars_comp c) }) + : Lemma + (ensures subst_comp + (subst_comp c (open_with_var x i)) + [ ND x i ] + == c) + (decreases c) + = match inspect_comp c with + | C_Total t + | C_GTotal t -> + close_open_inverse' i t x + + | C_Lemma pre post pats -> + close_open_inverse' i pre x; + close_open_inverse' i post x; + close_open_inverse' i pats x + + | C_Eff us eff_name res args decrs -> + close_open_inverse' i res x; + close_open_inverse'_args i args x; + close_open_inverse'_terms i decrs x + +and close_open_inverse'_args (i:nat) (args:list argv) (x:var{ ~(x `Set.mem` freevars_args args) }) + : Lemma + (ensures subst_args + (subst_args args (open_with_var x i)) + [ ND x i] + == args) + (decreases args) + = match args with + | [] -> () + | (a, q) :: args -> + close_open_inverse' i a x; + close_open_inverse'_args i args x + +and close_open_inverse'_binder (i:nat) (b:binder) (x:var{ ~(x `Set.mem` freevars_binder b) }) + : Lemma + (ensures subst_binder + (subst_binder b (open_with_var x i)) + [ ND x i ] + == b) + (decreases b) + = let bndr = inspect_binder b in + close_open_inverse' i bndr.sort x; + close_open_inverse'_terms i bndr.attrs x; + pack_inspect_binder b + +and close_open_inverse'_terms (i:nat) (ts:list term) (x:var{ ~(x `Set.mem` freevars_terms ts) }) + : Lemma + (ensures subst_terms + (subst_terms ts (open_with_var x i)) + [ ND x i ] + == ts) + (decreases ts) + = match ts with + | [] -> () + | hd :: tl -> + close_open_inverse' i hd x; + close_open_inverse'_terms i tl x + +and close_open_inverse'_branches (i:nat) (brs:list branch) + (x:var{ ~(x `Set.mem` freevars_branches brs) }) + : Lemma + (ensures subst_branches + (subst_branches brs (open_with_var x i)) + [ ND x i ] + == brs) + (decreases brs) + = match brs with + | [] -> () + | b :: brs -> + close_open_inverse'_branch i b x; + close_open_inverse'_branches i brs x + +and close_open_inverse'_branch (i:nat) + (br:branch) + (x:var{ ~(x `Set.mem` freevars_branch br) }) + : Lemma + (ensures subst_branch + (subst_branch br (open_with_var x i)) + [ ND x i ] + == br) + (decreases br) + = let p, t = br in + close_open_inverse'_pattern i p x; + binder_offset_pattern_invariant p (open_with_var x i); + close_open_inverse' (i + binder_offset_pattern p) t x + + +and close_open_inverse'_pattern (i:nat) + (p:pattern) + (x:var{ ~(x `Set.mem` freevars_pattern p) }) + : Lemma + (ensures subst_pattern + (subst_pattern p (open_with_var x i)) + [ ND x i ] + == p) + (decreases p) + = match p with + | Pat_Constant _ -> () + + | Pat_Cons fv us pats -> + close_open_inverse'_patterns i pats x + + | Pat_Var bv _ -> () + + | Pat_Dot_Term topt -> + match topt with + | None -> () + | Some t -> close_open_inverse' i t x + +and close_open_inverse'_patterns (i:nat) + (ps:list (pattern & bool)) + (x:var {~ (x `Set.mem` freevars_patterns ps) }) + : Lemma + (ensures subst_patterns + (subst_patterns ps (open_with_var x i)) + [ ND x i ] + == ps) + (decreases ps) + = match ps with + | [] -> () + | (p, b)::ps' -> + close_open_inverse'_pattern i p x; + let n = binder_offset_pattern p in + binder_offset_pattern_invariant p (open_with_var x i); + close_open_inverse'_patterns (i + n) ps' x + +and close_open_inverse'_match_returns (i:nat) (m:match_returns_ascription) + (x:var{ ~(x `Set.mem` freevars_match_returns m) }) + : Lemma + (ensures subst_match_returns + (subst_match_returns m (open_with_var x i)) + [ ND x i ] + == m) + (decreases m) + = let b, (ret, as_, eq) = m in + close_open_inverse'_binder i b x; + (match ret with + | Inl t -> close_open_inverse' (i + 1) t x + | Inr c -> close_open_inverse'_comp (i + 1) c x); + (match as_ with + | None -> () + | Some t -> close_open_inverse' (i + 1) t x) + + +let close_open_inverse (e:R.term) (x:var {~ (x `Set.mem` freevars e) }) + : Lemma (close_term (open_term e x) x == e) + = open_term_spec e x; + close_term_spec (open_term e x) x; + close_open_inverse' 0 e x + +let rec close_with_not_free_var (t:R.term) (x:var) (i:nat) + : Lemma + (requires ~ (Set.mem x (freevars t))) + (ensures subst_term t [ ND x i ] == t) + (decreases t) = + + match inspect_ln t with + | Tv_Var _ + | Tv_BVar _ + | Tv_FVar _ + | Tv_UInst _ _ -> () + | Tv_App hd (arg, _) -> + close_with_not_free_var hd x i; + close_with_not_free_var arg x i + | Tv_Abs b body -> + close_binder_with_not_free_var b x i; + close_with_not_free_var body x (i + 1) + | Tv_Arrow b c -> + close_binder_with_not_free_var b x i; + close_comp_with_not_free_var c x (i + 1) + | Tv_Type _ -> () + | Tv_Refine b t -> + close_binder_with_not_free_var b x i; + close_with_not_free_var t x (i + 1) + | Tv_Const _ -> () + | Tv_Uvar _ _ -> assert False + | Tv_Let recf attrs b e1 e2 -> + close_terms_with_not_free_var attrs x i; + close_binder_with_not_free_var b x i; + (if recf then close_with_not_free_var e1 x (i + 1) + else close_with_not_free_var e1 x i); + close_with_not_free_var e2 x (i + 1) + | Tv_Match scrutinee ret_opt brs -> + close_with_not_free_var scrutinee x i; + (match ret_opt with + | None -> () + | Some ret -> close_match_returns_with_not_free_var ret x i); + close_branches_with_not_free_var brs x i + + | Tv_AscribedT e t tacopt _ -> + close_with_not_free_var e x i; + close_with_not_free_var t x i; + (match tacopt with + | None -> () + | Some tac -> close_with_not_free_var tac x i) + + | Tv_AscribedC e c tacopt _ -> + close_with_not_free_var e x i; + close_comp_with_not_free_var c x i; + (match tacopt with + | None -> () + | Some tac -> close_with_not_free_var tac x i) + + | Tv_Unknown -> () + | Tv_Unsupp -> () + +and close_match_returns_with_not_free_var + (r:match_returns_ascription) + (x:var) (i:nat) + : Lemma + (requires ~ (Set.mem x (freevars_match_returns r))) + (ensures subst_match_returns r [ ND x i ] == r) + (decreases r) = + + let b, (ret, as_opt, _) = r in + close_binder_with_not_free_var b x i; + (match ret with + | Inl t -> close_with_not_free_var t x (i + 1) + | Inr c -> close_comp_with_not_free_var c x (i + 1)); + (match as_opt with + | None -> () + | Some t -> close_with_not_free_var t x (i + 1)) + +and close_branches_with_not_free_var + (brs:list R.branch) + (x:var) (i:nat) + : Lemma + (requires ~ (Set.mem x (freevars_branches brs))) + (ensures subst_branches brs [ ND x i ] == brs) + (decreases brs) = + + match brs with + | [] -> () + | hd::tl -> + close_branch_with_not_free_var hd x i; + close_branches_with_not_free_var tl x i + +and close_branch_with_not_free_var + (br:R.branch) + (x:var) (i:nat) + : Lemma + (requires ~ (Set.mem x (freevars_branch br))) + (ensures subst_branch br [ ND x i ] == br) + (decreases br) = + + let p, t = br in + close_pattern_with_not_free_var p x i; + close_with_not_free_var t x (binder_offset_pattern p + i) + +and close_pattern_with_not_free_var (p:R.pattern) (x:var) (i:nat) + : Lemma + (requires ~ (Set.mem x (freevars_pattern p))) + (ensures subst_pattern p [ ND x i ] == p) + (decreases p) = + + match p with + | Pat_Constant _ -> () + | Pat_Cons _ _ pats -> + close_patterns_with_not_free_var pats x i + | Pat_Var bv _ -> () + | Pat_Dot_Term topt -> + (match topt with + | None -> () + | Some t -> close_with_not_free_var t x i) + +and close_patterns_with_not_free_var (l:list (R.pattern & bool)) (x:var) (i:nat) + : Lemma + (requires ~ (Set.mem x (freevars_patterns l))) + (ensures subst_patterns l [ ND x i ] == l) + (decreases l) = + + match l with + | [] -> () + | (p, _)::tl -> + close_pattern_with_not_free_var p x i; + close_patterns_with_not_free_var tl x (binder_offset_pattern p + i) + +and close_terms_with_not_free_var (l:list R.term) (x:var) (i:nat) + : Lemma + (requires ~ (Set.mem x (freevars_terms l))) + (ensures subst_terms l [ ND x i ] == l) + (decreases l) = + + match l with + | [] -> () + | hd::tl -> + close_with_not_free_var hd x i; + close_terms_with_not_free_var tl x i + +and close_binder_with_not_free_var (b:R.binder) (x:var) (i:nat) + : Lemma + (requires ~ (Set.mem x (freevars_binder b))) + (ensures subst_binder b [ ND x i ] == b) + (decreases b) = + + let {attrs; sort} = inspect_binder b in + close_with_not_free_var sort x i; + close_terms_with_not_free_var attrs x i + +and close_comp_with_not_free_var (c:R.comp) (x:var) (i:nat) + : Lemma + (requires ~ (Set.mem x (freevars_comp c))) + (ensures subst_comp c [ ND x i ] == c) + (decreases c) = + + match inspect_comp c with + | C_Total t + | C_GTotal t -> close_with_not_free_var t x i + | C_Lemma pre post pats -> + close_with_not_free_var pre x i; + close_with_not_free_var post x i; + close_with_not_free_var pats x i + | C_Eff _ _ t args decrs -> + close_with_not_free_var t x i; + close_args_with_not_free_var args x i; + close_terms_with_not_free_var decrs x i + +and close_args_with_not_free_var (l:list R.argv) (x:var) (i:nat) + : Lemma + (requires ~ (Set.mem x (freevars_args l))) + (ensures subst_args l [ ND x i ] == l) + (decreases l) = + + match l with + | [] -> () + | (t, _)::tl -> + close_with_not_free_var t x i; + close_args_with_not_free_var tl x i + +let equiv_arrow #g #e1 #e2 ty q x eq = + assume (~ (x `Set.mem` (freevars e1 `Set.union` freevars e2))); + let c1 = E_Total, e1 in + let c2 = E_Total, e2 in + Rel_arrow _ _ _ _ c1 c2 _ _ (Rel_refl _ _ _) (Relc_typ _ _ _ _ _ eq) + +let equiv_abs_close #g #e1 #e2 ty q x eq = + // TODO: the following can be the preconditions? + // or derived from equiv? + assume (ln' e1 (-1)); + assume (ln' e2 (-1)); + // this should be a lemma + assume (~ (x `Set.mem` (freevars (subst_term e1 [ ND x 0 ]) `Set.union` + freevars (subst_term e2 [ ND x 0 ])))); + open_close_inverse' 0 e1 x; + open_close_inverse' 0 e2 x; + let eq + : equiv (extend_env g x ty) + (subst_term + (subst_term e1 [ ND x 0 ]) + (open_with_var x 0)) + (subst_term + (subst_term e2 [ ND x 0 ]) + (open_with_var x 0)) = + eq in + + Rel_abs _ _ _ _ _ _ _ (Rel_refl _ _ _) eq + +let rec open_with_gt_ln e i t j + : Lemma (requires ln' e i /\ i < j) + (ensures subst_term e [ DT j t ] == e) + (decreases e) = + match inspect_ln e with + | Tv_UInst _ _ + | Tv_FVar _ + | Tv_Type _ + | Tv_Const _ + | Tv_Unsupp + | Tv_Unknown + | Tv_Var _ + | Tv_BVar _ -> () + | Tv_App hd argv -> + open_with_gt_ln hd i t j; + open_with_gt_ln (fst argv) i t j + | Tv_Abs b body -> + open_with_gt_ln_binder b i t j; + open_with_gt_ln body (i + 1) t (j + 1) + | Tv_Arrow b c -> + open_with_gt_ln_binder b i t j; + open_with_gt_ln_comp c (i + 1) t (j + 1) + | Tv_Refine b f -> + open_with_gt_ln_binder b i t j; + open_with_gt_ln f (i + 1) t (j + 1) + | Tv_Uvar j c -> admit () + | Tv_Let recf attrs b def body -> + open_with_gt_ln_terms attrs i t j; + open_with_gt_ln_binder b i t j; + (if recf + then open_with_gt_ln def (i + 1) t (j + 1) + else open_with_gt_ln def i t j); + open_with_gt_ln body (i + 1) t (j + 1) + | Tv_Match scr ret brs -> + open_with_gt_ln scr i t j; + (match ret with + | None -> () + | Some ret -> open_with_gt_ln_match_returns ret i t j); + open_with_gt_ln_branches brs i t j + | Tv_AscribedT e t1 tac _ -> + open_with_gt_ln e i t j; + open_with_gt_ln t1 i t j; + (match tac with + | None -> () + | Some tac -> open_with_gt_ln tac i t j) + | Tv_AscribedC e c tac _ -> + open_with_gt_ln e i t j; + open_with_gt_ln_comp c i t j; + (match tac with + | None -> () + | Some tac -> open_with_gt_ln tac i t j) + +and open_with_gt_ln_binder (b:binder) (i:nat) (t:term) (j:nat) + : Lemma (requires ln'_binder b i /\ i < j) + (ensures subst_binder b [ DT j t ] == b) + (decreases b) = + + let {attrs;sort} = inspect_binder b in + open_with_gt_ln sort i t j; + open_with_gt_ln_terms attrs i t j + +and open_with_gt_ln_comp (c:comp) (i:nat) (t:term) (j:nat) + : Lemma (requires ln'_comp c i /\ i < j) + (ensures subst_comp c [ DT j t ] == c) + (decreases c) = + + match inspect_comp c with + | C_Total t1 + | C_GTotal t1 -> open_with_gt_ln t1 i t j + | C_Lemma pre post pats -> + open_with_gt_ln pre i t j; + open_with_gt_ln post i t j; + open_with_gt_ln pats i t j + | C_Eff _ _ res args decrs -> + open_with_gt_ln res i t j; + open_args_with_gt_ln_args args i t j; + open_with_gt_ln_terms decrs i t j + +and open_with_gt_ln_terms (l:list term) (i:nat) (t:term) (j:nat) + : Lemma (requires ln'_terms l i /\ i < j) + (ensures subst_terms l [ DT j t ] == l) + (decreases l) = + match l with + | [] -> () + | hd::tl -> + open_with_gt_ln hd i t j; + open_with_gt_ln_terms tl i t j + +and open_with_gt_ln_match_returns (m:match_returns_ascription) (i:nat) (t:term) (j:nat) + : Lemma (requires ln'_match_returns m i /\ i < j) + (ensures subst_match_returns m [ DT j t ] == m) + (decreases m) = + + let b, (ret, as_, _) = m in + open_with_gt_ln_binder b i t j; + (match ret with + | Inl t1 -> open_with_gt_ln t1 (i + 1) t (j + 1) + | Inr c -> open_with_gt_ln_comp c (i + 1) t (j + 1)); + (match as_ with + | None -> () + | Some t1 -> open_with_gt_ln t1 (i + 1) t (j + 1)) + + +and open_with_gt_ln_branches (l:list branch) (i:nat) (t:term) (j:nat) + : Lemma (requires ln'_branches l i /\ i < j) + (ensures subst_branches l [ DT j t ] == l) + (decreases l) = + match l with + | [] -> () + | hd::tl -> + open_with_gt_ln_branch hd i t j; + open_with_gt_ln_branches tl i t j + +and open_args_with_gt_ln_args (l:list argv) (i:nat) (t:term) (j:nat) + : Lemma (requires ln'_args l i /\ i < j) + (ensures subst_args l [ DT j t ] == l) + (decreases l) = + + match l with + | [] -> () + | (t1, _)::tl -> + open_with_gt_ln t1 i t j; + open_args_with_gt_ln_args tl i t j + +and open_with_gt_ln_branch (b:branch) (i:nat) (t:term) (j:nat) + : Lemma (requires ln'_branch b i /\ i < j) + (ensures subst_branch b [ DT j t ] == b) + (decreases b) = + + let p, t1 = b in + open_with_gt_ln_pat p i t j; + let k = binder_offset_pattern p in + open_with_gt_ln t1 (i + k) t (j + k) + +and open_with_gt_ln_pat (p:pattern) (i:nat) (t:term) (j:nat) + : Lemma (requires ln'_pattern p i /\ i < j) + (ensures subst_pattern p [ DT j t ] == p) + (decreases p) = + + match p with + | Pat_Constant _ -> () + | Pat_Cons _ _ pats -> + open_with_gt_ln_pats pats i t j + | Pat_Var bv _ -> () + | Pat_Dot_Term topt -> + (match topt with + | None -> () + | Some t1 -> open_with_gt_ln t1 i t j) + +and open_with_gt_ln_pats (l:list (pattern & bool)) (i:nat) (t:term) (j:nat) + : Lemma (requires ln'_patterns l i /\ i < j) + (ensures subst_patterns l [ DT j t ] == l) + (decreases l) = + + match l with + | [] -> () + | hd::tl -> + open_with_gt_ln_pat (fst hd) i t j; + let k = binder_offset_pattern (fst hd) in + open_with_gt_ln_pats tl (i + k) t (j + k) + +let if_complete_match (g:env) (t:term) = magic() + +let mkif + (g:fstar_env) + (scrutinee:term) + (then_:term) + (else_:term) + (ty:term) + (u_ty:universe) + (hyp:var { None? (lookup_bvar g hyp) /\ ~(hyp `Set.mem` (freevars then_ `Set.union` freevars else_)) }) + (eff:tot_or_ghost) + (ty_eff:tot_or_ghost) + (ts : typing g scrutinee (eff, bool_ty)) + (tt : typing (extend_env g hyp (eq2 (pack_universe Uv_Zero) bool_ty scrutinee true_bool)) then_ (eff, ty)) + (te : typing (extend_env g hyp (eq2 (pack_universe Uv_Zero) bool_ty scrutinee false_bool)) else_ (eff, ty)) + (tr : typing g ty (ty_eff, tm_type u_ty)) +: typing g (mk_if scrutinee then_ else_) (eff, ty) += let brt = (Pat_Constant C_True, then_) in + let bre = (Pat_Constant C_False, else_) in + bindings_ok_pat_constant g C_True; + bindings_ok_pat_constant g C_False; + let brty () : branches_typing g u_zero bool_ty scrutinee (eff,ty) [brt; bre] [[]; []] = + BT_S (Pat_Constant C_True, then_) [] + (BO (Pat_Constant C_True) [] hyp then_ () tt) + _ _ ( + BT_S (Pat_Constant C_False, else_) [] + (BO (Pat_Constant C_False) [] hyp else_ () te) + _ _ + BT_Nil) + in + T_Match g u_zero bool_ty scrutinee E_Total (T_FVar g bool_fv) eff ts [brt; bre] (eff, ty) + [[]; []] + (MC_Tok g scrutinee bool_ty _ _ (Squash.return_squash (if_complete_match g scrutinee))) + (brty ()) + +let typing_to_token (#g:env) (#e:term) (#c:comp_typ) (_ : typing g e c) = magic() diff --git a/stage0/ulib/experimental/FStar.Reflection.Typing.fsti b/stage0/ulib/experimental/FStar.Reflection.Typing.fsti new file mode 100644 index 00000000000..ff7178b2979 --- /dev/null +++ b/stage0/ulib/experimental/FStar.Reflection.Typing.fsti @@ -0,0 +1,1907 @@ +(* + Copyright 2008-2023 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Reflection.Typing + +(** This module defines a typing judgment for (parts of) the total + and ghost fragment of F*. + + We are using it in the meta DSL framework as an + official specification for the F* type system. + + We expect it to grow to cover more of the F* language. + + IT IS HIGHLY EXPERIMENTAL AND NOT YET READY TO USE. + *) + +open FStar.List.Tot +open FStar.Reflection.V2 +module L = FStar.List.Tot +module R = FStar.Reflection.V2 +open FStar.Stubs.Tactics.V2.Builtins +open FStar.Stubs.Tactics.Types +open FStar.Tactics.Effect +module RD = FStar.Stubs.Reflection.V2.Data + +(* MOVE to some helper module *) +let rec fold_left_dec #a #b + (acc : a) + (l : list b) + (f : a -> (x:b{x << l}) -> a) + : Tot a (decreases l) + = + match l with + | [] -> acc + | x::xs -> fold_left_dec (f acc x) xs f + +let rec map_dec #a #b + (l : list a) + (f : (x:a{x << l}) -> b) + : Tot (list b) (decreases l) + = + match l with + | [] -> [] + | x::xs -> f x :: map_dec xs f + +let rec zip2prop #a #b (f : a -> b -> Type0) (xs : list a) (ys : list b) : Type0 = + match xs, ys with + | [], [] -> True + | x::xx, y::yy -> f x y /\ zip2prop f xx yy + | _ -> False +(* / MOVE *) + +val inspect_pack (t:R.term_view) + : Lemma (ensures R.(inspect_ln (pack_ln t) == t)) + [SMTPat R.(inspect_ln (pack_ln t))] + +val pack_inspect (t:R.term) + : Lemma (requires ~(Tv_Unsupp? (inspect_ln t))) + (ensures R.(pack_ln (inspect_ln t) == t)) + [SMTPat R.(pack_ln (inspect_ln t))] + +val inspect_pack_namedv (t:R.namedv_view) + : Lemma (ensures R.(inspect_namedv (pack_namedv t) == t)) + [SMTPat R.(inspect_namedv (pack_namedv t))] + +val pack_inspect_namedv (t:R.namedv) + : Lemma (ensures R.(pack_namedv (inspect_namedv t) == t)) + [SMTPat R.(pack_namedv (inspect_namedv t))] + +val inspect_pack_bv (t:R.bv_view) + : Lemma (ensures R.(inspect_bv (pack_bv t) == t)) + [SMTPat R.(inspect_bv (pack_bv t))] + +val pack_inspect_bv (t:R.bv) + : Lemma (ensures R.(pack_bv (inspect_bv t) == t)) + [SMTPat R.(pack_bv (inspect_bv t))] + +val inspect_pack_binder (bview:R.binder_view) + : Lemma (ensures R.(R.inspect_binder (R.pack_binder bview) == bview)) + [SMTPat R.(inspect_binder (pack_binder bview))] + +val pack_inspect_binder (t:R.binder) + : Lemma (ensures (R.pack_binder (R.inspect_binder t) == t)) + [SMTPat (R.pack_binder (R.inspect_binder t))] + +val inspect_pack_comp (t:R.comp_view) + : Lemma (ensures (R.inspect_comp (R.pack_comp t) == t)) + [SMTPat (R.inspect_comp (R.pack_comp t))] + +val pack_inspect_comp (t:R.comp) + : Lemma (ensures (R.pack_comp (R.inspect_comp t) == t)) + [SMTPat (R.pack_comp (R.inspect_comp t))] + +val inspect_pack_fv (nm:R.name) + : Lemma (ensures R.inspect_fv (R.pack_fv nm) == nm) + [SMTPat (R.inspect_fv (R.pack_fv nm))] + +val pack_inspect_fv (fv:R.fv) + : Lemma (ensures R.pack_fv (R.inspect_fv fv) == fv) + [SMTPat (R.pack_fv (R.inspect_fv fv))] + +val inspect_pack_universe (u:R.universe_view) + : Lemma (ensures R.inspect_universe (R.pack_universe u) == u) + [SMTPat (R.inspect_universe (R.pack_universe u))] + +val pack_inspect_universe (u:R.universe) + : Lemma (requires ~(Uv_Unk? (inspect_universe u))) + (ensures R.pack_universe (R.inspect_universe u) == u) + [SMTPat (R.pack_universe (R.inspect_universe u))] + +val inspect_pack_lb (lb:R.lb_view) + : Lemma (ensures R.inspect_lb (R.pack_lb lb) == lb) + [SMTPat (R.inspect_lb (R.pack_lb lb))] + +val pack_inspect_lb (lb:R.letbinding) + : Lemma (ensures R.pack_lb (R.inspect_lb lb) == lb) + [SMTPat (R.pack_lb (R.inspect_lb lb))] + +val inspect_pack_sigelt (sev:R.sigelt_view { ~ (Unk? sev) }) + : Lemma (ensures R.inspect_sigelt (R.pack_sigelt sev) == sev) + [SMTPat (R.inspect_sigelt (R.pack_sigelt sev))] + +val pack_inspect_sigelt (se:R.sigelt) + : Lemma (requires ~ (Unk? (R.inspect_sigelt se))) + (ensures R.pack_sigelt (R.inspect_sigelt se) == se) + [SMTPat (R.pack_sigelt (R.inspect_sigelt se))] + +val lookup_bvar (e:env) (x:int) : option term + +val lookup_fvar_uinst (e:R.env) (x:R.fv) (us:list R.universe) : option R.term + +let lookup_fvar (e:env) (x:fv) : option term = lookup_fvar_uinst e x [] + +let pp_name_t = FStar.Sealed.Inhabited.sealed "x" +let pp_name_default : pp_name_t = FStar.Sealed.Inhabited.seal "x" +let seal_pp_name x : pp_name_t = FStar.Sealed.Inhabited.seal x + +let tun = pack_ln Tv_Unknown + +let sort_t = FStar.Sealed.Inhabited.sealed tun +let sort_default : sort_t = FStar.Sealed.Inhabited.seal tun +let seal_sort x : sort_t = FStar.Sealed.Inhabited.seal x + +let mk_binder (pp_name:pp_name_t) (ty:term) (q:aqualv) : binder + = pack_binder + { ppname = pp_name; + qual = q; + attrs = []; + sort = ty} + +let mk_simple_binder (pp_name:pp_name_t) (ty:term) : simple_binder + = pack_binder + { ppname = pp_name; + qual = Q_Explicit; + attrs = []; + sort = ty} + +let extend_env (e:env) (x:var) (ty:term) : env = + R.push_binding e ({ + ppname = seal_pp_name "x"; + uniq = x; + sort = ty; + }) + +val lookup_bvar_extend_env (g:env) (x y:var) (ty:term) + : Lemma + (ensures ( + if x = y + then lookup_bvar (extend_env g x ty) y == Some ty + else lookup_bvar (extend_env g x ty) y == lookup_bvar g y)) + [SMTPat (lookup_bvar (extend_env g x ty) y)] + +val lookup_fvar_extend_env (g:env) (x:fv) (us:universes) (y:var) (ty:term) + : Lemma + (ensures lookup_fvar_uinst (extend_env g y ty) x us == lookup_fvar_uinst g x us) + [SMTPat (lookup_fvar_uinst (extend_env g y ty) x us)] + +let bv_index (x:bv) + : var + = (inspect_bv x).index + +let namedv_uniq (x:namedv) + : var + = (inspect_namedv x).uniq + +let binder_sort (b:binder) = + (inspect_binder b).sort + +let binder_qual (b:binder) = + let { qual = q } = inspect_binder b in q + +noeq +type subst_elt = + | DT : nat -> term -> subst_elt + | NT : var -> term -> subst_elt + | ND : var -> nat -> subst_elt + +let shift_subst_elt (n:nat) = function + | DT i t -> DT (i + n) t + | NT x t -> NT x t + | ND x i -> ND x (i + n) + +let subst = list subst_elt + +let shift_subst_n (n:nat) = L.map (shift_subst_elt n) + +let shift_subst = shift_subst_n 1 + +let maybe_uniq_of_term (x:term) = + match inspect_ln x with + | Tv_Var namedv -> Some (namedv_uniq namedv) + | _ -> None + +let rec find_matching_subst_elt_bv (s:subst) (bv:bv) : option subst_elt = + match s with + | [] -> None + | (DT j t)::ss -> + if j = bv_index bv + then Some (DT j t) + else find_matching_subst_elt_bv ss bv + | _::ss -> find_matching_subst_elt_bv ss bv + +let subst_db (bv:bv) (s:subst) : term = + match find_matching_subst_elt_bv s bv with + | Some (DT _ t) -> + (match maybe_uniq_of_term t with + | None -> t + | Some k -> + //if we're substituting a name j for a name k, retain the pp_name of j + let v : namedv = pack_namedv { + sort = (inspect_bv bv).sort; + ppname = (inspect_bv bv).ppname; + uniq = k; + } in + pack_ln (Tv_Var v)) + | _ -> pack_ln (Tv_BVar bv) + +let rec find_matching_subst_elt_var (s:subst) (v:namedv) : option subst_elt = + match s with + | [] -> None + | (NT y _)::rest + | (ND y _)::rest -> + if y = namedv_uniq v + then Some (L.hd s) + else find_matching_subst_elt_var rest v + | _::rest -> find_matching_subst_elt_var rest v + +let subst_var (v:namedv) (s:subst) : term = + match find_matching_subst_elt_var s v with + | Some (NT _ t) -> + (match maybe_uniq_of_term t with + | None -> t + | Some k -> + pack_ln (Tv_Var (pack_namedv { inspect_namedv v with uniq = k }))) + | Some (ND _ i) -> + let bv = pack_bv { + sort = (inspect_namedv v).sort; + ppname = (inspect_namedv v).ppname; + index = i; + } in + pack_ln (Tv_BVar bv) + | _ -> pack_ln (Tv_Var v) + +let make_bv (n:nat) : bv_view = { + ppname = pp_name_default; + index = n; + sort = sort_default; +} +let make_bv_with_name (s:pp_name_t) (n:nat) : bv_view = { + ppname = s; + index = n; + sort = sort_default; +} + + +let var_as_bv (v:nat) = pack_bv (make_bv v) + +let make_namedv (n:nat) : namedv_view = { + ppname = pp_name_default; + uniq = n; + sort = sort_default; +} + +let make_namedv_with_name (s:pp_name_t) (n:nat) : namedv_view = { + ppname = s; + uniq = n; + sort = sort_default; +} + +let var_as_namedv (v:nat) : namedv = + pack_namedv { + uniq = v; + sort = sort_default; + ppname = pp_name_default; + } + +let var_as_term (v:var) = pack_ln (Tv_Var (var_as_namedv v)) + +let binder_of_t_q t q = mk_binder pp_name_default t q +let mk_abs ty qual t : R.term = R.pack_ln (R.Tv_Abs (binder_of_t_q ty qual) t) +let mk_total t = pack_comp (C_Total t) +let mk_ghost t = pack_comp (C_GTotal t) +let mk_arrow ty qual t : R.term = + R.pack_ln (R.Tv_Arrow (binder_of_t_q ty qual) (mk_total t)) +let mk_ghost_arrow ty qual t : R.term = + R.pack_ln (R.Tv_Arrow (binder_of_t_q ty qual) (mk_ghost t)) +let bound_var i : R.term = R.pack_ln (R.Tv_BVar (R.pack_bv (make_bv i))) +let mk_let ppname (e1 t1 e2:R.term) = + R.pack_ln (R.Tv_Let false [] (mk_simple_binder ppname t1) e1 e2) + +let open_with_var_elt (x:var) (i:nat) : subst_elt = + DT i (pack_ln (Tv_Var (var_as_namedv x))) +let open_with_var (x:var) (i:nat) : subst = [open_with_var_elt x i] + +val subst_ctx_uvar_and_subst (c:ctx_uvar_and_subst) (ss:subst) + : ctx_uvar_and_subst + +let rec binder_offset_patterns (ps:list (pattern & bool)) + : nat + = match ps with + | [] -> 0 + | (p, b)::ps -> + let n = binder_offset_pattern p in + let m = binder_offset_patterns ps in + n + m + +and binder_offset_pattern (p:pattern) + : nat + = match p with + | Pat_Constant _ + | Pat_Dot_Term _ -> 0 + + | Pat_Var _ _ -> 1 + + | Pat_Cons head univs subpats -> + binder_offset_patterns subpats + +let rec subst_term (t:term) (ss:subst) + : Tot term (decreases t) + = match inspect_ln t with + | Tv_UInst _ _ + | Tv_FVar _ + | Tv_Type _ + | Tv_Const _ + | Tv_Unsupp + | Tv_Unknown -> t + | Tv_Var x -> subst_var x ss + | Tv_BVar j -> subst_db j ss + | Tv_App hd argv -> + pack_ln (Tv_App (subst_term hd ss) + (subst_term (fst argv) ss, snd argv)) + + | Tv_Abs b body -> + let b' = subst_binder b ss in + pack_ln (Tv_Abs b' (subst_term body (shift_subst ss))) + + | Tv_Arrow b c -> + let b' = subst_binder b ss in + pack_ln (Tv_Arrow b' (subst_comp c (shift_subst ss))) + + | Tv_Refine b f -> + let b = subst_binder b ss in + pack_ln (Tv_Refine b (subst_term f (shift_subst ss))) + + | Tv_Uvar j c -> + pack_ln (Tv_Uvar j (subst_ctx_uvar_and_subst c ss)) + + | Tv_Let recf attrs b def body -> + let b = subst_binder b ss in + pack_ln (Tv_Let recf + (subst_terms attrs ss) + b + (if recf + then subst_term def (shift_subst ss) + else subst_term def ss) + (subst_term body (shift_subst ss))) + + | Tv_Match scr ret brs -> + pack_ln (Tv_Match (subst_term scr ss) + (match ret with + | None -> None + | Some m -> Some (subst_match_returns m ss)) + (subst_branches brs ss)) + + | Tv_AscribedT e t tac b -> + pack_ln (Tv_AscribedT (subst_term e ss) + (subst_term t ss) + (match tac with + | None -> None + | Some tac -> Some (subst_term tac ss)) + b) + + | Tv_AscribedC e c tac b -> + pack_ln (Tv_AscribedC (subst_term e ss) + (subst_comp c ss) + (match tac with + | None -> None + | Some tac -> Some (subst_term tac ss)) + b) + +and subst_binder (b:binder) (ss:subst) + : Tot (b':binder{binder_is_simple b ==> binder_is_simple b'}) (decreases b) + = let bndr = inspect_binder b in + pack_binder { + ppname = bndr.ppname; + qual = bndr.qual; + attrs = subst_terms bndr.attrs ss; + sort = subst_term bndr.sort ss + } + +and subst_comp (c:comp) (ss:subst) + : Tot comp (decreases c) + = match inspect_comp c with + | C_Total t -> + pack_comp (C_Total (subst_term t ss)) + + | C_GTotal t -> + pack_comp (C_GTotal (subst_term t ss)) + + | C_Lemma pre post pats -> + pack_comp (C_Lemma (subst_term pre ss) + (subst_term post ss) + (subst_term pats ss)) + + | C_Eff us eff_name res args decrs -> + pack_comp (C_Eff us eff_name + (subst_term res ss) + (subst_args args ss) + (subst_terms decrs ss)) + +and subst_terms (ts:list term) (ss:subst) + : Tot (ts':list term{Nil? ts ==> Nil? ts'}) // property useful for subst_binder + (decreases ts) + = match ts with + | [] -> [] + | t::ts -> subst_term t ss :: subst_terms ts ss + +and subst_args (ts:list argv) (ss:subst) + : Tot (list argv) (decreases ts) + = match ts with + | [] -> [] + | (t,q)::ts -> (subst_term t ss,q) :: subst_args ts ss + +and subst_patterns (ps:list (pattern & bool)) (ss:subst) + : Tot (list (pattern & bool)) + (decreases ps) + = match ps with + | [] -> ps + | (p, b)::ps -> + let n = binder_offset_pattern p in + let p = subst_pattern p ss in + let ps = subst_patterns ps (shift_subst_n n ss) in + (p,b)::ps + +and subst_pattern (p:pattern) (ss:subst) + : Tot pattern + (decreases p) + = match p with + | Pat_Constant _ -> p + + | Pat_Cons fv us pats -> + let pats = subst_patterns pats ss in + Pat_Cons fv us pats + + | Pat_Var bv s -> + Pat_Var bv s + + | Pat_Dot_Term topt -> + Pat_Dot_Term (match topt with + | None -> None + | Some t -> Some (subst_term t ss)) + + +and subst_branch (br:branch) (ss:subst) + : Tot branch (decreases br) + = let p, t = br in + let p = subst_pattern p ss in + let j = binder_offset_pattern p in + let t = subst_term t (shift_subst_n j ss) in + p, t + +and subst_branches (brs:list branch) (ss:subst) + : Tot (list branch) (decreases brs) + = match brs with + | [] -> [] + | br::brs -> subst_branch br ss :: subst_branches brs ss + +and subst_match_returns (m:match_returns_ascription) (ss:subst) + : Tot match_returns_ascription (decreases m) + = let b, (ret, as_, eq) = m in + let b = subst_binder b ss in + let ret = + match ret with + | Inl t -> Inl (subst_term t (shift_subst ss)) + | Inr c -> Inr (subst_comp c (shift_subst ss)) + in + let as_ = + match as_ with + | None -> None + | Some t -> Some (subst_term t (shift_subst ss)) + in + b, (ret, as_, eq) + +val open_with (t:term) (v:term) : term + +val open_with_spec (t v:term) + : Lemma (open_with t v == + subst_term t [ DT 0 v ]) + +val open_term (t:term) (v:var) : term + +val open_term_spec (t:term) (v:var) + : Lemma (open_term t v == + subst_term t (open_with_var v 0)) + +val close_term (t:term) (v:var) : term + +val close_term_spec (t:term) (v:var) + : Lemma (close_term t v == + subst_term t [ ND v 0 ]) + +val rename (t:term) (x y:var) : term + +val rename_spec (t:term) (x y:var) + : Lemma (rename t x y == + subst_term t [ NT x (var_as_term y)]) + +val bv_index_of_make_bv (n:nat) + : Lemma (ensures bv_index (pack_bv (make_bv n)) == n) + [SMTPat (bv_index (pack_bv (make_bv n)))] + +val namedv_uniq_of_make_namedv (n:nat) + : Lemma (ensures namedv_uniq (pack_namedv (make_namedv n)) == n) + [SMTPat (namedv_uniq (pack_namedv (make_namedv n)))] + +let constant_as_term (v:vconst) = pack_ln (Tv_Const v) +let unit_exp = constant_as_term C_Unit +let unit_fv = pack_fv unit_lid +let unit_ty = pack_ln (Tv_FVar unit_fv) +let bool_fv = pack_fv bool_lid +let bool_ty = pack_ln (Tv_FVar bool_fv) + +let u_zero = pack_universe Uv_Zero +let u_max u1 u2 = pack_universe (Uv_Max [u1; u2]) +let u_succ u = pack_universe (Uv_Succ u) +let tm_type u = pack_ln (Tv_Type u) +let tm_prop = + let prop_fv = R.pack_fv R.prop_qn in + R.pack_ln (Tv_FVar prop_fv) +let eqtype_lid : R.name = ["Prims"; "eqtype"] + +let true_bool = pack_ln (Tv_Const C_True) +let false_bool = pack_ln (Tv_Const C_False) +let eq2 (u:universe) (t v0 v1:term) + : term + = let eq2 = pack_fv eq2_qn in + let eq2 = pack_ln (Tv_UInst eq2 [u]) in + let h = pack_ln (Tv_App eq2 (t, Q_Implicit)) in + let h1 = pack_ln (Tv_App h (v0, Q_Explicit)) in + let h2 = pack_ln (Tv_App h1 (v1, Q_Explicit)) in + h2 + +let b2t_lid : R.name = ["Prims"; "b2t"] +let b2t_fv : R.fv = R.pack_fv b2t_lid +let b2t_ty : R.term = R.pack_ln (R.Tv_Arrow (mk_binder (seal "x") bool_ty Q_Explicit) (mk_total (tm_type u_zero))) + + +//////////////////////////////////////////////////////////////////////////////// +// freevars +//////////////////////////////////////////////////////////////////////////////// + + +let rec freevars (e:term) + : FStar.Set.set var + = match inspect_ln e with + | Tv_Uvar _ _ -> Set.complement Set.empty + + | Tv_UInst _ _ + | Tv_FVar _ + | Tv_Type _ + | Tv_Const _ + | Tv_Unknown + | Tv_Unsupp + | Tv_BVar _ -> Set.empty + + | Tv_Var x -> Set.singleton (namedv_uniq x) + + | Tv_App e1 (e2, _) -> + Set.union (freevars e1) (freevars e2) + + | Tv_Abs b body -> + Set.union (freevars_binder b) (freevars body) + + | Tv_Arrow b c -> + Set.union (freevars_binder b) (freevars_comp c) + + | Tv_Refine b f -> + freevars (binder_sort b) `Set.union` + freevars f + + | Tv_Let recf attrs b def body -> + freevars_terms attrs `Set.union` + freevars (binder_sort b) `Set.union` + freevars def `Set.union` + freevars body + + | Tv_Match scr ret brs -> + freevars scr `Set.union` + freevars_opt ret freevars_match_returns `Set.union` + freevars_branches brs + + | Tv_AscribedT e t tac b -> + freevars e `Set.union` + freevars t `Set.union` + freevars_opt tac freevars + + | Tv_AscribedC e c tac b -> + freevars e `Set.union` + freevars_comp c `Set.union` + freevars_opt tac freevars + +and freevars_opt (#a:Type0) (o:option a) (f: (x:a { x << o } -> FStar.Set.set var)) + : FStar.Set.set var + = match o with + | None -> Set.empty + | Some x -> f x + +and freevars_comp (c:comp) + : FStar.Set.set var + = match inspect_comp c with + | C_Total t + | C_GTotal t -> + freevars t + + | C_Lemma pre post pats -> + freevars pre `Set.union` + freevars post `Set.union` + freevars pats + + | C_Eff us eff_name res args decrs -> + freevars res `Set.union` + freevars_args args `Set.union` + freevars_terms decrs + +and freevars_args (ts:list argv) + : FStar.Set.set var + = match ts with + | [] -> Set.empty + | (t,q)::ts -> + freevars t `Set.union` + freevars_args ts + +and freevars_terms (ts:list term) + : FStar.Set.set var + = match ts with + | [] -> Set.empty + | t::ts -> + freevars t `Set.union` + freevars_terms ts + +and freevars_binder (b:binder) + : Tot (Set.set var) (decreases b) + = let bndr = inspect_binder b in + freevars bndr.sort `Set.union` + freevars_terms bndr.attrs + +and freevars_pattern (p:pattern) + : Tot (Set.set var) (decreases p) + = match p with + | Pat_Constant _ -> + Set.empty + + | Pat_Cons head univs subpats -> + freevars_patterns subpats + + | Pat_Var bv s -> Set.empty + + | Pat_Dot_Term topt -> + freevars_opt topt freevars + +and freevars_patterns (ps:list (pattern & bool)) + : Tot (Set.set var) (decreases ps) + = match ps with + | [] -> Set.empty + | (p, b)::ps -> + freevars_pattern p `Set.union` + freevars_patterns ps + +and freevars_branch (br:branch) + : Tot (Set.set var) (decreases br) + = let p, t = br in + freevars_pattern p `Set.union` + freevars t + +and freevars_branches (brs:list branch) + : Tot (Set.set var) (decreases brs) + = match brs with + | [] -> Set.empty + | hd::tl -> freevars_branch hd `Set.union` freevars_branches tl + +and freevars_match_returns (m:match_returns_ascription) + : Tot (Set.set var) (decreases m) + = let b, (ret, as_, eq) = m in + let b = freevars_binder b in + let ret = + match ret with + | Inl t -> freevars t + | Inr c -> freevars_comp c + in + let as_ = freevars_opt as_ freevars in + b `Set.union` ret `Set.union` as_ + + +let rec ln' (e:term) (n:int) + : Tot bool (decreases e) + = match inspect_ln e with + | Tv_UInst _ _ + | Tv_FVar _ + | Tv_Type _ + | Tv_Const _ + | Tv_Unknown + | Tv_Unsupp + | Tv_Var _ -> true + | Tv_BVar m -> bv_index m <= n + | Tv_App e1 (e2, _) -> ln' e1 n && ln' e2 n + | Tv_Abs b body -> + ln'_binder b n && + ln' body (n + 1) + + | Tv_Arrow b c -> + ln'_binder b n && + ln'_comp c (n + 1) + + | Tv_Refine b f -> + ln'_binder b n && + ln' f (n + 1) + + | Tv_Uvar _ _ -> + false + + | Tv_Let recf attrs b def body -> + ln'_terms attrs n && + ln'_binder b n && + (if recf then ln' def (n + 1) else ln' def n) && + ln' body (n + 1) + + | Tv_Match scr ret brs -> + ln' scr n && + (match ret with + | None -> true + | Some m -> ln'_match_returns m n) && + ln'_branches brs n + + | Tv_AscribedT e t tac b -> + ln' e n && + ln' t n && + (match tac with + | None -> true + | Some tac -> ln' tac n) + + | Tv_AscribedC e c tac b -> + ln' e n && + ln'_comp c n && + (match tac with + | None -> true + | Some tac -> ln' tac n) + +and ln'_comp (c:comp) (i:int) + : Tot bool (decreases c) + = match inspect_comp c with + | C_Total t + | C_GTotal t -> ln' t i + + | C_Lemma pre post pats -> + ln' pre i && + ln' post i && + ln' pats i + + | C_Eff us eff_name res args decrs -> + ln' res i && + ln'_args args i && + ln'_terms decrs i + +and ln'_args (ts:list argv) (i:int) + : Tot bool (decreases ts) + = match ts with + | [] -> true + | (t,q)::ts -> + ln' t i && + ln'_args ts i + +and ln'_binder (b:binder) (n:int) + : Tot bool (decreases b) + = let bndr = inspect_binder b in + ln' bndr.sort n && + ln'_terms bndr.attrs n + +and ln'_terms (ts:list term) (n:int) + : Tot bool (decreases ts) + = match ts with + | [] -> true + | t::ts -> ln' t n && ln'_terms ts n + +and ln'_patterns (ps:list (pattern & bool)) (i:int) + : Tot bool + (decreases ps) + = match ps with + | [] -> true + | (p, b)::ps -> + let b0 = ln'_pattern p i in + let n = binder_offset_pattern p in + let b1 = ln'_patterns ps (i + n) in + b0 && b1 + +and ln'_pattern (p:pattern) (i:int) + : Tot bool + (decreases p) + = match p with + | Pat_Constant _ -> true + + | Pat_Cons head univs subpats -> + ln'_patterns subpats i + + | Pat_Var bv s -> true + + | Pat_Dot_Term topt -> + (match topt with + | None -> true + | Some t -> ln' t i) + +and ln'_branch (br:branch) (i:int) + : Tot bool (decreases br) + = let p, t = br in + let b = ln'_pattern p i in + let j = binder_offset_pattern p in + let b' = ln' t (i + j) in + b&&b' + +and ln'_branches (brs:list branch) (i:int) + : Tot bool (decreases brs) + = match brs with + | [] -> true + | br::brs -> + ln'_branch br i && + ln'_branches brs i + +and ln'_match_returns (m:match_returns_ascription) (i:int) + : Tot bool (decreases m) + = let b, (ret, as_, eq) = m in + let b = ln'_binder b i in + let ret = + match ret with + | Inl t -> ln' t (i + 1) + | Inr c -> ln'_comp c (i + 1) + in + let as_ = + match as_ with + | None -> true + | Some t -> ln' t (i + 1) + in + b && ret && as_ + +let ln (t:term) = ln' t (-1) +let ln_comp (c:comp) = ln'_comp c (-1) + + +// +// term_ctxt is used to define the equiv relation later, +// basically putting two equiv terms in a hole gives equiv terms +// +// The abs, arrow, refine, and let cases don't seem right here, +// since to prove their equiv, we need to extend gamma for their bodies +// +// If this is useful only for app, then may be we should remove it, +// and add app rules to the equiv relation itself + +[@@ no_auto_projectors] +noeq +type term_ctxt = + | Ctxt_hole : term_ctxt + | Ctxt_app_head : term_ctxt -> argv -> term_ctxt + | Ctxt_app_arg : term -> aqualv -> term_ctxt -> term_ctxt + // | Ctxt_abs_binder : binder_ctxt -> term -> term_ctxt + // | Ctxt_abs_body : binder -> term_ctxt -> term_ctxt + // | Ctxt_arrow_binder : binder_ctxt -> comp -> term_ctxt + // | Ctxt_arrow_comp : binder -> comp_ctxt -> term_ctxt + // | Ctxt_refine_sort : bv -> term_ctxt -> term -> term_ctxt + // | Ctxt_refine_ref : bv -> typ -> term_ctxt -> term_ctxt + // | Ctxt_let_sort : bool -> list term -> bv -> term_ctxt -> term -> term -> term_ctxt + // | Ctxt_let_def : bool -> list term -> bv -> term -> term_ctxt -> term -> term_ctxt + // | Ctxt_let_body : bool -> list term -> bv -> term -> term -> term_ctxt -> term_ctxt + // | Ctxt_match_scrutinee : term_ctxt -> option match_returns_ascription -> list branch -> term_ctxt + +// and bv_ctxt = +// | Ctxt_bv : sealed string -> nat -> term_ctxt -> bv_ctxt + +// and binder_ctxt = +// | Ctxt_binder : bv -> aqualv -> list term -> term_ctxt -> binder_ctxt + +// and comp_ctxt = +// | Ctxt_total : term_ctxt -> comp_ctxt +// | Ctxt_gtotal : term_ctxt -> comp_ctxt + +let rec apply_term_ctxt (e:term_ctxt) (t:term) : Tot term (decreases e) = + match e with + | Ctxt_hole -> t + | Ctxt_app_head e arg -> pack_ln (Tv_App (apply_term_ctxt e t) arg) + | Ctxt_app_arg hd q e -> pack_ln (Tv_App hd (apply_term_ctxt e t, q)) +// | Ctxt_abs_binder b body -> pack_ln (Tv_Abs (apply_binder_ctxt b t) body) +// | Ctxt_abs_body b e -> pack_ln (Tv_Abs b (apply_term_ctxt e t)) +// | Ctxt_arrow_binder b c -> pack_ln (Tv_Arrow (apply_binder_ctxt b t) c) +// | Ctxt_arrow_comp b c -> pack_ln (Tv_Arrow b (apply_comp_ctxt c t)) +// | Ctxt_refine_sort b sort phi -> pack_ln (Tv_Refine b (apply_term_ctxt sort t) phi) +// | Ctxt_refine_ref b sort phi -> pack_ln (Tv_Refine b sort (apply_term_ctxt phi t)) + +// | Ctxt_let_sort b attrs bv sort def body -> +// pack_ln (Tv_Let b attrs bv (apply_term_ctxt sort t) def body) +// | Ctxt_let_def b attrs bv sort def body -> +// pack_ln (Tv_Let b attrs bv sort (apply_term_ctxt def t) body) +// | Ctxt_let_body b attrs bv sort def body -> +// pack_ln (Tv_Let b attrs bv sort def (apply_term_ctxt body t)) + +// | Ctxt_match_scrutinee sc ret brs -> +// pack_ln (Tv_Match (apply_term_ctxt sc t) ret brs) + +// and apply_binder_ctxt (b:binder_ctxt) (t:term) : Tot binder (decreases b) = +// let Ctxt_binder binder_bv binder_qual binder_attrs ctxt = b in +// pack_binder {binder_bv; binder_qual; binder_attrs; binder_sort=apply_term_ctxt ctxt t} + +// and apply_comp_ctxt (c:comp_ctxt) (t:term) : Tot comp (decreases c) = +// match c with +// | Ctxt_total e -> pack_comp (C_Total (apply_term_ctxt e t)) +// | Ctxt_gtotal e -> pack_comp (C_GTotal (apply_term_ctxt e t)) + +noeq +type constant_typing: vconst -> term -> Type0 = + | CT_Unit: constant_typing C_Unit unit_ty + | CT_True: constant_typing C_True bool_ty + | CT_False: constant_typing C_False bool_ty + +[@@ no_auto_projectors] +noeq +type univ_eq : universe -> universe -> Type0 = + | UN_Refl : + u:universe -> + univ_eq u u + + | UN_MaxCongL : + u:universe -> + u':universe -> + v:universe -> + univ_eq u u' -> + univ_eq (u_max u v) (u_max u' v) + + | UN_MaxCongR : + u:universe -> + v:universe -> + v':universe -> + univ_eq v v' -> + univ_eq (u_max u v) (u_max u v') + + | UN_MaxComm: + u:universe -> + v:universe -> + univ_eq (u_max u v) (u_max v u) + + | UN_MaxLeq: + u:universe -> + v:universe -> + univ_leq u v -> + univ_eq (u_max u v) v + +and univ_leq : universe -> universe -> Type0 = + | UNLEQ_Refl: + u:universe -> + univ_leq u u + + | UNLEQ_Succ: + u:universe -> + v:universe -> + univ_leq u v -> + univ_leq u (u_succ v) + + | UNLEQ_Max: + u:universe -> + v:universe -> + univ_leq u (u_max u v) + +let mk_if (scrutinee then_ else_:R.term) : R.term = + pack_ln (Tv_Match scrutinee None [(Pat_Constant C_True, then_); + (Pat_Constant C_False, else_)]) + + +// effect and type +type comp_typ = FStar.Stubs.Tactics.Types.tot_or_ghost & typ + +let close_comp_typ' (c:comp_typ) (x:var) (i:nat) = + fst c, subst_term (snd c) [ ND x i ] + +let close_comp_typ (c:comp_typ) (x:var) = + close_comp_typ' c x 0 + +let open_comp_typ' (c:comp_typ) (x:var) (i:nat) = + fst c, subst_term (snd c) (open_with_var x i) + +let open_comp_typ (c:comp_typ) (x:var) = + open_comp_typ' c x 0 + +let freevars_comp_typ (c:comp_typ) = freevars (snd c) + +let mk_comp (c:comp_typ) : R.comp = + match fst c with + | E_Total -> mk_total (snd c) + | E_Ghost -> mk_ghost (snd c) + +let mk_arrow_ct ty qual (c:comp_typ) : R.term = + R.pack_ln (R.Tv_Arrow (binder_of_t_q ty qual) (mk_comp c)) + +type relation = + | R_Eq + | R_Sub + +let binding = var & term +let bindings = list binding +let rename_bindings bs x y = FStar.List.Tot.map (fun (v, t) -> (v, rename t x y)) bs + +let rec extend_env_l (g:env) (bs:bindings) + : env + = match bs with + | [] -> g + | (x,t)::bs -> extend_env (extend_env_l g bs) x t + +// +// TODO: support for erasable attribute +// +let is_non_informative_name (l:name) : bool = + l = R.unit_lid || + l = R.squash_qn || + l = ["FStar"; "Ghost"; "erased"] + +let is_non_informative_fv (f:fv) : bool = + is_non_informative_name (inspect_fv f) + +let rec __close_term_vs (i:nat) (vs : list var) (t : term) : Tot term (decreases vs) = + match vs with + | [] -> t + | v::vs -> + subst_term (__close_term_vs (i+1) vs t) [ND v i] + +let close_term_vs (vs : list var) (t : term) : term = + __close_term_vs 0 vs t + +let close_term_bs (bs : list binding) (t : term) : term = + close_term_vs (List.Tot.map fst bs) t + +let bindings_to_refl_bindings (bs : list binding) : list R.binding = + L.map (fun (v, ty) -> {uniq=v; sort=ty; ppname = pp_name_default}) bs + +let refl_bindings_to_bindings (bs : list R.binding) : list binding = + L.map (fun b -> b.uniq, b.sort) bs + +[@@ no_auto_projectors] +noeq +type non_informative : env -> term -> Type0 = + | Non_informative_type: + g:env -> + u:universe -> + non_informative g (pack_ln (Tv_Type u)) + + | Non_informative_fv: + g:env -> + x:fv{is_non_informative_fv x} -> + non_informative g (pack_ln (Tv_FVar x)) + + | Non_informative_uinst: + g:env -> + x:fv{is_non_informative_fv x} -> + us:list universe -> + non_informative g (pack_ln (Tv_UInst x us)) + + | Non_informative_app: + g:env -> + t:term -> + arg:argv -> + non_informative g t -> + non_informative g (pack_ln (Tv_App t arg)) + + | Non_informative_total_arrow: + g:env -> + t0:term -> + q:aqualv -> + t1:term -> + non_informative g t1 -> + non_informative g (mk_arrow_ct t0 q (E_Total, t1)) + + | Non_informative_ghost_arrow: + g:env -> + t0:term -> + q:aqualv -> + t1:term -> + non_informative g (mk_arrow_ct t0 q (E_Ghost, t1)) + + | Non_informative_token: + g:env -> + t:typ -> + squash (non_informative_token g t) -> + non_informative g t + +val bindings_ok_for_pat : env -> list R.binding -> pattern -> Type0 + +val bindings_ok_pat_constant : + g:env -> c:R.vconst -> Lemma (bindings_ok_for_pat g [] (Pat_Constant c)) + +let bindings_ok_for_branch (g:env) (bs : list R.binding) (br : branch) : Type0 = + bindings_ok_for_pat g bs (fst br) + +let bindings_ok_for_branch_N (g:env) (bss : list (list R.binding)) (brs : list branch) = + zip2prop (bindings_ok_for_branch g) bss brs + +let binding_to_namedv (b:R.binding) : Tot namedv = + pack_namedv { + RD.uniq = b.uniq; + RD.sort = seal b.sort; + RD.ppname = b.ppname; + } + +(* Elaborates the p pattern into a term, using the bs bindings for the +pattern variables. The resulting term is properly scoped only on an +environment which contains all of bs. See for instance the branch_typing +judg. Returns an option, since this can fail if e.g. there are not +enough bindings, and returns the list of unused binders as well, which +should be empty if the list of binding was indeed ok. *) +let rec elaborate_pat (p : pattern) (bs : list R.binding) : Tot (option (term & list R.binding)) (decreases p) = + match p, bs with + | Pat_Constant c, _ -> Some (pack_ln (Tv_Const c), bs) + | Pat_Cons fv univs subpats, bs -> + let head = + match univs with + | Some univs -> pack_ln (Tv_UInst fv univs) + | None -> pack_ln (Tv_FVar fv) + in + fold_left_dec + (Some (head, bs)) + subpats + (fun st pi -> + let (p,i) = pi in + match st with | None -> None | Some (head, bs) -> + match elaborate_pat p bs with | None -> None | Some (t, bs') -> Some (pack_ln (Tv_App head (t, (if i then Q_Implicit else Q_Explicit))), bs')) + + | Pat_Var _ _, b::bs -> + Some (pack_ln (Tv_Var (binding_to_namedv b)), bs) + | Pat_Dot_Term (Some t), _ -> Some (t, bs) + | Pat_Dot_Term None, _ -> None + | _ -> None + +[@@ no_auto_projectors] +noeq +type typing : env -> term -> comp_typ -> Type0 = + | T_Token : + g:env -> + e:term -> + c:comp_typ -> + squash (typing_token g e c) -> + typing g e c + + | T_Var : + g:env -> + x:namedv { Some? (lookup_bvar g (namedv_uniq x)) } -> + typing g (pack_ln (Tv_Var x)) (E_Total, Some?.v (lookup_bvar g (namedv_uniq x))) + + | T_FVar : + g:env -> + x:fv { Some? (lookup_fvar g x) } -> + typing g (pack_ln (Tv_FVar x)) (E_Total, Some?.v (lookup_fvar g x)) + + | T_UInst : + g:env -> + x:fv -> + us:list universe { Some? (lookup_fvar_uinst g x us) } -> + typing g (pack_ln (Tv_UInst x us)) (E_Total, Some?.v (lookup_fvar_uinst g x us)) + + | T_Const: + g:env -> + v:vconst -> + t:term -> + constant_typing v t -> + typing g (constant_as_term v) (E_Total, t) + + | T_Abs : + g:env -> + x:var { None? (lookup_bvar g x) } -> + ty:term -> + body:term { ~(x `Set.mem` freevars body) } -> + body_c:comp_typ -> + u:universe -> + pp_name:pp_name_t -> + q:aqualv -> + ty_eff:tot_or_ghost -> + typing g ty (ty_eff, tm_type u) -> + typing (extend_env g x ty) (open_term body x) body_c -> + typing g (pack_ln (Tv_Abs (mk_binder pp_name ty q) body)) + (E_Total, + pack_ln (Tv_Arrow (mk_binder pp_name ty q) + (mk_comp (close_comp_typ body_c x)))) + + | T_App : + g:env -> + e1:term -> + e2:term -> + x:binder -> + t:term -> + eff:tot_or_ghost -> + typing g e1 (eff, pack_ln (Tv_Arrow x (mk_comp (eff, t)))) -> + typing g e2 (eff, binder_sort x) -> + typing g (pack_ln (Tv_App e1 (e2, binder_qual x))) + (eff, open_with t e2) + + | T_Let: + g:env -> + x:var { None? (lookup_bvar g x) } -> + e1:term -> + t1:typ -> + e2:term -> + t2:typ -> + eff:tot_or_ghost -> + pp_name:pp_name_t -> + typing g e1 (eff, t1) -> + typing (extend_env g x t1) (open_term e2 x) (eff, t2) -> + typing g (mk_let pp_name e1 t1 e2) (eff, open_with (close_term t2 x) e1) + + | T_Arrow: + g:env -> + x:var { None? (lookup_bvar g x) } -> + t1:term -> + t2:term { ~(x `Set.mem` freevars t2) } -> + u1:universe -> + u2:universe -> + pp_name:pp_name_t -> + q:aqualv -> + eff:tot_or_ghost -> + t1_eff:tot_or_ghost -> + t2_eff:tot_or_ghost -> + typing g t1 (t1_eff, tm_type u1) -> + typing (extend_env g x t1) (open_term t2 x) (t2_eff, tm_type u2) -> + typing g (pack_ln (Tv_Arrow (mk_binder pp_name t1 q) (mk_comp (eff, t2)))) + (E_Total, tm_type (u_max u1 u2)) + + | T_Refine: + g:env -> + x:var { None? (lookup_bvar g x) } -> + t:term -> + e:term { ~(x `Set.mem` freevars e) } -> + u1:universe -> + u2:universe -> + t_eff:tot_or_ghost -> + e_eff:tot_or_ghost -> + typing g t (t_eff, tm_type u1) -> + typing (extend_env g x t) (open_term e x) (e_eff, tm_type u2) -> + typing g (pack_ln (Tv_Refine (mk_simple_binder pp_name_default t) e)) (E_Total, tm_type u1) + + | T_PropIrrelevance: + g:env -> + e:term -> + t:term -> + e_eff:tot_or_ghost -> + t_eff:tot_or_ghost -> + typing g e (e_eff, t) -> + typing g t (t_eff, tm_prop) -> + typing g (`()) (E_Total, t) + + | T_Sub: + g:env -> + e:term -> + c:comp_typ -> + c':comp_typ -> + typing g e c -> + sub_comp g c c' -> + typing g e c' + + | T_If: + g:env -> + scrutinee:term -> + then_:term -> + else_:term -> + ty:term -> + u_ty:universe -> + hyp:var { None? (lookup_bvar g hyp) /\ ~(hyp `Set.mem` (freevars then_ `Set.union` freevars else_)) } -> + eff:tot_or_ghost -> + ty_eff:tot_or_ghost -> + typing g scrutinee (eff, bool_ty) -> + typing (extend_env g hyp (eq2 (pack_universe Uv_Zero) bool_ty scrutinee true_bool)) then_ (eff, ty) -> + typing (extend_env g hyp (eq2 (pack_universe Uv_Zero) bool_ty scrutinee false_bool)) else_ (eff, ty) -> + typing g ty (ty_eff, tm_type u_ty) -> //typedness of ty cannot rely on hyp + typing g (mk_if scrutinee then_ else_) (eff, ty) + + | T_Match: + g:env -> + sc_u : universe -> + sc_ty : typ -> + sc : term -> + ty_eff:tot_or_ghost -> + typing g sc_ty (ty_eff, tm_type sc_u) -> + eff:tot_or_ghost -> + typing g sc (eff, sc_ty) -> + branches:list branch -> + ty:comp_typ -> + bnds:list (list R.binding) -> + complet : match_is_complete g sc sc_ty (List.Tot.map fst branches) bnds -> // complete patterns + branches_typing g sc_u sc_ty sc ty branches bnds -> // each branch has proper type + typing g (pack_ln (Tv_Match sc None branches)) ty + +and related : env -> term -> relation -> term -> Type0 = + | Rel_refl: + g:env -> + t:term -> + rel:relation -> + related g t rel t + + | Rel_sym: + g:env -> + t0:term -> + t1:term -> + related g t0 R_Eq t1 -> + related g t1 R_Eq t0 + + | Rel_trans: + g:env -> + t0:term -> + t1:term -> + t2:term -> + rel:relation -> + related g t0 rel t1 -> + related g t1 rel t2 -> + related g t0 rel t2 + + | Rel_univ: + g:env -> + u:universe -> + v:universe -> + univ_eq u v -> + related g (tm_type u) R_Eq (tm_type v) + + | Rel_beta: + g:env -> + t:typ -> + q:aqualv -> + e:term { ln' e 0 } -> + arg:term { ln arg } -> + related g (R.pack_ln (R.Tv_App (mk_abs t q e) (arg, q))) + R_Eq + (subst_term e [ DT 0 arg ]) + + | Rel_eq_token: + g:env -> + t0:term -> + t1:term -> + squash (equiv_token g t0 t1) -> + related g t0 R_Eq t1 + + | Rel_subtyping_token: + g:env -> + t0:term -> + t1:term -> + squash (subtyping_token g t0 t1) -> + related g t0 R_Sub t1 + + | Rel_equiv: + g:env -> + t0:term -> + t1:term -> + rel:relation -> + related g t0 R_Eq t1 -> + related g t0 rel t1 + + | Rel_arrow: + g:env -> + t1:term -> + t2:term -> + q:aqualv -> + c1:comp_typ -> + c2:comp_typ -> + rel:relation -> + x:var{ + None? (lookup_bvar g x) /\ + ~ (x `Set.mem` (freevars_comp_typ c1 `Set.union` freevars_comp_typ c2)) + } -> + related g t2 rel t1 -> + related_comp (extend_env g x t2) + (open_comp_typ c1 x) + rel + (open_comp_typ c2 x) -> + related g (mk_arrow_ct t1 q c1) rel (mk_arrow_ct t2 q c2) + + | Rel_abs: + g:env -> + t1:term -> + t2:term -> + q:aqualv -> + e1:term -> + e2:term -> + x:var{ + None? (lookup_bvar g x) /\ ~ (x `Set.mem` (freevars e1 `Set.union` freevars e2)) + } -> + related g t1 R_Eq t2 -> + related (extend_env g x t1) + (subst_term e1 (open_with_var x 0)) + R_Eq + (subst_term e2 (open_with_var x 0)) -> + related g (mk_abs t1 q e1) R_Eq (mk_abs t2 q e2) + + | Rel_ctxt: + g:env -> + t0:term -> + t1:term -> + ctxt:term_ctxt -> + related g t0 R_Eq t1 -> + related g (apply_term_ctxt ctxt t0) R_Eq (apply_term_ctxt ctxt t1) + +and related_comp : env -> comp_typ -> relation -> comp_typ -> Type0 = + | Relc_typ: + g:env -> + t0:term -> + t1:term -> + eff:tot_or_ghost -> + rel:relation -> + related g t0 rel t1 -> + related_comp g (eff, t0) rel (eff, t1) + + | Relc_total_ghost: + g:env -> + t:term -> + related_comp g (E_Total, t) R_Sub (E_Ghost, t) + + | Relc_ghost_total: + g:env -> + t:term -> + non_informative g t -> + related_comp g (E_Ghost, t) R_Sub (E_Total, t) + +and branches_typing (g:env) (sc_u:universe) (sc_ty:typ) (sc:term) (rty:comp_typ) + : (brs:list branch) -> (bnds : list (list R.binding)) -> Type0 += + (* This judgement only enforces that branch_typing holds for every + element of brs and its respective bnds (which must have the same + length). *) + + | BT_Nil : + branches_typing g sc_u sc_ty sc rty [] [] + + | BT_S : + + br : branch -> + bnds : list R.binding -> + pf : branch_typing g sc_u sc_ty sc rty br bnds -> + + rest_br : list branch -> + rest_bnds : list (list R.binding) -> + rest : branches_typing g sc_u sc_ty sc rty rest_br rest_bnds -> + branches_typing g sc_u sc_ty sc rty (br :: rest_br) (bnds :: rest_bnds) + +and branch_typing (g:env) (sc_u:universe) (sc_ty:typ) (sc:term) (rty:comp_typ) + : (br : branch) -> (bnds : list R.binding) -> Type0 += + | BO : + pat : pattern -> + bnds : list R.binding{bindings_ok_for_pat g bnds pat} -> + hyp_var:var{None? (lookup_bvar (extend_env_l g (refl_bindings_to_bindings bnds)) hyp_var)} -> + + body:term -> + + _ : squash (Some? (elaborate_pat pat bnds)) -> + + typing (extend_env + (extend_env_l g (refl_bindings_to_bindings bnds)) + hyp_var (eq2 sc_u sc_ty sc (fst (Some?.v (elaborate_pat pat bnds)))) + ) + body rty -> + + branch_typing g sc_u sc_ty sc rty + (pat, close_term_bs (refl_bindings_to_bindings bnds) body) + bnds + +and match_is_complete : env -> term -> typ -> list pattern -> list (list R.binding) -> Type0 = + | MC_Tok : + env:_ -> + sc:_ -> + ty:_ -> + pats:_ -> + bnds:_ -> + squash (match_complete_token env sc ty pats bnds) -> match_is_complete env sc ty pats bnds + +and sub_typing (g:env) (t1 t2:term) = related g t1 R_Sub t2 + +and sub_comp (g:env) (c1 c2:comp_typ) = related_comp g c1 R_Sub c2 + +and equiv (g:env) (t1 t2:term) = related g t1 R_Eq t2 + +type tot_typing (g:env) (e:term) (t:term) = typing g e (E_Total, t) + +type ghost_typing (g:env) (e:term) (t:term) = typing g e (E_Ghost, t) + +val subtyping_token_renaming (g:env) + (bs0:bindings) + (bs1:bindings) + (x:var { None? (lookup_bvar (extend_env_l g (bs1@bs0)) x) }) + (y:var { None? (lookup_bvar (extend_env_l g (bs1@bs0)) y) }) + (t:term) + (t0 t1:term) + (d:subtyping_token (extend_env_l g (bs1@(x,t)::bs0)) t0 t1) + : subtyping_token (extend_env_l g (rename_bindings bs1 x y@(y,t)::bs0)) + (rename t0 x y) + (rename t1 x y) + +val subtyping_token_weakening (g:env) + (bs0:bindings) + (bs1:bindings) + (x:var { None? (lookup_bvar (extend_env_l g (bs1@bs0)) x) }) + (t:term) + (t0 t1:term) + (d:subtyping_token (extend_env_l g (bs1@bs0)) t0 t1) + : subtyping_token (extend_env_l g (bs1@(x,t)::bs0)) t0 t1 + +let simplify_umax (#g:R.env) (#t:R.term) (#u:R.universe) + (d:typing g t (E_Total, tm_type (u_max u u))) + : typing g t (E_Total, tm_type u) + = let ue + : univ_eq (u_max u u) u + = UN_MaxLeq u u (UNLEQ_Refl u) + in + let du : related g (tm_type (u_max u u)) R_Eq (tm_type u) + = Rel_univ g (u_max u u) u ue in + let du : related g (tm_type (u_max u u)) R_Sub (tm_type u) + = Rel_equiv _ _ _ _ du in + T_Sub _ _ _ _ d (Relc_typ _ _ _ E_Total _ du) + +val well_typed_terms_are_ln (g:R.env) (e:R.term) (c:comp_typ) (_:typing g e c) + : Lemma (ensures ln e /\ ln (snd c)) + +val type_correctness (g:R.env) (e:R.term) (c:comp_typ) (_:typing g e c) + : GTot (u:R.universe & typing g (snd c) (E_Total, tm_type u)) + +val binder_offset_pattern_invariant (p:pattern) (ss:subst) + : Lemma (binder_offset_pattern p == binder_offset_pattern (subst_pattern p ss)) + +val binder_offset_patterns_invariant (p:list (pattern & bool)) (ss:subst) + : Lemma (binder_offset_patterns p == binder_offset_patterns (subst_patterns p ss)) + +val open_close_inverse' (i:nat) (t:term { ln' t (i - 1) }) (x:var) + : Lemma + (ensures subst_term + (subst_term t [ ND x i ]) + (open_with_var x i) + == t) + +val open_close_inverse'_binder (i:nat) (b:binder { ln'_binder b (i - 1) }) (x:var) + : Lemma (ensures subst_binder + (subst_binder b [ ND x i ]) + (open_with_var x i) + == b) + +val open_close_inverse'_terms (i:nat) (ts:list term { ln'_terms ts (i - 1) }) (x:var) + : Lemma (ensures subst_terms + (subst_terms ts [ ND x i ]) + (open_with_var x i) + == ts) + +val open_close_inverse'_comp (i:nat) (c:comp { ln'_comp c (i - 1) }) (x:var) + : Lemma + (ensures subst_comp + (subst_comp c [ ND x i ]) + (open_with_var x i) + == c) + +val open_close_inverse'_args (i:nat) + (ts:list argv { ln'_args ts (i - 1) }) + (x:var) + : Lemma + (ensures subst_args + (subst_args ts [ ND x i ]) + (open_with_var x i) + == ts) + +val open_close_inverse'_patterns (i:nat) + (ps:list (pattern & bool) { ln'_patterns ps (i - 1) }) + (x:var) + : Lemma + (ensures subst_patterns + (subst_patterns ps [ ND x i ]) + (open_with_var x i) + == ps) + +val open_close_inverse'_pattern (i:nat) (p:pattern{ln'_pattern p (i - 1)}) (x:var) + : Lemma + (ensures subst_pattern + (subst_pattern p [ ND x i ]) + (open_with_var x i) + == p) + +val open_close_inverse'_branch (i:nat) (br:branch{ln'_branch br (i - 1)}) (x:var) + : Lemma + (ensures subst_branch + (subst_branch br [ ND x i ]) + (open_with_var x i) + == br) + +val open_close_inverse'_branches (i:nat) + (brs:list branch { ln'_branches brs (i - 1) }) + (x:var) + : Lemma + (ensures subst_branches + (subst_branches brs [ ND x i ]) + (open_with_var x i) + == brs) + +val open_close_inverse'_match_returns (i:nat) + (m:match_returns_ascription { ln'_match_returns m (i - 1) }) + (x:var) + : Lemma + (ensures subst_match_returns + (subst_match_returns m [ ND x i ]) + (open_with_var x i) + == m) + +val open_close_inverse (e:R.term { ln e }) (x:var) + : Lemma (open_term (close_term e x) x == e) + + +val close_open_inverse' (i:nat) + (t:term) + (x:var { ~(x `Set.mem` freevars t) }) + : Lemma + (ensures subst_term + (subst_term t (open_with_var x i)) + [ ND x i ] + == t) + +val close_open_inverse'_comp (i:nat) + (c:comp) + (x:var{ ~(x `Set.mem` freevars_comp c) }) + : Lemma + (ensures subst_comp + (subst_comp c (open_with_var x i)) + [ ND x i ] + == c) + +val close_open_inverse'_args (i:nat) (args:list argv) (x:var{ ~(x `Set.mem` freevars_args args) }) + : Lemma + (ensures subst_args + (subst_args args (open_with_var x i)) + [ ND x i] + == args) + +val close_open_inverse'_binder (i:nat) (b:binder) (x:var{ ~(x `Set.mem` freevars_binder b) }) + : Lemma + (ensures subst_binder + (subst_binder b (open_with_var x i)) + [ ND x i ] + == b) + +val close_open_inverse'_terms (i:nat) (ts:list term) (x:var{ ~(x `Set.mem` freevars_terms ts) }) + : Lemma + (ensures subst_terms + (subst_terms ts (open_with_var x i)) + [ ND x i ] + == ts) + +val close_open_inverse'_branches (i:nat) (brs:list branch) + (x:var{ ~(x `Set.mem` freevars_branches brs) }) + : Lemma + (ensures subst_branches + (subst_branches brs (open_with_var x i)) + [ ND x i ] + == brs) + +val close_open_inverse'_branch (i:nat) + (br:branch) + (x:var{ ~(x `Set.mem` freevars_branch br) }) + : Lemma + (ensures subst_branch + (subst_branch br (open_with_var x i)) + [ ND x i ] + == br) + +val close_open_inverse'_pattern (i:nat) + (p:pattern) + (x:var{ ~(x `Set.mem` freevars_pattern p) }) + : Lemma + (ensures subst_pattern + (subst_pattern p (open_with_var x i)) + [ ND x i ] + == p) + +val close_open_inverse'_patterns (i:nat) + (ps:list (pattern & bool)) + (x:var {~ (x `Set.mem` freevars_patterns ps) }) + : Lemma + (ensures subst_patterns + (subst_patterns ps (open_with_var x i)) + [ ND x i ] + == ps) + +val close_open_inverse'_match_returns (i:nat) (m:match_returns_ascription) + (x:var{ ~(x `Set.mem` freevars_match_returns m) }) + : Lemma + (ensures subst_match_returns + (subst_match_returns m (open_with_var x i)) + [ ND x i ] + == m) + +val close_open_inverse (e:R.term) (x:var {~ (x `Set.mem` freevars e) }) + : Lemma (close_term (open_term e x) x == e) + +// +// fst has corresponding lemmas for other syntax classes +// +val close_with_not_free_var (t:R.term) (x:var) (i:nat) + : Lemma + (requires ~ (Set.mem x (freevars t))) + (ensures subst_term t [ ND x i ] == t) + +// this also requires x to be not in freevars e1 `Set.union` freevars e2 +val equiv_arrow (#g:R.env) (#e1 #e2:R.term) (ty:R.typ) (q:R.aqualv) + (x:var { None? (lookup_bvar g x) }) + (eq:equiv (extend_env g x ty) + (subst_term e1 (open_with_var x 0)) + (subst_term e2 (open_with_var x 0))) + : equiv g (mk_arrow ty q e1) + (mk_arrow ty q e2) + + +// the proof for this requires e1 and e2 to be ln +val equiv_abs_close (#g:R.env) (#e1 #e2:R.term) (ty:R.typ) (q:R.aqualv) + (x:var{None? (lookup_bvar g x)}) + (eq:equiv (extend_env g x ty) e1 e2) + : equiv g (mk_abs ty q (subst_term e1 [ ND x 0 ])) + (mk_abs ty q (subst_term e2 [ ND x 0 ])) + +val open_with_gt_ln (e:term) (i:nat) (t:term) (j:nat) + : Lemma + (requires ln' e i /\ i < j) + (ensures subst_term e [ DT j t ] == e) + [SMTPat (ln' e i); SMTPat (subst_term e [ DT j t ])] + +// +// Type of the top-level tactic that would splice-in the definitions +// + +let fstar_env_fvs (g:R.env) = + lookup_fvar g unit_fv == Some (tm_type u_zero) /\ + lookup_fvar g bool_fv == Some (tm_type u_zero) /\ + lookup_fvar g b2t_fv == Some b2t_ty + +type fstar_env = g:R.env{fstar_env_fvs g} + +type fstar_top_env = g:fstar_env { + forall x. None? (lookup_bvar g x ) +} + +// +// No universe polymorphism yet +// +noeq +type sigelt_typing : env -> sigelt -> Type0 = + | ST_Let : + g : env -> + fv : R.fv -> + ty : R.typ -> + tm : R.term -> + squash (typing g tm (E_Total, ty)) -> + sigelt_typing g (pack_sigelt (Sg_Let false [pack_lb ({ lb_fv = fv; lb_us = []; lb_typ = ty; lb_def = tm })])) + + | ST_Let_Opaque : + g : env -> + fv : R.fv -> + ty : R.typ -> + (* no tm: only a proof of existence *) + squash (exists (tm:R.term). typing g tm (E_Total, ty)) -> + sigelt_typing g (pack_sigelt (Sg_Let false [pack_lb ({ lb_fv = fv; lb_us = []; lb_typ = ty; lb_def = (`_) })])) + +(** + * The type of the top-level tactic that would splice-in the definitions. + * + * The tactic takes as input as type environment and an optional expected type + * + * It returns (sigelts_before, sigelt, sigelt_after) + * where sigelts_before and sigelt_after are list of sigelts + * + * All the returned sigelts indicate via a boolean flag whether they are well-typed, + * in the judgment above + * + * If the flag is not set, F* typechecker typechecks the returned sigelts + * + * The sigelt in the middle, if well-typed, has the input expected type + * + * In addition, each sigelt can have a 'blob' attached with a given name. + * The blob can be used later, e.g., during extraction, and passed back to the + * extension to perform additional processing. + * + * The blob is stored in the sigmeta_extension_data field of the enclosing sigelt. + *) + +let blob = string & R.term + +// +// t is the optional expected type +// +let sigelt_has_type (s:R.sigelt) (t:option R.term) : prop = + let open R in + match t with + | None -> True + | Some t -> + match inspect_sigelt s with + | Sg_Let false [lb] -> begin + let {lb_typ} = inspect_lb lb in + lb_typ == t + end + + | _ -> False + +// +// If checked is true, this sigelt is properly typed for the environment +// If not, we don't know and let F* re-typecheck the sigelt. +// + +let sigelt_for (g:env) (t:option R.typ) = + tup:(bool & sigelt & option blob) { + let (checked, se, _) = tup in + checked ==> (sigelt_typing g se /\ sigelt_has_type se t) + } + +// +// sigelts_before, sigelt, sigelts_after +// +let dsl_tac_result_t (g:env) (t:option R.typ) = + list (sigelt_for g None) & + (sigelt_for g t) & + list (sigelt_for g None) + +// +// The input option R.typ is the expected type +// +type dsl_tac_t = + gt:(fstar_top_env & option R.typ) -> + Tac (dsl_tac_result_t (fst gt) (snd gt)) + +val if_complete_match (g:env) (t:term) + : match_complete_token g t bool_ty [ + Pat_Constant C_True; + Pat_Constant C_False; + ] [[]; []] + +// Derived rule for if + +val mkif + (g:fstar_env) + (scrutinee:term) + (then_:term) + (else_:term) + (ty:term) + (u_ty:universe) + (hyp:var { None? (lookup_bvar g hyp) /\ ~(hyp `Set.mem` (freevars then_ `Set.union` freevars else_)) }) + (eff:tot_or_ghost) + (ty_eff:tot_or_ghost) + (ts : typing g scrutinee (eff, bool_ty)) + (tt : typing (extend_env g hyp (eq2 (pack_universe Uv_Zero) bool_ty scrutinee true_bool)) then_ (eff, ty)) + (te : typing (extend_env g hyp (eq2 (pack_universe Uv_Zero) bool_ty scrutinee false_bool)) else_ (eff, ty)) + (tr : typing g ty (ty_eff, tm_type u_ty)) +: typing g (mk_if scrutinee then_ else_) (eff, ty) + +(* Helper to return a single let definition in a splice_t tactic. *) +let mk_checked_let (g:R.env) (cur_module:name) (nm:string) (tm:R.term) (ty:R.typ{typing g tm (E_Total, ty)}) + : sigelt_for g (Some ty) = + let fv = pack_fv (cur_module @ [nm]) in + let lb = R.pack_lb ({ lb_fv = fv; lb_us = []; lb_typ = ty; lb_def = tm }) in + let se = R.pack_sigelt (R.Sg_Let false [lb]) in + let pf : sigelt_typing g se = + ST_Let g fv ty tm () + in + ( true, se, None ) + +let mk_unchecked_let (g:R.env) (cur_module:name) (nm:string) (tm:R.term) (ty:R.typ) + : bool & sigelt & option blob = + let fv = pack_fv (cur_module @ [nm]) in + let lb = R.pack_lb ({ lb_fv = fv; lb_us = []; lb_typ = ty; lb_def = tm }) in + let se = R.pack_sigelt (R.Sg_Let false [lb]) in + ( false, se, None ) + +(* Turn a typing derivation into a token. This is useful +to call primitives that require a proof of typing, like +`call_subtac`, since they do not take derivations nor can +they even be mentioned in that module due to dependencies. +Probably the right thing to do is refactor and avoid this, though. *) +val typing_to_token (#g:env) (#e:term) (#c:comp_typ) + : typing g e c -> typing_token g e c diff --git a/stage0/ulib/experimental/FStar.Sequence.Ambient.fst b/stage0/ulib/experimental/FStar.Sequence.Ambient.fst new file mode 100644 index 00000000000..1d024da716b --- /dev/null +++ b/stage0/ulib/experimental/FStar.Sequence.Ambient.fst @@ -0,0 +1,43 @@ +(* + Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan + Rosen, Nikhil Swamy, Microsoft Research, and contributors to + the Dafny Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Includes material from the Dafny project + (https://github.com/dafny-lang/dafny) which carries this license + information: + + Created 9 February 2008 by Rustan Leino. + Converted to Boogie 2 on 28 June 2008. + Edited sequence axioms 20 October 2009 by Alex Summers. + Modified 2014 by Dan Rosen. + Copyright (c) 2008-2014, Microsoft. + Copyright by the contributors to the Dafny Project + SPDX-License-Identifier: MIT +*) + +(** +This module brings properties about sequences ambiently into the +context. The properties are modeled after those in the Dafny sequence +axioms, with patterns for quantifiers chosen as in those axioms. + +@summary Puts properties of sequences into the ambient context +*) +module FStar.Sequence.Ambient + +open FStar.Sequence.Base + +let all_seq_facts_ambient : (squash all_seq_facts) = + all_seq_facts_lemma () diff --git a/stage0/ulib/experimental/FStar.Sequence.Base.fst b/stage0/ulib/experimental/FStar.Sequence.Base.fst new file mode 100644 index 00000000000..eddce94dc9c --- /dev/null +++ b/stage0/ulib/experimental/FStar.Sequence.Base.fst @@ -0,0 +1,755 @@ +(* + Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan + Rosen, Nikhil Swamy, Microsoft Research, and contributors to + the Dafny Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Includes material from the Dafny project + (https://github.com/dafny-lang/dafny) which carries this license + information: + + Created 9 February 2008 by Rustan Leino. + Converted to Boogie 2 on 28 June 2008. + Edited sequence axioms 20 October 2009 by Alex Summers. + Modified 2014 by Dan Rosen. + Copyright (c) 2008-2014, Microsoft. + Copyright by the contributors to the Dafny Project + SPDX-License-Identifier: MIT +*) + +(** +This module declares a type and functions used for modeling sequences +as they're modeled in Dafny. It also states and proves some properties +about sequences, and provides a lemma `all_seq_facts_lemma` one +can call to bring them into context. The properties are modeled after +those in the Dafny sequence axioms, with patterns for quantifiers +chosen as in those axioms. + +@summary Type, functions, and properties of sequences +*) +module FStar.Sequence.Base + +module FLT = FStar.List.Tot + +/// Internally, we represent a sequence as a list. + +type seq (ty: Type) = list ty + +/// We represent the Dafny function `Seq#Length` with `length`: +/// +/// function Seq#Length(Seq T): int; + +let length = FLT.length + +/// We represent the Dafny function `Seq#Empty` with `empty`: +/// +/// function Seq#Empty(): Seq T; + +let empty (#ty: Type) : seq ty = [] + +/// We represent the Dafny function `Seq#Singleton` with `singleton`: +/// +/// function Seq#Singleton(T): Seq T; + +let singleton (#ty: Type) (v: ty) : seq ty = + [v] + +/// We represent the Dafny function `Seq#Index` with `index`: +/// +/// function Seq#Index(Seq T, int): T; + +let index (#ty: Type) (s: seq ty) (i: nat{i < length s}) : ty = + FLT.index s i + +/// We represent the Dafny function `Seq#Build` with `build`: +/// +/// function Seq#Build(s: Seq T, val: T): Seq T; + +let build (#ty: Type) (s: seq ty) (v: ty) : seq ty = + FLT.append s [v] + +/// We represent the Dafny function `Seq#Append` with `append`: +/// +/// function Seq#Append(Seq T, Seq T): Seq T; + +let append = FLT.append + +/// We represent the Dafny function `Seq#Update` with `update`: +/// +/// function Seq#Update(Seq T, int, T): Seq T; + +let update (#ty: Type) (s: seq ty) (i: nat{i < length s}) (v: ty) : seq ty = + let s1, _, s2 = FLT.split3 s i in + append s1 (append [v] s2) + +/// We represent the Dafny function `Seq#Contains` with `contains`: +/// +/// function Seq#Contains(Seq T, T): bool; + +let contains (#ty: Type) (s: seq ty) (v: ty) : Type0 = + FLT.memP v s + +/// We represent the Dafny function `Seq#Take` with `take`: +/// +/// function Seq#Take(s: Seq T, howMany: int): Seq T; + +let take (#ty: Type) (s: seq ty) (howMany: nat{howMany <= length s}) : seq ty = + let result, _ = FLT.splitAt howMany s in + result + +/// We represent the Dafny function `Seq#Drop` with `drop`: +/// +/// function Seq#Drop(s: Seq T, howMany: int): Seq T; + +let drop (#ty: Type) (s: seq ty) (howMany: nat{howMany <= length s}) : seq ty = + let _, result = FLT.splitAt howMany s in + result + +/// We represent the Dafny function `Seq#Equal` with `equal`. +/// +/// function Seq#Equal(Seq T, Seq T): bool; + +let equal (#ty: Type) (s0: seq ty) (s1: seq ty) : Type0 = + length s0 == length s1 /\ + (forall j.{:pattern index s0 j \/ index s1 j} + 0 <= j && j < length s0 ==> index s0 j == index s1 j) + +/// Instead of representing the Dafny function `Seq#SameUntil`, which +/// is only ever used in Dafny to represent prefix relations, we +/// instead use `is_prefix`. +/// +/// function Seq#SameUntil(Seq T, Seq T, int): bool; + +let is_prefix (#ty: Type) (s0: seq ty) (s1: seq ty) : Type0 = + length s0 <= length s1 + /\ (forall (j: nat).{:pattern index s0 j \/ index s1 j} + j < length s0 ==> index s0 j == index s1 j) + +/// We represent the Dafny function `Seq#Rank` with `rank`. +/// +/// function Seq#Rank(Seq T): int; + +let rank (#ty: Type) (v: ty) = v + +/// We now prove each of the facts that comprise `all_seq_facts`. +/// For fact `xxx_fact`, we prove it with `xxx_lemma`. Sometimes, that +/// requires a helper lemma, which we call `xxx_helper`. In some cases, +/// we need multiple helpers, so we suffix their names with integers. + +private let length_of_empty_is_zero_lemma () : Lemma (length_of_empty_is_zero_fact) = + () + +private let length_zero_implies_empty_lemma () : Lemma (length_zero_implies_empty_fact) = + () + +private let singleton_length_one_lemma () : Lemma (singleton_length_one_fact) = + () + +private let build_increments_length_lemma () : Lemma (build_increments_length_fact) = + introduce forall (ty: Type) (s: seq ty) (v: ty). length (build s v) = 1 + length s + with ( + FLT.append_length s [v] + ) + +private let rec index_into_build_helper (#ty: Type) (s: list ty) (v: ty) (i: nat{i < length (append s [v])}) + : Lemma (requires i <= length s) + (ensures index (append s [v]) i == (if i = length s then v else index s i)) = + FLT.append_length s [v]; + match s with + | [] -> () + | hd :: tl -> + if i = 0 then () else index_into_build_helper tl v (i - 1) + +private let index_into_build_lemma () + : Lemma (requires build_increments_length_fact u#a) + (ensures index_into_build_fact u#a ()) = + introduce forall (ty: Type) (s: seq ty) (v: ty) (i: nat{i < length (build s v)}). + (i = length s ==> index (build s v) i == v) + /\ (i <> length s ==> index (build s v) i == index s i) + with ( + index_into_build_helper u#a s v i + ) + +private let append_sums_lengths_lemma () : Lemma (append_sums_lengths_fact) = + introduce forall (ty: Type) (s0: seq ty) (s1: seq ty). length (append s0 s1) = length s0 + length s1 + with ( + FLT.append_length s0 s1 + ) + +private let index_into_singleton_lemma (_: squash (singleton_length_one_fact u#a)) : Lemma (index_into_singleton_fact u#a ()) = + () + +private let rec index_after_append_helper (ty: Type) (s0: list ty) (s1: list ty) (n: nat) + : Lemma (requires n < length (append s0 s1) && length (append s0 s1) = length s0 + length s1) + (ensures index (append s0 s1) n == (if n < length s0 then index s0 n else index s1 (n - length s0))) = + match s0 with + | [] -> () + | hd :: tl -> if n = 0 then () else index_after_append_helper ty tl s1 (n - 1) + +private let index_after_append_lemma (_: squash (append_sums_lengths_fact u#a)) : Lemma (index_after_append_fact u#a ()) = + introduce + forall (ty: Type) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)}). + (n < length s0 ==> index (append s0 s1) n == index s0 n) + /\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0)) + with ( + index_after_append_helper ty s0 s1 n + ) + +private let rec lemma_splitAt_fst_length (#a:Type) (n:nat) (l:list a) : + Lemma + (requires (n <= length l)) + (ensures (length (fst (FLT.splitAt n l)) = n)) = + match n, l with + | 0, _ -> () + | _, [] -> () + | _, _ :: l' -> lemma_splitAt_fst_length (n - 1) l' + +private let update_maintains_length_helper (#ty: Type) (s: list ty) (i: nat{i < length s}) (v: ty) + : Lemma (length (update s i v) = length s) = + let s1, _, s2 = FLT.split3 s i in + lemma_splitAt_fst_length i s; + FLT.lemma_splitAt_snd_length i s; + FLT.append_length [v] s2; + FLT.append_length s1 (append [v] s2) + +private let update_maintains_length_lemma () : Lemma (update_maintains_length_fact) = + introduce forall (ty: Type) (s: seq ty) (i: nat{i < length s}) (v: ty). + length (update s i v) = length s + with ( + update_maintains_length_helper s i v + ) + +#restart-solver +#push-options "--z3rlimit_factor 4" +private let rec update_then_index_helper + (#ty: Type) + (s: list ty) + (i: nat{i < length s}) + (v: ty) + (n: nat{n < length (update s i v)}) + : Lemma (requires n < length s) + (ensures index (update s i v) n == (if i = n then v else index s n)) = + match s with + | hd :: tl -> + if i = 0 || n = 0 then () + else update_then_index_helper tl (i - 1) v (n - 1) +#pop-options + +private let update_then_index_lemma () : Lemma (update_then_index_fact) = + update_maintains_length_lemma (); + introduce + forall (ty: Type) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)}). + n < length s ==> + (i = n ==> index (update s i v) n == v) + /\ (i <> n ==> index (update s i v) n == index s n) + with + introduce _ ==> _ + with given_antecedent. ( + update_then_index_helper s i v n + ) + +private let contains_iff_exists_index_lemma () : Lemma (contains_iff_exists_index_fact) = + introduce + forall (ty: Type) (s: seq ty) (x: ty). + contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x) + with ( + introduce contains s x ==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x) + with given_antecedent. ( + introduce exists (i: nat). i < length s /\ index s i == x + with (FLT.index_of s x) and () + ); + introduce (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x) ==> contains s x + with given_antecedent. ( + eliminate exists (i: nat). i < length s /\ index s i == x + returns _ + with _. FLT.lemma_index_memP s i + ) + ) + +private let empty_doesnt_contain_anything_lemma () : Lemma (empty_doesnt_contain_anything_fact) = + () + +private let rec build_contains_equiv_helper (ty: Type) (s: list ty) (v: ty) (x: ty) + : Lemma (FLT.memP x (append s [v]) <==> (v == x \/ FLT.memP x s)) = + match s with + | [] -> () + | hd :: tl -> + eliminate x == hd \/ ~(x == hd) + returns FLT.memP x (append s [v]) <==> (v == x \/ FLT.memP x s) + with _. () + and _. build_contains_equiv_helper ty tl v x + +private let build_contains_equiv_lemma () : Lemma (build_contains_equiv_fact) = + introduce + forall (ty: Type) (s: seq ty) (v: ty) (x: ty). + contains (build s v) x <==> (v == x \/ contains s x) + with ( + build_contains_equiv_helper ty s v x + ) + +private let rec take_contains_equiv_exists_helper1 (ty: Type) (s: list ty) (n: nat{n <= length s}) (x: ty) + : Lemma (requires FLT.memP x (take s n)) + (ensures (exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)) = + match s with + | hd :: tl -> + eliminate x == hd \/ ~(x == hd) + returns exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x + with case_x_eq_hd. + assert(index s 0 == x) + and case_x_ne_hd. ( + take_contains_equiv_exists_helper1 ty tl (n - 1) x; + eliminate exists (i_tl: nat). i_tl < n - 1 /\ i_tl < length tl /\ index tl i_tl == x + returns exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x + with _. + introduce exists (i: nat). i < n /\ i < length s /\ index s i == x + with (i_tl + 1) + and ()) + +private let rec take_contains_equiv_exists_helper2 (ty: Type) (s: list ty) (n: nat{n <= length s}) (x: ty) (i: nat) + : Lemma (requires (i < n /\ i < length s /\ index s i == x)) + (ensures FLT.memP x (take s n)) = + match s with + | hd :: tl -> + eliminate x == hd \/ ~(x == hd) + returns FLT.memP x (take s n) + with case_x_eq_hd. () + and case_x_ne_hd. take_contains_equiv_exists_helper2 ty tl (n - 1) x (i - 1) + +private let take_contains_equiv_exists_helper3 (ty: Type) (s: list ty) (n: nat{n <= length s}) (x: ty) + : Lemma (FLT.memP x (take s n) <==> + (exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)) = + introduce FLT.memP x (take s n) ==> + (exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x) + with given_antecedent. (take_contains_equiv_exists_helper1 ty s n x); + introduce (exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x) ==> + FLT.memP x (take s n) + with given_antecedent. ( + eliminate exists (i: nat). i < n /\ i < length s /\ index s i == x + returns _ + with _. take_contains_equiv_exists_helper2 ty s n x i + ) + +private let take_contains_equiv_exists_lemma () : Lemma (take_contains_equiv_exists_fact) = + introduce forall (ty: Type) (s: seq ty) (n: nat{n <= length s}) (x: ty). + contains (take s n) x <==> + (exists (i: nat). i < n /\ i < length s /\ index s i == x) + with ( + take_contains_equiv_exists_helper3 ty s n x + ) + +#push-options "--z3rlimit_factor 10 --fuel 1 --ifuel 1" +private let rec drop_contains_equiv_exists_helper1 (ty: Type) (s: list ty) (n: nat{n <= length s}) (x: ty) + : Lemma (requires FLT.memP x (drop s n)) + (ensures (exists (i: nat).{:pattern index s i} n <= i /\ i < length s /\ index s i == x)) = + match s with + | hd :: tl -> + eliminate n == 0 \/ n <> 0 + returns exists (i: nat).{:pattern index s i} n <= i /\ i < length s /\ index s i == x + with case_n_eq_0. ( + eliminate x == hd \/ ~(x == hd) + returns _ + with _. assert(index s 0 == x) + and _. ( + drop_contains_equiv_exists_helper1 ty tl n x; + eliminate exists (i_tl: nat). (n <= i_tl /\ i_tl < length tl /\ index tl i_tl == x) + returns _ + with _. introduce exists i. n <= i /\ i < length s /\ index s i == x with (i_tl + 1) and () + )) + and case_n_ne_0. ( + drop_contains_equiv_exists_helper1 ty tl (n - 1) x; + eliminate exists (i_tl: nat). n - 1 <= i_tl /\ i_tl < length tl /\ index tl i_tl == x + returns _ + with _. introduce exists i. n <= i /\ i < length s /\ index s i == x with (i_tl + 1) and ()) +#pop-options + +private let rec drop_contains_equiv_exists_helper2 (ty: Type) (s: list ty) (n: nat{n <= length s}) (x: ty) (i: nat) + : Lemma (requires (n <= i /\ i < length s /\ index s i == x)) + (ensures FLT.memP x (drop s n)) = + match s with + | hd :: tl -> + eliminate n == 0 \/ n <> 0 + returns FLT.memP x (drop s n) + with _. FLT.lemma_index_memP s i + and _. ( + drop_contains_equiv_exists_helper2 ty tl (n - 1) x (i - 1); + eliminate exists (i_tl: nat). n - 1 <= i_tl /\ i_tl < length tl /\ index tl i_tl == x + returns _ + with _. + introduce exists i. n <= i /\ i < length s /\ index s i == x with (i_tl + 1) and ()) + +private let drop_contains_equiv_exists_helper3 (ty: Type) (s: list ty) (n: nat{n <= length s}) (x: ty) + : Lemma (FLT.memP x (drop s n) <==> + (exists (i: nat).{:pattern index s i} n <= i /\ i < length s /\ index s i == x)) = + introduce FLT.memP x (drop s n) ==> + (exists (i: nat).{:pattern index s i} n <= i /\ i < length s /\ index s i == x) + with given_antecedent. ( + drop_contains_equiv_exists_helper1 ty s n x); + introduce (exists (i: nat).{:pattern index s i} n <= i /\ i < length s /\ index s i == x) ==> + FLT.memP x (drop s n) + with given_antecedent. ( + eliminate exists (i: nat). n <= i /\ i < length s /\ index s i == x + returns _ + with _. drop_contains_equiv_exists_helper2 ty s n x i + ) + +private let drop_contains_equiv_exists_lemma () : Lemma (drop_contains_equiv_exists_fact) = + introduce + forall (ty: Type) (s: seq ty) (n: nat{n <= length s}) (x: ty). + contains (drop s n) x <==> + (exists (i: nat).{:pattern index s i} n <= i /\ i < length s /\ index s i == x) + with ( + drop_contains_equiv_exists_helper3 ty s n x; + assert (FLT.memP x (drop s n) <==> + (exists (i: nat). n <= i /\ i < length s /\ index s i == x)) + ) + +private let equal_def_lemma () : Lemma (equal_def_fact) = + () + +private let extensionality_lemma () : Lemma (extensionality_fact) = + introduce forall (ty: Type) (a: seq ty) (b: seq ty). equal a b ==> a == b + with + introduce _ ==> _ + with given_antecedent. ( + introduce forall (i: nat) . i < length a ==> index a i == index b i + with + introduce _ ==> _ + with given_antecedent. ( + assert (index a i == index b i) // needed to trigger + ); + FStar.List.Tot.Properties.index_extensionality a b + ) + +private let is_prefix_def_lemma () : Lemma (is_prefix_def_fact) = + () + +private let take_length_lemma () : Lemma (take_length_fact) = + introduce forall (ty: Type) (s: seq ty) (n: nat). + n <= length s ==> length (take s n) = n + with + introduce _ ==> _ + with given_antecedent. ( + lemma_splitAt_fst_length n s + ) + +private let rec index_into_take_helper (#ty: Type) (s: list ty) (n: nat) (j: nat) + : Lemma (requires j < n && n <= length s /\ length (take s n) = n) + (ensures index (take s n) j == index s j) = + match s with + | hd :: tl -> if j = 0 || n = 0 then () else index_into_take_helper tl (n - 1) (j - 1) + +private let index_into_take_lemma () + : Lemma (requires take_length_fact u#a) (ensures index_into_take_fact u#a ()) = + introduce + forall (ty: Type) (s: seq ty) (n: nat) (j: nat). + j < n && n <= length s ==> index (take s n) j == index s j + with + introduce _ ==> _ + with given_antecedent. ( + assert (length (take s n) == n); // triggers take_length_fact + index_into_take_helper s n j + ) + +private let drop_length_lemma () : Lemma (drop_length_fact) = + introduce forall (ty: Type) (s: seq ty) (n: nat). + n <= length s ==> length (drop s n) = length s - n + with + introduce _ ==> _ + with given_antecedent. ( + FLT.lemma_splitAt_snd_length n s + ) + +private let rec index_into_drop_helper (#ty: Type) (s: list ty) (n: nat) (j: nat) + : Lemma (requires j < length s - n /\ length (drop s n) = length s - n) + (ensures index (drop s n) j == index s (j + n)) = + match s with + | hd :: tl -> if n = 0 then () else index_into_drop_helper tl (n - 1) j + +private let index_into_drop_lemma () + : Lemma (requires drop_length_fact u#a) (ensures index_into_drop_fact u#a ()) = + introduce + forall (ty: Type) (s: seq ty) (n: nat) (j: nat). + j < length s - n ==> index (drop s n) j == index s (j + n) + with + introduce _ ==> _ + with given_antecedent. ( + assert (length (drop s n) = length s - n); // triggers drop_length_fact + index_into_drop_helper s n j + ) + +private let drop_index_offset_lemma () + : Lemma (requires drop_length_fact u#a) (ensures drop_index_offset_fact u#a ()) = + introduce + forall (ty: Type) (s: seq ty) (n: nat) (k: nat). + n <= k && k < length s ==> index (drop s n) (k - n) == index s k + with + introduce _ ==> _ + with given_antecedent. ( + assert (length (drop s n) = length s - n); // triggers drop_length_fact + index_into_drop_helper s n (k - n) + ) + +private let rec append_then_take_or_drop_helper (#ty: Type) (s: list ty) (t: list ty) (n: nat) + : Lemma (requires n = length s /\ length (append s t) = length s + length t) + (ensures take (append s t) n == s /\ drop (append s t) n == t) = + match s with + | [] -> () + | hd :: tl -> append_then_take_or_drop_helper tl t (n - 1) + +private let append_then_take_or_drop_lemma () + : Lemma (requires append_sums_lengths_fact u#a) (ensures append_then_take_or_drop_fact u#a ()) = + introduce + forall (ty: Type) (s: seq ty) (t: seq ty) (n: nat). + n = length s ==> take (append s t) n == s /\ drop (append s t) n == t + with + introduce _ ==> _ + with given_antecedent. ( + append_then_take_or_drop_helper s t n + ) + +#push-options "--z3rlimit 20" +private let rec take_commutes_with_in_range_update_helper (#ty: Type) (s: list ty) (i: nat) (v: ty) (n: nat) + : Lemma (requires i < n + /\ n <= length s + /\ length (update s i v) = length s + /\ length (take s n) = n) + (ensures take (update s i v) n == update (take s n) i v) = + match s with + | hd :: tl -> if i = 0 then () else (update_maintains_length_lemma() ; take_commutes_with_in_range_update_helper tl (i - 1) v (n - 1)) +#pop-options + +private let take_commutes_with_in_range_update_lemma () + : Lemma (requires update_maintains_length_fact u#a /\ take_length_fact u#a) + (ensures take_commutes_with_in_range_update_fact u#a ()) = + introduce + forall (ty: Type) (s: seq ty) (i: nat) (v: ty) (n: nat). + i < n && n <= length s ==> + take (update s i v) n == update (take s n) i v + with + introduce _ ==> _ + with given_antecedent. ( + assert (length (update s i v) = length s); // triggers update_maintains_length_fact + assert (length (take s n) = n); // triggers take_length_fact + take_commutes_with_in_range_update_helper s i v n + ) + +private let rec take_ignores_out_of_range_update_helper (#ty: Type) (s: list ty) (i: nat) (v: ty) (n: nat) + : Lemma (requires n <= i + /\ i < length s + /\ length (update s i v) = length s) + (ensures take (update s i v) n == take s n) = + match s with + | hd :: tl -> if n = 0 then () else take_ignores_out_of_range_update_helper tl (i - 1) v (n - 1) + +private let take_ignores_out_of_range_update_lemma () + : Lemma (requires update_maintains_length_fact u#a) + (ensures take_ignores_out_of_range_update_fact u#a ()) = + introduce + forall (ty: Type) (s: seq ty) (i: nat) (v: ty) (n: nat). + n <= i && i < length s ==> + take (update s i v) n == take s n + with + introduce _ ==> _ + with given_antecedent. ( + assert (length (update s i v) = length s); // triggers update_maintains_length_fact + take_ignores_out_of_range_update_helper s i v n + ) + +#push-options "--fuel 2 --ifuel 1 --z3rlimit_factor 4" +private let rec drop_commutes_with_in_range_update_helper (#ty: Type) (s: list ty) (i: nat) (v: ty) (n: nat) + : Lemma (requires n <= i + /\ i < length s + /\ length (update s i v) = length s + /\ length (drop s n) = length s - n) + (ensures drop (update s i v) n == + update (drop s n) (i - n) v) = + match s with + | hd :: tl -> + if n = 0 + then () + else ( + update_maintains_length_lemma (); + drop_length_lemma (); + drop_commutes_with_in_range_update_helper tl (i - 1) v (n - 1) + ) +#pop-options + +private let drop_commutes_with_in_range_update_lemma () + : Lemma (requires update_maintains_length_fact u#a /\ drop_length_fact u#a) + (ensures drop_commutes_with_in_range_update_fact u#a ()) = + introduce + forall (ty: Type) (s: seq ty) (i: nat) (v: ty) (n: nat). + n <= i && i < length s ==> + drop (update s i v) n == update (drop s n) (i - n) v + with + introduce _ ==> _ + with given_antecedent. ( + assert (length (update s i v) = length s); // triggers update_maintains_length_fact + assert (length (drop s n) = length s - n); // triggers drop_length_fact + drop_commutes_with_in_range_update_helper s i v n + ) + +private let rec drop_ignores_out_of_range_update_helper (#ty: Type) (s: list ty) (i: nat) (v: ty) (n: nat) + : Lemma (requires i < n + /\ n <= length s + /\ length (update s i v) = length s) + (ensures drop (update s i v) n == drop s n) = + match s with + | hd :: tl -> if i = 0 then () else drop_ignores_out_of_range_update_helper tl (i - 1) v (n - 1) + +private let drop_ignores_out_of_range_update_lemma () + : Lemma (requires update_maintains_length_fact u#a) + (ensures drop_ignores_out_of_range_update_fact u#a ()) = + introduce + forall (ty: Type) (s: seq ty) (i: nat) (v: ty) (n: nat). + i < n && n <= length s ==> + drop (update s i v) n == drop s n + with + introduce _ ==> _ + with given_antecedent. ( + assert (length (update s i v) = length s); // triggers update_maintains_length_fact + drop_ignores_out_of_range_update_helper s i v n + ) + +private let rec drop_commutes_with_build_helper (#ty: Type) (s: list ty) (v: ty) (n: nat) + : Lemma (requires n <= length s /\ length (append s [v]) = 1 + length s) + (ensures drop (append s [v]) n == append (drop s n) [v]) = + match s with + | [] -> + assert (append s [v] == [v]); + assert (n == 0); + () + | hd :: tl -> if n = 0 then () else drop_commutes_with_build_helper tl v (n - 1) + +private let drop_commutes_with_build_lemma () + : Lemma (requires build_increments_length_fact u#a) + (ensures drop_commutes_with_build_fact u#a ()) = + introduce + forall (ty: Type) (s: seq ty) (v: ty) (n: nat). + n <= length s ==> drop (build s v) n == build (drop s n) v + with + introduce _ ==> _ + with given_antecedent. ( + assert (length (build s v) = 1 + length s); // triggers build_increments_length_fact + drop_commutes_with_build_helper s v n + ) + +private let rank_def_lemma () : Lemma (rank_def_fact) = + () + +private let element_ranks_less_lemma () : Lemma (element_ranks_less_fact) = + introduce forall (ty: Type) (s: seq ty) (i: nat). i < length s ==> rank (index s i) << rank s + with + introduce _ ==> _ + with given_antecedent. ( + contains_iff_exists_index_lemma (); + assert (contains s (index s i)); + FLT.memP_precedes (index s i) s + ) + +private let rec drop_ranks_less_helper (ty: Type) (s: list ty) (i: nat) + : Lemma (requires 0 < i && i <= length s) + (ensures drop s i << s) = + match s with + | [] -> () + | hd :: tl -> if i = 1 then () else drop_ranks_less_helper ty tl (i - 1) + +private let drop_ranks_less_lemma () : Lemma (drop_ranks_less_fact) = + introduce forall (ty: Type) (s: seq ty) (i: nat). + 0 < i && i <= length s ==> rank (drop s i) << rank s + with + introduce _ ==> _ + with given_antecedent. ( + drop_ranks_less_helper ty s i + ) + +private let take_ranks_less_lemma () : Lemma (take_ranks_less_fact) = + take_length_lemma () + +private let append_take_drop_ranks_less_lemma () : Lemma (append_take_drop_ranks_less_fact) = + take_length_lemma (); + drop_length_lemma (); + append_sums_lengths_lemma () + +private let drop_zero_lemma () : Lemma (drop_zero_fact) = + () + +private let take_zero_lemma () : Lemma (take_zero_fact) = + () + +private let rec drop_then_drop_helper (#ty: Type) (s: seq ty) (m: nat) (n: nat) + : Lemma (requires m + n <= length s /\ length (drop s m) = length s - m) + (ensures drop (drop s m) n == drop s (m + n)) = + match s with + | [] -> () + | hd :: tl -> + if m = 0 + then () + else ( + drop_length_lemma (); + drop_then_drop_helper tl (m - 1) n + ) + +private let drop_then_drop_lemma () : Lemma (requires drop_length_fact u#a) (ensures drop_then_drop_fact u#a ()) = + introduce forall (ty: Type) (s: seq ty) (m: nat) (n: nat). + m + n <= length s ==> drop (drop s m) n == drop s (m + n) + with + introduce _ ==> _ + with given_antecedent. ( + assert (length (drop s m) = length s - m); // triggers drop_length_fact + drop_then_drop_helper s m n + ) + +/// Finally, we use all the lemmas for all the facts to establish +/// `all_seq_facts`. To get all those facts in scope, one can +/// invoke `all_seq_facts_lemma`. + +let all_seq_facts_lemma () : Lemma (all_seq_facts u#a) = + length_of_empty_is_zero_lemma u#a (); + length_zero_implies_empty_lemma u#a (); + singleton_length_one_lemma u#a (); + build_increments_length_lemma u#a (); + index_into_build_lemma u#a (); + append_sums_lengths_lemma u#a (); + index_into_singleton_lemma u#a (); + index_after_append_lemma u#a (); + update_maintains_length_lemma u#a (); + update_then_index_lemma u#a (); + contains_iff_exists_index_lemma u#a (); + empty_doesnt_contain_anything_lemma u#a (); + build_contains_equiv_lemma u#a (); + take_contains_equiv_exists_lemma u#a (); + drop_contains_equiv_exists_lemma u#a (); + equal_def_lemma u#a (); + extensionality_lemma u#a (); + is_prefix_def_lemma u#a (); + take_length_lemma u#a (); + index_into_take_lemma u#a (); + drop_length_lemma u#a (); + index_into_drop_lemma u#a (); + drop_index_offset_lemma u#a (); + append_then_take_or_drop_lemma u#a (); + take_commutes_with_in_range_update_lemma u#a (); + take_ignores_out_of_range_update_lemma u#a (); + drop_commutes_with_in_range_update_lemma u#a (); + drop_ignores_out_of_range_update_lemma u#a (); + drop_commutes_with_build_lemma u#a (); + rank_def_lemma u#a (); + element_ranks_less_lemma u#a (); + drop_ranks_less_lemma u#a (); + take_ranks_less_lemma u#a (); + append_take_drop_ranks_less_lemma u#a (); + drop_zero_lemma u#a (); + take_zero_lemma u#a (); + drop_then_drop_lemma u#a () diff --git a/stage0/ulib/experimental/FStar.Sequence.Base.fsti b/stage0/ulib/experimental/FStar.Sequence.Base.fsti new file mode 100644 index 00000000000..0c4246ccbb1 --- /dev/null +++ b/stage0/ulib/experimental/FStar.Sequence.Base.fsti @@ -0,0 +1,603 @@ +(* + Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan + Rosen, Nikhil Swamy, Microsoft Research, and contributors to + the Dafny Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Includes material from the Dafny project + (https://github.com/dafny-lang/dafny) which carries this license + information: + + Created 9 February 2008 by Rustan Leino. + Converted to Boogie 2 on 28 June 2008. + Edited sequence axioms 20 October 2009 by Alex Summers. + Modified 2014 by Dan Rosen. + Copyright (c) 2008-2014, Microsoft. + Copyright by the contributors to the Dafny Project + SPDX-License-Identifier: MIT +*) + +(** +This module declares a type and functions used for modeling +sequences as they're modeled in Dafny. + +@summary Type and functions for modeling sequences +*) +module FStar.Sequence.Base + +new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a + +(** + We translate each Dafny sequence function prefixed with `Seq#` + into an F* function. +**) + +/// We represent the Dafny function `Seq#Length` with `length`: +/// +/// function Seq#Length(Seq T): int; + +val length : #ty: Type -> seq ty -> nat + +/// We represent the Dafny function `Seq#Empty` with `empty`: +/// +/// function Seq#Empty(): Seq T; +/// +/// We also provide an alias `nil` for it. + +val empty : #ty: Type -> seq ty + +/// We represent the Dafny function `Seq#Singleton` with `singleton`: +/// +/// function Seq#Singleton(T): Seq T; + +val singleton : #ty: Type -> ty -> seq ty + +/// We represent the Dafny function `Seq#Index` with `index`: +/// +/// function Seq#Index(Seq T, int): T; +/// +/// We also provide the infix symbol `$@` for it. + +val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty +let ($@) = index + +/// We represent the Dafny function `Seq#Build` with `build`: +/// +/// function Seq#Build(s: Seq T, val: T): Seq T; +/// +/// We also provide the infix symbol `$::` for it. + +val build: #ty: Type -> seq ty -> ty -> seq ty +let ($::) = build + +/// We represent the Dafny function `Seq#Append` with `append`: +/// +/// function Seq#Append(Seq T, Seq T): Seq T; +/// +/// We also provide the infix notation `$+` for it. + +val append: #ty: Type -> seq ty -> seq ty -> seq ty +let ($+) = append + +/// We represent the Dafny function `Seq#Update` with `update`: +/// +/// function Seq#Update(Seq T, int, T): Seq T; + +val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty + +/// We represent the Dafny function `Seq#Contains` with `contains`: +/// +/// function Seq#Contains(Seq T, T): bool; + +val contains: #ty: Type -> seq ty -> ty -> Type0 + +/// We represent the Dafny function `Seq#Take` with `take`: +/// +/// function Seq#Take(s: Seq T, howMany: int): Seq T; + +val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty + +/// We represent the Dafny function `Seq#Drop` with `drop`: +/// +/// function Seq#Drop(s: Seq T, howMany: int): Seq T; + +val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty + +/// We represent the Dafny function `Seq#Equal` with `equal`. +/// +/// function Seq#Equal(Seq T, Seq T): bool; +/// +/// We also provide the infix symbol `$==` for it. + +val equal: #ty: Type -> seq ty -> seq ty -> Type0 +let ($==) = equal + +/// Instead of representing the Dafny function `Seq#SameUntil`, which +/// is only ever used in Dafny to represent prefix relations, we +/// instead use `is_prefix`. +/// +/// function Seq#SameUntil(Seq T, Seq T, int): bool; +/// +/// We also provide the infix notation `$<=` for it. + +val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0 +let ($<=) = is_prefix + +/// We represent the Dafny function `Seq#Rank` with `rank`. +/// +/// function Seq#Rank(Seq T): int; + +val rank: #ty: Type -> ty -> ty + +(** + We translate each sequence axiom from the Dafny prelude into an F* + predicate ending in `_fact`. +**) + +/// We don't need the following axiom since we return a nat from length: +/// +/// axiom (forall s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s)); + +/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`: +/// +/// axiom (forall :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0); + +private let length_of_empty_is_zero_fact = + forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0 + +/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`: +/// +/// axiom (forall s: Seq T :: { Seq#Length(s) } +/// (Seq#Length(s) == 0 ==> s == Seq#Empty()) + +private let length_zero_implies_empty_fact = + forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty + +/// We represent the following Dafny axiom with `singleton_length_one_fact`: +/// +/// axiom (forall t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1); + +private let singleton_length_one_fact = + forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1 + +/// We represent the following Dafny axiom with `build_increments_length_fact`: +/// +/// axiom (forall s: Seq T, v: T :: +/// { Seq#Build(s,v) } +/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s)); + +private let build_increments_length_fact = + forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v} + length (build s v) = 1 + length s + +/// We represent the following Dafny axiom with `index_into_build_fact`: +/// +/// axiom (forall s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) } +/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) && +/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i))); + +private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) = + forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)}) + .{:pattern index (build s v) i} + (i = length s ==> index (build s v) i == v) + /\ (i <> length s ==> index (build s v) i == index s i) + +/// We represent the following Dafny axiom with `append_sums_lengths_fact`: +/// +/// axiom (forall s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) } +/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1)); + +private let append_sums_lengths_fact = + forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)} + length (append s0 s1) = length s0 + length s1 + +/// We represent the following Dafny axiom with `index_into_singleton_fact`: +/// +/// axiom (forall t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t); + +private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) = + forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0} + index (singleton v) 0 == v + +/// We represent the following axiom with `index_after_append_fact`: +/// +/// axiom (forall s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) } +/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) && +/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0)))); + +private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) = + forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)}) + .{:pattern index (append s0 s1) n} + (n < length s0 ==> index (append s0 s1) n == index s0 n) + /\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0)) + +/// We represent the following Dafny axiom with `update_maintains_length`: +/// +/// axiom (forall s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) } +/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s)); + +private let update_maintains_length_fact = + forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)} + length (update s i v) = length s + +/// We represent the following Dafny axiom with `update_then_index_fact`: +/// +/// axiom (forall s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) } +/// 0 <= n && n < Seq#Length(s) ==> +/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) && +/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n))); + +private let update_then_index_fact = + forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)}) + .{:pattern index (update s i v) n} + n < length s ==> + (i = n ==> index (update s i v) n == v) + /\ (i <> n ==> index (update s i v) n == index s n) + +/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`: +/// +/// axiom (forall s: Seq T, x: T :: { Seq#Contains(s,x) } +/// Seq#Contains(s,x) <==> +/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x)); + +private let contains_iff_exists_index_fact = + forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x} + contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x) + +/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`: +/// +/// axiom (forall x: T :: +/// { Seq#Contains(Seq#Empty(), x) } +/// !Seq#Contains(Seq#Empty(), x)); + +private let empty_doesnt_contain_anything_fact = + forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x) + +/// We represent the following Dafny axiom with `build_contains_equiv_fact`: +/// +/// axiom (forall s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy +/// { Seq#Contains(Seq#Build(s, v), x) } +/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x))); + +private let build_contains_equiv_fact = + forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x} + contains (build s v) x <==> (v == x \/ contains s x) + +/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`: +/// +/// axiom (forall s: Seq T, n: int, x: T :: +/// { Seq#Contains(Seq#Take(s, n), x) } +/// Seq#Contains(Seq#Take(s, n), x) <==> +/// (exists i: int :: { Seq#Index(s, i) } +/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x)); + +private let take_contains_equiv_exists_fact = + forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x} + contains (take s n) x <==> + (exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x) + +/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`: +/// +/// axiom (forall s: Seq T, n: int, x: T :: +/// { Seq#Contains(Seq#Drop(s, n), x) } +/// Seq#Contains(Seq#Drop(s, n), x) <==> +/// (exists i: int :: { Seq#Index(s, i) } +/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x)); + +private let drop_contains_equiv_exists_fact = + forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x} + contains (drop s n) x <==> + (exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x) + +/// We represent the following Dafny axiom with `equal_def_fact`: +/// +/// axiom (forall s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) } +/// Seq#Equal(s0,s1) <==> +/// Seq#Length(s0) == Seq#Length(s1) && +/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) } +/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j))); + +private let equal_def_fact = + forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1} + equal s0 s1 <==> + length s0 == length s1 /\ + (forall j.{:pattern index s0 j \/ index s1 j} + 0 <= j && j < length s0 ==> index s0 j == index s1 j) + +/// We represent the following Dafny axiom with `extensionality_fact`: +/// +/// axiom (forall a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences +/// Seq#Equal(a,b) ==> a == b); + +private let extensionality_fact = + forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b} + equal a b ==> a == b + +/// We represent an analog of the following Dafny axiom with +/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead +/// of `Seq#SameUntil`. +/// +/// axiom (forall s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) } +/// Seq#SameUntil(s0,s1,n) <==> +/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) } +/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j))); + +private let is_prefix_def_fact = + forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1} + is_prefix s0 s1 <==> + length s0 <= length s1 + /\ (forall (j: nat).{:pattern index s0 j \/ index s1 j} + j < length s0 ==> index s0 j == index s1 j) + +/// We represent the following Dafny axiom with `take_length_fact`: +/// +/// axiom (forall s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) } +/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n); + +private let take_length_fact = + forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)} + n <= length s ==> length (take s n) = n + +/// We represent the following Dafny axiom with `index_into_take_fact`. +/// +/// axiom (forall s: Seq T, n: int, j: int :: +/// {:weight 25} +/// { Seq#Index(Seq#Take(s,n), j) } +/// { Seq#Index(s, j), Seq#Take(s,n) } +/// 0 <= j && j < n && j < Seq#Length(s) ==> +/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j)); + +private let index_into_take_fact (_ : squash (take_length_fact u#a)) = + forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat). + {:pattern index (take s n) j \/ index s j ; take s n} + j < n && n <= length s ==> index (take s n) j == index s j + +/// We represent the following Dafny axiom with `drop_length_fact`. +/// +/// axiom (forall s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) } +/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n); + +private let drop_length_fact = + forall (ty: Type u#a) (s: seq ty) (n: nat). + {:pattern length (drop s n)} + n <= length s ==> length (drop s n) = length s - n + +/// We represent the following Dafny axiom with `index_into_drop_fact`. +/// +/// axiom (forall s: Seq T, n: int, j: int :: +/// {:weight 25} +/// { Seq#Index(Seq#Drop(s,n), j) } +/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==> +/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n)); + +private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) = + forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat). + {:pattern index (drop s n) j} + j < length s - n ==> index (drop s n) j == index s (j + n) + +/// We represent the following Dafny axiom with `drop_index_offset_fact`. +/// +/// axiom (forall s: Seq T, n: int, k: int :: +/// {:weight 25} +/// { Seq#Index(s, k), Seq#Drop(s,n) } +/// 0 <= n && n <= k && k < Seq#Length(s) ==> +/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k)); + +private let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) = + forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat). + {:pattern index s k; drop s n} + n <= k && k < length s ==> index (drop s n) (k - n) == index s k + +/// We represent the following Dafny axiom with `append_then_take_or_drop_fact`. +/// +/// axiom (forall s, t: Seq T, n: int :: +/// { Seq#Take(Seq#Append(s, t), n) } +/// { Seq#Drop(Seq#Append(s, t), n) } +/// n == Seq#Length(s) +/// ==> +/// Seq#Take(Seq#Append(s, t), n) == s && +/// Seq#Drop(Seq#Append(s, t), n) == t); + +private let append_then_take_or_drop_fact (_ : squash (append_sums_lengths_fact u#a)) = + forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat). + {:pattern take (append s t) n \/ drop (append s t) n} + n = length s ==> take (append s t) n == s /\ drop (append s t) n == t + +/// We represent the following Dafny axiom with `take_commutes_with_in_range_update_fact`. +/// +/// axiom (forall s: Seq T, i: int, v: T, n: int :: +/// { Seq#Take(Seq#Update(s, i, v), n) } +/// 0 <= i && i < n && n <= Seq#Length(s) ==> +/// Seq#Take(Seq#Update(s, i, v), n) == Seq#Update(Seq#Take(s, n), i, v) ); + +private let take_commutes_with_in_range_update_fact + (_ : squash (update_maintains_length_fact u#a /\ take_length_fact u#a)) = + forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n} + i < n && n <= length s ==> + take (update s i v) n == update (take s n) i v + +/// We represent the following Dafny axiom with `take_ignores_out_of_range_update_fact`. +/// +/// axiom (forall s: Seq T, i: int, v: T, n: int :: +/// { Seq#Take(Seq#Update(s, i, v), n) } +/// n <= i && i < Seq#Length(s) ==> Seq#Take(Seq#Update(s, i, v), n) == Seq#Take(s, n)); + +private let take_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) = + forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n} + n <= i && i < length s ==> + take (update s i v) n == take s n + +/// We represent the following Dafny axiom with `drop_commutes_with_in_range_update_fact`. +/// +/// axiom (forall s: Seq T, i: int, v: T, n: int :: +/// { Seq#Drop(Seq#Update(s, i, v), n) } +/// 0 <= n && n <= i && i < Seq#Length(s) ==> +/// Seq#Drop(Seq#Update(s, i, v), n) == Seq#Update(Seq#Drop(s, n), i-n, v) ); + +private let drop_commutes_with_in_range_update_fact + (_ : squash (update_maintains_length_fact u#a /\ drop_length_fact u#a)) = + forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n} + n <= i && i < length s ==> + drop (update s i v) n == update (drop s n) (i - n) v + +/// We represent the following Dafny axiom with `drop_ignores_out_of_range_update_fact`. +/// Jay noticed that it was unnecessarily weak, possibly due to a typo, so he reported this as +/// Dafny issue #1423 (https://github.com/dafny-lang/dafny/issues/1423) and updated it here. +/// +/// axiom (forall s: Seq T, i: int, v: T, n: int :: +/// { Seq#Drop(Seq#Update(s, i, v), n) } +/// 0 <= i && i < n && n < Seq#Length(s) ==> Seq#Drop(Seq#Update(s, i, v), n) == Seq#Drop(s, n)); + +private let drop_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) = + forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n} + i < n && n <= length s ==> + drop (update s i v) n == drop s n + +/// We represent the following Dafny axiom with `drop_commutes_with_build_fact`. +/// +/// axiom (forall s: Seq T, v: T, n: int :: +/// { Seq#Drop(Seq#Build(s, v), n) } +/// 0 <= n && n <= Seq#Length(s) ==> +/// Seq#Drop(Seq#Build(s, v), n) == Seq#Build(Seq#Drop(s, n), v) ); + +private let drop_commutes_with_build_fact (_ : squash (build_increments_length_fact u#a)) = + forall (ty: Type u#a) (s: seq ty) (v: ty) (n: nat).{:pattern drop (build s v) n} + n <= length s ==> drop (build s v) n == build (drop s n) v + +/// We include the definition of `rank` among our facts. + +private let rank_def_fact = + forall (ty: Type u#a) (v: ty).{:pattern rank v} rank v == v + +/// We represent the following Dafny axiom with `element_ranks_less_fact`. +/// +/// axiom (forall s: Seq Box, i: int :: +/// { DtRank($Unbox(Seq#Index(s, i)): DatatypeType) } +/// 0 <= i && i < Seq#Length(s) ==> DtRank($Unbox(Seq#Index(s, i)): DatatypeType) < Seq#Rank(s) ); + +private let element_ranks_less_fact = + forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (index s i)} + i < length s ==> rank (index s i) << rank s + +/// We represent the following Dafny axiom with `drop_ranks_less_fact`. +/// +/// axiom (forall s: Seq T, i: int :: +/// { Seq#Rank(Seq#Drop(s, i)) } +/// 0 < i && i <= Seq#Length(s) ==> Seq#Rank(Seq#Drop(s, i)) < Seq#Rank(s) ); + +private let drop_ranks_less_fact = + forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (drop s i)} + 0 < i && i <= length s ==> rank (drop s i) << rank s + +/// We represent the following Dafny axiom with +/// `take_ranks_less_fact`. However, since it isn't true in F* (which +/// has strong requirements for <<), we instead substitute length, +/// requiring decreases clauses to use length in this case. +/// +/// axiom (forall s: Seq T, i: int :: +/// { Seq#Rank(Seq#Take(s, i)) } +/// 0 <= i && i < Seq#Length(s) ==> Seq#Rank(Seq#Take(s, i)) < Seq#Rank(s) ); + +private let take_ranks_less_fact = + forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern length (take s i)} + i < length s ==> length (take s i) << length s + +/// We represent the following Dafny axiom with +/// `append_take_drop_ranks_less_fact`. However, since it isn't true +/// in F* (which has strong requirements for <<), we instead +/// substitute length, requiring decreases clauses to use +/// length in this case. +/// +/// axiom (forall s: Seq T, i: int, j: int :: +/// { Seq#Rank(Seq#Append(Seq#Take(s, i), Seq#Drop(s, j))) } +/// 0 <= i && i < j && j <= Seq#Length(s) ==> +/// Seq#Rank(Seq#Append(Seq#Take(s, i), Seq#Drop(s, j))) < Seq#Rank(s) ); + +private let append_take_drop_ranks_less_fact = + forall (ty: Type u#a) (s: seq ty) (i: nat) (j: nat).{:pattern length (append (take s i) (drop s j))} + i < j && j <= length s ==> length (append (take s i) (drop s j)) << length s + +/// We represent the following Dafny axiom with `drop_zero_fact`. +/// +/// axiom (forall s: Seq T, n: int :: { Seq#Drop(s, n) } +/// n == 0 ==> Seq#Drop(s, n) == s); + +private let drop_zero_fact = + forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern drop s n} + n = 0 ==> drop s n == s + +/// We represent the following Dafny axiom with `take_zero_fact`. +/// +/// axiom (forall s: Seq T, n: int :: { Seq#Take(s, n) } +/// n == 0 ==> Seq#Take(s, n) == Seq#Empty()); + +private let take_zero_fact = + forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern take s n} + n = 0 ==> take s n == empty + +/// We represent the following Dafny axiom with `drop_then_drop_fact`. +/// +/// axiom (forall s: Seq T, m, n: int :: { Seq#Drop(Seq#Drop(s, m), n) } +/// 0 <= m && 0 <= n && m+n <= Seq#Length(s) ==> +/// Seq#Drop(Seq#Drop(s, m), n) == Seq#Drop(s, m+n)); + +private let drop_then_drop_fact (_: squash (drop_length_fact u#a)) = + forall (ty: Type u#a) (s: seq ty) (m: nat) (n: nat).{:pattern drop (drop s m) n} + m + n <= length s ==> drop (drop s m) n == drop s (m + n) + +(** + The predicate `all_dafny_seq_facts` collects all the Dafny sequence axioms. + One can bring all these facts into scope with `all_dafny_seq_facts_lemma ()`. +**) + +let all_seq_facts = + length_of_empty_is_zero_fact u#a + /\ length_zero_implies_empty_fact u#a + /\ singleton_length_one_fact u#a + /\ build_increments_length_fact u#a + /\ index_into_build_fact u#a () + /\ append_sums_lengths_fact u#a + /\ index_into_singleton_fact u#a () + /\ index_after_append_fact u#a () + /\ update_maintains_length_fact u#a + /\ update_then_index_fact u#a + /\ contains_iff_exists_index_fact u#a + /\ empty_doesnt_contain_anything_fact u#a + /\ build_contains_equiv_fact u#a + /\ take_contains_equiv_exists_fact u#a + /\ drop_contains_equiv_exists_fact u#a + /\ equal_def_fact u#a + /\ extensionality_fact u#a + /\ is_prefix_def_fact u#a + /\ take_length_fact u#a + /\ index_into_take_fact u#a () + /\ drop_length_fact u#a + /\ index_into_drop_fact u#a () + /\ drop_index_offset_fact u#a () + /\ append_then_take_or_drop_fact u#a () + /\ take_commutes_with_in_range_update_fact u#a () + /\ take_ignores_out_of_range_update_fact u#a () + /\ drop_commutes_with_in_range_update_fact u#a () + /\ drop_ignores_out_of_range_update_fact u#a () + /\ drop_commutes_with_build_fact u#a () + /\ rank_def_fact u#a + /\ element_ranks_less_fact u#a + /\ drop_ranks_less_fact u#a + /\ take_ranks_less_fact u#a + /\ append_take_drop_ranks_less_fact u#a + /\ drop_zero_fact u#a + /\ take_zero_fact u#a + /\ drop_then_drop_fact u#a () + +val all_seq_facts_lemma : unit -> Lemma (all_seq_facts u#a) diff --git a/stage0/ulib/experimental/FStar.Sequence.Permutation.fst b/stage0/ulib/experimental/FStar.Sequence.Permutation.fst new file mode 100644 index 00000000000..5d721c380b9 --- /dev/null +++ b/stage0/ulib/experimental/FStar.Sequence.Permutation.fst @@ -0,0 +1,367 @@ +(* + Copyright 2021 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: N. Swamy +*) +module FStar.Sequence.Permutation +open FStar.Sequence +open FStar.Calc +open FStar.Sequence.Util +module S = FStar.Sequence + +//////////////////////////////////////////////////////////////////////////////// +[@@"opaque_to_smt"] +let is_permutation (#a:Type) (s0:seq a) (s1:seq a) (f:index_fun s0) = + S.length s0 == S.length s1 /\ + (forall x y. // {:pattern f x; f y} + x <> y ==> f x <> f y) /\ + (forall (i:nat{i < S.length s0}). // {:pattern (S.index s1 (f i))} + S.index s0 i == S.index s1 (f i)) + +let reveal_is_permutation #a (s0 s1:seq a) (f:index_fun s0) + = reveal_opaque (`%is_permutation) (is_permutation s0 s1 f) + +let reveal_is_permutation_nopats (#a:Type) (s0 s1:seq a) (f:index_fun s0) + : Lemma (is_permutation s0 s1 f <==> + + S.length s0 == S.length s1 /\ + + (forall x y. x <> y ==> f x <> f y) /\ + + (forall (i:nat{i < S.length s0}). + S.index s0 i == S.index s1 (f i))) + = reveal_is_permutation s0 s1 f + + +let split3_index (#a:eqtype) (s0:seq a) (x:a) (s1:seq a) (j:nat) + : Lemma + (requires j < S.length (S.append s0 s1)) + (ensures ( + let s = S.append s0 (cons x s1) in + let s' = S.append s0 s1 in + let n = S.length s0 in + if j < n then S.index s' j == S.index s j + else S.index s' j == S.index s (j + 1) + )) + = let n = S.length (S.append s0 s1) in + if j < n then () + else () + +let rec find (#a:eqtype) (x:a) (s:seq a{ count x s > 0 }) + : Tot (frags:(seq a & seq a) { + let s' = S.append (fst frags) (snd frags) in + let n = S.length (fst frags) in + s `S.equal` S.append (fst frags) (cons x (snd frags)) + }) (decreases (S.length s)) + = reveal_opaque (`%count) (count #a); + if head s = x + then S.empty, tail s + else ( + let pfx, sfx = find x (tail s) in + assert (S.equal (tail s) + (S.append pfx (cons x sfx))); + assert (S.equal s + (cons (head s) (tail s))); + cons (head s) pfx, sfx + ) + +let count_singleton_one (#a:eqtype) (x:a) + : Lemma (count x (singleton x) == 1) + = reveal_opaque (`%count) (count #a) +let count_singleton_zero (#a:eqtype) (x y:a) + : Lemma (x =!= y ==> count x (singleton y) == 0) + = reveal_opaque (`%count) (count #a) +let equal_counts_empty (#a:eqtype) (s0 s1:seq a) + : Lemma + (requires S.length s0 == 0 /\ (forall x. count x s0 == count x s1)) + (ensures S.length s1 == 0) + = reveal_opaque (`%count) (count #a); + if S.length s1 > 0 then + assert (count (head s1) s1 > 0) +let count_head (#a:eqtype) (x:seq a{ S.length x > 0 }) + : Lemma (count (head x) x > 0) + = reveal_opaque (`%count) (count #a) + +#restart-solver +#push-options "--fuel 0 --ifuel 0 --z3rlimit_factor 4" +let rec permutation_from_equal_counts (#a:eqtype) (s0:seq a) (s1:seq a{(forall x. count x s0 == count x s1)}) + : Tot (seqperm s0 s1) + (decreases (S.length s0)) + = if S.length s0 = 0 + then ( + count_empty s0; + assert (forall x. count x s0 = 0); + let f : index_fun s0 = fun i -> i in + reveal_is_permutation_nopats s0 s1 f; + equal_counts_empty s0 s1; + f + ) + else ( + count_head s0; + let pfx, sfx = find (head s0) s1 in + introduce forall x. count x (tail s0) == count x (S.append pfx sfx) + with + ( + if x = head s0 + then ( + calc (eq2 #int) { + count x (tail s0) <: int; + (==) { + assert (s0 `S.equal` cons (head s0) (tail s0)); + lemma_append_count_aux (head s0) (S.singleton (head s0)) (tail s0); + count_singleton_one x + } + count x s0 - 1 <: int; + (==) {} + count x s1 - 1 <: int; + (==) {} + count x (S.append pfx (cons (head s0) sfx)) - 1 <: int; + (==) { lemma_append_count_aux x pfx (cons (head s0) sfx) } + count x pfx + count x (cons (head s0) sfx) - 1 <: int; + (==) { lemma_append_count_aux x (S.singleton (head s0)) sfx } + count x pfx + (count x (S.singleton (head s0)) + count x sfx) - 1 <: int; + (==) { count_singleton_one x } + count x pfx + count x sfx <: int; + (==) { lemma_append_count_aux x pfx sfx } + count x (S.append pfx sfx) <: int; + } + ) + else ( + calc (==) { + count x (tail s0); + (==) { + assert (s0 `S.equal` cons (head s0) (tail s0)); + lemma_append_count_aux x (S.singleton (head s0)) (tail s0); + count_singleton_zero x (head s0) + } + count x s0; + (==) { } + count x s1; + (==) { } + count x (S.append pfx (cons (head s0) sfx)); + (==) { lemma_append_count_aux x pfx (cons (head s0) sfx) } + count x pfx + count x (cons (head s0) sfx); + (==) { lemma_append_count_aux x (S.singleton (head s0)) sfx } + count x pfx + (count x (S.singleton (head s0)) + count x sfx) ; + (==) { count_singleton_zero x (head s0) } + count x pfx + count x sfx; + (==) { lemma_append_count_aux x pfx sfx } + count x (S.append pfx sfx); + } + ) + ); + let s1' = (S.append pfx sfx) in + let f' = permutation_from_equal_counts (tail s0) s1' in + reveal_is_permutation_nopats (tail s0) s1' f'; + let n = S.length pfx in + let f : index_fun s0 = + fun i -> if i = 0 + then n + else if f' (i - 1) < n + then f' (i - 1) + else f' (i - 1) + 1 + in + assert (S.length s0 == S.length s1); + assert (forall x y. x <> y ==> f' x <> f' y); + introduce forall x y. x <> y ==> f x <> f y + with (introduce _ ==> _ + with _. ( + if f x = n || f y = n + then () + else if f' (x - 1) < n + then ( + assert (f x == f' (x - 1)); + if f' (y - 1) < n + then assert (f y == f' (y - 1)) + else assert (f y == f' (y - 1) + 1) + ) + else ( + assert (f x == f' (x - 1) + 1); + if f' (y - 1) < n + then assert (f y == f' (y - 1)) + else assert (f y == f' (y - 1) + 1) + ) + ) + ); + reveal_is_permutation_nopats s0 s1 f; f) +#pop-options + +#restart-solver + +module CM = FStar.Algebra.CommMonoid +let elim_monoid_laws #a (m:CM.cm a) + : Lemma ( + (forall x y. {:pattern m.mult x y} m.mult x y == m.mult y x) /\ + (forall x y z.{:pattern (m.mult x (m.mult y z))} m.mult x (m.mult y z) == m.mult (m.mult x y) z) /\ + (forall x.{:pattern (m.mult x m.unit)} m.mult x m.unit == x) + ) + = introduce forall x y. m.mult x y == m.mult y x + with ( m.commutativity x y ); + + introduce forall x y z. m.mult x (m.mult y z) == m.mult (m.mult x y) z + with ( m.associativity x y z ); + + introduce forall x. m.mult x m.unit == x + with ( m.identity x ) + +#restart-solver +#push-options "--fuel 1 --ifuel 0" +let rec foldm_back_append #a (m:CM.cm a) (s1 s2: seq a) + : Lemma + (ensures foldm_back m (append s1 s2) == m.mult (foldm_back m s1) (foldm_back m s2)) + (decreases (S.length s2)) + = elim_monoid_laws m; + if S.length s2 = 0 + then ( + assert (S.append s1 s2 `S.equal` s1); + assert (foldm_back m s2 == m.unit) + ) + else ( + let s2', last = un_build s2 in + calc (==) + { + foldm_back m (append s1 s2); + (==) { assert (S.equal (append s1 s2) + (S.build (append s1 s2') last)) } + foldm_back m (S.build (append s1 s2') last); + (==) {} + fold_back m.mult (S.build (append s1 s2') last) m.unit; + (==) {} + m.mult ((S.build (append s1 s2') last) $@ (length s1 + length s2')) + (fold_back m.mult (take (S.build (append s1 s2') last) (length s1 + length s2')) m.unit); + (==) { } + m.mult last + (fold_back m.mult (take (S.build (append s1 s2') last) (length s1 + length s2')) m.unit); + (==) { + assert (S.equal (take (S.build (append s1 s2') last) (length s1 + length s2')) (append s1 s2')) + } + m.mult last (foldm_back m (append s1 s2')); + (==) { foldm_back_append m s1 s2' } + m.mult last (m.mult (foldm_back m s1) (foldm_back m s2')); + (==) { } + m.mult (foldm_back m s1) (m.mult last (foldm_back m s2')); + (==) { } + m.mult (foldm_back m s1) (foldm_back m s2); + } + ) +#pop-options + +let foldm_back_sym #a (m:CM.cm a) (s1 s2: seq a) + : Lemma + (ensures foldm_back m (append s1 s2) == foldm_back m (append s2 s1)) + = elim_monoid_laws m; + foldm_back_append m s1 s2; + foldm_back_append m s2 s1 + +#push-options "--fuel 2" +let foldm_back_singleton (#a:_) (m:CM.cm a) (x:a) + : Lemma (foldm_back m (singleton x) == x) + = elim_monoid_laws m +#pop-options + +#push-options "--fuel 0" +let foldm_back3 #a (m:CM.cm a) (s1:seq a) (x:a) (s2:seq a) + : Lemma (foldm_back m (S.append s1 (cons x s2)) == + m.mult x (foldm_back m (S.append s1 s2))) + = calc (==) + { + foldm_back m (S.append s1 (cons x s2)); + (==) { foldm_back_append m s1 (cons x s2) } + m.mult (foldm_back m s1) (foldm_back m (cons x s2)); + (==) { foldm_back_append m (singleton x) s2 } + m.mult (foldm_back m s1) (m.mult (foldm_back m (singleton x)) (foldm_back m s2)); + (==) { foldm_back_singleton m x } + m.mult (foldm_back m s1) (m.mult x (foldm_back m s2)); + (==) { elim_monoid_laws m } + m.mult x (m.mult (foldm_back m s1) (foldm_back m s2)); + (==) { foldm_back_append m s1 s2 } + m.mult x (foldm_back m (S.append s1 s2)); + } +#pop-options + + +let remove_i #a (s:seq a) (i:nat{i < S.length s}) + : a & seq a + = let s0, s1 = split s i in + head s1, append s0 (tail s1) + +let shift_perm' #a + (s0 s1:seq a) + (_:squash (S.length s0 == S.length s1 /\ S.length s0 > 0)) + (p:seqperm s0 s1) + : Tot (seqperm (fst (un_build s0)) + (snd (remove_i s1 (p (S.length s0 - 1))))) + = reveal_is_permutation s0 s1 p; + let s0', last = un_build s0 in + let n = S.length s0' in + let p' (i:nat{ i < n }) + : j:nat{ j < n } + = if p i < p n then p i else p i - 1 + in + let _, s1' = remove_i s1 (p n) in + reveal_is_permutation_nopats s0' s1' p'; + p' + +let shift_perm #a + (s0 s1:seq a) + (_:squash (S.length s0 == S.length s1 /\ S.length s0 > 0)) + (p:seqperm s0 s1) + : Pure (seqperm (fst (un_build s0)) + (snd (remove_i s1 (p (S.length s0 - 1))))) + (requires True) + (ensures fun _ -> let n = S.length s0 - 1 in + S.index s1 (p n) == + S.index s0 n) + = reveal_is_permutation s0 s1 p; + shift_perm' s0 s1 () p + +let seqperm_len #a (s0 s1:seq a) + (p:seqperm s0 s1) + : Lemma + (ensures S.length s0 == S.length s1) + = reveal_is_permutation s0 s1 p + +let rec foldm_back_perm #a m s0 s1 p + : Lemma + (ensures foldm_back m s0 == foldm_back m s1) + (decreases (S.length s0)) + = seqperm_len s0 s1 p; + if S.length s0 = 0 then ( + assert (S.equal s0 s1) + ) + else ( + let n0 = S.length s0 - 1 in + let prefix, last = un_build s0 in + let prefix', suffix' = split s1 (p n0) in + let last', suffix' = suffix' $@ 0, drop suffix' 1 in + let s1' = snd (remove_i s1 (p n0)) in + let p' : seqperm prefix s1' = shift_perm s0 s1 () p in + assert (last == last'); + calc + (==) + { + foldm_back m s1; + (==) { assert (s1 `S.equal` S.append prefix' (cons last' suffix')) } + foldm_back m (S.append prefix' (cons last' suffix')); + (==) { foldm_back3 m prefix' last' suffix' } + m.mult last' (foldm_back m (append prefix' suffix')); + (==) { assert (S.equal (append prefix' suffix') s1') } + m.mult last' (foldm_back m s1'); + (==) { foldm_back_perm m prefix s1' p' } + m.mult last' (foldm_back m prefix); + (==) { } + foldm_back m s0; + } + ) diff --git a/stage0/ulib/experimental/FStar.Sequence.Permutation.fsti b/stage0/ulib/experimental/FStar.Sequence.Permutation.fsti new file mode 100644 index 00000000000..ed82d570ed5 --- /dev/null +++ b/stage0/ulib/experimental/FStar.Sequence.Permutation.fsti @@ -0,0 +1,94 @@ +(* + Copyright 2021 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: N. Swamy +*) +module FStar.Sequence.Permutation +open FStar.Sequence +open FStar.Sequence.Util +module S = FStar.Sequence +(* This module defines a permutation on sequences as a bijection among + the sequence indices relating equal elements. + + It defines a few utilities to work with such permutations. + + Notably: + + 1. Given two sequence with equal element counts, it constructs a + permutation. + + 2. Folding the multiplication of a commutative monoid over a + sequence and its permutation produces the same result +*) + +(* A bounded natural number *) +let nat_at_most (n:nat) = m:nat { m < n } + +(* A function from the indices of `s` to itself *) +let index_fun #a (s:seq a) = nat_at_most (S.length s) -> nat_at_most (S.length s) + +(* An abstract predicate defining when an index_fun is a permutation *) +val is_permutation (#a:Type) (s0:seq a) (s1:seq a) (f:index_fun s0) : prop + +(* Revealing the interpretation of is_permutation *) +val reveal_is_permutation (#a:Type) (s0 s1:seq a) (f:index_fun s0) + : Lemma (is_permutation s0 s1 f <==> + (* lengths of the sequences are the same *) + S.length s0 == S.length s1 /\ + (* f is injective *) + (forall x y. {:pattern f x; f y} + x <> y ==> f x <> f y) /\ + (* and f relates equal items in s0 and s1 *) + (forall (i:nat{i < S.length s0}).{:pattern (S.index s1 (f i))} + S.index s0 i == S.index s1 (f i))) + +(* A seqperm is an index_fun that is also a permutation *) +let seqperm (#a:Type) (s0:seq a) (s1:seq a) = + f:index_fun s0 { is_permutation s0 s1 f } + +(* We can construct a permutation from +// sequences whose element counts are the same *) +val permutation_from_equal_counts + (#a:eqtype) + (s0:seq a) (s1:seq a{(forall x. count x s0 == count x s1)}) + : Tot (seqperm s0 s1) + +(** Now, some utilities related to commutative monoids and permutations *) + +module CM = FStar.Algebra.CommMonoid + +(* folding a m.mult over a sequence *) +let foldm_back (#a:Type) (m:CM.cm a) (s:seq a) = fold_back m.mult s m.unit + +(* folding m over the concatenation of s1 and s2 + can be decomposed into a fold over s1 and a fold over s2 *) +val foldm_back_append (#a:Type) (m:CM.cm a) (s1 s2: seq a) + : Lemma + (ensures foldm_back m (append s1 s2) == m.mult (foldm_back m s1) (foldm_back m s2)) + +(* folds over concatenated lists can is symmetric *) +val foldm_back_sym (#a:Type) (m:CM.cm a) (s1 s2: seq a) + : Lemma + (ensures foldm_back m (append s1 s2) == foldm_back m (append s2 s1)) + +(* And, finally, if s0 and s1 are permutations, + then folding m over them is identical *) +val foldm_back_perm (#a:_) + (m:CM.cm a) + (s0:seq a) + (s1:seq a) + (p:seqperm s0 s1) + : Lemma + (ensures foldm_back m s0 == foldm_back m s1) diff --git a/stage0/ulib/experimental/FStar.Sequence.Seq.fst b/stage0/ulib/experimental/FStar.Sequence.Seq.fst new file mode 100644 index 00000000000..8c81bb4d876 --- /dev/null +++ b/stage0/ulib/experimental/FStar.Sequence.Seq.fst @@ -0,0 +1,88 @@ +(* + Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan + Rosen, Nikhil Swamy, Microsoft Research, and contributors to + the Dafny Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Includes material from the Dafny project + (https://github.com/dafny-lang/dafny) which carries this license + information: + + Created 9 February 2008 by Rustan Leino. + Converted to Boogie 2 on 28 June 2008. + Edited sequence axioms 20 October 2009 by Alex Summers. + Modified 2014 by Dan Rosen. + Copyright (c) 2008-2014, Microsoft. + Copyright by the contributors to the Dafny Project + SPDX-License-Identifier: MIT +*) + +(** +This module relates FStar.Seq.seq to FStar.Sequence.seq and provides +a bijection between the two. As such, it provides a path for migrating developments based on FStar.Seq to FStar.Sequence, or vice versa +*) + +module FStar.Sequence.Seq +module Seq = FStar.Seq +module Sequence = FStar.Sequence +open FStar.Sequence + +let rec sequence_of_seq (#a:Type) (s:Seq.seq a) + : Tot (Sequence.seq a) + (decreases (Seq.length s)) + = if Seq.length s = 0 + then Sequence.empty + else let prefix, last = Seq.un_snoc s in + sequence_of_seq prefix $:: last + +let rec seq_of_sequence (#a:Type) (s:Sequence.seq a) + : Tot (Seq.seq a) + (decreases (Sequence.length s)) + = if Sequence.length s = 0 + then Seq.empty + else let prefix = Sequence.take s (Sequence.length s - 1) in + Seq.snoc (seq_of_sequence prefix) + (s$@(Sequence.length s - 1)) + +let rec related_sequence_of_seq (#a:Type) (s:Seq.seq a) + : Lemma + (ensures related s (sequence_of_seq s)) + (decreases (Seq.length s)) + = if Seq.length s = 0 then () + else ( + let prefix, last = Seq.un_snoc s in + related_sequence_of_seq prefix + ) + +let rec related_seq_of_sequence (#a:Type) (s:Sequence.seq a) + : Lemma + (ensures related (seq_of_sequence s) s) + (decreases (Sequence.length s)) + = if Sequence.length s = 0 + then () + else ( + related_seq_of_sequence (Sequence.take s (Sequence.length s - 1)) + ) + +let seq_of_sequence_of_seq (#a:Type) (s:Seq.seq a) + : Lemma (seq_of_sequence (sequence_of_seq s) == s) + = related_sequence_of_seq s; + related_seq_of_sequence (sequence_of_seq s); + assert (Seq.equal (seq_of_sequence (sequence_of_seq s)) s) + +let sequence_of_seq_of_sequence (#a:Type) (s:Sequence.seq a) + : Lemma (sequence_of_seq (seq_of_sequence s) == s) + = related_seq_of_sequence s; + related_sequence_of_seq (seq_of_sequence s); + assert (Sequence.equal (sequence_of_seq (seq_of_sequence s)) s) diff --git a/stage0/ulib/experimental/FStar.Sequence.Seq.fsti b/stage0/ulib/experimental/FStar.Sequence.Seq.fsti new file mode 100644 index 00000000000..fc78067e319 --- /dev/null +++ b/stage0/ulib/experimental/FStar.Sequence.Seq.fsti @@ -0,0 +1,58 @@ +(* + Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan + Rosen, Nikhil Swamy, Microsoft Research, and contributors to + the Dafny Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Includes material from the Dafny project + (https://github.com/dafny-lang/dafny) which carries this license + information: + + Created 9 February 2008 by Rustan Leino. + Converted to Boogie 2 on 28 June 2008. + Edited sequence axioms 20 October 2009 by Alex Summers. + Modified 2014 by Dan Rosen. + Copyright (c) 2008-2014, Microsoft. + Copyright by the contributors to the Dafny Project + SPDX-License-Identifier: MIT +*) + +(** +This module relates FStar.Seq.seq to FStar.Sequence.seq and provides +a bijection between the two. As such, it provides a path for migrating developments based on FStar.Seq to FStar.Sequence, or vice versa +*) +module FStar.Sequence.Seq +module Seq = FStar.Seq +module Sequence = FStar.Sequence.Base + +val sequence_of_seq (#a:Type) (s:Seq.seq a) : Sequence.seq a + +val seq_of_sequence (#a:Type) (s:Sequence.seq a) : Seq.seq a + +let related #a (s:Seq.seq a) (s':Sequence.seq a) = + Seq.length s == Sequence.length s' /\ + (forall i.{:pattern (Seq.index s i) \/ (Sequence.index s' i)} + Seq.index s i == Sequence.index s' i) + +val related_sequence_of_seq (#a:Type) (s:Seq.seq a) + : Lemma (related s (sequence_of_seq s)) + +val related_seq_of_sequence (#a:Type) (s:Sequence.seq a) + : Lemma (related (seq_of_sequence s) s) + +val seq_of_sequence_of_seq (#a:Type) (s:Seq.seq a) + : Lemma (seq_of_sequence (sequence_of_seq s) == s) + +val sequence_of_seq_of_sequence (#a:Type) (s:Sequence.seq a) + : Lemma (sequence_of_seq (seq_of_sequence s) == s) diff --git a/stage0/ulib/experimental/FStar.Sequence.Util.fst b/stage0/ulib/experimental/FStar.Sequence.Util.fst new file mode 100644 index 00000000000..95dcf91f01c --- /dev/null +++ b/stage0/ulib/experimental/FStar.Sequence.Util.fst @@ -0,0 +1,102 @@ +(* + Copyright 2021 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: N. Swamy +*) + +(** This module provides some utilities on top of FStar.Sequence *) +module FStar.Sequence.Util +open FStar.Sequence.Base + + +/// For convenience, we define `slice` to represent Dafny sequence slices. +let slice (#ty: Type) (s: seq ty) (i: nat) (j: nat{j >= i && j <= length s}) + : seq ty + = all_seq_facts_lemma(); + drop (take s j) i + +let cons #a (x:a) (s:seq a) = singleton x `append` s + +let head #a (s:seq a{length s > 0}) = s $@ 0 + +let tail #a (s:seq a{length s > 0}) = drop s 1 + +/// Split a sequences into a prefix and the last element +/// This is the inverse of the Sequence.build +let un_build (#a:_) (s:seq a{length s > 0}) + : seq a & a + = take s (length s - 1), + s $@ (length s - 1) + +let split #a (s:seq a) (i:nat{ i <= length s}) + : seq a & seq a + = take s i, + drop s i + +/// Counts the number of elements of `s` that +/// satisfy the predicate [f] +let rec count_matches (#a:Type) (f:a -> bool) (s:seq a) + : Tot nat (decreases (length s)) + = all_seq_facts_lemma(); + if length s = 0 then 0 + else if f (head s) then 1 + count_matches f (tail s) + else count_matches f (tail s) + +/// count_matches on an empty sequence is always zero +let count_matches_empty (a:Type) (f:a -> bool) (s:seq a{length s = 0}) + : Lemma (count_matches f s = 0) + = () + +/// count is a specialization of count_matches +/// to count the number of occurrences of a given element `x` in `s`. +/// +/// It is opaque to give control over its unrollings in specific proofs +[@@"opaque_to_smt"] +let count (#a:eqtype) (x:a) (s:seq a) = count_matches (fun y -> x = y) s + +/// A specializtion of count_matches_empty +let count_empty (#a:eqtype) (s:seq a{length s = 0}) + : Lemma (forall x. count x s = 0) + = reveal_opaque (`%count) (count #a) + +/// The head element always occurs in a non-empty list +let count_head (#a:eqtype) (s:seq a{length s > 0}) + : Lemma (count (head s) s > 0) + = reveal_opaque (`%count) (count #a) + +/// count sums over append +let rec lemma_append_count_aux (#a:eqtype) (x:a) (lo hi:seq a) + : Lemma + (ensures (count x (append lo hi) = (count x lo + count x hi))) + (decreases (length lo)) + = all_seq_facts_lemma(); + reveal_opaque (`%count) (count #a); + if length lo = 0 + then assert (append lo hi `equal` hi) + else ( + lemma_append_count_aux x (tail lo) hi; + assert (append (tail lo) hi `equal` tail (append lo hi)) + ) + + +/// Folding a function over a sequence, starting from its +/// last element, hence fold_back +let rec fold_back (#a #b:Type) (f:b -> a -> Tot a) (s:seq b) (init:a) + : Tot a (decreases (length s)) + = all_seq_facts_lemma(); + if length s = 0 then init + else let last = s $@ (length s - 1) in + let s = take s (length s - 1) in + f last (fold_back f s init) diff --git a/stage0/ulib/experimental/FStar.Sequence.fst b/stage0/ulib/experimental/FStar.Sequence.fst new file mode 100644 index 00000000000..181f01e5613 --- /dev/null +++ b/stage0/ulib/experimental/FStar.Sequence.fst @@ -0,0 +1,32 @@ +(* + Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan + Rosen, Nikhil Swamy, Microsoft Research, and contributors to + the Dafny Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Includes material from the Dafny project + (https://github.com/dafny-lang/dafny) which carries this license + information: + + Created 9 February 2008 by Rustan Leino. + Converted to Boogie 2 on 28 June 2008. + Edited sequence axioms 20 October 2009 by Alex Summers. + Modified 2014 by Dan Rosen. + Copyright (c) 2008-2014, Microsoft. + Copyright by the contributors to the Dafny Project + SPDX-License-Identifier: MIT +*) +module FStar.Sequence +include FStar.Sequence.Base +include FStar.Sequence.Ambient diff --git a/stage0/ulib/experimental/FStar.Universe.PCM.fst b/stage0/ulib/experimental/FStar.Universe.PCM.fst new file mode 100644 index 00000000000..b3002af04e8 --- /dev/null +++ b/stage0/ulib/experimental/FStar.Universe.PCM.fst @@ -0,0 +1,46 @@ +(* + Copyright 2021 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Author: N. Swamy +*) +module FStar.Universe.PCM +(* Lift a PCM to a higher universe, including its frame-preserving updates *) +open FStar.PCM +open FStar.Universe +open FStar.Classical.Sugar + +let raise (#a:Type) (p:pcm a) + : pcm (raise_t u#a u#b a) + = { + p = { + composable = (fun x y -> p.p.composable (downgrade_val x) (downgrade_val y)); + op = (fun x y -> raise_val (p.p.op (downgrade_val x) (downgrade_val y))); + one = raise_val p.p.one; + }; + comm = (fun x y -> p.comm (downgrade_val x) (downgrade_val y)); + assoc = (fun x y z -> p.assoc (downgrade_val x) (downgrade_val y) (downgrade_val z)); + assoc_r = (fun x y z -> p.assoc_r (downgrade_val x) (downgrade_val y) (downgrade_val z)); + is_unit = (fun x -> p.is_unit (downgrade_val x)); + refine = (fun x -> p.refine (downgrade_val x)); + } + +let raise_frame_preserving_upd #a (#p:pcm a) (#x #y:a) (f:frame_preserving_upd p x y) + : frame_preserving_upd (raise p) (raise_val x) (raise_val y) + = fun v -> + let u = f (downgrade_val v) in + let v_new = raise_val u in + assert (forall frame. composable p y frame ==> composable (raise p) (raise_val y) (raise_val frame)); + assert (forall frame. composable (raise p) (raise_val x) frame ==> composable p x (downgrade_val frame)); + v_new diff --git a/stage0/ulib/experimental/FStar.Witnessed.Core.fst b/stage0/ulib/experimental/FStar.Witnessed.Core.fst new file mode 100644 index 00000000000..f51a82b105b --- /dev/null +++ b/stage0/ulib/experimental/FStar.Witnessed.Core.fst @@ -0,0 +1,19 @@ +module FStar.Witnessed.Core +module P = FStar.Preorder + +(* This is just to give definitions to the witnessed type for extraction. + It is NOT a semantic model of the witnessed modality *) + +let witnessed (state:Type u#a) + (rel:P.preorder state) + (p:s_predicate state) + : Type0 + = unit + +let witnessed_proof_irrelevant + (state:Type u#a) + (rel:P.preorder state) + (p:s_predicate state) + (w0 w1:witnessed state rel p) + : Lemma (w0 == w1) + = () diff --git a/stage0/ulib/experimental/FStar.Witnessed.Core.fsti b/stage0/ulib/experimental/FStar.Witnessed.Core.fsti new file mode 100644 index 00000000000..50fcaa61ce9 --- /dev/null +++ b/stage0/ulib/experimental/FStar.Witnessed.Core.fsti @@ -0,0 +1,22 @@ +module FStar.Witnessed.Core +module P = FStar.Preorder + +let s_predicate (state:Type u#a) = state -> Type0 + +let stable (state:Type u#a) + (rel:P.preorder state) + (p:s_predicate state) = + forall s0 s1. (p s0 /\ rel s0 s1) ==> p s1 + +val witnessed (state:Type u#a) + (rel:P.preorder state) + (p:s_predicate state) + : Type0 + +val witnessed_proof_irrelevant + (state:Type u#a) + (rel:P.preorder state) + (p:s_predicate state) + (w0 w1:witnessed state rel p) + : Lemma (w0 == w1) + diff --git a/stage0/ulib/fs/FStar_All.fs b/stage0/ulib/fs/FStar_All.fs new file mode 100644 index 00000000000..d17e5bfd367 --- /dev/null +++ b/stage0/ulib/fs/FStar_All.fs @@ -0,0 +1,7 @@ +#light "off" +module FStar_All + let failwith x = failwith x + let exit i = exit i + let pipe_right a f = f a + let pipe_left f a = f a + let try_with f1 f2 = try f1 () with | e -> f2 e diff --git a/stage0/ulib/fs/FStar_Char.fs b/stage0/ulib/fs/FStar_Char.fs new file mode 100644 index 00000000000..362dc52b204 --- /dev/null +++ b/stage0/ulib/fs/FStar_Char.fs @@ -0,0 +1,9 @@ +module FStar_Char +open Prims + +type char = FSharp.Core.char + +let lowercase = System.Char.ToLower +let uppercase = System.Char.ToUpper +let int_of_char (x:char) : int = Microsoft.FSharp.Core.Operators.int x |> System.Numerics.BigInteger.op_Implicit +let char_of_int (x:int) : char = Microsoft.FSharp.Core.Operators.int x |> Microsoft.FSharp.Core.Operators.char diff --git a/stage0/ulib/fs/FStar_CommonST.fs b/stage0/ulib/fs/FStar_CommonST.fs new file mode 100644 index 00000000000..cc470325dd3 --- /dev/null +++ b/stage0/ulib/fs/FStar_CommonST.fs @@ -0,0 +1,26 @@ +module FStar_CommonST + +open FStar_Monotonic_Heap + +let read x = x.contents + +let op_Bang x = read x + +let write x y = x.contents <- y + +let op_Colon_Equals x y = write x y + +let uid = ref 0 + +let alloc contents = + let id = incr uid; !uid in + let r = { id = id; contents = contents } in + r + +let recall = (fun r -> ()) +let get () = () + +type 'a witnessed = | C + +let gst_witness = (fun r -> ()) +let gst_recall = (fun r -> ()) diff --git a/stage0/ulib/fs/FStar_Dyn.fs b/stage0/ulib/fs/FStar_Dyn.fs new file mode 100644 index 00000000000..a1b4bd8be6c --- /dev/null +++ b/stage0/ulib/fs/FStar_Dyn.fs @@ -0,0 +1,7 @@ +module FStar_Dyn + +type dyn = obj + +let mkdyn (x:'a) : dyn = box x + +let undyn (d:dyn) : 'a = unbox<'a> d diff --git a/stage0/ulib/fs/FStar_Exn.fs b/stage0/ulib/fs/FStar_Exn.fs new file mode 100644 index 00000000000..b931f23bae9 --- /dev/null +++ b/stage0/ulib/fs/FStar_Exn.fs @@ -0,0 +1,3 @@ +module FStar_Exn + +let raise = raise diff --git a/stage0/ulib/fs/FStar_Float.fs b/stage0/ulib/fs/FStar_Float.fs new file mode 100644 index 00000000000..b95942563b5 --- /dev/null +++ b/stage0/ulib/fs/FStar_Float.fs @@ -0,0 +1,5 @@ +module FStar_Float +open Prims + +type float = FSharp.Core.float +type double = float diff --git a/stage0/ulib/fs/FStar_Ghost.fs b/stage0/ulib/fs/FStar_Ghost.fs new file mode 100644 index 00000000000..dca2089cef3 --- /dev/null +++ b/stage0/ulib/fs/FStar_Ghost.fs @@ -0,0 +1,12 @@ +module FStar_Ghost + +type erased = unit +let reveal : erased -> unit = fun _ -> () +let hide : unit -> erased = fun _ -> () +let hide_reveal : erased -> unit = fun _ -> () +let reveal_hide : unit -> unit = fun _ -> () +let elift1 : (unit -> unit) -> erased -> erased = fun _ _ -> () +let elift2 : (unit -> unit -> unit) -> erased -> erased -> erased = fun _ _ _ -> () +let elift3 : (unit -> unit -> unit -> unit) -> erased -> erased -> erased -> erased = fun _ _ _ _ -> () +let elift1_p : (unit -> unit) -> erased -> erased = fun _ _ -> () +let elift2_p : (unit -> unit -> unit) -> erased -> erased -> erased = fun _ _ _ -> () diff --git a/stage0/ulib/fs/FStar_Heap.fs b/stage0/ulib/fs/FStar_Heap.fs new file mode 100644 index 00000000000..1321304f226 --- /dev/null +++ b/stage0/ulib/fs/FStar_Heap.fs @@ -0,0 +1,7 @@ +module FStar_Heap + +open FStar_Monotonic_Heap + +type 'a ref = 'a FStar_Monotonic_Heap.ref +type trivial_rel = Prims.l_True +type trivial_preorder = trivial_rel diff --git a/stage0/ulib/fs/FStar_HyperStack_All.fs b/stage0/ulib/fs/FStar_HyperStack_All.fs new file mode 100644 index 00000000000..d83edab144e --- /dev/null +++ b/stage0/ulib/fs/FStar_HyperStack_All.fs @@ -0,0 +1,8 @@ +module FStar_HyperStack_All + +let failwith x = failwith x +let exit i = exit (Microsoft.FSharp.Core.Operators.int i) +let pipe_right a f = f a +let pipe_left f a = f a +let try_with f1 f2 = try f1 () with | e -> f2 e + diff --git a/stage0/ulib/fs/FStar_HyperStack_IO.fs b/stage0/ulib/fs/FStar_HyperStack_IO.fs new file mode 100644 index 00000000000..64f17eef943 --- /dev/null +++ b/stage0/ulib/fs/FStar_HyperStack_IO.fs @@ -0,0 +1,6 @@ +module FStar_HyperStack_IO + +open Prims + +let print_string : Prims.string -> Prims.unit = + FStar_IO.print_string diff --git a/stage0/ulib/fs/FStar_HyperStack_ST.fs b/stage0/ulib/fs/FStar_HyperStack_ST.fs new file mode 100644 index 00000000000..eccb680f1a8 --- /dev/null +++ b/stage0/ulib/fs/FStar_HyperStack_ST.fs @@ -0,0 +1,90 @@ +module FStar_HyperStack_ST + +open FStar_CommonST + +open FStar_Monotonic_HyperHeap + +(* TODO: There are issues with removing unused parameters in (Monotonic_)Hyper_Stack modules *) +open FStar_Monotonic_HyperStack + +open FStar_HyperStack + +let push_frame () = () +let pop_frame () = () + +let root = () + +let def_rid = root + +let salloc (contents:'a) :('a reference) = + let r = FStar_CommonST.alloc contents in + MkRef (root, r) + +let salloc_mm (contents:'a) :('a reference) = + let r = FStar_CommonST.alloc contents in + MkRef (root, r) + +let sfree r = () + +let new_region = (fun r0 -> def_rid) +let new_colored_region = (fun r0 c -> def_rid) + +let ralloc i (contents:'a) :('a reference) = + let r = FStar_CommonST.alloc contents in + MkRef (i, r) + +let ralloc_mm i (contents:'a) :('a reference) = + let r = FStar_CommonST.alloc contents in + MkRef (i, r) + +let rfree r = () + +let op_Colon_Equals r v = match r with + | MkRef (_, r) -> op_Colon_Equals r v + +let op_Bang r = match r with + | MkRef (_, r) -> op_Bang r + +let read = op_Bang + +let write = op_Colon_Equals + +let get () = HS (Prims.parse_int "0", FStar_Map.const1 FStar_Monotonic_Heap.emp, def_rid) + +let recall = (fun r -> ()) + +let recall_region = (fun r -> ()) +let witness_region _ = () +let witness_hsref _ = () +type erid = rid + +type ('a, 'rel) mreference = ('a, 'rel) FStar_Monotonic_HyperStack.mreference +type ('a, 'rel) mref = ('a, 'rel) FStar_Monotonic_HyperStack.mref +type 'a reference = ('a, unit) mreference +type 'a ref = ('a, unit) mref +type ('a, 'b) m_rref = ('a, 'b) mref + +//type 'a ref = 'a FStar_HyperStack.reference +//type 'a mreference = 'a ref +//type 'a reference = 'a ref +let alloc = salloc +//type 'a mref = 'a ref +//type 'b m_rref = 'b ref +type stable_on_t = unit +let mr_witness _ _ _ _ _ = () +let testify _ = () +let testify_forall _ = () +let testify_forall_region_contains_pred _ _ = () + +type ex_rid = erid +type 'a witnessed = 'a FStar_CommonST.witnessed +type stable_on = unit +type token = unit +let witness_p _ _ = () +let recall_p _ _ = () + +type drgn = rid +let new_drgn _ = () +let free_drgn _ = () +let ralloc_drgn = ralloc +let ralloc_drgn_mm = ralloc_mm diff --git a/stage0/ulib/fs/FStar_IO.fs b/stage0/ulib/fs/FStar_IO.fs new file mode 100644 index 00000000000..b060b1aac7e --- /dev/null +++ b/stage0/ulib/fs/FStar_IO.fs @@ -0,0 +1,27 @@ +module FStar_IO +exception EOF +open System +open System.IO +type fd_read = TextReader +type fd_write = TextWriter + +let print_newline _ = Printf.printf "\n" +let print_string x = Printf.printf "%s" x +let print_uint8 x = Printf.printf "%02x" x +let print_uint32 x = Printf.printf "%04x" x +let print_uint64 x = Printf.printf "%08x" x +let print_any x = Printf.printf "%A" x +let input_line () = System.Console.ReadLine() +let input_int () = Int32.Parse(System.Console.ReadLine()) +let input_float () = Single.Parse(System.Console.ReadLine(), System.Globalization.CultureInfo.InvariantCulture); +let open_read_file (x:string) = new StreamReader(x) +let open_write_file (x:string) = File.CreateText(x) +let close_read_file (x:fd_read) = x.Close() +let close_write_file (x:fd_write) = x.Close() +let read_line (fd:fd_read) = + let x = fd.ReadLine() in + if x=null + then raise EOF + else x +let write_string (fd:fd_write) (x:string) = + fd.Write(x) diff --git a/stage0/ulib/fs/FStar_Int16.fs b/stage0/ulib/fs/FStar_Int16.fs new file mode 100644 index 00000000000..b837d19c0e3 --- /dev/null +++ b/stage0/ulib/fs/FStar_Int16.fs @@ -0,0 +1,85 @@ +module FStar_Int16 +(* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! *) +(* THIS FILE IS BASED ON AUTOGENERATED ml/FStar_Int16.ml FILE! *) +(* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! *) + +type int16 = System.Int16 +type t = System.Int16 +let n = Prims.of_int 16 + +let int_to_t x = System.Int16.Parse((string x)) +let __int_to_t = int_to_t + +let v (x:t) : Prims.int = Prims.parse_int (string x) + +let zero = 0s +let one = 1s +let ones = System.Int16.MaxValue + +(* Reexport add, plus aliases *) +let add : t -> t -> t = (+) +let add_underspec : t -> t -> t = (+) +let add_mod : t -> t -> t = (+) + +(* Reexport sub, plus aliases *) +let sub : t -> t -> t = (-) +let sub_underspec : t -> t -> t = (-) +let sub_mod : t -> t -> t = (-) + +(* Reexport mul, plus aliases *) +let mul : t -> t -> t = (*) +let mul_underspec : t -> t -> t = (*) +let mul_mod : t -> t -> t = (*) + +(* Just reexport these *) +let div : t -> t -> t = (/) +let rem : t -> t -> t = (%) +let logand : t -> t -> t = (&&&) +let logxor : t -> t -> t = (^^^) +let logor : t -> t -> t = (|||) +let lognot : t -> t = (~~~) +let to_string : t -> string = string +let of_string : string -> t = System.Int16.Parse + +let to_string_hex (x : t) = "0x" + (x.ToString("X")) + +let to_string_hex_pad (i : t) = i.ToString("X4") + +(* The shifts take a uint32 argument, so we need to convert *) +let shift_right (n : t) (i : System.UInt32) : t = n >>> (int32 i) +let shift_left (n : t) (i : System.UInt32) : t = n <<< (int32 i) +let shift_arithmetic_right = shift_right + +(* Comparison operators *) +let eq (a:t) (b:t) : bool = a = b +let gt (a:t) (b:t) : bool = a > b +let gte (a:t) (b:t) : bool = a >= b +let lt (a:t) (b:t) : bool = a < b +let lte (a:t) (b:t) : bool = a <= b + +(* NOT Constant time operators *) +let eq_mask (a:t) (b:t) : t = if a = b then ones else zero +let gte_mask (a:t) (b:t) : t = if a >= b then ones else zero + +(* Infix notations *) +let op_Plus_Hat = add +let op_Plus_Question_Hat = add_underspec +let op_Plus_Percent_Hat = add_mod +let op_Subtraction_Hat = sub +let op_Subtraction_Question_Hat = sub_underspec +let op_Subtraction_Percent_Hat = sub_mod +let op_Star_Hat = mul +let op_Star_Question_Hat = mul_underspec +let op_Star_Percent_Hat = mul_mod +let op_Slash_Hat = div +let op_Percent_Hat = rem +let op_Hat_Hat = logxor +let op_Amp_Hat = logand +let op_Bar_Hat = logor +let op_Less_Less_Hat = shift_left +let op_Greater_Greater_Hat = shift_right +let op_Equals_Hat = eq +let op_Greater_Hat = gt +let op_Greater_Equals_Hat = gte +let op_Less_Hat = lt +let op_Less_Equals_Hat = lte diff --git a/stage0/ulib/fs/FStar_Int32.fs b/stage0/ulib/fs/FStar_Int32.fs new file mode 100644 index 00000000000..b35b9fee7b3 --- /dev/null +++ b/stage0/ulib/fs/FStar_Int32.fs @@ -0,0 +1,85 @@ +module FStar_Int32 +(* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! *) +(* THIS FILE IS BASED ON AUTOGENERATED ml/FStar_Int32.ml FILE! *) +(* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! *) + +type int32 = System.Int32 +type t = System.Int32 +let n = Prims.of_int 32 + +let int_to_t x = System.Int32.Parse((string x)) +let __int_to_t = int_to_t + +let v (x:t) : Prims.int = Prims.parse_int (string x) + +let zero = 0 +let one = 1 +let ones = System.Int32.MaxValue + +(* Reexport add, plus aliases *) +let add : t -> t -> t = (+) +let add_underspec : t -> t -> t = (+) +let add_mod : t -> t -> t = (+) + +(* Reexport sub, plus aliases *) +let sub : t -> t -> t = (-) +let sub_underspec : t -> t -> t = (-) +let sub_mod : t -> t -> t = (-) + +(* Reexport mul, plus aliases *) +let mul : t -> t -> t = (*) +let mul_underspec : t -> t -> t = (*) +let mul_mod : t -> t -> t = (*) + +(* Just reexport these *) +let div : t -> t -> t = (/) +let rem : t -> t -> t = (%) +let logand : t -> t -> t = (&&&) +let logxor : t -> t -> t = (^^^) +let logor : t -> t -> t = (|||) +let lognot : t -> t = (~~~) +let to_string : t -> string = string +let of_string : string -> t = System.Int32.Parse + +let to_string_hex (x : t) = "0x" + (x.ToString("X")) + +let to_string_hex_pad (i : t) = i.ToString("X8") + +(* The shifts take a uint32 argument, so we need to convert *) +let shift_right (n : t) (i : System.UInt32) : t = n >>> (FSharp.Core.Operators.int32 i) +let shift_left (n : t) (i : System.UInt32) : t = n <<< (FSharp.Core.Operators.int32 i) +let shift_arithmetic_right = shift_right + +(* Comparison operators *) +let eq (a:t) (b:t) : bool = a = b +let gt (a:t) (b:t) : bool = a > b +let gte (a:t) (b:t) : bool = a >= b +let lt (a:t) (b:t) : bool = a < b +let lte (a:t) (b:t) : bool = a <= b + +(* NOT Constant time operators *) +let eq_mask (a:t) (b:t) : t = if a = b then ones else zero +let gte_mask (a:t) (b:t) : t = if a >= b then ones else zero + +(* Infix notations *) +let op_Plus_Hat = add +let op_Plus_Question_Hat = add_underspec +let op_Plus_Percent_Hat = add_mod +let op_Subtraction_Hat = sub +let op_Subtraction_Question_Hat = sub_underspec +let op_Subtraction_Percent_Hat = sub_mod +let op_Star_Hat = mul +let op_Star_Question_Hat = mul_underspec +let op_Star_Percent_Hat = mul_mod +let op_Slash_Hat = div +let op_Percent_Hat = rem +let op_Hat_Hat = logxor +let op_Amp_Hat = logand +let op_Bar_Hat = logor +let op_Less_Less_Hat = shift_left +let op_Greater_Greater_Hat = shift_right +let op_Equals_Hat = eq +let op_Greater_Hat = gt +let op_Greater_Equals_Hat = gte +let op_Less_Hat = lt +let op_Less_Equals_Hat = lte diff --git a/stage0/ulib/fs/FStar_Int64.fs b/stage0/ulib/fs/FStar_Int64.fs new file mode 100644 index 00000000000..734d148f02d --- /dev/null +++ b/stage0/ulib/fs/FStar_Int64.fs @@ -0,0 +1,85 @@ +module FStar_Int64 +(* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! *) +(* THIS FILE IS BASED ON AUTOGENERATED ml/FStar_Int64.ml FILE! *) +(* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! *) + +type int16 = System.Int64 +type t = System.Int64 +let n = Prims.of_int 64 + +let int_to_t x = System.Int64.Parse((string x)) +let __int_to_t = int_to_t + +let v (x:t) : Prims.int = Prims.parse_int (string x) + +let zero = 0L +let one = 1L +let ones = System.Int64.MaxValue + +(* Reexport add, plus aliases *) +let add : t -> t -> t = (+) +let add_underspec : t -> t -> t = (+) +let add_mod : t -> t -> t = (+) + +(* Reexport sub, plus aliases *) +let sub : t -> t -> t = (-) +let sub_underspec : t -> t -> t = (-) +let sub_mod : t -> t -> t = (-) + +(* Reexport mul, plus aliases *) +let mul : t -> t -> t = (*) +let mul_underspec : t -> t -> t = (*) +let mul_mod : t -> t -> t = (*) + +(* Just reexport these *) +let div : t -> t -> t = (/) +let rem : t -> t -> t = (%) +let logand : t -> t -> t = (&&&) +let logxor : t -> t -> t = (^^^) +let logor : t -> t -> t = (|||) +let lognot : t -> t = (~~~) +let to_string : t -> string = string +let of_string : string -> t = System.Int64.Parse + +let to_string_hex (x : t) = "0x" + (x.ToString("X")) + +let to_string_hex_pad (i : t) = i.ToString("X16") + +(* The shifts take a uint32 argument, so we need to convert *) +let shift_right (n : t) (i : System.UInt32) : t = n >>> (int32 i) +let shift_left (n : t) (i : System.UInt32) : t = n <<< (int32 i) +let shift_arithmetic_right = shift_right + +(* Comparison operators *) +let eq (a:t) (b:t) : bool = a = b +let gt (a:t) (b:t) : bool = a > b +let gte (a:t) (b:t) : bool = a >= b +let lt (a:t) (b:t) : bool = a < b +let lte (a:t) (b:t) : bool = a <= b + +(* NOT Constant time operators *) +let eq_mask (a:t) (b:t) : t = if a = b then ones else zero +let gte_mask (a:t) (b:t) : t = if a >= b then ones else zero + +(* Infix notations *) +let op_Plus_Hat = add +let op_Plus_Question_Hat = add_underspec +let op_Plus_Percent_Hat = add_mod +let op_Subtraction_Hat = sub +let op_Subtraction_Question_Hat = sub_underspec +let op_Subtraction_Percent_Hat = sub_mod +let op_Star_Hat = mul +let op_Star_Question_Hat = mul_underspec +let op_Star_Percent_Hat = mul_mod +let op_Slash_Hat = div +let op_Percent_Hat = rem +let op_Hat_Hat = logxor +let op_Amp_Hat = logand +let op_Bar_Hat = logor +let op_Less_Less_Hat = shift_left +let op_Greater_Greater_Hat = shift_right +let op_Equals_Hat = eq +let op_Greater_Hat = gt +let op_Greater_Equals_Hat = gte +let op_Less_Hat = lt +let op_Less_Equals_Hat = lte diff --git a/stage0/ulib/fs/FStar_Int8.fs b/stage0/ulib/fs/FStar_Int8.fs new file mode 100644 index 00000000000..fcfe9607068 --- /dev/null +++ b/stage0/ulib/fs/FStar_Int8.fs @@ -0,0 +1,85 @@ +module FStar_Int8 +(* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! *) +(* THIS FILE IS BASED ON AUTOGENERATED ml/FStar_Int8.ml FILE! *) +(* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! *) + +type int8 = System.SByte +type t = System.SByte +let n = Prims.of_int 8 + +let int_to_t x = System.SByte.Parse((string x)) +let __int_to_t = int_to_t + +let v (x:t) : Prims.int = Prims.parse_int (string x) + +let zero = 0y +let one = 1y +let ones = System.SByte.MaxValue + +(* Reexport add, plus aliases *) +let add : t -> t -> t = (+) +let add_underspec : t -> t -> t = (+) +let add_mod : t -> t -> t = (+) + +(* Reexport sub, plus aliases *) +let sub : t -> t -> t = (-) +let sub_underspec : t -> t -> t = (-) +let sub_mod : t -> t -> t = (-) + +(* Reexport mul, plus aliases *) +let mul : t -> t -> t = (*) +let mul_underspec : t -> t -> t = (*) +let mul_mod : t -> t -> t = (*) + +(* Just reexport these *) +let div : t -> t -> t = (/) +let rem : t -> t -> t = (%) +let logand : t -> t -> t = (&&&) +let logxor : t -> t -> t = (^^^) +let logor : t -> t -> t = (|||) +let lognot : t -> t = (~~~) +let to_string : t -> string = string +let of_string : string -> t = System.SByte.Parse + +let to_string_hex (x : t) = "0x" + (x.ToString("X")) + +let to_string_hex_pad (i : t) = i.ToString("X2") + +(* The shifts take a uint32 argument, so we need to convert *) +let shift_right (n : t) (i : System.UInt32) : t = n >>> (int32 i) +let shift_left (n : t) (i : System.UInt32) : t = n <<< (int32 i) +let shift_arithmetic_right = shift_right + +(* Comparison operators *) +let eq (a:t) (b:t) : bool = a = b +let gt (a:t) (b:t) : bool = a > b +let gte (a:t) (b:t) : bool = a >= b +let lt (a:t) (b:t) : bool = a < b +let lte (a:t) (b:t) : bool = a <= b + +(* NOT Constant time operators *) +let eq_mask (a:t) (b:t) : t = if a = b then ones else zero +let gte_mask (a:t) (b:t) : t = if a >= b then ones else zero + +(* Infix notations *) +let op_Plus_Hat = add +let op_Plus_Question_Hat = add_underspec +let op_Plus_Percent_Hat = add_mod +let op_Subtraction_Hat = sub +let op_Subtraction_Question_Hat = sub_underspec +let op_Subtraction_Percent_Hat = sub_mod +let op_Star_Hat = mul +let op_Star_Question_Hat = mul_underspec +let op_Star_Percent_Hat = mul_mod +let op_Slash_Hat = div +let op_Percent_Hat = rem +let op_Hat_Hat = logxor +let op_Amp_Hat = logand +let op_Bar_Hat = logor +let op_Less_Less_Hat = shift_left +let op_Greater_Greater_Hat = shift_right +let op_Equals_Hat = eq +let op_Greater_Hat = gt +let op_Greater_Equals_Hat = gte +let op_Less_Hat = lt +let op_Less_Equals_Hat = lte diff --git a/stage0/ulib/fs/FStar_List.fs b/stage0/ulib/fs/FStar_List.fs new file mode 100644 index 00000000000..29d4e065659 --- /dev/null +++ b/stage0/ulib/fs/FStar_List.fs @@ -0,0 +1,54 @@ +module FStar_List +open Prims +//open FStar.List.Tot.Base + +let isEmpty l = List.isEmpty l +let mem = List.contains +let memT = mem +let hd = List.head +let tail = List.tail +let tl = List.tail + +let nth l i = List.nth l (Microsoft.FSharp.Core.Operators.int i) +let length l : int = List.length l |> System.Numerics.BigInteger.op_Implicit +let rev = List.rev +let map = List.map +let mapT = map +let mapi f l = List.mapi (fun i x -> f (System.Numerics.BigInteger.op_Implicit i) x) l +let map2 = List.map2 +let rec map3 = List.map3 +let iter = List.iter +let iter2 = List.iter2 +let iteri_aux _ _ _ = failwith "FStar.List.fs: Not implemented: iteri_aux" +let iteri f l = List.iteri (fun i x -> f (System.Numerics.BigInteger.op_Implicit i) x) l +let partition = List.partition +let append = List.append +let rev_append _ _ = failwith "FStar.List.fs: Not implemented: rev_append" +let fold_left = List.fold +let fold_right = List.foldBack +let fold_left2 = List.fold2 +let fold_right2 = List.foldBack2 +let collect = List.collect +let unzip = List.unzip +let unzip3 = List.unzip3 +let filter = List.filter +let sortWith f l = List.sortWith (fun x y -> Microsoft.FSharp.Core.Operators.int (f x y)) l +let for_all = List.forall +let forall2 = List.forall2 +let tryFind f l = List.tryFind f l +let tryFindT = tryFind +let find = tryFind +let tryPick f l = List.tryPick f l +let flatten = List.concat +let split = unzip +let choose = List.choose +let existsb f l = List.exists f l +let existsML f l = List.exists f l +let contains x l = List.exists (fun y -> x = y) l +let zip = List.zip +let splitAt x l = List.splitAt ( Microsoft.FSharp.Core.Operators.int x) l +let filter_map = List.choose +let index f l = System.Numerics.BigInteger.op_Implicit (List.findIndex f l) +let zip3 = List.zip3 +let unique _ _ = failwith "FStar.List.fs: Not implemented: unique" +let map_flatten f l = flatten (map f l) diff --git a/stage0/ulib/fs/FStar_List_Tot_Base.fs b/stage0/ulib/fs/FStar_List_Tot_Base.fs new file mode 100644 index 00000000000..1edaa3e6926 --- /dev/null +++ b/stage0/ulib/fs/FStar_List_Tot_Base.fs @@ -0,0 +1,48 @@ +module FStar_List_Tot_Base +open Prims + +let isEmpty l = List.isEmpty l +let hd = List.head +let tail = List.tail +let tl = List.tail +let length l : int = List.length l |> System.Numerics.BigInteger.op_Implicit +let nth l (i : Prims.nat) = try Some (List.nth l (Microsoft.FSharp.Core.Operators.int i)) with _ -> None +let index l (i : Prims.nat) = List.nth l (Microsoft.FSharp.Core.Operators.int i) +let count _ _ = failwith "FStar_List.Tot.Base.fs: Not implemented: count" +let rev_acc l r = List.fold (fun xs x -> x :: xs) r l +let rev = List.rev +let append = List.append +let op_At = append +let snoc (x, y) = append x [y] +let flatten = List.concat +let map = List.map +let mapi_init _ _ _ = failwith "FStar.List.Tot.Base.fs: Not implemented: mapi_init" +let mapi f l = List.mapi (fun i x -> f (System.Numerics.BigInteger.op_Implicit i) x) l +let concatMap f l = List.collect f l +let fold_left = List.fold +let fold_right = List.foldBack +let fold_left2 = List.fold2 +let mem = List.contains +//type ('a, 'b, 'c) memP = NOT IMPLEMENTED +let contains x l = List.exists (fun y -> x = y) l +let existsb f l = List.exists f l +let find f l = List.tryFind f l +let filter = List.filter +let for_all = List.forall +let collect f l = List.collect f l +let tryFind = find +let tryPick f l = List.tryPick f l +let choose = List.choose +let partition = List.partition +let subset _ _ = failwith "FStar.List.Tot.Base.fs: Not implemented: subset" +let noRepeats _ = failwith "FStar.List.Tot.Base.fs: Not implemented: noRepeats" +let rec assoc x l = l |> List.tryFind (fun (h, _) -> h = x) |> Option.map snd +let split = List.unzip +let splitAt = List.splitAt +let unzip = List.unzip + +let unzip3 = List.unzip3 +let bool_of_compare _ _ _ = failwith "FStar.List.Tot.Base.fs: Not implemented: bool_of_compare" +let compare_of_bool _ _ _ = failwith "FStar.List.Tot.Base.fs: Not implemented: compare_of_bool" +let sortWith (f : 'a -> 'a -> Prims.int) l = List.sortWith (fun x y -> Microsoft.FSharp.Core.Operators.int (f x y)) l +let list_unref l = l diff --git a/stage0/ulib/fs/FStar_Map.fs b/stage0/ulib/fs/FStar_Map.fs new file mode 100644 index 00000000000..140d8d0a704 --- /dev/null +++ b/stage0/ulib/fs/FStar_Map.fs @@ -0,0 +1,135 @@ +#light "off" +module FStar_Map +open Prims +open FStar_Pervasives +(* TODO: The extracted version of this file doesn't include the when 'key : comparison constraint which is required for F# *) +type t<'key, 'value when 'key : comparison> = +{mappings : ('key, 'value) FStar_FunctionalExtensionality.restricted_t; domain : 'key FStar_Set.set} + + +let __proj__Mkt__item__mappings = (fun ( projectee : ('key, 'value) t ) -> (match (projectee) with +| {mappings = mappings; domain = domain} -> begin +mappings +end)) + + +let __proj__Mkt__item__domain = (fun ( projectee : ('key, 'value) t ) -> (match (projectee) with +| {mappings = mappings; domain = domain} -> begin +domain +end)) + + +let sel = (fun ( m : ('key, 'value) t ) ( k : 'key ) -> (m.mappings k)) + + +let upd = (fun ( m : ('key, 'value) t ) ( k : 'key ) ( v : 'value ) -> {mappings = (FStar_FunctionalExtensionality.on_domain (fun ( x : 'key ) -> (match ((Prims.op_Equality x k)) with +| true -> begin +v +end +| uu____5020 -> begin +(m.mappings x) +end))); domain = (FStar_Set.union m.domain (FStar_Set.singleton k))}) + + +let const1 = (fun ( v : 'value ) -> {mappings = (FStar_FunctionalExtensionality.on_domain (fun ( uu____5049 : 'key ) -> v)); domain = (FStar_Set.complement (FStar_Set.empty ()))}) + + +let domain = (fun ( m : ('key, 'value) t ) -> m.domain) + + +let contains = (fun ( m : ('key, 'value) t ) ( k : 'key ) -> (FStar_Set.mem k m.domain)) + + +let concat = (fun ( m1 : ('key, 'value) t ) ( m2 : ('key, 'value) t ) -> {mappings = (FStar_FunctionalExtensionality.on_domain (fun ( x : 'key ) -> (match ((FStar_Set.mem x m2.domain)) with +| true -> begin +(m2.mappings x) +end +| uu____5174 -> begin +(m1.mappings x) +end))); domain = (FStar_Set.union m1.domain m2.domain)}) + +(* TODO: Only implicit arguments at the start of a function are erased, whereas the others are extracted to unit and obj + which makes extracted function unusable. See examples/hello/TestFSharp for a minimal example. + + Here, key should be a generic argument with a comparison constraint instead of obj/unit. + + A simple workaround would be to change the declaration of map_val in the FStar.Map.fsti so that + '#key:eqtype' parameter is moved before any non-implicit parameters (i.e. before 'f'). +*) +let map_val = (fun ( f : 'uuuuuu5195 -> 'uuuuuu5196 ) ( key : 'key ) ( m : ('key, 'uuuuuu5195) t ) -> {mappings = (FStar_FunctionalExtensionality.on_domain (fun ( x : 'key ) -> (f (m.mappings x)))); domain = m.domain}) + + +let restrict = (fun ( s : 'key FStar_Set.set ) ( m : ('key, 'value) t ) -> {mappings = m.mappings; domain = (FStar_Set.intersect s m.domain)}) + + +let const_on = (fun ( dom : 'key FStar_Set.set ) ( v : 'value ) -> (restrict dom (const1 v))) + + +type disjoint_dom = +unit + + +type has_dom = +unit + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +type equal = +unit + + + + + + + + + + + + + diff --git a/stage0/ulib/fs/FStar_Monotonic_Heap.fs b/stage0/ulib/fs/FStar_Monotonic_Heap.fs new file mode 100644 index 00000000000..6c92edde7ea --- /dev/null +++ b/stage0/ulib/fs/FStar_Monotonic_Heap.fs @@ -0,0 +1,46 @@ +module FStar_Monotonic_Heap + +type heap = unit + +(* Following OCaml implementation we want reference (physical) equality for ref. + https://www.lexifi.com/blog/references-physical-equality *) +[] +type 'a ref = { + mutable contents: 'a; + id: int +} + +type 'a mref = 'a ref + +let emp = + () + +(* Logical functions on heap *) +(* TODO : complete the functions to have the same interface as in FStar.Heap.fsti *) + +let addr_of _ = unbox (box ()) +let is_mm _ = unbox (box ()) + +(* let compare_addrs *) + +// HACK: We need to somehow make the implementation agree with the interface. Those types seem to be used only +// in lemmas, so they shouldn't matter. +type ('a, 'b, 'c, 'd) contains = 'a * 'b * 'c * 'd +type ('a, 'b) addr_unused_in = 'a * 'b +type ('a, 'b, 'c, 'd) unused_in = 'a * 'b * 'c * 'd +let fresh _ _ _ = unbox (box ()) + +let sel _ _ = unbox (box ()) +let upd _ _ _ = unbox (box ()) +let alloc _ _ _ = unbox (box ()) + +let free_mm _ _ = unbox (box ()) +let sel_tot = sel +let upd_tot = upd + +(* Untyped view of references *) +type aref = + | Ref of (unit * unit) +let dummy_aref = Ref ((), ()) +let aref_of _ = dummy_aref +let ref_of _ _ = unbox (box ()) diff --git a/ulib/fs/FStar_Mul.fs b/stage0/ulib/fs/FStar_Mul.fs similarity index 100% rename from ulib/fs/FStar_Mul.fs rename to stage0/ulib/fs/FStar_Mul.fs diff --git a/stage0/ulib/fs/FStar_Option.fs b/stage0/ulib/fs/FStar_Option.fs new file mode 100644 index 00000000000..2a74f4f9c0b --- /dev/null +++ b/stage0/ulib/fs/FStar_Option.fs @@ -0,0 +1,11 @@ +module FStar_Option +let isSome = function + | Some _ -> true + | None -> false +let isNone o = not (isSome o) +let map f = function + | Some x -> Some (f x) + | None -> None +let get = function + | Some x -> x + | None -> failwith "Option.get called on None" diff --git a/stage0/ulib/fs/FStar_Pervasives_Native.fs b/stage0/ulib/fs/FStar_Pervasives_Native.fs new file mode 100644 index 00000000000..c37bfcc2aa4 --- /dev/null +++ b/stage0/ulib/fs/FStar_Pervasives_Native.fs @@ -0,0 +1,271 @@ +#light "off" +module FStar_Pervasives_Native +open Prims +type 'Aa option = +| None +| Some of 'Aa + + +let uu___is_None = function None -> true | _ -> false +let uu___is_Some = function Some _ -> true | _ -> false +let __proj__Some__item__v = function Some x -> x | _ -> failwith "Option value not available" + +type ('a,'b) tuple2 = 'a * 'b + +let fst = Microsoft.FSharp.Core.Operators.fst +let snd = Microsoft.FSharp.Core.Operators.snd + +let __proj__Mktuple2__1 = fst +let __proj__Mktuple2__2 = snd + +type ('a,'b,'c) tuple3 = + 'a* 'b* 'c +let uu___is_Mktuple3 projectee = true +let __proj__Mktuple3__item___1 projectee = + match projectee with | (_1,_2,_3) -> _1 +let __proj__Mktuple3__item___2 projectee = + match projectee with | (_1,_2,_3) -> _2 +let __proj__Mktuple3__item___3 projectee = + match projectee with | (_1,_2,_3) -> _3 + +type ('a,'b,'c,'d) tuple4 = + 'a* 'b* 'c* 'd +let uu___is_Mktuple4 projectee = true +let __proj__Mktuple4__item___1 projectee = + match projectee with | (_1,_2,_3,_4) -> _1 +let __proj__Mktuple4__item___2 projectee = + match projectee with | (_1,_2,_3,_4) -> _2 +let __proj__Mktuple4__item___3 projectee = + match projectee with | (_1,_2,_3,_4) -> _3 +let __proj__Mktuple4__item___4 projectee = + match projectee with | (_1,_2,_3,_4) -> _4 + +type ('a,'b,'c,'d,'e) tuple5 = + 'a* 'b* 'c* 'd* 'e +let uu___is_Mktuple5 projectee = true +let __proj__Mktuple5__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5) -> _1 +let __proj__Mktuple5__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5) -> _2 +let __proj__Mktuple5__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5) -> _3 +let __proj__Mktuple5__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5) -> _4 +let __proj__Mktuple5__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5) -> _5 + +type ('a,'b,'c,'d,'e,'f) tuple6 = + 'a* 'b* 'c* 'd* 'e* 'f +let uu___is_Mktuple6 projectee = true +let __proj__Mktuple6__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6) -> _1 +let __proj__Mktuple6__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6) -> _2 +let __proj__Mktuple6__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6) -> _3 +let __proj__Mktuple6__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6) -> _4 +let __proj__Mktuple6__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6) -> _5 +let __proj__Mktuple6__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6) -> _6 + +type ('a,'b,'c,'d,'e,'f,'g) tuple7 = + 'a* 'b* 'c* 'd* 'e* 'f* 'g +let uu___is_Mktuple7 projectee = true +let __proj__Mktuple7__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7) -> _1 +let __proj__Mktuple7__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7) -> _2 +let __proj__Mktuple7__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7) -> _3 +let __proj__Mktuple7__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7) -> _4 +let __proj__Mktuple7__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7) -> _5 +let __proj__Mktuple7__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7) -> _6 +let __proj__Mktuple7__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7) -> _7 + +type ('a,'b,'c,'d,'e,'f,'g,'h) tuple8 = + 'a* 'b* 'c* 'd* 'e* 'f* 'g* 'h +let uu___is_Mktuple8 projectee = true +let __proj__Mktuple8__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _1 +let __proj__Mktuple8__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _2 +let __proj__Mktuple8__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _3 +let __proj__Mktuple8__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _4 +let __proj__Mktuple8__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _5 +let __proj__Mktuple8__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _6 +let __proj__Mktuple8__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _7 +let __proj__Mktuple8__item___8 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _8 + +type ('a,'b,'c,'d,'e,'f,'g,'h,'i) tuple9 = + 'a *'b *'c *'d *'e *'f *'g *'h *'i +let uu___is_Mktuple9 projectee = true +let __proj__Mktuple9__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _1 +let __proj__Mktuple9__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _2 +let __proj__Mktuple9__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _3 +let __proj__Mktuple9__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _4 +let __proj__Mktuple9__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _5 +let __proj__Mktuple9__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _6 +let __proj__Mktuple9__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _7 +let __proj__Mktuple9__item___8 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _8 +let __proj__Mktuple9__item___9 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _9 + +type ('a,'b,'c,'d,'e,'f,'g,'h,'i,'j) tuple10 = + 'a *'b *'c *'d *'e *'f *'g *'h *'i *'j +let uu___is_Mktuple10 projectee = true +let __proj__Mktuple10__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _1 +let __proj__Mktuple10__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _2 +let __proj__Mktuple10__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _3 +let __proj__Mktuple10__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _4 +let __proj__Mktuple10__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _5 +let __proj__Mktuple10__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _6 +let __proj__Mktuple10__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _7 +let __proj__Mktuple10__item___8 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _8 +let __proj__Mktuple10__item___9 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _9 +let __proj__Mktuple10__item___10 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _10 + +type ('a,'b,'c,'d,'e,'f,'g,'h,'i,'j,'k) tuple11 = + 'a *'b *'c *'d *'e *'f *'g *'h *'i *'j *'k +let uu___is_Mktuple11 projectee = true +let __proj__Mktuple11__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _1 +let __proj__Mktuple11__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _2 +let __proj__Mktuple11__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _3 +let __proj__Mktuple11__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _4 +let __proj__Mktuple11__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _5 +let __proj__Mktuple11__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _6 +let __proj__Mktuple11__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _7 +let __proj__Mktuple11__item___8 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _8 +let __proj__Mktuple11__item___9 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _9 +let __proj__Mktuple11__item___10 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _10 +let __proj__Mktuple11__item___11 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _11 + +type ('a,'b,'c,'d,'e,'f,'g,'h,'i,'j,'k,'l) tuple12 = + 'a *'b *'c *'d *'e *'f *'g *'h *'i *'j *'k *'l +let uu___is_Mktuple12 projectee = true +let __proj__Mktuple12__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _1 +let __proj__Mktuple12__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _2 +let __proj__Mktuple12__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _3 +let __proj__Mktuple12__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _4 +let __proj__Mktuple12__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _5 +let __proj__Mktuple12__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _6 +let __proj__Mktuple12__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _7 +let __proj__Mktuple12__item___8 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _8 +let __proj__Mktuple12__item___9 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _9 +let __proj__Mktuple12__item___10 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _10 +let __proj__Mktuple12__item___11 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _11 +let __proj__Mktuple12__item___12 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _12 + +type ('a,'b,'c,'d,'e,'f,'g,'h,'i,'j,'k,'l,'m) tuple13 = + 'a *'b *'c *'d *'e *'f *'g *'h *'i *'j *'k *'l *'m +let uu___is_Mktuple13 projectee = true +let __proj__Mktuple13__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _1 +let __proj__Mktuple13__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _2 +let __proj__Mktuple13__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _3 +let __proj__Mktuple13__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _4 +let __proj__Mktuple13__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _5 +let __proj__Mktuple13__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _6 +let __proj__Mktuple13__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _7 +let __proj__Mktuple13__item___8 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _8 +let __proj__Mktuple13__item___9 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _9 +let __proj__Mktuple13__item___10 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _10 +let __proj__Mktuple13__item___11 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _11 +let __proj__Mktuple13__item___12 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _12 +let __proj__Mktuple13__item___13 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _13 + +type ('a,'b,'c,'d,'e,'f,'g,'h,'i,'j,'k,'l,'m,'n) tuple14 = + 'a *'b *'c *'d *'e *'f *'g *'h *'i *'j *'k *'l *'m *'n +let uu___is_Mktuple14 projectee = true +let __proj__Mktuple14__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _1 +let __proj__Mktuple14__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _2 +let __proj__Mktuple14__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _3 +let __proj__Mktuple14__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _4 +let __proj__Mktuple14__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _5 +let __proj__Mktuple14__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _6 +let __proj__Mktuple14__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _7 +let __proj__Mktuple14__item___8 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _8 +let __proj__Mktuple14__item___9 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _9 +let __proj__Mktuple14__item___10 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _10 +let __proj__Mktuple14__item___11 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _11 +let __proj__Mktuple14__item___12 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _12 +let __proj__Mktuple14__item___13 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _13 +let __proj__Mktuple14__item___14 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _14 diff --git a/stage0/ulib/fs/FStar_ST.fs b/stage0/ulib/fs/FStar_ST.fs new file mode 100644 index 00000000000..79da3351dea --- /dev/null +++ b/stage0/ulib/fs/FStar_ST.fs @@ -0,0 +1,23 @@ +#light "off" +module FStar_ST + +open FStar_CommonST + +type 'a mref = 'a FStar_Monotonic_Heap.mref +type 'a ref = 'a FStar_Monotonic_Heap.ref + +let read = read +let op_Bang = op_Bang + +let write = write +let op_Colon_Equals = op_Colon_Equals + +let alloc x = alloc + +let recall = recall +let get = get + +type 'a witnessed = 'a FStar_CommonST.witnessed + +let gst_Witness = gst_witness +let gst_recall = gst_recall \ No newline at end of file diff --git a/stage0/ulib/fs/FStar_Set.fs b/stage0/ulib/fs/FStar_Set.fs new file mode 100644 index 00000000000..28e1df7bb34 --- /dev/null +++ b/stage0/ulib/fs/FStar_Set.fs @@ -0,0 +1,15 @@ +module FStar_Set + +type set<'a when 'a : comparison> = Set<'a> +let empty () = Set.empty +let singleton = Set.singleton +let union = Set.union +let intersect = Set.intersect +let complement x = Set.empty // TODO +let mem = Set.contains + +(* + * F* should not extract Set.equal + * We should fix it, adding the following in the meantime + *) +type equal = unit diff --git a/stage0/ulib/fs/FStar_String.fs b/stage0/ulib/fs/FStar_String.fs new file mode 100644 index 00000000000..c40710f24dc --- /dev/null +++ b/stage0/ulib/fs/FStar_String.fs @@ -0,0 +1,29 @@ +module FStar_String +open Prims + +let make (i : nat) (c : FStar_Char.char) = String.init (Microsoft.FSharp.Core.Operators.int i) (fun _ -> string([|c|])) +let strcat s t = s ^ t +let op_Hat s t = strcat s t + +let split (seps : FStar_Char.char list) (s : string) = s.Split(Array.ofList seps) + +let compare (x : string) (y : string) = Prims.of_int (x.CompareTo(y)) +type char = FStar_Char.char +let concat = String.concat +let length s = Prims.of_int (String.length s) +let strlen s = length s + +let substring (s : string) (i : Prims.int) (j : Prims.int) = s.Substring(Microsoft.FSharp.Core.Operators.int i, Microsoft.FSharp.Core.Operators.int j) +let sub = substring + +let get (s : string) (i : Prims.int) = s.[Microsoft.FSharp.Core.Operators.int i] +let collect (f : char -> string) (s : string) = s |> Array.ofSeq |> Array.map f |> String.concat "" +let lowercase (s : string) = s.ToLowerInvariant() +let uppercase (s : string) = s.ToUpperInvariant() +//let escaped = BatString.escaped +let index = get + +let index_of (s : string) (c : char) = s.IndexOf(c) +let list_of_string (s : string) = s |> Seq.toList +let string_of_list (l : char list) = string(Array.ofList l) +let string_of_char (c : char) = string(c, 1) diff --git a/stage0/ulib/fs/FStar_UInt16.fs b/stage0/ulib/fs/FStar_UInt16.fs new file mode 100644 index 00000000000..9c9c543ce6f --- /dev/null +++ b/stage0/ulib/fs/FStar_UInt16.fs @@ -0,0 +1,85 @@ +module FStar_UInt16 +(* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! *) +(* THIS FILE IS BASED ON AUTOGENERATED ml/FStar_UInt16.ml FILE! *) +(* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! *) + +type uint16 = System.UInt16 +type t = System.UInt16 +let n = Prims.of_int 16 + +let uint_to_t x = System.UInt16.Parse((string x)) +let __uint_to_t = uint_to_t + +let v (x:t) : Prims.int = Prims.parse_int (string x) + +let zero = 0us +let one = 1us +let ones = System.UInt16.MaxValue + +(* Reexport add, plus aliases *) +let add : t -> t -> t = (+) +let add_underspec : t -> t -> t = (+) +let add_mod : t -> t -> t = (+) + +(* Reexport sub, plus aliases *) +let sub : t -> t -> t = (-) +let sub_underspec : t -> t -> t = (-) +let sub_mod : t -> t -> t = (-) + +(* Reexport mul, plus aliases *) +let mul : t -> t -> t = (*) +let mul_underspec : t -> t -> t = (*) +let mul_mod : t -> t -> t = (*) + +(* Just reexport these *) +let div : t -> t -> t = (/) +let rem : t -> t -> t = (%) +let logand : t -> t -> t = (&&&) +let logxor : t -> t -> t = (^^^) +let logor : t -> t -> t = (|||) +let lognot : t -> t = (~~~) +let to_string : t -> string = string +let of_string : string -> t = System.UInt16.Parse + +let to_string_hex (x : t) = "0x" + (x.ToString("X")) + +let to_string_hex_pad (i : t) = i.ToString("X4") + +(* The shifts take a uint32 argument, so we need to convert *) +let shift_right (n : t) (i : System.UInt32) : t = n >>> (int32 i) +let shift_left (n : t) (i : System.UInt32) : t = n <<< (int32 i) +let shift_arithmetic_right = shift_right + +(* Comparison operators *) +let eq (a:t) (b:t) : bool = a = b +let gt (a:t) (b:t) : bool = a > b +let gte (a:t) (b:t) : bool = a >= b +let lt (a:t) (b:t) : bool = a < b +let lte (a:t) (b:t) : bool = a <= b + +(* NOT Constant time operators *) +let eq_mask (a:t) (b:t) : t = if a = b then ones else zero +let gte_mask (a:t) (b:t) : t = if a >= b then ones else zero + +(* Infix notations *) +let op_Plus_Hat = add +let op_Plus_Question_Hat = add_underspec +let op_Plus_Percent_Hat = add_mod +let op_Subtraction_Hat = sub +let op_Subtraction_Question_Hat = sub_underspec +let op_Subtraction_Percent_Hat = sub_mod +let op_Star_Hat = mul +let op_Star_Question_Hat = mul_underspec +let op_Star_Percent_Hat = mul_mod +let op_Slash_Hat = div +let op_Percent_Hat = rem +let op_Hat_Hat = logxor +let op_Amp_Hat = logand +let op_Bar_Hat = logor +let op_Less_Less_Hat = shift_left +let op_Greater_Greater_Hat = shift_right +let op_Equals_Hat = eq +let op_Greater_Hat = gt +let op_Greater_Equals_Hat = gte +let op_Less_Hat = lt +let op_Less_Equals_Hat = lte diff --git a/stage0/ulib/fs/FStar_UInt32.fs b/stage0/ulib/fs/FStar_UInt32.fs new file mode 100644 index 00000000000..58a4feeb85d --- /dev/null +++ b/stage0/ulib/fs/FStar_UInt32.fs @@ -0,0 +1,85 @@ +module FStar_UInt32 +(* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! *) +(* THIS FILE IS BASED ON AUTOGENERATED ml/FStar_UInt32.ml FILE! *) +(* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! *) + +type uint32 = System.UInt32 +type t = System.UInt32 +let n = Prims.of_int 32 + +let uint_to_t x = System.UInt32.Parse((string x)) +let __uint_to_t = uint_to_t + +let v (x:t) : Prims.int = Prims.parse_int (string x) + +let zero = 0u +let one = 1u +let ones = System.UInt32.MaxValue + +(* Reexport add, plus aliases *) +let add : t -> t -> t = (+) +let add_underspec : t -> t -> t = (+) +let add_mod : t -> t -> t = (+) + +(* Reexport sub, plus aliases *) +let sub : t -> t -> t = (-) +let sub_underspec : t -> t -> t = (-) +let sub_mod : t -> t -> t = (-) + +(* Reexport mul, plus aliases *) +let mul : t -> t -> t = (*) +let mul_underspec : t -> t -> t = (*) +let mul_mod : t -> t -> t = (*) + +(* Just reexport these *) +let div : t -> t -> t = (/) +let rem : t -> t -> t = (%) +let logand : t -> t -> t = (&&&) +let logxor : t -> t -> t = (^^^) +let logor : t -> t -> t = (|||) +let lognot : t -> t = (~~~) +let to_string : t -> string = string +let of_string : string -> t = System.UInt32.Parse + +let to_string_hex (x : t) = "0x" + (x.ToString("X")) + +let to_string_hex_pad (i : t) = i.ToString("X8") + +(* The shifts take a uint32 argument, so we need to convert *) +let shift_right (n : t) (i : System.UInt32) : t = n >>> (int32 i) +let shift_left (n : t) (i : System.UInt32) : t = n <<< (int32 i) +let shift_arithmetic_right = shift_right + +(* Comparison operators *) +let eq (a:t) (b:t) : bool = a = b +let gt (a:t) (b:t) : bool = a > b +let gte (a:t) (b:t) : bool = a >= b +let lt (a:t) (b:t) : bool = a < b +let lte (a:t) (b:t) : bool = a <= b + +(* NOT Constant time operators *) +let eq_mask (a:t) (b:t) : t = if a = b then ones else zero +let gte_mask (a:t) (b:t) : t = if a >= b then ones else zero + +(* Infix notations *) +let op_Plus_Hat = add +let op_Plus_Question_Hat = add_underspec +let op_Plus_Percent_Hat = add_mod +let op_Subtraction_Hat = sub +let op_Subtraction_Question_Hat = sub_underspec +let op_Subtraction_Percent_Hat = sub_mod +let op_Star_Hat = mul +let op_Star_Question_Hat = mul_underspec +let op_Star_Percent_Hat = mul_mod +let op_Slash_Hat = div +let op_Percent_Hat = rem +let op_Hat_Hat = logxor +let op_Amp_Hat = logand +let op_Bar_Hat = logor +let op_Less_Less_Hat = shift_left +let op_Greater_Greater_Hat = shift_right +let op_Equals_Hat = eq +let op_Greater_Hat = gt +let op_Greater_Equals_Hat = gte +let op_Less_Hat = lt +let op_Less_Equals_Hat = lte diff --git a/stage0/ulib/fs/FStar_UInt64.fs b/stage0/ulib/fs/FStar_UInt64.fs new file mode 100644 index 00000000000..1b0a40070e3 --- /dev/null +++ b/stage0/ulib/fs/FStar_UInt64.fs @@ -0,0 +1,85 @@ +module FStar_UInt64 +(* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! *) +(* THIS FILE IS BASED ON AUTOGENERATED ml/FStar_UInt64.ml FILE! *) +(* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! *) + +type uint64 = System.UInt64 +type t = System.UInt64 +let n = Prims.of_int 64 + +let uint_to_t x = System.UInt64.Parse((string x)) +let __uint_to_t = uint_to_t + +let v (x:t) : Prims.int = Prims.parse_int (string x) + +let zero = 0UL +let one = 1UL +let ones = System.UInt64.MaxValue + +(* Reexport add, plus aliases *) +let add : t -> t -> t = (+) +let add_underspec : t -> t -> t = (+) +let add_mod : t -> t -> t = (+) + +(* Reexport sub, plus aliases *) +let sub : t -> t -> t = (-) +let sub_underspec : t -> t -> t = (-) +let sub_mod : t -> t -> t = (-) + +(* Reexport mul, plus aliases *) +let mul : t -> t -> t = (*) +let mul_underspec : t -> t -> t = (*) +let mul_mod : t -> t -> t = (*) + +(* Just reexport these *) +let div : t -> t -> t = (/) +let rem : t -> t -> t = (%) +let logand : t -> t -> t = (&&&) +let logxor : t -> t -> t = (^^^) +let logor : t -> t -> t = (|||) +let lognot : t -> t = (~~~) +let to_string : t -> string = string +let of_string : string -> t = System.UInt64.Parse + +let to_string_hex (x : t) = "0x" + (x.ToString("X")) + +let to_string_hex_pad (i : t) = i.ToString("X16") + +(* The shifts take a uint32 argument, so we need to convert *) +let shift_right (n : t) (i : System.UInt32) : t = n >>> (int32 i) +let shift_left (n : t) (i : System.UInt32) : t = n <<< (int32 i) +let shift_arithmetic_right = shift_right + +(* Comparison operators *) +let eq (a:t) (b:t) : bool = a = b +let gt (a:t) (b:t) : bool = a > b +let gte (a:t) (b:t) : bool = a >= b +let lt (a:t) (b:t) : bool = a < b +let lte (a:t) (b:t) : bool = a <= b + +(* NOT Constant time operators *) +let eq_mask (a:t) (b:t) : t = if a = b then ones else zero +let gte_mask (a:t) (b:t) : t = if a >= b then ones else zero + +(* Infix notations *) +let op_Plus_Hat = add +let op_Plus_Question_Hat = add_underspec +let op_Plus_Percent_Hat = add_mod +let op_Subtraction_Hat = sub +let op_Subtraction_Question_Hat = sub_underspec +let op_Subtraction_Percent_Hat = sub_mod +let op_Star_Hat = mul +let op_Star_Question_Hat = mul_underspec +let op_Star_Percent_Hat = mul_mod +let op_Slash_Hat = div +let op_Percent_Hat = rem +let op_Hat_Hat = logxor +let op_Amp_Hat = logand +let op_Bar_Hat = logor +let op_Less_Less_Hat = shift_left +let op_Greater_Greater_Hat = shift_right +let op_Equals_Hat = eq +let op_Greater_Hat = gt +let op_Greater_Equals_Hat = gte +let op_Less_Hat = lt +let op_Less_Equals_Hat = lte diff --git a/stage0/ulib/fs/FStar_UInt8.fs b/stage0/ulib/fs/FStar_UInt8.fs new file mode 100644 index 00000000000..4d08b53a04c --- /dev/null +++ b/stage0/ulib/fs/FStar_UInt8.fs @@ -0,0 +1,84 @@ +module FStar_UInt8 + +// TODO: Would it make sense to use .net byte here? +type uint8 = Prims.int +type byte = uint8 +type t = uint8 +type t' = t + +let n = Prims.parse_int "8" +let v (x:uint8) : Prims.int = Prims.parse_int (string x) + +let zero = 0 +let one = 1 +let ones = 255 + +let add (a:uint8) (b:uint8) : uint8 = a + b +let add_underspec a b = (add a b) &&& 255I +let add_mod = add_underspec + +let sub (a:uint8) (b:uint8) : uint8 = a - b +let sub_underspec a b = (sub a b) &&& 255I +let sub_mod = sub_underspec + +let mul (a:uint8) (b:uint8) : uint8 = a * b +let mul_underspec a b = (mul a b) &&& 255I +let mul_mod = mul_underspec + +let div (a:uint8) (b:uint8) : uint8 = Prims.(/) a b + +let rem (a:uint8) (b:uint8) : uint8 = Prims.(mod) a b + +let logand (a:uint8) (b:uint8) : uint8 = a &&& b +let logxor (a:uint8) (b:uint8) : uint8 = a ^^^ b +let logor (a:uint8) (b:uint8) : uint8 = a ||| b +let lognot (a:uint8) : uint8 = bigint.op_OnesComplement a + +let int_to_uint8 (x:Prims.int) : uint8 = x % 256I + +let shift_right (a:uint8) (b:System.UInt32) : uint8 = a >>> (int32 b) +let shift_left (a:uint8) (b:System.UInt32) : uint8 = (a <<< (int32 b)) &&& 255I + +(* Comparison operators *) +let eq (a:uint8) (b:uint8) : bool = a = b +let gt (a:uint8) (b:uint8) : bool = a > b +let gte (a:uint8) (b:uint8) : bool = a >= b +let lt (a:uint8) (b:uint8) : bool = a < b +let lte (a:uint8) (b:uint8) : bool = a <= b + +(* NOT Constant time comparison operators *) +let gte_mask (a:uint8) (b:uint8) : uint8 = if a >= b then 255I else 0I +let eq_mask (a:uint8) (b:uint8) : uint8 = if a = b then 255I else 0I + +(* Infix notations *) +let op_Plus_Hat = add +let op_Plus_Question_Hat = add_underspec +let op_Plus_Percent_Hat = add_mod +let op_Subtraction_Hat = sub +let op_Subtraction_Question_Hat = sub_underspec +let op_Subtraction_Percent_Hat = sub_mod +let op_Star_Hat = mul +let op_Star_Question_Hat = mul_underspec +let op_Star_Percent_Hat = mul_mod +let op_Slash_Hat = div +let op_Percent_Hat = rem +let op_Hat_Hat = logxor +let op_Amp_Hat = logand +let op_Bar_Hat = logor +let op_Less_Less_Hat = shift_left +let op_Greater_Greater_Hat = shift_right +let op_Equals_Hat = eq +let op_Greater_Hat = gt +let op_Greater_Equals_Hat = gte +let op_Less_Hat = lt +let op_Less_Equals_Hat = lte + +let of_string s = Prims.parse_int s +let to_string s = Prims.to_string s +// The hex printing for BigInteger in .NET is a bit non-standard as it +// prints an extra leading '0' for positive numbers +let to_string_hex (s : t) = "0x" + (s.ToString("X").TrimStart([| '0' |])) +let to_string_hex_pad (s : t) = s.ToString("X").TrimStart([| '0' |]).PadLeft(2, '0') +let uint_to_t s = int_to_uint8 s +let to_int s = s +let __uint_to_t = uint_to_t diff --git a/stage0/ulib/fs/VS/.gitignore b/stage0/ulib/fs/VS/.gitignore new file mode 100644 index 00000000000..57a74cff201 --- /dev/null +++ b/stage0/ulib/fs/VS/.gitignore @@ -0,0 +1 @@ +packages \ No newline at end of file diff --git a/ulib/fs/VS/Makefile b/stage0/ulib/fs/VS/Makefile similarity index 100% rename from ulib/fs/VS/Makefile rename to stage0/ulib/fs/VS/Makefile diff --git a/stage0/ulib/fs/VS/README.md b/stage0/ulib/fs/VS/README.md new file mode 100644 index 00000000000..bd6f346d14d --- /dev/null +++ b/stage0/ulib/fs/VS/README.md @@ -0,0 +1,4 @@ +ulibfs +====== + +Runtime library for exported code exported from F* to F#. diff --git a/stage0/ulib/fs/VS/UlibFS.sln b/stage0/ulib/fs/VS/UlibFS.sln new file mode 100644 index 00000000000..7fda5cd0cd0 --- /dev/null +++ b/stage0/ulib/fs/VS/UlibFS.sln @@ -0,0 +1,27 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 16 +VisualStudioVersion = 16.0.30114.105 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{82253A13-6BB4-4C9F-8198-50F638EFB3EE}" +EndProject +Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "ulibfs", "..\ulibfs.fsproj", "{E3E96B71-22AB-4518-B08E-9C55C4D256DB}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {E3E96B71-22AB-4518-B08E-9C55C4D256DB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E3E96B71-22AB-4518-B08E-9C55C4D256DB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E3E96B71-22AB-4518-B08E-9C55C4D256DB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E3E96B71-22AB-4518-B08E-9C55C4D256DB}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {F7F50713-93CD-494F-80BA-C050D87A05B2} + EndGlobalSection +EndGlobal diff --git a/stage0/ulib/fs/VS/fstar-new.png b/stage0/ulib/fs/VS/fstar-new.png new file mode 100644 index 00000000000..e6bf8a6b330 Binary files /dev/null and b/stage0/ulib/fs/VS/fstar-new.png differ diff --git a/stage0/ulib/fs/VS/global.json b/stage0/ulib/fs/VS/global.json new file mode 100644 index 00000000000..ff28d518391 --- /dev/null +++ b/stage0/ulib/fs/VS/global.json @@ -0,0 +1,6 @@ +{ + "sdk": { + "version": "6.0.400", + "rollForward": "latestFeature" + } +} diff --git a/ulib/fs/native_int/prims.fs b/stage0/ulib/fs/native_int/prims.fs similarity index 100% rename from ulib/fs/native_int/prims.fs rename to stage0/ulib/fs/native_int/prims.fs diff --git a/stage0/ulib/fs/prims.fs b/stage0/ulib/fs/prims.fs new file mode 100644 index 00000000000..a2ce47f4484 --- /dev/null +++ b/stage0/ulib/fs/prims.fs @@ -0,0 +1,140 @@ +#light "off" +module Prims +open System.Numerics + +(* Euclidean division and remainder: + Inefficient implementation based on the naive version at + https://en.wikipedia.org/wiki/Division_algorithm + + Note, in OCaml, we use ZArith's ediv and erem +*) +let rec ediv_rem (n:bigint) (d:bigint) : bigint * bigint = + if d < 0I then + let q, r = ediv_rem n (-d) in + -q, r + else if n < 0I then + let q, r = ediv_rem (-n) d in + if r = 0I then + -q, 0I + else + (-q) - (-1I), + d - r + else BigInteger.DivRem (n, d) + +type int = bigint +type nonzero = int +let ( + ) (x:bigint) (y:int) = x + y +let ( - ) (x:int) (y:int) = x - y +let ( * ) (x:int) (y:int) = x * y +let ( / ) (x:int) (y:int) = fst (ediv_rem x y) +let ( <= ) (x:int) (y:int) = x <= y +let ( >= ) (x:int) (y:int) = x >= y +let ( < ) (x:int) (y:int) = x < y +let ( > ) (x:int) (y:int) = x > y +let (mod) (x:int) (y:int) = snd (ediv_rem x y) +let ( ~- ) (x:int) = -x +let abs (x:int) = BigInteger.Abs x +let of_int (x:FSharp.Core.int) = BigInteger x +let int_zero = of_int 0 +let int_one = of_int 1 +let parse_int = BigInteger.Parse +let to_string (x:int) = x.ToString() + +type unit = Microsoft.FSharp.Core.unit +type bool = Microsoft.FSharp.Core.bool +type string = Microsoft.FSharp.Core.string +type 'a array = 'a Microsoft.FSharp.Core.array +type exn = Microsoft.FSharp.Core.exn +type 'a list' = 'a list +type 'a list = 'a Microsoft.FSharp.Collections.list +type 'a option = 'a Microsoft.FSharp.Core.option + +type nat = int +type pos = int +type 'd b2t = B2t of unit + +type 'a squash = Squash of unit + +type (' p, ' q) sum = + | Left of ' p + | Right of ' q + +type (' p, ' q) l_or = ('p, 'q) sum squash + +let uu___is_Left = function Left _ -> true | Right _ -> false + +let uu___is_Right = function Left _ -> false | Right _ -> true + +type (' p, ' q) pair = +| Pair of ' p * ' q + +type (' p, ' q) l_and = ('p, 'q) pair squash + +let uu___is_Pair _ = true + + +type trivial = + | T + +type l_True = trivial squash + +let uu___is_T _ = true + +type empty = unit +(*This is how Coq extracts Inductive void := . Our extraction needs to be fixed to recognize when there + are no constructors and generate this type abbreviation*) +type l_False = empty squash + +type (' p, ' q) l_imp = ('p -> 'q) squash + +type (' p, ' q) l_iff = ((' p, ' q) l_imp, (' q, ' p) l_imp) l_and + +type ' p l_not = (' p, l_False) l_imp + +type (' a, ' p) l_Forall = L_forall of unit + +type (' a, ' p) l_Exists = L_exists of unit + + +type (' p, ' q, 'dummyP) eq2 = Eq2 of unit +type (' p, ' q, 'dummyP, 'dummyQ) op_Equals_Equals_Equals = Eq3 of unit + +type prop = obj + +let cut = () +let admit () = failwith "no admits" +let _assume () = () +let _assert x = () +let magic () = failwith "no magic" +let unsafe_coerce x = unbox (box x) +let op_Negation x = not x + +let op_Equality x y = x = y +let op_disEquality x y = x<>y +let op_AmpAmp x y = x && y +let op_BarBar x y = x || y +let uu___is_Nil l = l = [] (*consider redefining List.isEmpty as this function*) +let uu___is_Cons l = not (uu___is_Nil l) +let strcat x y = x ^ y + +let string_of_bool (b:bool) = b.ToString() +let string_of_int (i:int) = i.ToString() + +type ('a, 'b) dtuple2 = + | Mkdtuple2 of 'a * 'b + +let __proj__Mkdtuple2__item___1 x = match x with + | Mkdtuple2 (x, _) -> x +let __proj__Mkdtuple2__item___2 x = match x with + | Mkdtuple2 (_, x) -> x + +let rec pow2 (n:int) = if n = bigint 0 then + bigint 1 + else + (bigint 2) * pow2 (n - (bigint 1)) + +let __proj__Cons__item__tl = function + | _::tl -> tl + | _ -> failwith "Impossible" + +let min = min diff --git a/ulib/fs/ulibfs.fsproj b/stage0/ulib/fs/ulibfs.fsproj similarity index 100% rename from ulib/fs/ulibfs.fsproj rename to stage0/ulib/fs/ulibfs.fsproj diff --git a/stage0/ulib/fstar.include b/stage0/ulib/fstar.include new file mode 100644 index 00000000000..05c2d2a67a2 --- /dev/null +++ b/stage0/ulib/fstar.include @@ -0,0 +1,3 @@ +legacy +experimental +.cache diff --git a/stage0/ulib/gmake/Makefile.tmpl b/stage0/ulib/gmake/Makefile.tmpl new file mode 100644 index 00000000000..86aa9877194 --- /dev/null +++ b/stage0/ulib/gmake/Makefile.tmpl @@ -0,0 +1,86 @@ +.PHONY: verify-all basic_clean test test.karamel test.ocaml +################################################################################ +# Customize these variables for your project +################################################################################ +# The root files of your project, from which to begin scanning dependences +FSTAR_FILES ?= + +# The paths to related files which to include for scanning +# -- No need to add FSTAR_HOME/ulib; it is included by default +INCLUDE_PATHS ?= + +# The executable file you want to produce +PROGRAM ?= + +# A driver in ML to call into your program +TOP_LEVEL_FILE ?= + +# A place to put all the emitted .ml files +OUTPUT_DIRECTORY ?= _output + +################################################################################ +MY_FSTAR=$(FSTAR) $(SIL) --cache_checked_modules --odir $(OUTPUT_DIRECTORY) +ML_FILES=$(addprefix $(OUTPUT_DIRECTORY)/,$(addsuffix .ml,$(subst .,_, $(subst .fst,,$(FSTAR_FILES))))) +OCAML_EXE=$(PROGRAM).ocaml.exe +KRML_EXE=$(PROGRAM).exe + +# a.fst.checked is the binary, checked version of a.fst +%.fst.checked: %.fst + $(MY_FSTAR) $*.fst + touch -c $@ + +# a.fsti.checked is the binary, checked version of a.fsti +%.fsti.checked: %.fsti + $(MY_FSTAR) $*.fsti + touch -c $@ + +# The _tags file is a directive to ocamlbuild +# The extracted ML files are precious, because you may want to examine them, +# e.g., to see how type signatures were transformed from F* +.PRECIOUS: _tags $(ML_FILES) $(addsuffix .checked,$(FSTAR_FILES)) $(OUTPUT_DIRECTORY)/out.krml + +_tags: + echo ": traverse" > $@ + echo "<$(OUTPUT_DIRECTORY)>: traverse\n" >> $@ + echo "<$(OUTPUT_DIRECTORY)/c>: -traverse\n" >> $@ + +# To extract an A.ml ML file from an A.fst, we just reload its A.fst.checked file +# and then with the --codegen OCaml option, emit an A.ml +# Note, by default F* will extract all files in the dependency graph +# With the --extract_module, we instruct it to just extract A.ml +$(OUTPUT_DIRECTORY)/%.ml: + $(MY_FSTAR) $(subst .checked,,$(notdir $<)) --codegen OCaml --extract_module $(subst .fst.checked,,$(notdir $<)) + +# FIXME: ocamlbuild is deprecated, use dune instead +$(OCAML_EXE): _tags $(ML_FILES) $(TOP_LEVEL_FILE) + OCAMLPATH="$(FSTAR_HOME)/lib" ocamlbuild -I $(OUTPUT_DIRECTORY) -use-ocamlfind -pkg fstar.lib $(subst .ml,.native,$(TOP_LEVEL_FILE)) + mv _build/$(subst .ml,.native,$(TOP_LEVEL_FILE)) $@ + +test.ocaml: $(OCAML_EXE) + ./$< hello + +$(OUTPUT_DIRECTORY)/c/out.krml: $(addsuffix .checked,$(FSTAR_FILES)) + krml -fsopts --cache_checked_modules -tmpdir $(OUTPUT_DIRECTORY)/c -skip-translation $(FSTAR_FILES) + +$(KRML_EXE): $(OUTPUT_DIRECTORY)/c/out.krml + krml $< -tmpdir $(OUTPUT_DIRECTORY)/c -no-prefix A -o $@ + +test.karamel: $(KRML_EXE) + ./$< hello + +test: test.karamel test.ocaml + +basic_clean: + rm -rf _build $(OUTPUT_DIRECTORY) *~ *.checked $(OCAML_EXE) $(KRML_EXE) .depend .depend.rsp + +.depend: $(FSTAR_FILES) + @true $(shell rm -f .depend.rsp) $(foreach f,--dep full $(addprefix --include , $(INCLUDE_PATHS)) $(FSTAR_FILES),$(shell echo $(f) >> .depend.rsp)) + $(MY_FSTAR) @.depend.rsp --output_deps_to .depend + +depend: .depend + +include .depend + +# The default target is to verify all files, without extracting anything +# It needs to be here, because it reads the variable ALL_CHECKED_FILES in .depend +verify-all: $(ALL_CHECKED_FILES) diff --git a/ulib/gmake/fstar.mk b/stage0/ulib/gmake/fstar.mk similarity index 100% rename from ulib/gmake/fstar.mk rename to stage0/ulib/gmake/fstar.mk diff --git a/ulib/gmake/z3.mk b/stage0/ulib/gmake/z3.mk similarity index 100% rename from ulib/gmake/z3.mk rename to stage0/ulib/gmake/z3.mk diff --git a/ulib/install-ulib.sh b/stage0/ulib/install-ulib.sh similarity index 100% rename from ulib/install-ulib.sh rename to stage0/ulib/install-ulib.sh diff --git a/stage0/ulib/legacy/FStar.Array.fst b/stage0/ulib/legacy/FStar.Array.fst new file mode 100644 index 00000000000..6368159cf24 --- /dev/null +++ b/stage0/ulib/legacy/FStar.Array.fst @@ -0,0 +1,150 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +(** +F* standard library mutable arrays module. + +@summary Mutable arrays +*) +module FStar.Array +#set-options "--max_fuel 0 --initial_fuel 0 --initial_ifuel 0 --max_ifuel 0" +open FStar.All +open FStar.Seq +open FStar.Ref + +let array a = ref (seq a) + +let as_ref #_ arr = arr + +let op_At_Bar #a s1 s2 = + let s1' = !s1 in + let s2' = !s2 in + ST.alloc (Seq.append s1' s2') + +let of_seq #a s = ST.alloc s + +let to_seq #a s = !s + +let of_list #a l = of_seq (Seq.seq_of_list l) + +let create #a n init = ST.alloc (Seq.create n init) + +let index #a x n = + let s = to_seq x in + Seq.index s n + +let upd #a x n v = + let s = !x in + let s' = Seq.upd s n v in + x:= s' + +let length #a x = let s = !x in Seq.length s + +let op #a f x = + let s = !x in + let s' = f s in + x := s' + +let swap #a x i j = + let tmpi = index x i in + let tmpj = index x j in + upd x j tmpi; + upd x i tmpj + +val copy_aux: + #a:Type -> s:array a -> cpy:array a -> ctr:nat -> + ST unit + (requires (fun h -> (contains h s /\ contains h cpy /\ addr_of s <> addr_of cpy) + /\ (Seq.length (sel h cpy) = Seq.length (sel h s)) + /\ (ctr <= Seq.length (sel h cpy)) + /\ (forall (i:nat). i < ctr ==> Seq.index (sel h s) i == Seq.index (sel h cpy) i))) + (ensures (fun h0 u h1 -> (contains h1 s /\ contains h1 cpy /\ addr_of s <> addr_of cpy ) + /\ (modifies (only cpy) h0 h1) + /\ (Seq.equal (sel h1 cpy) (sel h1 s)))) +let rec copy_aux #a s cpy ctr = + match length cpy - ctr with + | 0 -> () + | _ -> upd cpy ctr (index s ctr); + copy_aux s cpy (ctr+1) + +let copy #a s = + let cpy = create (length s) (index s 0) in + copy_aux s cpy 0; + cpy + +private val blit_aux: + #a:Type -> s:array a -> s_idx:nat -> t:array a -> t_idx:nat -> len:nat -> ctr:nat -> + ST unit + (requires (fun h -> + (contains h s /\ contains h t /\ addr_of s <> addr_of t) + /\ (Seq.length (sel h s) >= s_idx + len) + /\ (Seq.length (sel h t) >= t_idx + len) + /\ (ctr <= len) + /\ (forall (i:nat). + i < ctr ==> Seq.index (sel h s) (s_idx+i) == Seq.index (sel h t) (t_idx+i)))) + (ensures (fun h0 u h1 -> + (contains h1 s /\ contains h1 t /\ addr_of s <> addr_of t) + /\ (modifies (only t) h0 h1) + /\ (Seq.length (sel h1 s) >= s_idx + len) + /\ (Seq.length (sel h1 t) >= t_idx + len) + /\ (Seq.length (sel h0 s) = Seq.length (sel h1 s)) + /\ (Seq.length (sel h0 t) = Seq.length (sel h1 t)) + /\ (forall (i:nat). + i < len ==> Seq.index (sel h1 s) (s_idx+i) == Seq.index (sel h1 t) (t_idx+i)) + /\ (forall (i:nat). + (i < Seq.length (sel h1 t) /\ (i < t_idx \/ i >= t_idx + len)) ==> + Seq.index (sel h1 t) i == Seq.index (sel h0 t) i) )) + +#set-options "--z3rlimit 60" +let rec blit_aux #a s s_idx t t_idx len ctr = + match len - ctr with + | 0 -> () + | _ -> upd t (t_idx + ctr) (index s (s_idx + ctr)); + blit_aux s s_idx t t_idx len (ctr+1) +#set-options "--z3rlimit 5" + +private val blit: + #a:Type -> s:array a -> s_idx:nat -> t:array a -> t_idx:nat -> len:nat -> + ST unit + (requires (fun h -> + (contains h s) + /\ (contains h t) + /\ (addr_of s <> addr_of t) + /\ (Seq.length (sel h s) >= s_idx + len) + /\ (Seq.length (sel h t) >= t_idx + len))) + (ensures (fun h0 u h1 -> + (contains h1 s /\ contains h1 t /\ addr_of s <> addr_of t) + /\ (Seq.length (sel h1 s) >= s_idx + len) + /\ (Seq.length (sel h1 t) >= t_idx + len) + /\ (Seq.length (sel h0 s) = Seq.length (sel h1 s)) + /\ (Seq.length (sel h0 t) = Seq.length (sel h1 t)) + /\ (modifies (only t) h0 h1) + /\ (forall (i:nat). + i < len ==> Seq.index (sel h1 s) (s_idx+i) == Seq.index (sel h1 t) (t_idx+i)) + /\ (forall (i:nat).{:pattern (Seq.index (sel h1 t) i)} + (i < Seq.length (sel h1 t) /\ (i < t_idx \/ i >= t_idx + len)) ==> + (Seq.index (sel h1 t) i == Seq.index (sel h0 t) i)) )) +let blit #a s s_idx t t_idx len = + blit_aux s s_idx t t_idx len 0 + +#set-options "--z3rlimit 120" +let sub #a s idx len = + let h0 = ST.get () in + let t = create len (index s 0) in + blit s idx t 0 len; + let h1 = ST.get () in + assert (Seq.equal (Seq.slice (sel h0 s) idx (idx + len)) (sel h1 t)); + t diff --git a/stage0/ulib/legacy/FStar.Array.fsti b/stage0/ulib/legacy/FStar.Array.fsti new file mode 100644 index 00000000000..c91ef0cc33d --- /dev/null +++ b/stage0/ulib/legacy/FStar.Array.fsti @@ -0,0 +1,135 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +module FStar.Array + +(** +F* standard library mutable arrays module. + +@summary Mutable arrays +*) + +open FStar.All +open FStar.Seq +open FStar.Ref + +#set-options "--max_fuel 0 --initial_fuel 0 --initial_ifuel 0 --max_ifuel 0" + +val array (a:Type0) : Type0 + +val as_ref (#a:Type0) (arr:array a) : GTot (ref (seq a)) + +let sel (#a:Type0) (h:heap) (s:array a) : GTot (seq a) = Heap.sel h (as_ref s) + +let contains (#a:Type0) (h:heap) (s:array a) : Type0 = Heap.contains h (as_ref s) + +let unused_in (#a:Type0) (arr:array a) (h:heap) : Type0 = Heap.unused_in (as_ref arr) h + +let heap_upd (#a:Type0) (h:heap) (r:array a) (v:seq a) : GTot heap = Heap.upd h (as_ref r) v + +let addr_of (#a:Type0) (arr:array a) : GTot nat = addr_of (as_ref arr) + +let only (#a:Type0) (arr:array a) : GTot (Set.set nat) = Set.singleton (addr_of arr) + +val op_At_Bar (#a:Type0) (s1:array a) (s2:array a) + : ST (array a) + (requires (fun h -> contains h s1 /\ contains h s2)) + (ensures (fun h0 s h1 -> contains h0 s1 /\ contains h0 s2 /\ contains h1 s /\ + sel h1 s == Seq.append (sel h0 s1) (sel h0 s2) /\ + modifies Set.empty h0 h1)) + +unfold let create_post (#a:Type0) (s:seq a) +: heap -> array a -> heap -> Type0 += fun h0 x h1 -> + x `unused_in` h0 /\ + contains h1 x /\ + modifies Set.empty h0 h1 /\ + sel h1 x== s + +val of_seq (#a:Type0) (s:seq a) +: ST (array a) + (requires fun _ -> True) + (ensures create_post s) + +val to_seq (#a:Type0) (s:array a) + : ST (seq a) + (requires (fun h -> contains h s)) + (ensures (fun h0 x h1 -> (sel h0 s == x /\ h0 == h1))) + +// Used by the compiler for array literals +val of_list (#a:Type0) (l:list a) +: ST (array a) + (requires fun _ -> True) + (ensures create_post (seq_of_list l)) + +val create (#a:Type0) (n:nat) (init:a) + : ST (array a) + (requires (fun h -> True)) + (ensures (fun h0 x h1 -> x `unused_in` h0 /\ + contains h1 x /\ + modifies Set.empty h0 h1 /\ + sel h1 x == Seq.create n init)) + +val index (#a:Type0) (x:array a) (n:nat) + : ST a + (requires (fun h -> contains h x /\ n < Seq.length (sel h x))) + (ensures (fun h0 v h1 -> n < Seq.length (sel h0 x) /\ + h0 == h1 /\ + v == Seq.index (sel h0 x) n)) + +val upd (#a:Type0) (x:array a) (n:nat) (v:a) + :ST unit + (requires (fun h -> contains h x /\ n < Seq.length (sel h x))) + (ensures (fun h0 u h1 -> n < Seq.length (sel h0 x) /\ + contains h1 x /\ + modifies (Set.singleton (addr_of x)) h0 h1 /\ + sel h1 x == Seq.upd (sel h0 x) n v)) + +val length (#a:Type0) (x:array a) + : ST nat + (requires (fun h -> contains h x)) + (ensures (fun h0 y h1 -> y = length (sel h0 x) /\ h0 == h1)) + +val op (#a:Type0) (f:seq a -> seq a) (x:array a) + : ST unit + (requires (fun h -> contains h x)) + (ensures (fun h0 u h1 -> modifies (Set.singleton (addr_of x)) h0 h1 /\ sel h1 x == f (sel h0 x))) + +val swap (#a:Type0) (x:array a) (i:nat) (j:nat{i <= j}) + : ST unit + (requires (fun h -> contains h x /\ j < Seq.length (sel h x))) + (ensures (fun h0 _u h1 -> j < Seq.length (sel h0 x) /\ + contains h1 x /\ + modifies (Set.singleton (addr_of x)) h0 h1 /\ + sel h1 x == Seq.swap (sel h0 x) i j)) + +val copy (#a:Type0) (s:array a) + : ST (array a) + (requires (fun h -> contains h s /\ Seq.length (sel h s) > 0)) + (ensures (fun h0 r h1 -> modifies Set.empty h0 h1 /\ + r `unused_in` h0 /\ + contains h1 r /\ + sel h1 r == sel h0 s)) + +val sub (#a:Type0) (s:array a) (idx:nat) (len:nat) + : ST (array a) + (requires (fun h -> contains h s /\ + Seq.length (sel h s) > 0 /\ + idx + len <= Seq.length (sel h s))) + (ensures (fun h0 t h1 -> contains h1 t /\ + t `unused_in` h0 /\ + modifies Set.empty h0 h1 /\ + Seq.slice (sel h0 s) idx (idx + len) == sel h1 t)) diff --git a/stage0/ulib/legacy/FStar.Axiomatic.Array.fst b/stage0/ulib/legacy/FStar.Axiomatic.Array.fst new file mode 100644 index 00000000000..353454678dc --- /dev/null +++ b/stage0/ulib/legacy/FStar.Axiomatic.Array.fst @@ -0,0 +1,94 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +(* A logical theory of integer-indexed arrays, from [0, n) *) +module FStar.Axiomatic.Array + +open FStar.Heap + +assume new type seq : Type -> Type +assume val index : #a:Type -> seq a -> int -> Tot a +assume val update : #a:Type -> seq a -> int -> a -> Tot (seq a) +assume val emp : a:Type -> Tot (seq a) +assume val create : #a:Type -> int -> a -> Tot (seq a) + +assume val length : #a:Type -> seq a -> Tot nat +assume val slice : #a:Type -> seq a -> int -> int -> Tot (seq a) +assume val append : #a:Type -> seq a -> seq a -> Tot (seq a) +assume val proj_some: #a:Type -> seq (option a) -> Tot (seq a) +assume type equal : #a:Type -> seq a -> seq a -> Type +type array (a:Type) = ref (seq a) + +assume LengthConst: forall (a:Type) (n:int) (v:a).{:pattern (length (create n v))} + length (create n v) == n + +assume IndexConst: forall (a:Type) (n:int) (v:a) (i:int). {:pattern (index (create n v) i)} + (0 <= i /\ i < n) + ==> index (create n v) i == v + +assume LengthUpdate: forall (a:Type) (s:seq a) (i:int) (v:a). {:pattern (length (update s i v))} + (0 <= i /\ i < length s) + ==> length (update s i v) == length s + +assume IndexUpdate: forall (a:Type) (s:seq a) (i:int) (v:a) (n:int). {:pattern (index (update s i v) n)} + (0 <= n /\ n <= length s) + ==> (if i=n + then index (update s i v) n == v + else index (update s i v) n == index s n) + +assume LengthSlice: forall (a:Type) (s:seq a) (i:int) (j:int). {:pattern (length (slice s i j))} + (0 <= i /\ i <= j /\ j <= length s) + ==> j - i == length (slice s i j) + +assume IndexSlice: forall (a:Type) (s:seq a) (i:int) (j:int) (k:int). {:pattern (index (slice s i j) k)} + (0 <= k /\ k <= length (slice s i j)) + ==> index (slice s i j) k == index s (i + k) + +assume LengthAppend: forall (a:Type) (s1:seq a) (s2:seq a). {:pattern (length (append s1 s2))} + length (append s1 s2) == length s1 + length s2 + +assume IndexAppend: forall (a:Type) (s1:seq a) (s2:seq a) (i:int). {:pattern (index (append s1 s2) i)} + if (0 <= i && i < length s1) + then index (append s1 s2) i == index s1 i + else index (append s1 s2) i == index s2 (i - length s1) + +assume SeqEquals: forall (a:Type) (s1:seq a) (s2:seq a).{:pattern (equal s1 s2)} + equal s1 s2 + <==> (length s1 == length s2 + /\ (forall (i:int).{:pattern (index s1 i); (index s2 i)} + (0 <= i /\ i < length s1) + ==> index s1 i == index s2 i)) + +assume Extensional: forall (a:Type) (s1:seq a) (s2:seq a).{:pattern (equal s1 s2)} + equal s1 s2 + ==> s1 == s2 + +assume ProjEmp: forall (a:Type).{:pattern (proj_some (emp (option a)))} + length (proj_some (emp (option a)))==0 + +assume ProjLen: forall (a:Type) (s:seq (option a)).{:pattern (proj_some s)} + length (proj_some s)==length s + +(* assume ProjIndex: forall (a:Type) (s:seq (option a)) (i:int).{:pattern (index (proj_some s) i)} *) +(* (0 <= i /\ i <= length (proj_some s)) *) +(* ==> index (proj_some s) i == Some.v (index s i) *) + +assume EmpConst: forall (a:Type) (s:seq a).{:pattern (length s)} + length s == 0 + ==> s==emp a + +type is_Some_All (a:Type) (s:seq (option a)) = (forall (i:int). (0 <= i /\ i < length s) ==> Some? (index s i)) + diff --git a/stage0/ulib/legacy/FStar.Buffer.Quantifiers.fst b/stage0/ulib/legacy/FStar.Buffer.Quantifiers.fst new file mode 100644 index 00000000000..783b9af7ffc --- /dev/null +++ b/stage0/ulib/legacy/FStar.Buffer.Quantifiers.fst @@ -0,0 +1,119 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Buffer.Quantifiers + +open FStar.Seq +open FStar.UInt32 +open FStar.HyperStack +open FStar.Ghost +open FStar.Buffer +open FStar.Classical + +#set-options "--initial_fuel 0 --max_fuel 0" + +val lemma_sub_quantifiers: #a:Type -> h:mem -> b:buffer a -> b':buffer a -> i:FStar.UInt32.t -> len:FStar.UInt32.t{v len <= length b /\ v i + v len <= length b} -> Lemma + (requires (live h b /\ live h b' /\ Seq.slice (as_seq h b) (v i) (v i + v len) == as_seq h b')) + (ensures (live h b /\ live h b' /\ Seq.slice (as_seq h b) (v i) (v i + v len) == as_seq h b' + /\ length b' = v len + /\ (forall (j:nat). {:pattern (get h b' j)} j < length b' ==> get h b' j == get h b (j + v i)) )) + [SMTPat (Seq.slice (as_seq h b) (v i) (v i + v len)); SMTPat (as_seq h b')] +let lemma_sub_quantifiers #a h b b' i len = + assert (Seq.length (Seq.slice (as_seq h b) (v i) (v i + v len)) = v len); + let lemma_post (j:nat) = j < length b' ==> get h b' j == get h b (j + v i) in + let qj : j:nat -> Lemma (lemma_post j) + = fun j -> assert (j < v len ==> Seq.index (as_seq h b') j == Seq.index (as_seq h b) (j + v i)) in + Classical.forall_intro #_ #lemma_post qj + +val lemma_offset_quantifiers: #a:Type -> h:mem -> b:buffer a -> b':buffer a -> i:FStar.UInt32.t{v i <= length b} -> Lemma + (requires (live h b /\ live h b' /\ Seq.slice (as_seq h b) (v i) (length b) == as_seq h b')) + (ensures (live h b /\ live h b' /\ Seq.slice (as_seq h b) (v i) (length b) == as_seq h b' + /\ length b' = length b - v i + /\ (forall (j:nat). {:pattern (get h b' j)} j < length b' ==> get h b' j == get h b (j + v i)) )) + [SMTPat (Seq.slice (as_seq h b) (v i) (length b)); SMTPat (as_seq h b')] +let lemma_offset_quantifiers #a h b b' i = + lemma_sub_quantifiers #a h b b' i (uint_to_t (length b - v i)) + +val lemma_create_quantifiers: #a:Type -> h:mem -> b:buffer a -> init:a -> len:FStar.UInt32.t -> Lemma + (requires (live h b /\ as_seq h b == Seq.create (v len) init)) + (ensures (live h b /\ length b = v len + /\ (forall (i:nat). {:pattern (get h b i)} i < length b ==> get h b i == init))) + [SMTPat (as_seq h b); SMTPat (Seq.create (v len) init)] +let lemma_create_quantifiers #a h b init len = + assert (Seq.length (as_seq h b) = v len); + let lemma_post (i:nat) = i < length b ==> get h b i == init in + let qi : i:nat -> Lemma (lemma_post i) = + fun i -> assert (i < length b ==> get h b i == Seq.index (as_seq h b) i) in + Classical.forall_intro #_ #lemma_post qi + +val lemma_index_quantifiers: #a:Type -> h:mem -> b:buffer a -> n:FStar.UInt32.t -> Lemma + (requires (live h b /\ v n < length b)) + (ensures (live h b /\ v n < length b /\ get h b (v n) == Seq.index (as_seq h b) (v n))) + [SMTPat (Seq.index (as_seq h b) (v n))] +let lemma_index_quantifiers #a h b n = () + +val lemma_upd_quantifiers: #a:Type -> h0:mem -> h1:mem -> b:buffer a -> n:FStar.UInt32.t -> z:a -> Lemma + (requires (live h0 b /\ live h1 b /\ v n < length b /\ as_seq h1 b == Seq.upd (as_seq h0 b) (v n) z)) + (ensures (live h0 b /\ live h1 b /\ v n < length b + /\ (forall (i:nat). {:pattern (get h1 b i)} (i < length b /\ i <> v n) + ==> get h1 b i == get h0 b i) + /\ get h1 b (v n) == z)) + [SMTPat (as_seq h1 b); SMTPat (Seq.upd (as_seq h0 b) (v n) z)] +let lemma_upd_quantifiers #a h0 h1 b n z = + assert(forall (i:nat). i < length b ==> get h1 b i == Seq.index (as_seq h1 b) i) + +#reset-options "--initial_fuel 0 --max_fuel 0 --z3rlimit 8" + +val lemma_blit_quantifiers: #a:Type -> h0:mem -> h1:mem -> b:buffer a -> bi:UInt32.t{v bi <= length b} -> + b':buffer a{disjoint b b'} -> bi':UInt32.t{v bi' <= length b'} -> len:UInt32.t{v bi+v len <= length b /\ v bi'+v len <= length b'} -> Lemma + (requires (live h0 b /\ live h0 b' /\ live h1 b' + /\ Seq.slice (as_seq h1 b') (v bi') (v bi'+v len) == Seq.slice (as_seq h0 b) (v bi) (v bi+v len) + /\ Seq.slice (as_seq h1 b') 0 (v bi') == Seq.slice (as_seq h0 b') 0 (v bi') + /\ Seq.slice (as_seq h1 b') (v bi'+v len) (length b') == Seq.slice (as_seq h0 b') (v bi'+v len) (length b') + )) + (ensures (live h0 b /\ live h0 b' /\ live h1 b' + /\ (forall (i:nat). {:pattern (get h1 b' (v bi'+i))} i < v len ==> get h1 b' (v bi'+i) == get h0 b (v bi+i)) + /\ (forall (i:nat). {:pattern (get h1 b' i)} ((i >= v bi' + v len /\ i < length b') \/ i < v bi') ==> get h1 b' i == get h0 b' i) +)) +let lemma_blit_quantifiers #a h0 h1 b bi b' bi' len = + let lemma_post_1 (j:nat) = j < v len ==> get h1 b' (v bi'+j) == get h0 b (v bi+j) in + let qj_1 : j:nat -> Lemma (lemma_post_1 j) + = fun j -> assert (j < v len ==> Seq.index (Seq.slice (as_seq h1 b') (v bi') (v bi'+v len)) j + == Seq.index (Seq.slice (as_seq h0 b) (v bi) (v bi+v len)) j) in + let lemma_post_2 (j:nat) = ((j >= v bi' + v len /\ j < length b') \/ j < v bi') + ==> get h1 b' j == get h0 b' j in + let qj_2 : j:nat -> Lemma (lemma_post_2 j) + = fun j -> assert (j < v bi' ==> Seq.index (Seq.slice (as_seq h1 b') 0 (v bi')) j + == Seq.index (Seq.slice (as_seq h0 b') 0 (v bi')) j); + assert ((j >= v bi' + v len /\ j < length b') + ==> Seq.index (Seq.slice (as_seq h1 b') (v bi'+v len) (length b')) (j - (v bi'+v len)) + == Seq.index (Seq.slice (as_seq h0 b') (v bi'+v len) (length b')) (j - (v bi'+v len))) + in + Classical.forall_intro #_ #lemma_post_1 qj_1; + Classical.forall_intro #_ #lemma_post_2 qj_2 + + +(* Equality predicate between buffers with quantifiers *) +val eq_lemma: #a:Type -> h:mem -> b:buffer a{live h b} -> h':mem -> b':buffer a{live h' b'} -> Lemma + (requires (equal h b h' b')) + (ensures (length b = length b' /\ (forall (i:nat). {:pattern (get h b i)} + i < length b ==> get h b i == get h' b' i))) + [SMTPat (equal h b h' b')] +let eq_lemma #a h b h' b' = + assert(Seq.length (as_seq h b) = Seq.length (as_seq h' b')); + let lemma_post (j:nat) = j < length b ==> get h b j == get h' b' j in + let qj : j:nat -> Lemma (lemma_post j) = fun j -> + assert(j < length b ==> Seq.index (as_seq h b) j == Seq.index (as_seq h' b') j) in + Classical.forall_intro #_ #lemma_post qj diff --git a/stage0/ulib/legacy/FStar.Buffer.fst b/stage0/ulib/legacy/FStar.Buffer.fst new file mode 100644 index 00000000000..1911536b625 --- /dev/null +++ b/stage0/ulib/legacy/FStar.Buffer.fst @@ -0,0 +1,1465 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Buffer + +open FStar.Seq +open FStar.UInt32 +module Int32 = FStar.Int32 +open FStar.HyperStack +open FStar.HyperStack.ST +open FStar.Ghost +module L = FStar.List.Tot.Base + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +#set-options "--initial_fuel 0 --max_fuel 0" + +//17-01-04 usage? move to UInt? +let lemma_size (x:int) : Lemma (requires (UInt.size x n)) + (ensures (x >= 0)) + [SMTPat (UInt.size x n)] + = () + +let lseq (a: Type) (l: nat) : Type = + (s: seq a { Seq.length s == l } ) + +(* Buffer general type, fully implemented on FStar's arrays *) +noeq private type _buffer (a:Type) = + | MkBuffer: max_length:UInt32.t + -> content:reference (lseq a (v max_length)) + -> idx:UInt32.t + -> length:UInt32.t{v idx + v length <= v max_length} + -> _buffer a + +(* Exposed buffer type *) +type buffer (a:Type) = _buffer a + +(* Ghost getters for specifications *) +// TODO: remove `contains` after replacing all uses with `live` +let contains #a h (b:buffer a) : GTot Type0 = HS.contains h b.content +let unused_in #a (b:buffer a) h : GTot Type0 = HS.unused_in b.content h + +(* In most cases `as_seq` should be used instead of this one. *) +let sel #a h (b:buffer a) : GTot (seq a) = HS.sel h b.content + +let max_length #a (b:buffer a) : GTot nat = v b.max_length +let length #a (b:buffer a) : GTot nat = v b.length +let idx #a (b:buffer a) : GTot nat = v b.idx + +//17-01-04 rename to container or ref? +let content #a (b:buffer a) : + GTot (reference (lseq a (max_length b))) = b.content + +(* Lifting from buffer to reference *) +let as_ref #a (b:buffer a) = as_ref (content b) +let as_addr #a (b:buffer a) = as_addr (content b) +let frameOf #a (b:buffer a) : GTot HS.rid = HS.frameOf (content b) + +(* Liveliness condition, necessary for any computation on the buffer *) +let live #a (h:mem) (b:buffer a) : GTot Type0 = HS.contains h b.content +let unmapped_in #a (b:buffer a) (h:mem) : GTot Type0 = unused_in b h + +val recall: #a:Type + -> b:buffer a{is_eternal_region (frameOf b) /\ not (is_mm b.content)} -> Stack unit + (requires (fun m -> True)) + (ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b)) +let recall #a b = recall b.content + +(* Ghostly access an element of the array, or the full underlying sequence *) +let as_seq #a h (b:buffer a) : GTot (s:seq a{Seq.length s == length b}) = + Seq.slice (sel h b) (idx b) (idx b + length b) + +let get #a h (b:buffer a) (i:nat{i < length b}) : GTot a = + Seq.index (as_seq h b) i + +(* Equality predicate on buffer contents, without quantifiers *) +//17-01-04 revise comment? rename? +let equal #a h (b:buffer a) h' (b':buffer a) : GTot Type0 = + as_seq h b == as_seq h' b' + +(* y is included in x / x contains y *) +let includes #a (x:buffer a) (y:buffer a) : GTot Type0 = + x.max_length == y.max_length /\ + x.content === y.content /\ + idx y >= idx x /\ + idx x + length x >= idx y + length y + +let includes_live #a h (x: buffer a) (y: buffer a) +: Lemma + (requires (x `includes` y)) + (ensures (live h x <==> live h y)) += () + +let includes_as_seq #a h1 h2 (x: buffer a) (y: buffer a) +: Lemma + (requires (x `includes` y /\ as_seq h1 x == as_seq h2 x)) + (ensures (as_seq h1 y == as_seq h2 y)) += Seq.slice_slice (sel h1 x) (idx x) (idx x + length x) (idx y - idx x) (idx y - idx x + length y); + Seq.slice_slice (sel h2 x) (idx x) (idx x + length x) (idx y - idx x) (idx y - idx x + length y) + +let includes_trans #a (x y z: buffer a) +: Lemma + (requires (x `includes` y /\ y `includes` z)) + (ensures (x `includes` z)) += () + +(* Disjointness between two buffers *) +let disjoint #a #a' (x:buffer a) (y:buffer a') : GTot Type0 = + frameOf x =!= frameOf y \/ as_addr x =!= as_addr y + \/ (a == a' /\ as_addr x == as_addr y /\ frameOf x == frameOf y /\ x.max_length == y.max_length /\ + (idx x + length x <= idx y \/ idx y + length y <= idx x)) + +(* Disjointness is symmetric *) +let lemma_disjoint_symm #a #a' (x:buffer a) (y:buffer a') : Lemma + (requires True) + (ensures (disjoint x y <==> disjoint y x)) + [SMTPat (disjoint x y)] + = () + +let lemma_disjoint_sub #a #a' (x:buffer a) (subx:buffer a) (y:buffer a') : Lemma + (requires (includes x subx /\ disjoint x y)) + (ensures (disjoint subx y)) + [SMTPat (disjoint subx y); SMTPat (includes x subx)] + = () + +let lemma_disjoint_sub' #a #a' (x:buffer a) (subx:buffer a) (y:buffer a') : Lemma + (requires (includes x subx /\ disjoint x y)) + (ensures (disjoint subx y)) + [SMTPat (disjoint y subx); SMTPat (includes x subx)] + = () + +val lemma_live_disjoint: #a:Type -> #a':Type -> h:mem -> b:buffer a -> b':buffer a' -> Lemma + (requires (live h b /\ b' `unused_in` h)) + (ensures (disjoint b b')) + [SMTPat (disjoint b b'); SMTPat (live h b)] +let lemma_live_disjoint #a #a' h b b' = () + +(* Heterogeneous buffer type *) +noeq type abuffer = | Buff: #t:Type -> b:buffer t -> abuffer + +(* let empty : TSet.set abuffer = TSet.empty #abuffer *) +let only #t (b:buffer t) : Tot (TSet.set abuffer) = FStar.TSet.singleton (Buff #t b) +(* let op_Plus_Plus #a s (b:buffer a) : Tot (TSet.set abuffer) = TSet.union s (only b) *) +(* let op_Plus_Plus_Plus set1 set2 : Tot (TSet.set abuffer) = FStar.TSet.union set1 set2 *) + +let op_Bang_Bang = TSet.singleton +let op_Plus_Plus = TSet.union + +(* Maps a set of buffer to the set of their references *) +assume val arefs: TSet.set abuffer -> Tot (Set.set nat) + +assume Arefs_def: forall (x:nat) (s:TSet.set abuffer). {:pattern (Set.mem x (arefs s))} + Set.mem x (arefs s) <==> (exists (y:abuffer). TSet.mem y s /\ as_addr y.b == x) + +val lemma_arefs_1: s:TSet.set abuffer -> Lemma + (requires (s == TSet.empty #abuffer)) + (ensures (arefs s == Set.empty #nat)) + [SMTPat (arefs s)] +let lemma_arefs_1 s = Set.lemma_equal_intro (arefs s) (Set.empty) + +val lemma_arefs_2: s1:TSet.set abuffer -> s2:TSet.set abuffer -> Lemma + (requires True) + (ensures (arefs (s1 ++ s2) == Set.union (arefs s1) (arefs s2))) + [SMTPatOr [ + [SMTPat (arefs (s2 ++ s1))]; + [SMTPat (arefs (s1 ++ s2))] + ]] +let lemma_arefs_2 s1 s2 = + Set.lemma_equal_intro (arefs (s1 ++ s2)) (Set.union (arefs s1) (arefs s2)) + +val lemma_arefs_3: s1:TSet.set abuffer -> s2:TSet.set abuffer -> Lemma + (requires (TSet.subset s1 s2)) + (ensures (Set.subset (arefs s1) (arefs s2))) +let lemma_arefs_3 s1 s2 = () + +(* General disjointness predicate between a buffer and a set of heterogeneous buffers *) +let disjoint_from_bufs #a (b:buffer a) (bufs:TSet.set abuffer) = + forall b'. TSet.mem b' bufs ==> disjoint b b'.b + +(* General disjointness predicate between a buffer and a set of heterogeneous references *) +let disjoint_from_refs #a (b:buffer a) (set:Set.set nat) = + ~(Set.mem (as_addr b) set) + + +(* Similar but specialized disjointness predicates *) +let disjoint_1 a b = disjoint a b +let disjoint_2 a b b' = disjoint a b /\ disjoint a b' +let disjoint_3 a b b' b'' = disjoint a b /\ disjoint a b' /\ disjoint a b'' +let disjoint_4 a b b' b'' b''' = disjoint a b /\ disjoint a b' /\ disjoint a b'' /\ disjoint a b''' +let disjoint_5 a b b' b'' b''' b'''' = disjoint a b /\ disjoint a b' /\ disjoint a b'' /\ disjoint a b''' /\ disjoint a b'''' + +let disjoint_ref_1 (#t:Type) (#u:Type) (a:buffer t) (r:reference u) = + frameOf a =!= HS.frameOf r \/ as_addr a =!= HS.as_addr r +let disjoint_ref_2 a r r' = disjoint_ref_1 a r /\ disjoint_ref_1 a r' +let disjoint_ref_3 a r r' r'' = disjoint_ref_1 a r /\ disjoint_ref_2 a r' r'' +let disjoint_ref_4 a r r' r'' r''' = disjoint_ref_1 a r /\ disjoint_ref_3 a r' r'' r''' +let disjoint_ref_5 a r r' r'' r''' r'''' = disjoint_ref_1 a r /\ disjoint_ref_4 a r' r'' r''' r'''' + +val disjoint_only_lemma: #a:Type -> #a':Type -> b:buffer a -> b':buffer a' -> Lemma + (requires (disjoint b b')) + (ensures (disjoint_from_bufs b (only b'))) +let disjoint_only_lemma #a #a' b b' = () + +(* Fully general modifies clause *) +let modifies_bufs_and_refs (bufs:TSet.set abuffer) (refs:Set.set nat) h h' : GTot Type0 = + (forall rid. Set.mem rid (Map.domain (HS.get_hmap h)) ==> + (HS.modifies_ref rid (Set.union (arefs bufs) refs) h h' + /\ (forall (#a:Type) (b:buffer a). (frameOf b == rid /\ live h b /\ disjoint_from_bufs b bufs + /\ disjoint_from_refs b refs) ==> equal h b h' b /\ live h' b))) + +(* Fully general modifies clause for buffer sets *) +let modifies_buffers (bufs:TSet.set abuffer) h h' : GTot Type0 = + (forall rid. Set.mem rid (Map.domain (HS.get_hmap h)) ==> + (HS.modifies_ref rid (arefs bufs) h h' /\ + (forall (#a:Type) (b:buffer a). {:pattern (frameOf b == rid /\ live h b /\ disjoint_from_bufs b bufs)} + (frameOf b == rid /\ live h b /\ disjoint_from_bufs b bufs ==> equal h b h' b /\ live h' b)))) + +(* General modifies clause for buffers only *) +let modifies_bufs rid buffs h h' = + modifies_ref rid (arefs buffs) h h' + /\ (forall (#a:Type) (b:buffer a). (frameOf b == rid /\ live h b /\ disjoint_from_bufs b buffs) ==> equal h b h' b /\ live h' b) + +let modifies_none h h' = + HS.get_tip h' == HS.get_tip h /\ HS.modifies_transitively Set.empty h h' + +(* Specialized clauses for small numbers of buffers *) +let modifies_buf_0 rid h h' = + modifies_ref rid (Set.empty #nat) h h' + /\ (forall (#tt:Type) (bb:buffer tt). (frameOf bb == rid /\ live h bb) ==> equal h bb h' bb /\ live h' bb) + +let modifies_buf_1 (#t:Type) rid (b:buffer t) h h' = //would be good to drop the rid argument on these, since they can be computed from the buffers + modifies_ref rid (Set.singleton (Heap.addr_of (as_ref b))) h h' + /\ (forall (#tt:Type) (bb:buffer tt). (frameOf bb == rid /\ live h bb /\ disjoint b bb) ==> equal h bb h' bb /\ live h' bb) + +let to_set_2 (n1:nat) (n2:nat) :Set.set nat = Set.union (Set.singleton n1) (Set.singleton n2) + +let modifies_buf_2 (#t:Type) (#t':Type) rid (b:buffer t) (b':buffer t') h h' = + modifies_ref rid (to_set_2 (as_addr b) (as_addr b')) h h' + /\ (forall (#tt:Type) (bb:buffer tt). (frameOf bb == rid /\ live h bb /\ disjoint b bb /\ disjoint b' bb) + ==> equal h bb h' bb /\ live h' bb) + +let to_set_3 (n1:nat) (n2:nat) (n3:nat) :Set.set nat = Set.union (Set.union (Set.singleton n1) (Set.singleton n2)) (Set.singleton n3) + +let modifies_buf_3 (#t:Type) (#t':Type) (#t'':Type) rid (b:buffer t) (b':buffer t') (b'':buffer t'') h h' = + modifies_ref rid (to_set_3 (as_addr b) (as_addr b') (as_addr b'')) h h' + /\ (forall (#tt:Type) (bb:buffer tt). (frameOf bb == rid /\ live h bb /\ disjoint b bb /\ disjoint b' bb /\ disjoint b'' bb) + ==> equal h bb h' bb /\ live h' bb) + +let to_set_4 (n1:nat) (n2:nat) (n3:nat) (n4:nat) :Set.set nat = + Set.union (Set.union (Set.union (Set.singleton n1) (Set.singleton n2)) (Set.singleton n3)) (Set.singleton n4) + +let modifies_buf_4 (#t:Type) (#t':Type) (#t'':Type) (#t''':Type) rid (b:buffer t) (b':buffer t') (b'':buffer t'') (b''':buffer t''') h h' = + modifies_ref rid (to_set_4 (as_addr b) (as_addr b') (as_addr b'') (as_addr b''')) h h' + /\ (forall (#tt:Type) (bb:buffer tt). (frameOf bb == rid /\ live h bb /\ disjoint b bb /\ disjoint b' bb /\ disjoint b'' bb /\ disjoint b''' bb) + ==> equal h bb h' bb /\ live h' bb) + + +(* General lemmas for the modifies_bufs clause *) +let lemma_modifies_bufs_trans rid bufs h0 h1 h2 : + Lemma (requires (modifies_bufs rid bufs h0 h1 /\ modifies_bufs rid bufs h1 h2)) + (ensures (modifies_bufs rid bufs h0 h2)) + [SMTPat (modifies_bufs rid bufs h0 h1); SMTPat (modifies_bufs rid bufs h1 h2)] + = () + +let lemma_modifies_bufs_sub rid bufs subbufs h0 h1 : + Lemma + (requires (TSet.subset subbufs bufs /\ modifies_bufs rid subbufs h0 h1)) + (ensures (modifies_bufs rid bufs h0 h1)) + [SMTPat (modifies_bufs rid subbufs h0 h1); SMTPat (TSet.subset subbufs bufs)] + = () + +val lemma_modifies_bufs_subset: #a:Type -> #a':Type -> h0:mem -> h1:mem -> bufs:TSet.set abuffer -> b:buffer a -> b':buffer a' -> Lemma + (requires (disjoint_from_bufs b (bufs ++ (only b')) )) + (ensures (disjoint_from_bufs b bufs)) + [SMTPat (modifies_bufs (HS.get_tip h0) (bufs ++ (only b')) h0 h1); SMTPat (live h0 b)] +let lemma_modifies_bufs_subset #a #a' h0 h1 bufs b b' = () + +val lemma_modifies_bufs_superset: #a:Type -> #a':Type -> h0:mem -> h1:mem -> bufs:TSet.set abuffer -> b:buffer a -> b':buffer a' -> Lemma + (requires (b' `unused_in` h0 /\ live h0 b /\ disjoint_from_bufs b bufs)) + (ensures (disjoint_from_bufs b (bufs ++ (only b')))) + [SMTPat (modifies_bufs (HS.get_tip h0) bufs h0 h1); SMTPat (b' `unmapped_in` h0); SMTPat (live h0 b)] +let lemma_modifies_bufs_superset #a #a' h0 h1 bufs b b' = () + +(* Specialized lemmas *) +let modifies_trans_0_0 (rid:rid) (h0 h1 h2:mem) : + Lemma (requires (modifies_buf_0 rid h0 h1 /\ modifies_buf_0 rid h1 h2)) + (ensures (modifies_buf_0 rid h0 h2)) + [SMTPat (modifies_buf_0 rid h0 h1); SMTPat (modifies_buf_0 rid h1 h2)] + = () + +let modifies_trans_1_0 (#t:Type) (rid:rid) (b:buffer t) (h0 h1 h2:mem) : + Lemma (requires (modifies_buf_1 rid b h0 h1 /\ modifies_buf_0 rid h1 h2)) + (ensures (modifies_buf_1 rid b h0 h2)) + [SMTPat (modifies_buf_1 rid b h0 h1); SMTPat (modifies_buf_0 rid h1 h2)] + = () + +let modifies_trans_0_1 (#t:Type) (rid:rid) (b:buffer t) (h0 h1 h2:mem) : + Lemma (requires (modifies_buf_0 rid h0 h1 /\ modifies_buf_1 rid b h1 h2)) + (ensures (modifies_buf_1 rid b h0 h2)) + [SMTPat (modifies_buf_0 rid h0 h1); SMTPat (modifies_buf_1 rid b h1 h2)] + = () + +let modifies_trans_1_1 (#t:Type) (rid:rid) (b:buffer t) (h0 h1 h2:mem) : + Lemma (requires (modifies_buf_1 rid b h0 h1 /\ modifies_buf_1 rid b h1 h2)) + (ensures (modifies_buf_1 rid b h0 h2)) + [SMTPat (modifies_buf_1 rid b h0 h1); SMTPat (modifies_buf_1 rid b h1 h2)] + = () + +let modifies_trans_1_1' (#t:Type) (#t':Type) (rid:rid) (b:buffer t) (b':buffer t') (h0 h1 h2:mem) : + Lemma (requires (modifies_buf_1 rid b h0 h1 /\ modifies_buf_1 rid b' h1 h2)) + (ensures (modifies_buf_2 rid b b' h0 h2)) + [SMTPat (modifies_buf_1 rid b h0 h1); SMTPat (modifies_buf_1 rid b' h1 h2)] + = () + +let modifies_trans_2_0 (#t:Type) (#t':Type) (rid:rid) (b:buffer t) (b':buffer t') (h0 h1 h2:mem) : + Lemma (requires (modifies_buf_2 rid b b' h0 h1 /\ modifies_buf_0 rid h1 h2)) + (ensures (modifies_buf_2 rid b b' h0 h2)) + [SMTPat (modifies_buf_2 rid b b' h0 h1); SMTPat (modifies_buf_0 rid h1 h2)] + = () + +let modifies_trans_2_1 (#t:Type) (#t':Type) (rid:rid) (b:buffer t) (b':buffer t') (h0 h1 h2:mem) : + Lemma (requires (modifies_buf_2 rid b b' h0 h1 /\ modifies_buf_1 rid b h1 h2)) + (ensures (modifies_buf_2 rid b b' h0 h2)) + [SMTPat (modifies_buf_2 rid b b' h0 h1); SMTPat (modifies_buf_1 rid b h1 h2)] + = () + +let modifies_trans_2_1' (#t:Type) (#t':Type) (rid:rid) (b:buffer t) (b':buffer t') (h0 h1 h2:mem) : + Lemma (requires (modifies_buf_2 rid b' b h0 h1 /\ modifies_buf_1 rid b h1 h2)) + (ensures (modifies_buf_2 rid b b' h0 h2)) + [SMTPat (modifies_buf_2 rid b' b h0 h1); SMTPat (modifies_buf_1 rid b h1 h2)] + = () + +let modifies_trans_0_2 (#t:Type) (#t':Type) (rid:rid) (b:buffer t) (b':buffer t') (h0 h1 h2:mem) : + Lemma (requires (modifies_buf_0 rid h0 h1 /\ modifies_buf_2 rid b b' h1 h2)) + (ensures (modifies_buf_2 rid b b' h0 h2)) + [SMTPat (modifies_buf_0 rid h0 h1); SMTPat (modifies_buf_2 rid b b' h1 h2)] + = () + +let modifies_trans_1_2 (#t:Type) (#t':Type) (rid:rid) (b:buffer t) (b':buffer t') (h0 h1 h2:mem) : + Lemma (requires (modifies_buf_1 rid b h0 h1 /\ modifies_buf_2 rid b b' h1 h2)) + (ensures (modifies_buf_2 rid b b' h0 h2)) + [SMTPat (modifies_buf_1 rid b h0 h1); SMTPat (modifies_buf_2 rid b b' h1 h2)] + = () + +let modifies_trans_2_2 (#t:Type) (#t':Type) (rid:rid) (b:buffer t) (b':buffer t') (h0 h1 h2:mem) : + Lemma (requires (modifies_buf_2 rid b b' h0 h1 /\ modifies_buf_2 rid b b' h1 h2)) + (ensures (modifies_buf_2 rid b b' h0 h2)) + [SMTPat (modifies_buf_2 rid b b' h0 h1); SMTPat (modifies_buf_2 rid b b' h1 h2)] + = () + +let modifies_trans_3_3 (#t #t' #t'':Type) (rid:rid) (b:buffer t) (b':buffer t') (b'':buffer t'') (h0 h1 h2:mem) : + Lemma (requires (modifies_buf_3 rid b b' b'' h0 h1 /\ modifies_buf_3 rid b b' b'' h1 h2)) + (ensures (modifies_buf_3 rid b b' b'' h0 h2)) + [SMTPat (modifies_buf_3 rid b b' b'' h0 h1); SMTPat (modifies_buf_3 rid b b' b'' h1 h2)] + = () + +let modifies_trans_4_4 (#t #t' #t'' #t''':Type) (rid:rid) (b:buffer t) (b':buffer t') (b'':buffer t'') (b''':buffer t''') (h0 h1 h2:mem) : + Lemma (requires (modifies_buf_4 rid b b' b'' b''' h0 h1 /\ modifies_buf_4 rid b b' b'' b''' h1 h2)) + (ensures (modifies_buf_4 rid b b' b'' b''' h0 h2)) + [SMTPat (modifies_buf_4 rid b b' b'' b''' h0 h1); SMTPat (modifies_buf_4 rid b b' b'' b''' h1 h2)] + = () + +(* TODO: complete with specialized versions of every general lemma *) + +(* Modifies clauses that do not change the shape of the HyperStack ((HS.get_tip h1) = (HS.get_tip h0)) *) +(* NB: those clauses are made abstract in order to make verification faster +// Lemmas follow to allow the programmer to make use of the real definition +// of those predicates in a general setting *) +let modifies_0 (h0 h1:mem) :Type0 = + modifies_one (HS.get_tip h0) h0 h1 + /\ modifies_buf_0 (HS.get_tip h0) h0 h1 + /\ HS.get_tip h0 == HS.get_tip h1 + +(* This one is very generic: it says +// * - some references have changed in the frame of b, but +// * - among all buffers in this frame, b is the only one that changed. *) +let modifies_1 (#a:Type) (b:buffer a) (h0 h1:mem) :Type0 = + let rid = frameOf b in + modifies_one rid h0 h1 /\ modifies_buf_1 rid b h0 h1 /\ HS.get_tip h0 == HS.get_tip h1 + +let modifies_2_1 (#a:Type) (b:buffer a) (h0 h1:mem) :Type0 = + HS.get_tip h0 == HS.get_tip h1 /\ + (let rid = frameOf b in + ((rid == HS.get_tip h0 /\ modifies_buf_1 rid b h0 h1 /\ modifies_one rid h0 h1) + \/ (rid =!= HS.get_tip h0 /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton (HS.get_tip h0))) h0 h1 + /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_0 (HS.get_tip h0) h0 h1 ))) + +let modifies_2 (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') (h0 h1:mem) :Type0 = + HS.get_tip h0 == HS.get_tip h1 /\ + (let rid = frameOf b in let rid' = frameOf b' in + ((rid == rid' /\ modifies_buf_2 rid b b' h0 h1 /\ modifies_one rid h0 h1) + \/ (rid =!= rid' /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton rid')) h0 h1 + /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_1 rid' b' h0 h1 ))) + +let modifies_3 (#a:Type) (#a':Type) (#a'':Type) (b:buffer a) (b':buffer a') (b'':buffer a'') (h0 h1:mem) :Type0 = + HS.get_tip h0 == HS.get_tip h1 /\ + (let rid = frameOf b in let rid' = frameOf b' in let rid'' = frameOf b'' in + ((rid == rid' /\ rid' == rid'' /\ modifies_buf_3 rid b b' b'' h0 h1 /\ modifies_one rid h0 h1) + \/ (rid == rid' /\ rid' =!= rid'' /\ modifies_buf_2 rid b b' h0 h1 /\ modifies_buf_1 rid'' b'' h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton rid'')) h0 h1 ) + \/ (rid =!= rid' /\ rid' == rid'' /\ modifies_buf_2 rid' b' b'' h0 h1 /\ modifies_buf_1 rid b h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton rid'')) h0 h1 ) + \/ (rid == rid'' /\ rid' =!= rid'' /\ modifies_buf_2 rid b b'' h0 h1 /\ modifies_buf_1 rid' b' h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton rid')) h0 h1 ) + \/ (rid =!= rid' /\ rid' =!= rid'' /\ rid =!= rid'' + /\ HS.modifies (Set.union (Set.union (Set.singleton rid) (Set.singleton rid')) (Set.singleton rid'')) h0 h1 + /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_1 rid' b' h0 h1 /\ modifies_buf_1 rid'' b'' h0 h1))) + +let modifies_3_2 (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') (h0 h1:mem) :Type0 = + HS.get_tip h0 == HS.get_tip h1 /\ + (let rid = frameOf b in let rid' = frameOf b' in + ((rid == rid' /\ rid' == HS.get_tip h0 /\ modifies_buf_2 rid b b' h0 h1 /\ modifies_one rid h0 h1) + \/ (rid == rid' /\ rid' =!= HS.get_tip h0 /\ modifies_buf_2 rid b b' h0 h1 /\ modifies_buf_0 (HS.get_tip h0) h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton (HS.get_tip h0))) h0 h1 ) + \/ (rid =!= rid' /\ rid == HS.get_tip h0 /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_1 rid' b' h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid') (Set.singleton (HS.get_tip h0))) h0 h1 ) + \/ (rid =!= rid' /\ rid' == HS.get_tip h0 /\ modifies_buf_1 rid' b' h0 h1 /\ modifies_buf_1 rid b h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton (HS.get_tip h0))) h0 h1 ) + \/ (rid =!= rid' /\ rid' =!= HS.get_tip h0 /\ rid =!= HS.get_tip h0 + /\ HS.modifies (Set.union (Set.union (Set.singleton rid) (Set.singleton rid')) (Set.singleton (HS.get_tip h0))) h0 h1 + /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_1 rid' b' h0 h1 /\ modifies_buf_0 (HS.get_tip h0) h0 h1))) + +let modifies_region (rid:rid) (bufs:TSet.set abuffer) (h0 h1:mem) :Type0 = + modifies_one rid h0 h1 /\ modifies_bufs rid bufs h0 h1 /\ HS.get_tip h0 == HS.get_tip h1 + +(* Lemmas introducing the 'modifies' predicates *) +let lemma_intro_modifies_0 h0 h1 : Lemma + (requires (modifies_one (HS.get_tip h0) h0 h1 + /\ modifies_buf_0 (HS.get_tip h0) h0 h1 + /\ HS.get_tip h0 == HS.get_tip h1)) + (ensures (modifies_0 h0 h1)) + = () + +let lemma_intro_modifies_1 (#a:Type) (b:buffer a) h0 h1 : Lemma + (requires (let rid = frameOf b in + modifies_one rid h0 h1 /\ modifies_buf_1 rid b h0 h1 /\ HS.get_tip h0 == HS.get_tip h1)) + (ensures (modifies_1 b h0 h1)) + = () + +let lemma_intro_modifies_2_1 (#a:Type) (b:buffer a) h0 h1 : Lemma + (requires ( + HS.get_tip h0 == HS.get_tip h1 /\ + (let rid = frameOf b in + ((rid == HS.get_tip h0 /\ modifies_buf_1 rid b h0 h1 /\ modifies_one rid h0 h1) + \/ (rid =!= HS.get_tip h0 /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton (HS.get_tip h0))) h0 h1 + /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_0 (HS.get_tip h0) h0 h1 ))))) + (ensures (modifies_2_1 b h0 h1)) + = () + +let lemma_intro_modifies_2 (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 : Lemma + (requires ( + HS.get_tip h0 == HS.get_tip h1 /\ + (let rid = frameOf b in let rid' = frameOf b' in + ((rid == rid' /\ modifies_buf_2 rid b b' h0 h1 /\ modifies_one rid h0 h1) + \/ (rid =!= rid' /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton rid')) h0 h1 + /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_1 rid' b' h0 h1 ))))) + (ensures (modifies_2 b b' h0 h1)) + = () + +let lemma_intro_modifies_3 (#a:Type) (#a':Type) (#a'':Type) (b:buffer a) (b':buffer a') (b'':buffer a'') h0 h1 : Lemma + (requires ( + HS.get_tip h0 == HS.get_tip h1 /\ + (let rid = frameOf b in let rid' = frameOf b' in let rid'' = frameOf b'' in + ((rid == rid' /\ rid' == rid'' /\ modifies_buf_3 rid b b' b'' h0 h1 /\ modifies_one rid h0 h1) + \/ (rid == rid' /\ rid' =!= rid'' /\ modifies_buf_2 rid b b' h0 h1 /\ modifies_buf_1 rid'' b'' h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton rid'')) h0 h1 ) + \/ (rid =!= rid' /\ rid' == rid'' /\ modifies_buf_2 rid' b' b'' h0 h1 /\ modifies_buf_1 rid b h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton rid'')) h0 h1 ) + \/ (rid == rid'' /\ rid' =!= rid'' /\ modifies_buf_2 rid b b'' h0 h1 /\ modifies_buf_1 rid' b' h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton rid')) h0 h1 ) + \/ (rid =!= rid' /\ rid' =!= rid'' /\ rid =!= rid'' + /\ HS.modifies (Set.union (Set.union (Set.singleton rid) (Set.singleton rid')) (Set.singleton rid'')) h0 h1 + /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_1 rid' b' h0 h1 /\ modifies_buf_1 rid'' b'' h0 h1))))) + (ensures (modifies_3 b b' b'' h0 h1)) + = () + +let lemma_intro_modifies_3_2 (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 : Lemma + (requires ( + HS.get_tip h0 == HS.get_tip h1 /\ + (let rid = frameOf b in let rid' = frameOf b' in + ((rid == rid' /\ rid' == HS.get_tip h0 /\ modifies_buf_2 rid b b' h0 h1 /\ modifies_one rid h0 h1) + \/ (rid == rid' /\ rid' =!= HS.get_tip h0 /\ modifies_buf_2 rid b b' h0 h1 /\ modifies_buf_0 (HS.get_tip h0) h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton (HS.get_tip h0))) h0 h1 ) + \/ (rid =!= rid' /\ rid == HS.get_tip h0 /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_1 rid' b' h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid') (Set.singleton (HS.get_tip h0))) h0 h1 ) + \/ (rid =!= rid' /\ rid' == HS.get_tip h0 /\ modifies_buf_1 rid' b' h0 h1 /\ modifies_buf_1 rid b h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton (HS.get_tip h0))) h0 h1 ) + \/ (rid =!= rid' /\ rid' =!= HS.get_tip h0 /\ rid =!= HS.get_tip h0 + /\ HS.modifies (Set.union (Set.union (Set.singleton rid) (Set.singleton rid')) (Set.singleton (HS.get_tip h0))) h0 h1 + /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_1 rid' b' h0 h1 /\ modifies_buf_0 (HS.get_tip h0) h0 h1))))) + (ensures (modifies_3_2 b b' h0 h1)) + = () + +let lemma_intro_modifies_region (rid:rid) bufs h0 h1 : Lemma + (requires (modifies_one rid h0 h1 /\ modifies_bufs rid bufs h0 h1 /\ HS.get_tip h0 == HS.get_tip h1)) + (ensures (modifies_region rid bufs h0 h1)) + = () + + +(* Lemmas revealing the content of the specialized modifies clauses in order to +// be able to generalize them if needs be. *) +let lemma_reveal_modifies_0 h0 h1 : Lemma + (requires (modifies_0 h0 h1)) + (ensures (modifies_one (HS.get_tip h0) h0 h1 /\ modifies_buf_0 (HS.get_tip h0) h0 h1 /\ HS.get_tip h0 == HS.get_tip h1)) + = () + +let lemma_reveal_modifies_1 (#a:Type) (b:buffer a) h0 h1 : Lemma + (requires (modifies_1 b h0 h1)) + (ensures (let rid = frameOf b in modifies_one rid h0 h1 /\ modifies_buf_1 rid b h0 h1 /\ HS.get_tip h0 == HS.get_tip h1)) + = () + +let lemma_reveal_modifies_2_1 (#a:Type) (b:buffer a) h0 h1 : Lemma + (requires (modifies_2_1 b h0 h1)) + (ensures ( + HS.get_tip h0 == HS.get_tip h1 /\ + (let rid = frameOf b in + ((rid == HS.get_tip h0 /\ modifies_buf_1 rid b h0 h1 /\ modifies_one rid h0 h1) + \/ (rid =!= HS.get_tip h0 /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton (HS.get_tip h0))) h0 h1 + /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_0 (HS.get_tip h0) h0 h1 ))))) + = () + +let lemma_reveal_modifies_2 (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 : Lemma + (requires (modifies_2 b b' h0 h1)) + (ensures ( + HS.get_tip h0 == HS.get_tip h1 /\ + (let rid = frameOf b in let rid' = frameOf b' in + ((rid == rid' /\ modifies_buf_2 rid b b' h0 h1 /\ modifies_one rid h0 h1) + \/ (rid =!= rid' /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton rid')) h0 h1 + /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_1 rid' b' h0 h1 )) ))) + = () + +let lemma_reveal_modifies_3 (#a:Type) (#a':Type) (#a'':Type) (b:buffer a) (b':buffer a') (b'':buffer a'') h0 h1 : Lemma + (requires (modifies_3 b b' b'' h0 h1)) + (ensures ( + HS.get_tip h0 == HS.get_tip h1 /\ + (let rid = frameOf b in let rid' = frameOf b' in let rid'' = frameOf b'' in + ((rid == rid' /\ rid' == rid'' /\ modifies_buf_3 rid b b' b'' h0 h1 /\ modifies_one rid h0 h1) + \/ (rid == rid' /\ rid' =!= rid'' /\ modifies_buf_2 rid b b' h0 h1 /\ modifies_buf_1 rid'' b'' h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton rid'')) h0 h1 ) + \/ (rid =!= rid' /\ rid' == rid'' /\ modifies_buf_2 rid' b' b'' h0 h1 /\ modifies_buf_1 rid b h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton rid'')) h0 h1 ) + \/ (rid == rid'' /\ rid' =!= rid'' /\ modifies_buf_2 rid b b'' h0 h1 /\ modifies_buf_1 rid' b' h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton rid')) h0 h1 ) + \/ (rid =!= rid' /\ rid' =!= rid'' /\ rid =!= rid'' + /\ HS.modifies (Set.union (Set.union (Set.singleton rid) (Set.singleton rid')) (Set.singleton rid'')) h0 h1 + /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_1 rid' b' h0 h1 /\ modifies_buf_1 rid'' b'' h0 h1)) ))) + = () + +let lemma_reveal_modifies_3_2 (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 : Lemma + (requires (modifies_3_2 b b' h0 h1)) + (ensures ( + HS.get_tip h0 == HS.get_tip h1 /\ + (let rid = frameOf b in let rid' = frameOf b' in + ((rid == rid' /\ rid' == HS.get_tip h0 /\ modifies_buf_2 rid b b' h0 h1 /\ modifies_one rid h0 h1) + \/ (rid == rid' /\ rid' =!= HS.get_tip h0 /\ modifies_buf_2 rid b b' h0 h1 /\ modifies_buf_0 (HS.get_tip h0) h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton (HS.get_tip h0))) h0 h1 ) + \/ (rid =!= rid' /\ rid == HS.get_tip h0 /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_1 rid' b' h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid') (Set.singleton (HS.get_tip h0))) h0 h1 ) + \/ (rid =!= rid' /\ rid' == HS.get_tip h0 /\ modifies_buf_1 rid' b' h0 h1 /\ modifies_buf_1 rid b h0 h1 + /\ HS.modifies (Set.union (Set.singleton rid) (Set.singleton (HS.get_tip h0))) h0 h1 ) + \/ (rid =!= rid' /\ rid' =!= HS.get_tip h0 /\ rid =!= HS.get_tip h0 + /\ HS.modifies (Set.union (Set.union (Set.singleton rid) (Set.singleton rid')) (Set.singleton (HS.get_tip h0))) h0 h1 + /\ modifies_buf_1 rid b h0 h1 /\ modifies_buf_1 rid' b' h0 h1 /\ modifies_buf_0 (HS.get_tip h0) h0 h1)) ))) + = () + +let lemma_reveal_modifies_region (rid:rid) bufs h0 h1 : Lemma + (requires (modifies_region rid bufs h0 h1)) + (ensures (modifies_one rid h0 h1 /\ modifies_bufs rid bufs h0 h1 /\ HS.get_tip h0 == HS.get_tip h1)) + = () + +#reset-options "--z3rlimit 100 --max_fuel 0 --max_ifuel 0 --initial_fuel 0 --initial_ifuel 0" + +(* Stack effect specific lemmas *) +let lemma_stack_1 (#a:Type) (b:buffer a) h0 h1 h2 h3 : Lemma + (requires (live h0 b /\ fresh_frame h0 h1 /\ modifies_1 b h1 h2 /\ popped h2 h3)) + (ensures (modifies_buf_1 (frameOf b) b h0 h3)) + [SMTPat (modifies_1 b h1 h2); SMTPat (fresh_frame h0 h1); SMTPat (popped h2 h3)] + = () + +let lemma_stack_2 (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 h2 h3 : Lemma + (requires (live h0 b /\ live h0 b' /\ fresh_frame h0 h1 /\ modifies_2 b b' h1 h2 /\ popped h2 h3)) + (ensures (modifies_2 b b' h0 h3)) + [SMTPat (modifies_2 b b' h1 h2); SMTPat (fresh_frame h0 h1); SMTPat (popped h2 h3)] + = () + +(* Specialized modifies clauses lemmas + associated SMTPatterns. Those are critical for +// verification as the specialized modifies clauses are abstract from outside the +// module *) + +(** Commutativity lemmas *) +let lemma_modifies_2_comm (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 : Lemma + (requires True) + (ensures (modifies_2 b b' h0 h1 <==> modifies_2 b' b h0 h1)) + [SMTPat (modifies_2 b b' h0 h1)] + = () + +let lemma_modifies_3_2_comm (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 : Lemma + (requires True) + (ensures (modifies_3_2 b b' h0 h1 <==> modifies_3_2 b' b h0 h1)) + [SMTPat (modifies_3_2 b b' h0 h1)] + = () +(* TODO: add commutativity lemmas for modifies_3 *) + +#reset-options "--z3rlimit 20" + +(** Transitivity lemmas *) +let lemma_modifies_0_trans h0 h1 h2 : Lemma + (requires (modifies_0 h0 h1 /\ modifies_0 h1 h2)) + (ensures (modifies_0 h0 h2)) + [SMTPat (modifies_0 h0 h1); SMTPat (modifies_0 h1 h2)] + = () + +let lemma_modifies_1_trans (#a:Type) (b:buffer a) h0 h1 h2 : Lemma + (requires (modifies_1 b h0 h1 /\ modifies_1 b h1 h2)) + (ensures (modifies_1 b h0 h2)) + [SMTPat (modifies_1 b h0 h1); SMTPat (modifies_1 b h1 h2)] + = () + +let lemma_modifies_2_1_trans (#a:Type) (b:buffer a) h0 h1 h2 : Lemma + (requires (modifies_2_1 b h0 h1 /\ modifies_2_1 b h1 h2)) + (ensures (modifies_2_1 b h0 h2)) + [SMTPat (modifies_2_1 b h0 h1); SMTPat (modifies_2_1 b h1 h2)] + = () + +let lemma_modifies_2_trans (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 h2 : Lemma + (requires (modifies_2 b b' h0 h1 /\ modifies_2 b b' h1 h2)) + (ensures (modifies_2 b b' h0 h2)) + (* TODO: Make the following work and merge with the following lemma *) + (* [SMTPatOr [ *) + (* [SMTPat (modifies_2 b b' h0 h1); *) + (* SMTPat (modifies_2 b' b h0 h1)]]; *) + (* SMTPat (modifies_2 b' b h1 h2)] *) + [SMTPat (modifies_2 b b' h0 h1); SMTPat (modifies_2 b b' h1 h2)] + = () + +let lemma_modifies_2_trans' (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 h2 : Lemma + (requires (modifies_2 b b' h0 h1 /\ modifies_2 b b' h1 h2)) + (ensures (modifies_2 b b' h0 h2)) + [SMTPat (modifies_2 b' b h0 h1); SMTPat (modifies_2 b b' h1 h2)] + = () + +#reset-options "--z3rlimit 40" + +let lemma_modifies_3_trans (#a:Type) (#a':Type) (#a'':Type) (b:buffer a) (b':buffer a') (b'':buffer a'') h0 h1 h2 : Lemma + (requires (modifies_3 b b' b'' h0 h1 /\ modifies_3 b b' b'' h1 h2)) + (ensures (modifies_3 b b' b'' h0 h2)) + (* TODO: add the appropriate SMTPatOr patterns so as not to rewrite X times the same lemma *) + [SMTPat (modifies_3 b b' b'' h0 h1); SMTPat (modifies_3 b b' b'' h1 h2)] + = () + +#reset-options "--z3rlimit 200" + +let lemma_modifies_3_2_trans (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 h2 : Lemma + (requires (modifies_3_2 b b' h0 h1 /\ modifies_3_2 b b' h1 h2)) + (ensures (modifies_3_2 b b' h0 h2)) + [SMTPat (modifies_3_2 b b' h0 h1); SMTPat (modifies_3_2 b b' h1 h2)] + = () +let lemma_modifies_3_2_trans' (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 h2 : Lemma + (requires (modifies_3_2 b' b h0 h1 /\ modifies_3_2 b b' h1 h2)) + (ensures (modifies_3_2 b b' h0 h2)) + [SMTPat (modifies_3_2 b' b h0 h1); SMTPat (modifies_3_2 b b' h1 h2)] + = () + +#reset-options "--z3rlimit 20" + +(* Specific modifies clause lemmas *) +val lemma_modifies_0_0: h0:mem -> h1:mem -> h2:mem -> Lemma + (requires (modifies_0 h0 h1 /\ modifies_0 h1 h2)) + (ensures (modifies_0 h0 h2)) + [SMTPat (modifies_0 h0 h1); SMTPat (modifies_0 h1 h2)] +let lemma_modifies_0_0 h0 h1 h2 = () + +#reset-options "--z3rlimit 20 --initial_fuel 0 --max_fuel 0" + +let lemma_modifies_1_0 (#a:Type) (b:buffer a) h0 h1 h2 : Lemma + (requires (live h0 b /\ modifies_1 b h0 h1 /\ modifies_0 h1 h2)) + (ensures (live h2 b /\ modifies_2_1 b h0 h2)) + [SMTPat (modifies_1 b h0 h1); SMTPat (modifies_0 h1 h2)] + = () + +let lemma_modifies_0_1 (#a:Type) (b:buffer a) h0 h1 h2 : Lemma + (requires (live h0 b /\ modifies_0 h0 h1 /\ modifies_1 b h1 h2)) + (ensures (modifies_2_1 b h0 h2)) + [SMTPat (modifies_0 h0 h1); SMTPat (modifies_1 b h1 h2)] + = () + +let lemma_modifies_0_1' (#a:Type) (b:buffer a) h0 h1 h2 : Lemma + (requires (b `unused_in` h0 /\ modifies_0 h0 h1 /\ live h1 b /\ modifies_1 b h1 h2)) + (ensures (modifies_0 h0 h2)) + [SMTPat (modifies_0 h0 h1); SMTPat (modifies_1 b h1 h2)] + = () + +#reset-options "--z3rlimit 100 --initial_fuel 0 --max_fuel 0" + +let lemma_modifies_1_1 (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 h2 : Lemma + (requires (live h0 b /\ live h0 b' /\ modifies_1 b h0 h1 /\ modifies_1 b' h1 h2)) + (ensures (modifies_2 b b' h0 h2 /\ modifies_2 b' b h0 h2)) + [SMTPat (modifies_1 b h0 h1); SMTPat (modifies_1 b' h1 h2)] + = if frameOf b = frameOf b' then modifies_trans_1_1' (frameOf b) b b' h0 h1 h2 + else () + +#reset-options "--z3rlimit 200 --initial_fuel 0 --max_fuel 0" + +let lemma_modifies_0_2 (#t:Type) (#t':Type) (b:buffer t) (b':buffer t') h0 h1 h2 : Lemma + (requires (live h0 b /\ b' `unused_in` h0 /\ modifies_0 h0 h1 /\ live h1 b' + /\ modifies_2 b b' h1 h2)) + (ensures (modifies_2_1 b h0 h2)) + [SMTPat (modifies_2 b b' h1 h2); SMTPat (modifies_0 h0 h1)] + = () + +let lemma_modifies_0_2' (#t:Type) (#t':Type) (b:buffer t) (b':buffer t') h0 h1 h2 : Lemma + (requires (live h0 b /\ b' `unused_in` h0 /\ modifies_0 h0 h1 /\ live h1 b' + /\ modifies_2 b' b h1 h2)) + (ensures (modifies_2_1 b h0 h2)) + [SMTPat (modifies_2 b' b h1 h2); SMTPat (modifies_0 h0 h1)] + = () + +let lemma_modifies_1_2 (#t:Type) (#t':Type) (b:buffer t) (b':buffer t') h0 h1 h2 : Lemma + (requires (live h0 b /\ modifies_1 b h0 h1 /\ b' `unused_in` h0 /\ live h1 b' /\ + modifies_2 b b' h1 h2)) + (ensures (modifies_2_1 b h0 h2)) + [SMTPat (modifies_1 b h0 h1); SMTPat (modifies_2 b b' h1 h2)] + = () + +let lemma_modifies_1_2' (#t:Type) (#t':Type) (b:buffer t) (b':buffer t') h0 h1 h2 : Lemma + (requires (live h0 b /\ modifies_1 b h0 h1 /\ b' `unused_in` h0 /\ live h1 b' /\ + modifies_2 b' b h1 h2)) + (ensures (modifies_2_1 b h0 h2)) + [SMTPat (modifies_1 b h0 h1); SMTPat (modifies_2 b' b h1 h2)] + = () + +let lemma_modifies_1_2'' (#t:Type) (#t':Type) (b:buffer t) (b':buffer t') h0 h1 h2 : Lemma + (requires (live h0 b /\ live h0 b' /\ modifies_1 b h0 h1 /\ modifies_2 b b' h1 h2)) + (ensures (modifies_2 b b' h0 h2)) + [SMTPat (modifies_1 b h0 h1); SMTPat (modifies_2 b b' h1 h2)] + = () + +let lemma_modifies_1_2''' (#t:Type) (#t':Type) (b:buffer t) (b':buffer t') h0 h1 h2 : Lemma + (requires (live h0 b /\ live h0 b' /\ modifies_1 b h0 h1 /\ modifies_2 b' b h1 h2)) + (ensures (modifies_2 b' b h0 h2)) + [SMTPat (modifies_1 b h0 h1); SMTPat (modifies_2 b' b h1 h2)] + = () + +let lemma_modifies_1_1_prime (#t:Type) (#t':Type) (b:buffer t) (b':buffer t') h0 h1 h2 : Lemma + (requires (live h0 b /\ modifies_1 b h0 h1 /\ b' `unused_in` h0 /\ live h1 b' /\ + modifies_1 b' h1 h2)) + (ensures (modifies_2_1 b h0 h2)) + [SMTPat (modifies_1 b h0 h1); SMTPat (modifies_1 b' h1 h2)] + = () + +let lemma_modifies_2_1 (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 h2 : Lemma + (requires (live h0 b /\ live h0 b' /\ modifies_2 b b' h0 h1 /\ modifies_1 b h1 h2)) + (ensures (modifies_2 b b' h0 h2)) + [SMTPat (modifies_2 b b' h0 h1); SMTPat (modifies_1 b h1 h2)] + = () + +let lemma_modifies_2_1' (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 h2 : Lemma + (requires (live h0 b /\ live h0 b' /\ modifies_2 b' b h0 h1 /\ modifies_1 b h1 h2)) + (ensures (modifies_2 b' b h0 h2)) + [SMTPat (modifies_2 b' b h0 h1); SMTPat (modifies_1 b h1 h2)] + = () + +let lemma_modifies_2_1'' (#a:Type) (#a':Type) (b:buffer a) (b':buffer a') h0 h1 h2 : Lemma + (requires (live h0 b /\ live h0 b' /\ modifies_2_1 b h0 h1 /\ modifies_1 b' h1 h2)) + (ensures (modifies_3_2 b b' h0 h2)) + [SMTPat (modifies_2_1 b h0 h1); SMTPat (modifies_1 b' h1 h2)] + = () + +(* TODO: lemmas for modifies_3 *) + +let lemma_modifies_0_unalloc (#a:Type) (b:buffer a) h0 h1 h2 : Lemma + (requires (b `unused_in` h0 /\ + frameOf b == HS.get_tip h0 /\ + modifies_0 h0 h1 /\ + modifies_1 b h1 h2)) + (ensures (modifies_0 h0 h2)) + = () + +let lemma_modifies_none_1_trans (#a:Type) (b:buffer a) h0 h1 h2 : Lemma + (requires (modifies_none h0 h1 /\ + live h0 b /\ + modifies_1 b h1 h2)) + (ensures (modifies_1 b h0 h2)) + = () + +let lemma_modifies_0_none_trans h0 h1 h2 : Lemma + (requires (modifies_0 h0 h1 /\ + modifies_none h1 h2)) + (ensures (modifies_0 h0 h2)) + = () + +#reset-options "--initial_fuel 0 --max_fuel 0" + +(** Concrete getters and setters *) +val create: #a:Type -> init:a -> len:UInt32.t -> StackInline (buffer a) + (requires (fun h -> True)) + (ensures (fun (h0:mem) b h1 -> b `unused_in` h0 + /\ live h1 b /\ idx b == 0 /\ length b == v len + /\ frameOf b == HS.get_tip h0 + /\ Map.domain (HS.get_hmap h1) == Map.domain (HS.get_hmap h0) + /\ modifies_0 h0 h1 + /\ as_seq h1 b == Seq.create (v len) init)) +let create #a init len = + let content: reference (lseq a (v len)) = + salloc (Seq.create (v len) init) in + let b = MkBuffer len content 0ul len in + let h = HST.get() in + assert (Seq.equal (as_seq h b) (sel h b)); + b + +#reset-options "--initial_fuel 0 --max_fuel 0" + +unfold let p (#a:Type0) (init:list a) : GTot Type0 = + normalize (0 < L.length init) /\ + normalize (L.length init <= UInt.max_int 32) + +unfold let q (#a:Type0) (len:nat) (buf:buffer a) : GTot Type0 = + normalize (length buf == len) + +(** Concrete getters and setters *) +val createL: #a:Type0 -> init:list a -> StackInline (buffer a) + (requires (fun h -> p #a init)) + (ensures (fun (h0:mem) b h1 -> + let len = L.length init in + len > 0 + /\ b `unused_in` h0 + /\ live h1 b /\ idx b == 0 /\ length b == len + /\ frameOf b == (HS.get_tip h0) + /\ Map.domain (HS.get_hmap h1) == Map.domain (HS.get_hmap h0) + /\ modifies_0 h0 h1 + /\ as_seq h1 b == Seq.seq_of_list init + /\ q #a len b)) +#set-options "--initial_fuel 1 --max_fuel 1" //the normalize_term (length init) in the pre-condition will be unfolded + //whereas the L.length init below will not +let createL #a init = + let len = UInt32.uint_to_t (L.length init) in + let s = Seq.seq_of_list init in + let content: reference (lseq a (v len)) = + salloc (Seq.seq_of_list init) in + let b = MkBuffer len content 0ul len in + let h = HST.get() in + assert (Seq.equal (as_seq h b) (sel h b)); + b + + +#reset-options "--initial_fuel 0 --max_fuel 0" +let lemma_upd (#a:Type) (h:mem) (x:reference a{live_region h (HS.frameOf x)}) (v:a) : Lemma + (requires True) + (ensures (Map.domain (HS.get_hmap h) == Map.domain (HS.get_hmap (upd h x v)))) + = let m = HS.get_hmap h in + let m' = Map.upd m (HS.frameOf x) (Heap.upd (Map.sel m (HS.frameOf x)) (HS.as_ref x) v) in + Set.lemma_equal_intro (Map.domain m) (Map.domain m') + +unfold let rcreate_post_common (#a:Type) (r:rid) (init:a) (len:UInt32.t) (b:buffer a) (h0 h1:mem) :Type0 + = b `unused_in` h0 + /\ live h1 b /\ idx b == 0 /\ length b == v len + /\ Map.domain (HS.get_hmap h1) == Map.domain (HS.get_hmap h0) + /\ HS.get_tip h1 == HS.get_tip h0 + /\ modifies (Set.singleton r) h0 h1 + /\ modifies_ref r Set.empty h0 h1 + /\ as_seq h1 b == Seq.create (v len) init + +private let rcreate_common (#a:Type) (r:rid) (init:a) (len:UInt32.t) (mm:bool) + :ST (buffer a) (requires (fun h0 -> is_eternal_region r)) + (ensures (fun h0 b h1 -> rcreate_post_common r init len b h0 h1 /\ + is_mm b.content == mm)) + = let h0 = HST.get() in + let s = Seq.create (v len) init in + let content: reference (lseq a (v len)) = + if mm then ralloc_mm r s else ralloc r s + in + let b = MkBuffer len content 0ul len in + let h1 = HST.get() in + assert (Seq.equal (as_seq h1 b) (sel h1 b)); + lemma_upd h0 content s; + b + +(** This function allocates a buffer in an "eternal" region, i.e. a region where memory is +// * automatically-managed. One does not need to call rfree on such a buffer. It +// * translates to C as a call to malloc and assumes a conservative garbage +// * collector is running. *) +val rcreate: #a:Type -> r:rid -> init:a -> len:UInt32.t -> ST (buffer a) + (requires (fun h -> is_eternal_region r)) + (ensures (fun (h0:mem) b h1 -> rcreate_post_common r init len b h0 h1 /\ ~(is_mm b.content))) +let rcreate #a r init len = rcreate_common r init len false + +(** This predicate tells whether a buffer can be `rfree`d. The only + way to produce it should be `rcreate_mm`, and the only way to + consume it should be `rfree.` Rationale: a buffer can be `rfree`d + only if it is the result of `rcreate_mm`. Subbuffers should not. *) +let freeable (#a: Type) (b: buffer a) : GTot Type0 = + is_mm b.content /\ is_eternal_region (frameOf b) /\ idx b == 0 + +(** This function allocates a buffer into a manually-managed buffer in a heap + * region, meaning that the client must call rfree in order to avoid memory + * leaks. It translates to C as a straight malloc. *) +let rcreate_mm (#a:Type) (r:rid) (init:a) (len:UInt32.t) + :ST (buffer a) (requires (fun h0 -> is_eternal_region r)) + (ensures (fun h0 b h1 -> rcreate_post_common r init len b h0 h1 /\ is_mm (content b) /\ freeable b)) + = rcreate_common r init len true + +#reset-options + +(** This function frees a buffer allocated with `rcreate_mm`. It translates to C as a regular free. *) +let rfree (#a:Type) (b:buffer a) + :ST unit (requires (fun h0 -> live h0 b /\ freeable b)) + (ensures (fun h0 _ h1 -> is_mm (content b) /\ is_eternal_region (frameOf b) /\ h1 == HS.free (content b) h0)) + = rfree b.content + +(* #reset-options "--z3rlimit 100 --initial_fuel 0 --max_fuel 0" *) + +(* val create_null: #a:Type -> init:a -> len:UInt32.t -> Stack (buffer a) *) +(* (requires (fun h -> True)) *) +(* (ensures (fun h0 b h1 -> length b = UInt32.v len /\ h0 == h1)) *) +(* let create_null #a init len = *) +(* push_frame(); *) +(* let r = create init len in *) +(* pop_frame(); *) +(* r *) + + +#reset-options "--initial_fuel 0 --max_fuel 0" + +// ocaml-only, used for conversions to Platform.bytes +val to_seq: #a:Type -> b:buffer a -> l:UInt32.t{v l <= length b} -> STL (seq a) + (requires (fun h -> live h b)) + (ensures (fun h0 r h1 -> h0 == h1 /\ live h1 b /\ Seq.length r == v l + (*/\ r == as_seq #a h1 b *) )) +let to_seq #a b l = + let s = !b.content in + let i = v b.idx in + Seq.slice s i (i + v l) + + +// ocaml-only, used for conversions to Platform.bytes +val to_seq_full: #a:Type -> b:buffer a -> ST (seq a) + (requires (fun h -> live h b)) + (ensures (fun h0 r h1 -> h0 == h1 /\ live h1 b /\ + r == as_seq #a h1 b )) +let to_seq_full #a b = + let s = !b.content in + let i = v b.idx in + Seq.slice s i (i + v b.length) + +val index: #a:Type -> b:buffer a -> n:UInt32.t{v n < length b} -> Stack a + (requires (fun h -> live h b)) + (ensures (fun h0 z h1 -> live h0 b /\ h1 == h0 + /\ z == Seq.index (as_seq h0 b) (v n))) +let index #a b n = + let s = !b.content in + Seq.index s (v b.idx + v n) + +(** REMARK: the proof of this lemma relies crucially on the `a == a'` condition +// in `disjoint`, and on the pattern in `Seq.slice_upd` *) +private let lemma_aux_0 + (#a:Type) (b:buffer a) (n:UInt32.t{v n < length b}) (z:a) (h0:mem) (tt:Type) (bb:buffer tt) + :Lemma (requires (live h0 b /\ live h0 bb /\ disjoint b bb)) + (ensures (live h0 b /\ live h0 bb /\ + (let h1 = HS.upd h0 b.content (Seq.upd (sel h0 b) (idx b + v n) z) in + as_seq h0 bb == as_seq h1 bb))) + = Heap.lemma_distinct_addrs_distinct_preorders (); + Heap.lemma_distinct_addrs_distinct_mm () + +#set-options "--z3rlimit 10" +private let lemma_aux_1 + (#a:Type) (b:buffer a) (n:UInt32.t{v n < length b}) (z:a) (h0:mem) (tt:Type) + :Lemma (requires (live h0 b)) + (ensures (live h0 b /\ + (forall (bb:buffer tt). (live h0 bb /\ disjoint b bb) ==> + (let h1 = HS.upd h0 b.content (Seq.upd (sel h0 b) (idx b + v n) z) in + as_seq h0 bb == as_seq h1 bb)))) + = let open FStar.Classical in + forall_intro (move_requires (lemma_aux_0 b n z h0 tt)) + +#reset-options "--initial_fuel 0 --max_fuel 0" + +private let lemma_aux_2 + (#a:Type) (b:buffer a) (n:UInt32.t{v n < length b}) (z:a) (h0:mem) + :Lemma (requires (live h0 b)) + (ensures (live h0 b /\ + (forall (tt:Type) (bb:buffer tt). (live h0 bb /\ disjoint b bb) ==> + (let h1 = HS.upd h0 b.content (Seq.upd (sel h0 b) (idx b + v n) z) in + as_seq h0 bb == as_seq h1 bb)))) + = let open FStar.Classical in + forall_intro (move_requires (lemma_aux_1 b n z h0)) + +private val lemma_aux: #a:Type -> b:buffer a -> n:UInt32.t{v n < length b} -> z:a + -> h0:mem -> Lemma + (requires (live h0 b)) + (ensures (live h0 b + /\ modifies_1 b h0 (HS.upd h0 b.content (Seq.upd (sel h0 b) (idx b + v n) z)) )) + [SMTPat (HS.upd h0 b.content (Seq.upd (sel h0 b) (idx b + v n) z))] +let lemma_aux #a b n z h0 = lemma_aux_2 b n z h0 + +val upd: #a:Type -> b:buffer a -> n:UInt32.t -> z:a -> Stack unit + (requires (fun h -> live h b /\ v n < length b)) + (ensures (fun h0 _ h1 -> live h0 b /\ live h1 b /\ v n < length b + /\ modifies_1 b h0 h1 + /\ as_seq h1 b == Seq.upd (as_seq h0 b) (v n) z )) +let upd #a b n z = + let h0 = HST.get () in + let s0 = !b.content in + let s = Seq.upd s0 (v b.idx + v n) z in + b.content := s; + lemma_aux b n z h0; + let h = HST.get() in + Seq.lemma_eq_intro (as_seq h b) (Seq.slice s (idx b) (idx b + length b)); + Seq.upd_slice s0 (idx b) (idx b + length b) (v n) z + +val sub: #a:Type -> b:buffer a -> i:UInt32.t + -> len:UInt32.t{v i + v len <= length b} + -> Tot (b':buffer a{b `includes` b' /\ length b' == v len}) +let sub #a b i len = + assert (v i + v b.idx < pow2 n); // was formerly a precondition + MkBuffer b.max_length b.content (i +^ b.idx) len + +let sub_sub + (#a: Type) + (b: buffer a) + (i1: UInt32.t) + (len1: UInt32.t{v i1 + v len1 <= length b}) + (i2: UInt32.t) + (len2: UInt32.t {v i2 + v len2 <= v len1}) +: Lemma + (ensures (sub (sub b i1 len1) i2 len2 == sub b (i1 +^ i2) len2)) += () + +let sub_zero_length + (#a: Type) + (b: buffer a) +: Lemma + (ensures (sub b (UInt32.uint_to_t 0) (UInt32.uint_to_t (length b)) == b)) += () + +let lemma_sub_spec (#a:Type) (b:buffer a) + (i:UInt32.t) + (len:UInt32.t{v len <= length b /\ v i + v len <= length b}) + h : Lemma + (requires (live h b)) + (ensures (live h (sub b i len) /\ + as_seq h (sub b i len) == Seq.slice (as_seq h b) (v i) (v i + v len))) + [SMTPat (sub b i len); SMTPat (live h b)] + = Seq.lemma_eq_intro (as_seq h (sub b i len)) (Seq.slice (as_seq h b) (v i) (v i + v len)) + +let lemma_sub_spec' (#a:Type) (b:buffer a) + (i:UInt32.t) + (len:UInt32.t{v len <= length b /\ v i + v len <= length b}) + h : Lemma + (requires (live h b)) + (ensures (live h (sub b i len) /\ + as_seq h (sub b i len) == Seq.slice (as_seq h b) (v i) (v i + v len))) + [SMTPat (live h (sub b i len))] + = lemma_sub_spec b i len h + +val offset: #a:Type -> b:buffer a + -> i:UInt32.t{v i + v b.idx < pow2 n /\ v i <= v b.length} + -> Tot (b':buffer a{b `includes` b'}) +let offset #a b i = + MkBuffer b.max_length b.content (i +^ b.idx) (b.length -^ i) + +let lemma_offset_spec (#a:Type) (b:buffer a) + (i:UInt32.t{v i + v b.idx < pow2 n /\ v i <= v b.length}) + h : Lemma + (requires True) + (ensures (as_seq h (offset b i) == Seq.slice (as_seq h b) (v i) (length b))) + [SMTPatOr [[SMTPat (as_seq h (offset b i))]; + [SMTPat (Seq.slice (as_seq h b) (v i) (length b))]]] + = Seq.lemma_eq_intro (as_seq h (offset b i)) (Seq.slice (as_seq h b) (v i) (length b)) + +private val eq_lemma1: + #a:eqtype + -> b1:buffer a + -> b2:buffer a + -> len:UInt32.t{v len <= length b1 /\ v len <= length b2} + -> h:mem + -> Lemma + (requires (forall (j:nat). j < v len ==> get h b1 j == get h b2 j)) + (ensures equal h (sub b1 0ul len) h (sub b2 0ul len)) + [SMTPat (equal h (sub b1 0ul len) h (sub b2 0ul len))] +let eq_lemma1 #a b1 b2 len h = + Seq.lemma_eq_intro (as_seq h (sub b1 0ul len)) (as_seq h (sub b2 0ul len)) + +#reset-options "--z3rlimit 20" + +private val eq_lemma2: + #a:eqtype + -> b1:buffer a + -> b2:buffer a + -> len:UInt32.t{v len <= length b1 /\ v len <= length b2} + -> h:mem + -> Lemma + (requires equal h (sub b1 0ul len) h (sub b2 0ul len)) + (ensures (forall (j:nat). j < v len ==> get h b1 j == get h b2 j)) + [SMTPat (equal h (sub b1 0ul len) h (sub b2 0ul len))] +let eq_lemma2 #a b1 b2 len h = + let s1 = as_seq h (sub b1 0ul len) in + let s2 = as_seq h (sub b2 0ul len) in + cut (forall (j:nat). j < v len ==> get h b1 j == Seq.index s1 j); + cut (forall (j:nat). j < v len ==> get h b2 j == Seq.index s2 j) + +(** Corresponds to memcmp for `eqtype` *) +val eqb: #a:eqtype -> b1:buffer a -> b2:buffer a + -> len:UInt32.t{v len <= length b1 /\ v len <= length b2} + -> ST bool + (requires (fun h -> live h b1 /\ live h b2)) + (ensures (fun h0 z h1 -> h1 == h0 /\ + (z <==> equal h0 (sub b1 0ul len) h0 (sub b2 0ul len)))) +let rec eqb #a b1 b2 len = + if len =^ 0ul then true + else + let len' = len -^ 1ul in + if index b1 len' = index b2 len' then + eqb b1 b2 len' + else + false + +(** +// Defining operators for buffer accesses as specified at +// https://github.com/FStarLang/FStar/wiki/Parsing-and-operator-precedence +// *) +(* JP: if the [val] is not specified, there's an issue with these functions +// * taking an extra unification parameter at extraction-time... *) +val op_Array_Access: #a:Type -> b:buffer a -> n:UInt32.t{v n Stack a + (requires (fun h -> live h b)) + (ensures (fun h0 z h1 -> h1 == h0 + /\ z == Seq.index (as_seq h0 b) (v n))) +let op_Array_Access #a b n = index #a b n + +val op_Array_Assignment: #a:Type -> b:buffer a -> n:UInt32.t -> z:a -> Stack unit + (requires (fun h -> live h b /\ v n < length b)) + (ensures (fun h0 _ h1 -> live h0 b /\ live h1 b /\ v n < length b + /\ modifies_1 b h0 h1 + /\ as_seq h1 b == Seq.upd (as_seq h0 b) (v n) z )) +let op_Array_Assignment #a b n z = upd #a b n z + +let lemma_modifies_one_trans_1 (#a:Type) (b:buffer a) (h0:mem) (h1:mem) (h2:mem): Lemma + (requires (modifies_one (frameOf b) h0 h1 /\ modifies_one (frameOf b) h1 h2)) + (ensures (modifies_one (frameOf b) h0 h2)) + [SMTPat (modifies_one (frameOf b) h0 h1); SMTPat (modifies_one (frameOf b) h1 h2)] + = () + +#reset-options "--z3rlimit 100 --max_fuel 0 --max_ifuel 0 --initial_fuel 0 --initial_ifuel 0" + +(** Corresponds to memcpy *) +val blit: #t:Type + -> a:buffer t + -> idx_a:UInt32.t{v idx_a <= length a} + -> b:buffer t{disjoint a b} + -> idx_b:UInt32.t{v idx_b <= length b} + -> len:UInt32.t{v idx_a + v len <= length a /\ v idx_b + v len <= length b} + -> Stack unit + (requires (fun h -> live h a /\ live h b)) + (ensures (fun h0 _ h1 -> live h0 b /\ live h0 a /\ live h1 b /\ live h1 a /\ modifies_1 b h0 h1 + /\ Seq.slice (as_seq h1 b) (v idx_b) (v idx_b + v len) == + Seq.slice (as_seq h0 a) (v idx_a) (v idx_a + v len) + /\ Seq.slice (as_seq h1 b) 0 (v idx_b) == + Seq.slice (as_seq h0 b) 0 (v idx_b) + /\ Seq.slice (as_seq h1 b) (v idx_b+v len) (length b) == + Seq.slice (as_seq h0 b) (v idx_b+v len) (length b) )) +#restart-solver +let rec blit #t a idx_a b idx_b len = + let h0 = HST.get () in + if len =^ 0ul then () + else + begin + let len' = len -^ 1ul in + blit #t a idx_a b idx_b len'; + let z = a.(idx_a +^ len') in + b.(idx_b +^ len') <- z; + let h1 = HST.get() in + Seq.snoc_slice_index (as_seq h1 b) (v idx_b) (v idx_b + v len'); + Seq.cons_head_tail (Seq.slice (as_seq h0 b) (v idx_b + v len') (length b)); + Seq.cons_head_tail (Seq.slice (as_seq h1 b) (v idx_b + v len') (length b)) + end + +(** Corresponds to memset *) +val fill: #t:Type + -> b:buffer t + -> z:t + -> len:UInt32.t{v len <= length b} + -> Stack unit + (requires (fun h -> live h b)) + (ensures (fun h0 _ h1 -> live h0 b /\ live h1 b /\ modifies_1 b h0 h1 + /\ Seq.slice (as_seq h1 b) 0 (v len) == Seq.create (v len) z + /\ Seq.slice (as_seq h1 b) (v len) (length b) == + Seq.slice (as_seq h0 b) (v len) (length b) )) +let rec fill #t b z len = + let h0 = HST.get () in + if len =^ 0ul then () + else + begin + let len' = len -^ 1ul in + fill #t b z len'; + b.(len') <- z; + let h = HST.get() in + Seq.snoc_slice_index (as_seq h b) 0 (v len'); + Seq.lemma_tail_slice (as_seq h b) (v len') (length b) + end; + let h1 = HST.get() in + Seq.lemma_eq_intro (Seq.slice (as_seq h1 b) 0 (v len)) (Seq.create (v len) z) + + +let split #t (b:buffer t) (i:UInt32.t{v i <= length b}) : Tot (buffer t & buffer t) + = sub b 0ul i, offset b i + +let join #t (b:buffer t) (b':buffer t{b.max_length == b'.max_length /\ b.content === b'.content /\ idx b + length b == idx b'}) : Tot (buffer t) + = MkBuffer (b.max_length) (b.content) (b.idx) (FStar.UInt32.(b.length +^ b'.length)) + + +val no_upd_lemma_0: #t:Type -> h0:mem -> h1:mem -> b:buffer t -> Lemma + (requires (live h0 b /\ modifies_0 h0 h1)) + (ensures (live h0 b /\ live h1 b /\ equal h0 b h1 b)) + [SMTPat (modifies_0 h0 h1); SMTPat (live h0 b)] +let no_upd_lemma_0 #t h0 h1 b = () + +val no_upd_lemma_1: #t:Type -> #t':Type -> h0:mem -> h1:mem -> a:buffer t -> b:buffer t' -> Lemma + (requires (live h0 b /\ disjoint a b /\ modifies_1 a h0 h1)) + (ensures (live h0 b /\ live h1 b /\ equal h0 b h1 b)) + [SMTPat (modifies_1 a h0 h1); SMTPat (live h0 b)] +let no_upd_lemma_1 #t #t' h0 h1 a b = () + +#reset-options "--z3rlimit 30 --initial_fuel 0 --max_fuel 0" + +val no_upd_lemma_2: #t:Type -> #t':Type -> #t'':Type -> h0:mem -> h1:mem -> a:buffer t -> a':buffer t' -> b:buffer t'' -> Lemma + (requires (live h0 b /\ disjoint a b /\ disjoint a' b /\ modifies_2 a a' h0 h1)) + (ensures (live h0 b /\ live h1 b /\ equal h0 b h1 b)) + [SMTPat (live h0 b); SMTPat (modifies_2 a a' h0 h1)] +let no_upd_lemma_2 #t #t' #t'' h0 h1 a a' b = () + +val no_upd_lemma_2_1: #t:Type -> #t':Type -> h0:mem -> h1:mem -> a:buffer t -> b:buffer t' -> Lemma + (requires (live h0 b /\ disjoint a b /\ modifies_2_1 a h0 h1)) + (ensures (live h0 b /\ live h1 b /\ equal h0 b h1 b)) + [SMTPat (live h0 b); SMTPat (modifies_2_1 a h0 h1)] +let no_upd_lemma_2_1 #t #t' h0 h1 a b = () + +val no_upd_fresh: #t:Type -> h0:mem -> h1:mem -> a:buffer t -> Lemma + (requires (live h0 a /\ fresh_frame h0 h1)) + (ensures (live h0 a /\ live h1 a /\ equal h0 a h1 a)) + [SMTPat (live h0 a); SMTPat (fresh_frame h0 h1)] +let no_upd_fresh #t h0 h1 a = () + +val no_upd_popped: #t:Type -> h0:mem -> h1:mem -> b:buffer t -> Lemma + (requires (live h0 b /\ frameOf b =!= HS.get_tip h0 /\ popped h0 h1)) + (ensures (live h0 b /\ live h1 b /\ equal h0 b h1 b)) + [SMTPat (live h0 b); SMTPat (popped h0 h1)] +let no_upd_popped #t h0 h1 b = () + +(* Modifies of subset lemmas *) +let lemma_modifies_sub_0 h0 h1 : Lemma + (requires (h1 == h0)) + (ensures (modifies_0 h0 h1)) + [SMTPat (modifies_0 h0 h1)] + = () + +let lemma_modifies_sub_1 #t h0 h1 (b:buffer t) : Lemma + (requires (h1 == h0)) + (ensures (modifies_1 b h0 h1)) + [SMTPat (live h0 b); SMTPat (modifies_1 b h0 h1)] + = () + +let lemma_modifies_sub_2 #t #t' h0 h1 (b:buffer t) (b':buffer t') : Lemma + (requires (h1 == h0)) + (ensures (modifies_2 b b' h0 h1)) + [SMTPat (live h0 b); SMTPat (live h0 b'); SMTPat (modifies_2 b b' h0 h1)] + = () + +let lemma_modifies_sub_2_1 #t h0 h1 (b:buffer t) : Lemma + (requires (modifies_0 h0 h1 /\ live h0 b)) + (ensures (modifies_2_1 b h0 h1)) + [SMTPat (live h0 b); SMTPat (modifies_2_1 b h0 h1)] + = () + +#reset-options "--z3rlimit 100 --initial_fuel 0 --max_fuel 0" + +let modifies_subbuffer_1 (#t:Type) h0 h1 (sub:buffer t) (a:buffer t) : Lemma + (requires (live h0 a /\ modifies_1 sub h0 h1 /\ includes a sub)) + (ensures (modifies_1 a h0 h1 /\ live h1 a)) + [SMTPat (modifies_1 sub h0 h1); SMTPat (includes a sub)] + = () + +let modifies_subbuffer_2 (#t:Type) (#t':Type) h0 h1 (sub:buffer t) (a':buffer t') (a:buffer t) : Lemma + (requires (live h0 a /\ live h0 a' /\ includes a sub /\ modifies_2 sub a' h0 h1 )) + (ensures (modifies_2 a a' h0 h1 /\ modifies_2 a' a h0 h1 /\ live h1 a)) + [SMTPat (modifies_2 sub a' h0 h1); SMTPat (includes a sub)] + = () + +let modifies_subbuffer_2' (#t:Type) (#t':Type) h0 h1 (sub:buffer t) (a':buffer t') (a:buffer t) : Lemma + (requires (live h0 a /\ live h0 a' /\ includes a sub /\ modifies_2 a' sub h0 h1 )) + (ensures (modifies_2 a a' h0 h1 /\ live h1 a)) + [SMTPat (modifies_2 a' sub h0 h1); SMTPat (includes a sub)] + = () + +let modifies_subbuffer_2_1 (#t:Type) h0 h1 (sub:buffer t) (a:buffer t) : Lemma + (requires (live h0 a /\ includes a sub /\ modifies_2_1 sub h0 h1)) + (ensures (modifies_2_1 a h0 h1 /\ live h1 a)) + [SMTPat (modifies_2_1 sub h0 h1); SMTPat (includes a sub)] + = () + +let modifies_subbuffer_2_prime (#t:Type) h0 h1 (sub1:buffer t) (sub2:buffer t) (a:buffer t) : Lemma + (requires (live h0 a /\ includes a sub1 /\ includes a sub2 /\ modifies_2 sub1 sub2 h0 h1)) + (ensures (modifies_1 a h0 h1 /\ live h1 a)) + [SMTPat (modifies_2 sub1 sub2 h0 h1); SMTPat (includes a sub1); SMTPat (includes a sub2)] + = () + +let modifies_popped_3_2 (#t:Type) #t' (a:buffer t) (b:buffer t') h0 h1 h2 h3 : Lemma + (requires (live h0 a /\ live h0 b /\ fresh_frame h0 h1 /\ popped h2 h3 /\ modifies_3_2 a b h1 h2)) + (ensures (modifies_2 a b h0 h3)) + [SMTPat (fresh_frame h0 h1); SMTPat (popped h2 h3); SMTPat (modifies_3_2 a b h1 h2)] + = () + +let modifies_popped_2 (#t:Type) #t' (a:buffer t) (b:buffer t') h0 h1 h2 h3 : Lemma + (requires (live h0 a /\ live h0 b /\ fresh_frame h0 h1 /\ popped h2 h3 /\ modifies_2 a b h1 h2)) + (ensures (modifies_2 a b h0 h3)) + [SMTPat (fresh_frame h0 h1); SMTPat (popped h2 h3); SMTPat (modifies_2 a b h1 h2)] + = () + +let modifies_popped_1 (#t:Type) (a:buffer t) h0 h1 h2 h3 : Lemma + (requires (live h0 a /\ fresh_frame h0 h1 /\ popped h2 h3 /\ modifies_2_1 a h1 h2)) + (ensures (modifies_1 a h0 h3)) + [SMTPat (fresh_frame h0 h1); SMTPat (popped h2 h3); SMTPat (modifies_2_1 a h1 h2)] + = () + +let modifies_popped_1' (#t:Type) (a:buffer t) h0 h1 h2 h3 : Lemma + (requires (live h0 a /\ fresh_frame h0 h1 /\ popped h2 h3 /\ modifies_1 a h1 h2)) + (ensures (modifies_1 a h0 h3)) + [SMTPat (fresh_frame h0 h1); SMTPat (popped h2 h3); SMTPat (modifies_1 a h1 h2)] + = () + +let modifies_popped_0 h0 h1 h2 h3 : Lemma + (requires (fresh_frame h0 h1 /\ popped h2 h3 /\ modifies_0 h1 h2)) + (ensures (modifies_0 h0 h3)) + [SMTPat (fresh_frame h0 h1); SMTPat (popped h2 h3); SMTPat (modifies_0 h1 h2)] + = () + +let live_popped (#t:Type) (b:buffer t) h0 h1 : Lemma + (requires (popped h0 h1 /\ live h0 b /\ frameOf b =!= HS.get_tip h0)) + (ensures (live h1 b)) + [SMTPat (popped h0 h1); SMTPat (live h0 b)] + = () + +let live_fresh (#t:Type) (b:buffer t) h0 h1 : Lemma + (requires (fresh_frame h0 h1 /\ live h0 b)) + (ensures (live h1 b)) + [SMTPat (fresh_frame h0 h1); SMTPat (live h0 b)] + = () + +let modifies_0_to_2_1_lemma (#t:Type) h0 h1 (b:buffer t) : Lemma + (requires (modifies_0 h0 h1 /\ live h0 b)) + (ensures (modifies_2_1 b h0 h1)) + [SMTPat (modifies_2_1 b h0 h1); SMTPat (live h0 b) ] + = () + +let lemma_modifies_none_push_pop h0 h1 h2 : Lemma + (requires (fresh_frame h0 h1 /\ popped h1 h2)) + (ensures (modifies_none h0 h2)) + = () + +let lemma_modifies_0_push_pop h0 h1 h2 h3 : Lemma + (requires (fresh_frame h0 h1 /\ modifies_0 h1 h2 /\ popped h2 h3)) + (ensures (modifies_none h0 h3)) + = () + +let modifies_1_to_2_1_lemma (#t:Type) h0 h1 (b:buffer t) : Lemma + (requires (modifies_1 b h0 h1 /\ live h0 b)) + (ensures (modifies_2_1 b h0 h1)) + [SMTPat (modifies_2_1 b h0 h1); SMTPat (live h0 b) ] + = () + +(* let modifies_1_to_2_lemma (#t:Type) #t' h0 h1 (b:buffer t) (b':buffer t'): Lemma *) +(* (requires (modifies_1 b h0 h1 /\ live h0 b)) *) +(* (ensures (modifies_2 b b' h0 h1)) *) +(* [SMTPat (modifies_2 b b' h0 h1); SMTPat (live h0 b) ] *) +(* = () *) + +let modifies_poppable_0 (h0 h1:mem) : Lemma + (requires (modifies_0 h0 h1 /\ HS.poppable h0)) + (ensures (HS.poppable h1)) + [SMTPat (modifies_0 h0 h1)] + = () + +let modifies_poppable_1 #t (h0 h1:mem) (b:buffer t) : Lemma + (requires (modifies_1 b h0 h1 /\ HS.poppable h0)) + (ensures (HS.poppable h1)) + [SMTPat (modifies_1 b h0 h1)] + = () + +let modifies_poppable_2_1 #t (h0 h1:mem) (b:buffer t) : Lemma + (requires (modifies_2_1 b h0 h1 /\ HS.poppable h0)) + (ensures (HS.poppable h1)) + [SMTPat (modifies_2_1 b h0 h1)] + = () + +let modifies_poppable_2 #t #t' (h0 h1:mem) (b:buffer t) (b':buffer t') : Lemma + (requires (modifies_2 b b' h0 h1 /\ HS.poppable h0)) + (ensures (HS.poppable h1)) + [SMTPat (modifies_2 b' b h0 h1)] + = () + +let modifies_poppable_3_2 #t #t' (h0 h1:mem) (b:buffer t) (b':buffer t') : Lemma + (requires (modifies_3_2 b b' h0 h1 /\ HS.poppable h0)) + (ensures (HS.poppable h1)) + [SMTPat (modifies_3_2 b' b h0 h1)] + = () + +let lemma_fresh_poppable (h0 h1:mem) : Lemma + (requires (fresh_frame h0 h1)) + (ensures (poppable h1)) + [SMTPat (fresh_frame h0 h1)] + = () + +let lemma_equal_domains_popped (h0 h1 h2 h3:mem) : Lemma + (requires (equal_domains h0 h1 /\ popped h0 h2 /\ popped h1 h3)) + (ensures (equal_domains h2 h3)) + = () + +let lemma_equal_domains (h0 h1 h2 h3:mem) : Lemma + (requires (fresh_frame h0 h1 /\ equal_domains h1 h2 /\ popped h2 h3)) + (ensures (equal_domains h0 h3)) + [SMTPat (fresh_frame h0 h1); SMTPat (equal_domains h1 h2); SMTPat (popped h2 h3)] + = () + +let lemma_equal_domains_2 (h0 h1 h2 h3 h4:mem) : Lemma + (requires (fresh_frame h0 h1 + /\ modifies_0 h1 h2 /\ Map.domain (HS.get_hmap h1) == Map.domain (HS.get_hmap h2) + /\ equal_domains h2 h3 /\ popped h3 h4)) + (ensures (equal_domains h0 h4)) + [SMTPat (fresh_frame h0 h1); SMTPat (modifies_0 h1 h2); SMTPat (popped h3 h4)] + = () + +#reset-options "--z3rlimit 50" + +let rec assignL #a (l: list a) (b: buffer a): Stack unit + (requires (fun h0 -> + live h0 b /\ + length b = L.length l)) + (ensures (fun h0 _ h1 -> + live h1 b /\ + modifies_1 b h0 h1 /\ + as_seq h1 b == Seq.seq_of_list l)) += lemma_seq_of_list_induction l; + match l with + | [] -> () + | hd :: tl -> + let b_hd = sub b 0ul 1ul in + let b_tl = offset b 1ul in + b_hd.(0ul) <- hd; + assignL tl b_tl; + let h = HST.get () in + assert (get h b_hd 0 == hd); + assert (as_seq h b_tl == Seq.seq_of_list tl); + assert (Seq.equal (as_seq h b) (Seq.append (as_seq h b_hd) (as_seq h b_tl))); + assert (Seq.equal (as_seq h b) (Seq.seq_of_list l)) diff --git a/stage0/ulib/legacy/FStar.BufferNG.fst b/stage0/ulib/legacy/FStar.BufferNG.fst new file mode 100644 index 00000000000..b530571236c --- /dev/null +++ b/stage0/ulib/legacy/FStar.BufferNG.fst @@ -0,0 +1,492 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.BufferNG +module HH = FStar.HyperStack +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST +module P = FStar.Pointer + +(* This module will help for the transition of some buffer-based code + It tries to sidestep the following two issues: + - the type of elements must be embeddable into P.typ + - all elements must always be readable (no uninitialized data) +*) + +let rec supported + (t : P.typ) +: Tot bool += match t with + | P.TBase _ -> true + | P.TStruct l -> struct_typ_supported l.P.fields + | _ -> false + +and struct_typ_supported + (l: list (string & P.typ)) +: Tot bool += match l with + | [] -> true + | (_, t) :: l' -> supported t && struct_typ_supported l' + +let typ = (t: P.typ { supported t } ) + +unfold +let buffer + (t: typ) +: Tot Type0 += P.buffer t + +unfold +let live (#a: typ) (h: HS.mem) (b: buffer a) : GTot Type0 = + P.buffer_readable h b + +unfold +let unused_in (#a: typ) (b: buffer a) (h: HS.mem) : GTot Type0 = + P.buffer_unused_in b h + +unfold +let length (#a: typ) (b: buffer a) : GTot nat = + UInt32.v (P.buffer_length b) + +unfold +let as_addr (#a: typ) (b: buffer a) : GTot nat = + P.buffer_as_addr b + +unfold +let frameOf (#a: typ) (b: buffer a) : GTot HH.rid = + P.frameOf_buffer b + +unfold +let as_seq (#a: typ) (h: HS.mem) (b: buffer a) : GTot (s: Seq.seq (P.type_of_typ a) { Seq.length s == length b } ) = + P.buffer_as_seq h b + +unfold +let equal (#a: typ) (h: HS.mem) (b: buffer a) (h' : HS.mem) (b' : buffer a) : GTot Type0 = + as_seq h b == as_seq h' b' + +unfold +let includes + (#a: typ) + (x y: buffer a) +: GTot Type0 += P.buffer_includes x y + +let includes_live + (#a: typ) + (h: HS.mem) + (x y : buffer a) +: Lemma + (requires (x `includes` y /\ live h x)) + (ensures (live h y)) += P.buffer_includes_elim x y + +let includes_as_seq #a h1 h2 (x: buffer a) (y: buffer a) +: Lemma + (requires (x `includes` y /\ as_seq h1 x == as_seq h2 x)) + (ensures (as_seq h1 y == as_seq h2 y)) += P.buffer_includes_elim x y + +let includes_trans #a (x y z: buffer a) +: Lemma + (requires (x `includes` y /\ y `includes` z)) + (ensures (x `includes` z)) += P.buffer_includes_trans x y z + +unfold +let disjoint (#a #a' : typ) (x: buffer a) (y: buffer a') : GTot Type0 = + P.loc_disjoint (P.loc_buffer x) (P.loc_buffer y) + +(* Disjointness is symmetric *) +let lemma_disjoint_symm #a #a' (x:buffer a) (y:buffer a') : Lemma + (requires True) + (ensures (disjoint x y <==> disjoint y x)) + [SMTPat (disjoint x y)] + = () + +let lemma_disjoint_sub #a #a' (x:buffer a) (subx:buffer a) (y:buffer a') : Lemma + (requires (includes x subx /\ disjoint x y)) + (ensures (disjoint subx y)) + [SMTPat (disjoint subx y); SMTPat (includes x subx)] + = P.buffer_includes_loc_includes x subx; + P.loc_disjoint_includes (P.loc_buffer x) (P.loc_buffer y) (P.loc_buffer subx) (P.loc_buffer y) + +let lemma_disjoint_sub' #a #a' (x:buffer a) (subx:buffer a) (y:buffer a') : Lemma + (requires (includes x subx /\ disjoint x y)) + (ensures (disjoint subx y)) + [SMTPat (disjoint y subx); SMTPat (includes x subx)] + = () + +let lemma_live_disjoint #a #a' h (b:buffer a) (b':buffer a') : Lemma + (requires (live h b /\ b' `unused_in` h)) + (ensures (disjoint b b')) + [SMTPat (disjoint b b'); SMTPat (live h b)] += () + +(** Concrete getters and setters *) +val create + (#a:typ) + (init: P.type_of_typ a) + (len:UInt32.t) +: HST.StackInline (buffer a) + (requires (fun h -> + UInt32.v len > 0 + )) + (ensures (fun (h0: HS.mem) b h1 -> + UInt32.v len > 0 /\ + b `unused_in` h0 /\ + live h1 b /\ + length b == UInt32.v len /\ + frameOf b == (HS.get_tip h0) /\ + P.modifies_0 h0 h1 /\ + as_seq h1 b == Seq.create (UInt32.v len) init + )) +let create #a init len = + let len : P.array_length_t = len in + let content = P.screate (P.TArray len a) (Some (Seq.create (UInt32.v len) init)) in + P.buffer_of_array_pointer content + +unfold let p (#a:typ) (init:list (P.type_of_typ a)) : GTot Type0 = + normalize (0 < FStar.List.Tot.length init) /\ + normalize (FStar.List.Tot.length init < UInt.max_int 32) + +unfold let q (#a:typ) (len:nat) (buf:buffer a) : GTot Type0 = + normalize (length buf == len) + +val createL + (#a: typ) + (init:list (P.type_of_typ a)) +: HST.StackInline (buffer a) + (requires (fun h -> p #a init)) + (ensures (fun (h0: HS.mem) b h1 -> + let len = FStar.List.Tot.length init in + len > 0 /\ + b `unused_in` h0 /\ + live h1 b /\ + length b == len /\ + frameOf b == (HS.get_tip h0) /\ + P.modifies_0 h0 h1 /\ + as_seq h1 b == Seq.seq_of_list init /\ + q #a len b + )) + +#set-options "--initial_fuel 1 --max_fuel 1" //the normalize_term (length init) in the pre-condition will be unfolded + //whereas the L.length init below will not + +let createL #a init = + let len : P.array_length_t = UInt32.uint_to_t (List.Tot.length init) in + let s = Seq.seq_of_list init in + let content = P.screate (P.TArray len a) (Some s) in + P.buffer_of_array_pointer content + +#reset-options "--initial_fuel 0 --max_fuel 0" + +val rcreate + (#a: typ) + (r:HH.rid) + (init: P.type_of_typ a) + (len:UInt32.t) +: HST.ST (buffer a) + (requires (fun h -> + HST.is_eternal_region r /\ + HST.witnessed (HST.region_contains_pred r) /\ + UInt32.v len > 0 + )) + (ensures (fun (h0: HS.mem) b h1 -> + b `unused_in` h0 /\ + live h1 b /\ + length b == UInt32.v len /\ + (HS.get_tip h1) == (HS.get_tip h0) /\ + P.modifies (P.loc_addresses r Set.empty) h0 h1 /\ + as_seq h1 b == Seq.create (UInt32.v len) init + )) + +let rcreate #a r init len = + let len : P.array_length_t = len in + let content = P.ecreate (P.TArray len a) r (Some (Seq.create (UInt32.v len) init)) in + P.buffer_of_array_pointer content + +val index + (#a: typ) + (b: buffer a) + (n: UInt32.t) +: HST.Stack (P.type_of_typ a) + (requires (fun h -> + UInt32.v n < length b /\ + live h b + )) + (ensures (fun h0 z h1 -> + UInt32.v n < length b /\ + h1 == h0 /\ + z == Seq.index (as_seq h0 b) (UInt32.v n) + )) + +let index #a b n = + P.read_buffer b n + +val upd + (#a: typ) + (b: buffer a) + (n: UInt32.t) + (z: P.type_of_typ a) +: HST.Stack unit + (requires (fun h -> + live h b /\ + UInt32.v n < length b + )) + (ensures (fun h0 _ h1 -> + live h1 b /\ + UInt32.v n < length b /\ + P.modifies (P.loc_pointer (P.gpointer_of_buffer_cell b n)) h0 h1 /\ + as_seq h1 b == Seq.upd (as_seq h0 b) (UInt32.v n) z + )) + +let upd #a b n z = + let h0 = HST.get () in + P.write_buffer b n z; + let h1 = HST.get () in + assert (Seq.equal (as_seq h1 b) (Seq.upd (as_seq h0 b) (UInt32.v n) z)) + +(* NOTE: Here I cannot fully respect the Buffer interface, + because pure sub no longer exists, since it has been split + into ghost gsub and stateful sub + *) + +unfold +let gsub + (#a: typ) + (b: buffer a) + (i: UInt32.t) + (len: UInt32.t) +: Ghost (buffer a) + (requires (UInt32.v i + UInt32.v len <= length b)) + (ensures (fun _ -> True)) += P.gsub_buffer b i len + +let sub + (#a: typ) + (b: buffer a) + (i: UInt32.t) + (len: UInt32.t) +: HST.Stack (buffer a) + (requires (fun h -> + live h b /\ + UInt32.v i + UInt32.v len <= length b + )) + (ensures (fun h0 b' h1 -> + live h0 b /\ + UInt32.v i + UInt32.v len <= length b /\ + h1 == h0 /\ + b' == gsub b i len /\ + b `includes` b' + )) += P.sub_buffer b i len + +let sub_sub + (#a: typ) + (b: buffer a) + (i1: UInt32.t) + (len1: UInt32.t) + (i2: UInt32.t) + (len2: UInt32.t) +: Lemma + (requires ( + UInt32.v i1 + UInt32.v len1 <= length b /\ + UInt32.v i2 + UInt32.v len2 <= UInt32.v len1 + )) + (ensures ( + UInt32.v i1 + UInt32.v len1 <= length b /\ + UInt32.v i2 + UInt32.v len2 <= UInt32.v len1 /\ + gsub (gsub b i1 len1) i2 len2 == gsub b (UInt32.add i1 i2) len2 + )) += () + +let sub_zero_length + (#a: typ) + (b: buffer a) +: Lemma + (ensures (gsub b (UInt32.uint_to_t 0) (UInt32.uint_to_t (length b)) == b)) += () + +let lemma_sub_spec (#a:typ) (b:buffer a) + (i:UInt32.t) + (len:UInt32.t) + (h: HS.mem) +: Lemma + (requires ( + UInt32.v i + UInt32.v len <= length b /\ + live h b + )) + (ensures ( + UInt32.v i + UInt32.v len <= length b /\ + live h (gsub b i len) /\ + as_seq h (gsub b i len) == Seq.slice (as_seq h b) (UInt32.v i) (UInt32.v i + UInt32.v len) + )) + [SMTPatOr [ + [SMTPat (gsub b i len); SMTPat (live h b)]; + [SMTPat (live h (gsub b i len))] + ]] + = Seq.lemma_eq_intro (as_seq h (gsub b i len)) (Seq.slice (as_seq h b) (UInt32.v i) (UInt32.v i + UInt32.v len)) + +(* Same here *) + +let goffset + (#a: typ) + (b: buffer a) + (i: UInt32.t) +: Ghost (buffer a) + (requires (UInt32.v i <= length b)) + (ensures (fun b' -> + UInt32.v i <= length b /\ + b' == gsub b i (UInt32.sub (P.buffer_length b) i) + )) += P.gsub_buffer b i (UInt32.sub (P.buffer_length b) i) + +let offset + (#a:typ) + (b:buffer a) + (i:UInt32.t) +: HST.Stack (buffer a) + (requires (fun h0 -> + live h0 b /\ + UInt32.v i <= length b + )) + (ensures (fun h0 b' h1 -> + h1 == h0 /\ + UInt32.v i <= length b /\ + b' == goffset b i + )) += P.offset_buffer b i + +let lemma_offset_spec + (#a: typ) + (b: buffer a) + (i: UInt32.t) + (h: HS.mem) +: Lemma + (requires ( + UInt32.v i <= length b /\ + live h b + )) + (ensures ( + UInt32.v i <= length b /\ + as_seq h (goffset b i) == Seq.slice (as_seq h b) (UInt32.v i) (length b) + )) += () + +val eqb: #a:typ -> b1:buffer a -> b2:buffer a + -> len:UInt32.t + -> HST.ST bool + (requires (fun h -> + hasEq (P.type_of_typ a) /\ + UInt32.v len <= length b1 /\ + UInt32.v len <= length b2 /\ + live h b1 /\ + live h b2 + )) + (ensures (fun h0 z h1 -> + h1 == h0 /\ + UInt32.v len <= length b1 /\ + UInt32.v len <= length b2 /\ + (z <==> equal h0 (gsub b1 0ul len) h0 (gsub b2 0ul len)) + )) + +let eqb #a b1 b2 len = + P.buffer_contents_equal b1 b2 len + +(* JP: if the [val] is not specified, there's an issue with these functions + * taking an extra unification parameter at extraction-time... *) +val op_Array_Access: #a:typ -> b:buffer a -> n:UInt32.t -> HST.Stack (P.type_of_typ a) + (requires (fun h -> UInt32.v n h1 == h0 /\ + UInt32.v n b:buffer a -> n:UInt32.t -> z:P.type_of_typ a -> HST.Stack unit + (requires (fun h -> live h b /\ UInt32.v n < length b)) + (ensures (fun h0 _ h1 -> live h0 b /\ live h1 b /\ UInt32.v n < length b + /\ P.modifies (P.loc_pointer (P.gpointer_of_buffer_cell b n)) h0 h1 + /\ as_seq h1 b == Seq.upd (as_seq h0 b) (UInt32.v n) z )) +let op_Array_Assignment #a b n z = upd #a b n z + +val live_slice_middle + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) + (h: HS.mem) +: Lemma + (requires ( + UInt32.v i + UInt32.v len <= length b /\ + live h (gsub b 0ul i) /\ + live h (gsub b i len) /\ ( + let off = UInt32.add i len in + live h (gsub b off (UInt32.sub (P.buffer_length b) off)) + ))) + (ensures (live h b)) + [SMTPat (live h (gsub b i len))] + +let live_slice_middle #t b i len h = + P.buffer_readable_gsub_merge b i len h + +(** Corresponds to memcpy *) +val blit: #t:typ + -> a:buffer t + -> idx_a:UInt32.t + -> b:buffer t{disjoint a b} + -> idx_b:UInt32.t + -> len:UInt32.t{UInt32.v idx_a + UInt32.v len <= length a /\ UInt32.v idx_b + UInt32.v len <= length b} + -> HST.Stack unit + (requires (fun h -> live h a /\ live h b)) + (ensures (fun h0 _ h1 -> live h0 b /\ live h0 a /\ live h1 b /\ live h1 a /\ P.modifies (P.loc_buffer (gsub b idx_b len)) h0 h1 + /\ Seq.slice (as_seq h1 b) (UInt32.v idx_b) (UInt32.v idx_b + UInt32.v len) == + Seq.slice (as_seq h0 a) (UInt32.v idx_a) (UInt32.v idx_a + UInt32.v len) + /\ Seq.slice (as_seq h1 b) 0 (UInt32.v idx_b) == + Seq.slice (as_seq h0 b) 0 (UInt32.v idx_b) + /\ Seq.slice (as_seq h1 b) (UInt32.v idx_b+UInt32.v len) (length b) == + Seq.slice (as_seq h0 b) (UInt32.v idx_b+UInt32.v len) (length b) )) + +let blit #t a idx_a b idx_b len = + if len = 0ul + then () + else begin + let h0 = HST.get () in + P.copy_buffer_contents a idx_a b idx_b len; + let h1 = HST.get () in + P.buffer_readable_modifies_gsub b idx_b len h0 h1 (P.loc_buffer (P.gsub_buffer b idx_b len)); + assert (let g = (gsub b (UInt32.add idx_b len) (UInt32.sub (P.buffer_length b) (UInt32.add idx_b len))) in as_seq h1 g == as_seq h0 g); + assert (as_seq h1 (gsub b idx_b len) == as_seq h0 (gsub a idx_a len)); + assert (let g = gsub b 0ul idx_b in as_seq h1 g == as_seq h0 g) + end + +(** Corresponds to memset *) +val fill: #t:typ + -> b:buffer t + -> z: P.type_of_typ t + -> len:UInt32.t{UInt32.v len <= length b} + -> HST.Stack unit + (requires (fun h -> live h b)) + (ensures (fun h0 _ h1 -> live h0 b /\ live h1 b /\ + P.modifies (P.loc_buffer (gsub b 0ul len)) h0 h1 + /\ Seq.slice (as_seq h1 b) 0 (UInt32.v len) == Seq.create (UInt32.v len) z + /\ Seq.slice (as_seq h1 b) (UInt32.v len) (length b) == + Seq.slice (as_seq h0 b) (UInt32.v len) (length b) )) + +let fill #t b z len = + let h0 = HST.get () in + P.fill_buffer b 0ul len z; + let h1 = HST.get () in + assert (as_seq h1 (gsub b 0ul len) == Seq.slice (as_seq h1 b) 0 (UInt32.v len)); + assert (let g = gsub b len (UInt32.sub (P.buffer_length b) len) in as_seq h1 g == as_seq h0 g) diff --git a/stage0/ulib/legacy/FStar.Constructive.fst b/stage0/ulib/legacy/FStar.Constructive.fst new file mode 100644 index 00000000000..249b52e6ca8 --- /dev/null +++ b/stage0/ulib/legacy/FStar.Constructive.fst @@ -0,0 +1,72 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Constructive +type cand p1 p2 = + | Conj : h1:p1 -> h2:p2 -> cand p1 p2 + +type cor p1 p2 = + | IntroL : h:p1 -> cor p1 p2 + | IntroR : h:p2 -> cor p1 p2 + +type cimp a b = a -> Tot b + +type ciff a b = cand (cimp a b) (cimp b a) + +noeq type cexists (#a:Type) (p:a -> Type) = + | ExIntro : x:a -> h:p x -> cexists p + +// val ex_intro_x : #a:Type -> #p:(a -> Type) -> projectee:cexists p -> Tot a +// let ex_intro_x #a #p = function +// | ExIntro x _ -> x + +type ceq (#a:Type) x : a -> Type = + | Refl : ceq #a x x + +type ceq_type (a:Type) : Type -> Type = + | ReflType : ceq_type a a + +val eq_ind : #a:Type -> x:a -> p:(a -> Type) -> f:p x -> y:a -> e:ceq x y -> Tot (p y) +let eq_ind #a x p f y _ = f + +val ceq_eq : #a:Type{hasEq a} -> #x:a -> #y:a -> h:(ceq x y) -> Lemma (x = y) +let ceq_eq #a #x #y h = () + +val ceq_congruence : #a:Type -> #b:Type -> #x:a -> #y:a -> ceq x y -> + f:(a -> GTot b) -> GTot (ceq (f x) (f y)) +let ceq_congruence #a #b #x #y h f = Refl #_ #(f x) //refuse to infer terms with non-Tot effect + +val ceq_symm : #a:Type -> #x:a -> #y:a -> ceq x y -> Tot (ceq y x) +let ceq_symm #a #x #y h = Refl + +val ceq_trans : #a:Type -> #x:a -> #y:a -> #z:a -> ceq x y -> ceq y z -> Tot (ceq x z) +let ceq_trans #a #x #y #z hxy hyz = Refl + +type ctrue = + | I : ctrue + +(* hopefully this is an empty type *) +type cfalse : Type = + +val cfalse_elim : #a:Type -> cfalse -> Tot a +let cfalse_elim #a f = match f with + +val false_elim2 : #a:Type -> cfalse -> Tot a +let false_elim2 #a x = false_elim () + +val false_elim : #a:Type -> u:unit{false} -> Tot a +let false_elim #a u = false_elim () + +type cnot (p:Type) = cimp p cfalse diff --git a/stage0/ulib/legacy/FStar.Crypto.fst b/stage0/ulib/legacy/FStar.Crypto.fst new file mode 100644 index 00000000000..5d091b225fe --- /dev/null +++ b/stage0/ulib/legacy/FStar.Crypto.fst @@ -0,0 +1,55 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Crypto + +open FStar.Bytes + +type nbytes (n:nat) = b:bytes{length b == n} (* fixed-length bytes *) +type tag = nbytes 20 + +assume val sha1: bytes -> Tot tag + +type hmac_key = nbytes 16 +assume val hmac_sha1_keygen: unit -> Tot hmac_key +assume val hmac_sha1: hmac_key -> bytes -> Tot tag +assume val hmac_sha1_verify: hmac_key -> bytes -> tag -> Tot bool + +type block = nbytes 32 +type cipher = nbytes (op_Multiply 2 32) +type aes_key = nbytes 16 +type aes_iv = nbytes 16 + +assume val aes_128_keygen: unit -> Tot aes_key +assume val aes_128_decrypt: aes_key -> cipher -> Tot block +assume val aes_128_encrypt: k:aes_key -> p:block -> c:cipher {aes_128_decrypt k c = p} + +assume val aes_128_ivgen: unit -> Tot aes_iv +assume val aes_128_cbc_decrypt: aes_key -> aes_iv -> bytes -> Tot bytes +assume val aes_128_cbc_encrypt: k:aes_key -> iv:aes_iv -> p:bytes -> c:bytes {aes_128_cbc_decrypt k iv c = p} + +type rsa_pkey = {modulus: bytes; exponent: bytes} +type rsa_skey = rsa_pkey & bytes + +assume val rsa_keygen: unit -> Tot rsa_skey +assume val rsa_pk: rsa_skey -> Tot rsa_pkey +assume val rsa_pkcs1_encrypt: rsa_pkey -> bytes -> bytes +assume val rsa_pkcs1_decrypt: rsa_skey -> bytes -> bytes + +type sigv = bytes +assume val rsa_sha1: rsa_skey -> bytes -> sigv +assume val rsa_sha1_verify: rsa_pkey -> bytes -> sigv -> bool + + diff --git a/stage0/ulib/legacy/FStar.ErasedLogic.fst b/stage0/ulib/legacy/FStar.ErasedLogic.fst new file mode 100644 index 00000000000..a0419e9de35 --- /dev/null +++ b/stage0/ulib/legacy/FStar.ErasedLogic.fst @@ -0,0 +1,44 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.ErasedLogic + +(*F* 's exists should be thought of Coq's sigT. It is totally unerased *) +(*Coq has 2 more versions, with increasing degree of erasure*) +(* https://coq.inria.fr/library/Coq.Init.Specif.html#sigT *) +open FStar.Ghost + +(*you can get the witness x, that the proof part is erased*) +(*https://coq.inria.fr/library/Coq.Init.Specif.html#sig*) +type sig_ (a:Type) (p: a->Type) = exists (x:a). (erased (p x)) + + +(*you get nothing. Of course, in ghost contexts, or to build other erased date, you get everything*) +(* https://coq.inria.fr/library/Coq.Init.Logic.html#ex *) +type ex (a:Type) (p:a->Type) = erased (exists (x:a). (p x)) + + +(*how to use the above:*) +(*assuming that existentials in F* are constructive. If so, the following 2 assumes must be definable*) +assume val exists_proj1 : #a:Type -> #p:(a->Type) -> (exists x.p x) -> GTot a +assume val mkexists : #a:Type -> #p:(a->Type) -> x:a -> (p x) -> Tot (exists x.p x) + + +val ex_proj1 : #a:Type -> #p:(a->Type) -> ex a p -> Tot (erased a) +let ex_proj1 #a #p e = (elift1 (exists_proj1 #a #p)) e + + +val gex_proj1 : #a:Type -> #p:(a->Type) -> (ex a p) -> GTot a +let gex_proj1 #a #p e = (reveal (ex_proj1 e)) diff --git a/stage0/ulib/legacy/FStar.Error.fst b/stage0/ulib/legacy/FStar.Error.fst new file mode 100644 index 00000000000..69289a270be --- /dev/null +++ b/stage0/ulib/legacy/FStar.Error.fst @@ -0,0 +1,75 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +/// A library for optional results, +/// where the error case carries some payload +module FStar.Error + +type optResult 'a 'b = + | Error of 'a + | Correct of 'b + +/// allowing inverting optResult without having +/// to globally increase the fuel just for this +let invertOptResult (a:Type) (b:Type) + : Lemma + (requires True) + (ensures (forall (x:optResult a b). Error? x \/ Correct? x)) + [SMTPat (optResult a b)] + = allow_inversion (optResult a b) + +irreducible +let perror + (file:string) + (line:int) + (text:string) + : Tot string + = text + +let correct + (#a:Type) + (#r:Type) + (x:r) + : Tot (optResult a r) + = Correct x + +(* Both unexpected and unreachable are aliases for failwith; + they indicate code that should never be executed at runtime. + This is verified by typing only for the unreachable function; + this matters e.g. when dynamic errors are security-critical *) +let rec unexpected + (#a:Type) + (s:string) + : Div a + (requires True) + (ensures (fun _ -> True)) + = let _ = FStar.IO.debug_print_string ("Platform.Error.unexpected: " ^ s) in + unexpected s + +let rec unreachable + (#a:Type) + (s:string) + : Div a + (requires False) + (ensures (fun _ -> False)) + = let _ = FStar.IO.debug_print_string ("Platform.Error.unreachable: " ^ s) in + unreachable s + +irreducible +let if_ideal + (f:unit -> Tot 'a) + (x:'a) + : Tot 'a + = x diff --git a/stage0/ulib/legacy/FStar.HyperStack.IO.fst b/stage0/ulib/legacy/FStar.HyperStack.IO.fst new file mode 100644 index 00000000000..cf77906b7f1 --- /dev/null +++ b/stage0/ulib/legacy/FStar.HyperStack.IO.fst @@ -0,0 +1,20 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.HyperStack.IO + +open FStar.HyperStack.All + +assume val print_string (s:Prims.string) : Dv unit diff --git a/stage0/ulib/legacy/FStar.Matrix2.fsti b/stage0/ulib/legacy/FStar.Matrix2.fsti new file mode 100644 index 00000000000..3ab1866e8b6 --- /dev/null +++ b/stage0/ulib/legacy/FStar.Matrix2.fsti @@ -0,0 +1,106 @@ +(* + Copyright 2008-2015 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Matrix2 + +#set-options "--initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0" + +assume new type matrix2 : nat -> nat -> Type -> Type +open FStar.Seq + +(* Destructors *) +val index: #a:Type -> #m:nat -> #n:nat -> matrix2 m n a -> i:nat{i < m} -> j:nat{j < n} -> Tot a +val row: #a:Type -> #m:nat -> #n:nat -> matrix2 m n a -> i:nat{i Tot (seq a) +val col: #a:Type -> #m:nat -> #n:nat -> matrix2 m n a -> j:nat{j Tot (seq a) + +(* Constructors *) +val create: #a:Type -> m:nat -> n:nat -> a -> Tot (matrix2 m n a) +val emp: #a:Type -> Tot (matrix2 0 0 a) +val upd: #a:Type -> #m:nat -> #n:nat -> x:matrix2 m n a -> i:nat{i j:nat{j a -> Tot (matrix2 m n a) +val upd_row:#a:Type -> #m:nat -> #n:nat -> x:matrix2 m n a -> i:nat{i r:seq a{Seq.length r = n} -> Tot (matrix2 m n a) +val upd_col:#a:Type -> #m:nat -> #n:nat -> x:matrix2 m n a -> j:nat{j c:seq a{Seq.length c = m} -> Tot (matrix2 m n a) + +(* Lemmas about length *) +val lemma_row_len: #a:Type -> m:nat -> n:nat -> x:matrix2 m n a -> i:nat{i Lemma + (requires True) + (ensures (Seq.length (row x i) = n)) + [SMTPat (Seq.length (row x i))] + +val lemma_col_len: #a:Type -> m:nat -> n:nat -> x:matrix2 m n a -> j:nat{j Lemma + (requires True) + (ensures (Seq.length (col x j) = m)) + [SMTPat (Seq.length (col x j))] + +(* Lemmas about index *) +val lemma_index_create: #a:Type -> m:nat -> n:nat -> v:a -> i:nat{i < m} -> j:nat{j < n} -> Lemma + (requires True) + (ensures (index (create m n v) i j == v)) + [SMTPat (index (create m n v) i j)] + +val lemma_index_upd1: #a:Type -> m:nat -> n:nat -> x:matrix2 m n a -> i:nat{i j:nat{j v:a -> Lemma + (requires True) + (ensures (index (upd x i j v) i j == v)) + [SMTPat (index (upd x i j v) i j)] + +val lemma_index_upd2: #a:Type -> m:nat -> n:nat -> x:matrix2 m n a -> i:nat{i j:nat{j i':nat{i' j':nat{j' v:a -> Lemma + (requires (i<>i' \/ j<>j')) + (ensures (index (upd x i j v) i' j' == index x i' j')) + [SMTPat (index (upd x i j v) i' j')] + +val lemma_index_row: #a:Type -> m:nat -> n:nat -> x:matrix2 m n a -> i:nat{i j:nat{j Lemma + (requires (True)) + (ensures (Seq.index (row x i) j == index x i j)) + [SMTPat (Seq.index (row x i) j)] + +val lemma_index_col: #a:Type -> m:nat -> n:nat -> x:matrix2 m n a -> i:nat{i j:nat{j Lemma + (requires (True)) + (ensures (Seq.index (col x j) i == index x i j)) + [SMTPat (Seq.index (col x j) i)] + +val lemma_index_upd_row1: #a:Type -> m:nat -> n:nat -> x:matrix2 m n a -> s:seq a{Seq.length s = n} -> i:nat{i j:nat{j Lemma + (requires (True)) + (ensures (index (upd_row x i s) i j == Seq.index s j)) + [SMTPat (index (upd_row x i s) i j)] + +val lemma_index_upd_row2: #a:Type -> m:nat -> n:nat -> x:matrix2 m n a -> s:seq a{Seq.length s = n} -> i:nat{i i':nat{i'i'} -> j:nat{j Lemma + (requires (True)) + (ensures (index (upd_row x i s) i' j == index x i' j)) + [SMTPat (index (upd_row x i s) i' j)] + +val lemma_index_upd_col1: #a:Type -> m:nat -> n:nat -> x:matrix2 m n a -> s:seq a{Seq.length s = m} -> j:nat{j i:nat{i Lemma + (requires (True)) + (ensures (index (upd_col x j s) i j == Seq.index s i)) + [SMTPat (index (upd_col x j s) i j)] + +val lemma_index_upd_col2: #a:Type -> m:nat -> n:nat -> x:matrix2 m n a -> s:seq a{Seq.length s = m} -> j:nat{j i:nat{i j':nat{j'j} -> Lemma + (requires (True)) + (ensures (index (upd_col x j s) i j' == index x i j')) + [SMTPat (index (upd_col x j s) i j')] + +(* Extensionality *) +assume new type eq: #a:Type -> #m:nat -> #n:nat -> matrix2 m n a -> matrix2 m n a -> Type +val lemma_eq_intro: #a:Type -> #m:nat -> #n:nat -> x1:matrix2 m n a -> x2:matrix2 m n a -> Lemma + (requires ((forall (i:nat{i < m}) (j:nat{j #m:nat -> #n:nat -> x1:matrix2 m n a -> x2:matrix2 m n a -> Lemma + (requires (eq x1 x2)) + (ensures (x1==x2)) + [SMTPat (eq x1 x2)] + + diff --git a/stage0/ulib/legacy/FStar.Pointer.Base.fst b/stage0/ulib/legacy/FStar.Pointer.Base.fst new file mode 100644 index 00000000000..adb2e570a31 --- /dev/null +++ b/stage0/ulib/legacy/FStar.Pointer.Base.fst @@ -0,0 +1,4260 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Pointer.Base + +module DM = FStar.DependentMap +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +(*** Definitions *) + +(** Pointers to data of type t. + + This defines two main types: + - `npointer (t: typ)`, a pointer that may be "NULL"; + - `pointer (t: typ)`, a pointer that cannot be "NULL" + (defined as a refinement of `npointer`). + + `nullptr #t` (of type `npointer t`) represents the "NULL" value. +*) + +#set-options "--initial_fuel 1 --initial_ifuel 1 --max_fuel 1 --max_ifuel 1" + +type step: (from: typ) -> (to: typ) -> Tot Type0 = + | StepField: + (l: struct_typ) -> + (fd: struct_field l) -> + step (TStruct l) (typ_of_struct_field l fd) + | StepUField: + (l: union_typ) -> + (fd: struct_field l) -> + step (TUnion l) (typ_of_struct_field l fd) + | StepCell: + (length: UInt32.t) -> + (value: typ) -> + (index: UInt32.t { UInt32.v index < UInt32.v length } ) -> + step (TArray length value) value + +type path (from: typ) : (to: typ) -> Tot Type0 = + | PathBase: + path from from + | PathStep: + (through: typ) -> + (to: typ) -> + (p: path from through) -> + (s: step through to) -> + path from to + +let step_typ_depth + (#from #to: typ) + (s: step from to) +: Lemma + (typ_depth from > typ_depth to) += match s with + | StepUField l fd + | StepField l fd -> + typ_depth_typ_of_struct_field l.fields fd + | _ -> () + +let rec path_typ_depth + (#from #to: typ) + (p: path from to) +: Lemma + (ensures ( + typ_depth from >= typ_depth to /\ ( + (~ (PathBase? p)) ==> typ_depth from <> typ_depth to + ))) + (decreases p) += match p with + | PathBase -> () + | PathStep _ _ p' s -> + path_typ_depth p'; + step_typ_depth s + +(* +private +let not_cell + (#from #to: typ) + (p: path from to) +: GTot bool += match p with + | PathStep _ _ _ (StepCell _ _ _) -> false + | _ -> true + +private type array_path (from: typ) (to_elem: typ) : (length: UInt32.t) -> Tot Type0 = +| PSingleton: + (p: path from to_elem { not_cell p } ) -> + array_path from to_elem 1ul +| PArray: + length: UInt32.t -> + path from (TArray length to_elem) -> + array_path from to_elem length + +private let path' (from: typ) (to: typ) : Tot Type0 = + if TArray? to + then + let length = TArray?.length to in + (array_path from (TArray?.t to) length * (offset: UInt32.t & (length': UInt32.t {UInt32.v offset + UInt32.v length' <= UInt32.v length}))) + else path from to +*) + +noeq type _npointer (to : typ): Type0 = + | Pointer: + (from: typ) -> + (contents: HS.aref) -> + (p: path from to) -> + _npointer to + | NullPtr + +let npointer (t: typ): Tot Type0 = + _npointer t + +(** The null pointer *) + +let nullptr (#t: typ): Tot (npointer t) = NullPtr + +let g_is_null (#t: typ) (p: npointer t) : GTot bool = + match p with + | NullPtr -> true + | _ -> false + +let g_is_null_intro + (t: typ) +: Lemma + (g_is_null (nullptr #t) == true) += () + +(** Buffers *) + +let not_an_array_cell (#t: typ) (p: pointer t) : GTot bool = + match Pointer?.p p with + | PathStep _ _ _ (StepCell _ _ _) -> false + | _ -> true + +noeq type buffer_root (t: typ) = +| BufferRootSingleton: + (p: pointer t { not_an_array_cell p } ) -> + buffer_root t +| BufferRootArray: + (#max_length: array_length_t) -> + (p: pointer (TArray max_length t)) -> + buffer_root t + +let buffer_root_length (#t: typ) (b: buffer_root t): Tot UInt32.t = match b with +| BufferRootSingleton _ -> 1ul +| BufferRootArray #_ #len _ -> len + +noeq type _buffer (t: typ) = +| Buffer: + (broot: buffer_root t) -> + (bidx: UInt32.t) -> + (blength: UInt32.t { UInt32.v bidx + UInt32.v blength <= UInt32.v (buffer_root_length broot) } ) -> + _buffer t +let buffer (t: typ): Tot Type0 = _buffer t + +(** Helper for the interpretation of unions. + + A C union is interpreted as a dependent pair of a key and a value (which + depends on the key). The intent is for the key to be ghost, as it will not + exist at runtime (C unions are untagged). + + Therefore, + - `gtdata_get_key` (defined below) is in `GTot`, and + - `gtdata_get_value` asks for the key `k` to read, and a proof that `k` + matches the ghost key. +*) + +let gtdata (* ghostly-tagged data *) + (key: eqtype) + (value: (key -> Tot Type0)) +: Tot Type0 += ( k: key & value k ) + +let _gtdata_get_key + (#key: eqtype) + (#value: (key -> Tot Type0)) + (u: gtdata key value) +: Tot key += dfst u + +let gtdata_get_key + (#key: eqtype) + (#value: (key -> Tot Type0)) + (u: gtdata key value) +: GTot key // important: must be Ghost, the tag is not actually stored in memory += _gtdata_get_key u + +let gtdata_get_value + (#key: eqtype) + (#value: (key -> Tot Type0)) + (u: gtdata key value) + (k: key) +: Pure (value k) + (requires (gtdata_get_key u == k)) + (ensures (fun _ -> True)) += let (| _, v |) = u in v + +let gtdata_create + (#key: eqtype) + (#value: (key -> Tot Type0)) + (k: key) + (v: value k) +: Pure (gtdata key value) + (requires True) + (ensures (fun x -> gtdata_get_key x == k /\ gtdata_get_value x k == v)) += (| k, v |) + +let gtdata_extensionality + (#key: eqtype) + (#value: (key -> Tot Type0)) + (u1 u2: gtdata key value) +: Lemma + (requires ( + let k = gtdata_get_key u1 in ( + k == gtdata_get_key u2 /\ + gtdata_get_value u1 k == gtdata_get_value u2 k + ))) + (ensures (u1 == u2)) += () + +(* Interprets a type code (`typ`) as a FStar type (`Type0`). *) +let rec type_of_typ' + (t: typ) +: Tot Type0 += match t with + | TBase b -> type_of_base_typ b + | TStruct l -> + struct l + | TUnion l -> + union l + | TArray length t -> + array length (type_of_typ' t) + | TPointer t -> + pointer t + | TNPointer t -> + npointer t + | TBuffer t -> + buffer t +and struct (l: struct_typ) : Tot Type0 = + DM.t (struct_field l) (type_of_struct_field' l (fun x -> type_of_typ' x)) +and union (l: union_typ) : Tot Type0 = + gtdata (struct_field l) (type_of_struct_field' l (fun x -> type_of_typ' x)) + +let rec type_of_typ'_eq (t: typ) : Lemma (type_of_typ' t == type_of_typ t) + [SMTPat (type_of_typ t)] += + match t with + | TArray _ t' -> type_of_typ'_eq t' + | TPointer t' -> type_of_typ'_eq t' + | TNPointer t' -> type_of_typ'_eq t' + | TBuffer t' -> type_of_typ'_eq t' + | _ -> () + +(** Interpretation of unions, as ghostly-tagged data + (see `gtdata` for more information). +*) +let _union_get_key (#l: union_typ) (v: union l) : Tot (struct_field l) = _gtdata_get_key v + +let struct_sel (#l: struct_typ) (s: struct l) (f: struct_field l) : Tot (type_of_struct_field l f) = + DM.sel s f + +let struct_upd (#l: struct_typ) (s: struct l) (f: struct_field l) (v: type_of_struct_field l f) : Tot (struct l) = + DM.upd s f v + +let struct_create_fun (l: struct_typ) (f: ((fd: struct_field l) -> Tot (type_of_struct_field l fd))) : Tot (struct l) = + DM.create #(struct_field l) #(type_of_struct_field' l (fun x -> type_of_typ' x)) f + +let struct_sel_struct_create_fun l f fd = () + +let union_get_key (#l: union_typ) (v: union l) : GTot (struct_field l) = gtdata_get_key v + +let union_get_value #l v fd = gtdata_get_value v fd + +let union_create l fd v = gtdata_create fd v + +(** For any `t: typ`, `dummy_val t` provides a default value of this type. + + This is useful to represent uninitialized data. +*) +let rec dummy_val + (t: typ) +: Tot (type_of_typ t) += match t with + | TBase b -> + begin match b with + | TUInt -> 0 + | TUInt8 -> UInt8.uint_to_t 0 + | TUInt16 -> UInt16.uint_to_t 0 + | TUInt32 -> UInt32.uint_to_t 0 + | TUInt64 -> UInt64.uint_to_t 0 + | TInt -> 0 + | TInt8 -> Int8.int_to_t 0 + | TInt16 -> Int16.int_to_t 0 + | TInt32 -> Int32.int_to_t 0 + | TInt64 -> Int64.int_to_t 0 + | TChar -> 'c' + | TBool -> false + | TUnit -> () + end + | TStruct l -> + struct_create_fun l (fun f -> ( + dummy_val (typ_of_struct_field l f) + )) + | TUnion l -> + let dummy_field : string = List.Tot.hd (List.Tot.map fst l.fields) in + union_create l dummy_field (dummy_val (typ_of_struct_field l dummy_field)) + | TArray length t -> Seq.create (UInt32.v length) (dummy_val t) + | TPointer t -> Pointer t HS.dummy_aref PathBase + | TNPointer t -> NullPtr #t + | TBuffer t -> Buffer (BufferRootSingleton (Pointer t HS.dummy_aref PathBase)) 0ul 1ul + +(** The interpretation of type codes (`typ`) defined previously (`type_of_typ`) + maps codes to fully defined FStar types. In other words, a struct is + interpreted as a dependent map where all fields have a well defined value. + + However, in practice, C structures (or any other type) can be uninitialized + or partially-initialized. + + To account for that: + + - First, we define an alternative interpretation of type codes, + `otype_of_typ`, which makes uninitialized data explicit (essentially + wrapping all interpretations with `option`). + + This concrete interpretation is what is stored in the model of the heap, + and what is manipulated internally. As it is quite verbose, it is not + exposed to the user. + + - Then, interpretations with explicit uninitialized data (`otype_of_type t`) + can be mapped to fully-initialized data (`type_of_type t`) by inserting + dummy values. This is done by the `value_of_ovalue` function. + + - Finally, reading from a fully-initialized data is guarded by a `readable` + predicate, which ensures that the dummy values cannot be accessed, and + therefore that reading uninitialized data is actually forbidden. +*) + +let rec otype_of_typ + (t: typ) +: Tot Type0 += match t with + | TBase b -> option (type_of_base_typ b) + | TStruct l -> + option (DM.t (struct_field l) (type_of_struct_field' l otype_of_typ)) + | TUnion l -> + option (gtdata (struct_field l) (type_of_struct_field' l otype_of_typ)) + | TArray length t -> + option (array length (otype_of_typ t)) + | TPointer t -> + option (pointer t) + | TNPointer t -> + option (npointer t) + | TBuffer t -> + option (buffer t) + +let otype_of_struct_field + (l: struct_typ) +: Tot (struct_field l -> Tot Type0) += type_of_struct_field' l otype_of_typ + +let otype_of_typ_otype_of_struct_field + (l: struct_typ) + (f: struct_field l) +: Lemma + (otype_of_typ (typ_of_struct_field l f) == otype_of_struct_field l f) + [SMTPat (type_of_typ (typ_of_struct_field l f))] += () + +let otype_of_typ_base + (b: base_typ) +: Lemma + (otype_of_typ (TBase b) == option (type_of_base_typ b)) + [SMTPat (otype_of_typ (TBase b))] += () + +let otype_of_typ_array + (len: array_length_t ) + (t: typ) +: Lemma + (otype_of_typ (TArray len t) == option (array len (otype_of_typ t))) + [SMTPat (otype_of_typ (TArray len t))] += () + +let ostruct (l: struct_typ) = option (DM.t (struct_field l) (otype_of_struct_field l)) + +let ostruct_sel (#l: struct_typ) (s: ostruct l { Some? s }) (f: struct_field l) : Tot (otype_of_struct_field l f) = + DM.sel (Some?.v s) f + +let ostruct_upd (#l: struct_typ) (s: ostruct l { Some? s }) (f: struct_field l) (v: otype_of_struct_field l f) : Tot (s': ostruct l { Some? s' } ) = + Some (DM.upd (Some?.v s) f v) + +let ostruct_create (l: struct_typ) (f: ((fd: struct_field l) -> Tot (otype_of_struct_field l fd))) : Tot (s': ostruct l { Some? s' } ) = + Some (DM.create #(struct_field l) #(otype_of_struct_field l) f) + +let otype_of_typ_struct + (l: struct_typ) +: Lemma + (otype_of_typ (TStruct l) == ostruct l) + [SMTPat (otype_of_typ (TStruct l))] += assert_norm(otype_of_typ (TStruct l) == ostruct l) + +let ounion (l: struct_typ) = option (gtdata (struct_field l) (otype_of_struct_field l)) + +let ounion_get_key (#l: union_typ) (v: ounion l { Some? v } ) : Tot (struct_field l) = _gtdata_get_key (Some?.v v) + +let ounion_get_value + (#l: union_typ) + (v: ounion l { Some? v } ) + (fd: struct_field l) +: Pure (otype_of_struct_field l fd) + (requires (ounion_get_key v == fd)) + (ensures (fun _ -> True)) += gtdata_get_value (Some?.v v) fd + +let ounion_create + (l: union_typ) + (fd: struct_field l) + (v: otype_of_struct_field l fd) +: Tot (ounion l) += Some (gtdata_create fd v) + +let otype_of_typ_union + (l: union_typ) +: Lemma + (otype_of_typ (TUnion l) == ounion l) + [SMTPat (otype_of_typ (TUnion l))] += assert_norm (otype_of_typ (TUnion l) == ounion l) + +let struct_field_is_readable + (l: struct_typ) + (ovalue_is_readable: ( + (t: typ) -> + (v: otype_of_typ t) -> + Pure bool + (requires (t << l)) + (ensures (fun _ -> True)) + )) + (v: ostruct l { Some? v } ) + (s: string) +: Tot bool += if List.Tot.mem s (List.Tot.map fst l.fields) + then ovalue_is_readable (typ_of_struct_field l s) (ostruct_sel v s) + else true + +let rec ovalue_is_readable + (t: typ) + (v: otype_of_typ t) +: Tot bool + (decreases t) += match t with + | TStruct l -> + let (v: ostruct l) = v in + Some? v && ( + let keys = List.Tot.map fst l.fields in + let pred + (t': typ) + (v: otype_of_typ t') + : Pure bool + (requires (t' << l)) + (ensures (fun _ -> True)) + = ovalue_is_readable t' v + in + List.Tot.for_all (struct_field_is_readable l pred v) keys + ) + | TUnion l -> + let v : ounion l = v in + Some? v && ( + let k = ounion_get_key v in + ovalue_is_readable (typ_of_struct_field l k) (ounion_get_value v k) + ) + | TArray len t -> + let (v: option (array len (otype_of_typ t))) = v in + Some? v && + Seq.for_all (ovalue_is_readable t) (Some?.v v) + | TBase t -> + let (v: option (type_of_base_typ t)) = v in + Some? v + | TPointer t -> + let (v: option (pointer t)) = v in + Some? v + | TNPointer t -> + let (v: option (npointer t)) = v in + Some? v + | TBuffer t -> + let (v: option (buffer t)) = v in + Some? v + +let ovalue_is_readable_struct_intro' + (l: struct_typ) + (v: otype_of_typ (TStruct l)) +: Lemma + (requires ( + let (v: ostruct l) = v in ( + Some? v /\ + List.Tot.for_all (struct_field_is_readable l (fun x y -> ovalue_is_readable x y) v) (List.Tot.map fst l.fields) + ))) + (ensures (ovalue_is_readable (TStruct l) v)) += assert_norm (ovalue_is_readable (TStruct l) v == true) + +let ovalue_is_readable_struct_intro + (l: struct_typ) + (v: otype_of_typ (TStruct l)) +: Lemma + (requires ( + let (v: ostruct l) = v in ( + Some? v /\ ( + forall (f: struct_field l) . + ovalue_is_readable (typ_of_struct_field l f) (ostruct_sel v f) + )))) + (ensures (ovalue_is_readable (TStruct l) v)) += List.Tot.for_all_mem (struct_field_is_readable l (fun x y -> ovalue_is_readable x y) v) (List.Tot.map fst l.fields); + ovalue_is_readable_struct_intro' l v + +let ovalue_is_readable_struct_elim + (l: struct_typ) + (v: otype_of_typ (TStruct l)) + (fd: struct_field l) +: Lemma + (requires (ovalue_is_readable (TStruct l) v)) + (ensures ( + let (v: ostruct l) = v in ( + Some? v /\ + ovalue_is_readable (typ_of_struct_field l fd) (ostruct_sel v fd) + ))) + [SMTPat (ovalue_is_readable (typ_of_struct_field l fd) (ostruct_sel v fd))] += let (v: ostruct l) = v in + assert_norm (ovalue_is_readable (TStruct l) v == List.Tot.for_all (struct_field_is_readable l (fun x y -> ovalue_is_readable x y) v) (List.Tot.map fst l.fields)); + assert (List.Tot.for_all (struct_field_is_readable l (fun x y -> ovalue_is_readable x y) v) (List.Tot.map fst l.fields)); + List.Tot.for_all_mem (struct_field_is_readable l (fun x y -> ovalue_is_readable x y) v) (List.Tot.map fst l.fields); + assert (ovalue_is_readable (typ_of_struct_field l fd) (ostruct_sel v fd)) + +let ovalue_is_readable_array_elim + (#len: array_length_t ) + (#t: typ) + (v: otype_of_typ (TArray len t)) + (i: UInt32.t { UInt32.v i < UInt32.v len } ) +: Lemma + (requires (ovalue_is_readable (TArray len t) v)) + (ensures ( + let (v: option (array len (otype_of_typ t))) = v in ( + Some? v /\ + ovalue_is_readable t (Seq.index (Some?.v v) (UInt32.v i)) + ))) += () + +let ovalue_is_readable_array_intro + (#len: array_length_t ) + (#t: typ) + (v: otype_of_typ (TArray len t)) +: Lemma + (requires ( + let (v: option (array len (otype_of_typ t))) = v in ( + Some? v /\ ( + forall (i: UInt32.t { UInt32.v i < UInt32.v len } ) . + ovalue_is_readable t (Seq.index (Some?.v v) (UInt32.v i)) + )))) + (ensures (ovalue_is_readable (TArray len t) v)) += let (v: option (array len (otype_of_typ t))) = v in + let (v: array len (otype_of_typ t)) = Some?.v v in + let f + (i: nat { i < UInt32.v len } ) + : Lemma + (ovalue_is_readable t (Seq.index v i)) + = let (j : UInt32.t { UInt32.v j < UInt32.v len } ) = UInt32.uint_to_t i in + assert (ovalue_is_readable t (Seq.index v (UInt32.v j))) + in + Classical.forall_intro f + +let ostruct_field_of_struct_field + (l: struct_typ) + (ovalue_of_value: ( + (t: typ) -> + (v: type_of_typ t) -> + Pure (otype_of_typ t) + (requires (t << l)) + (ensures (fun _ -> True)) + )) + (v: struct l) + (f: struct_field l) +: Tot (otype_of_struct_field l f) += ovalue_of_value (typ_of_struct_field l f) (struct_sel #l v f) + +(* TODO: move to Seq.Base *) + +let seq_init_index + (#a:Type) (len:nat) (contents:(i:nat { i < len } -> Tot a)) (i: nat) +: Lemma + (requires (i < len)) + (ensures (i < len /\ Seq.index (Seq.init len contents) i == contents i)) + [SMTPat (Seq.index (Seq.init len contents) i)] += Seq.init_index len contents + +let rec ovalue_of_value + (t: typ) + (v: type_of_typ t) +: Tot (otype_of_typ t) + (decreases t) += match t with + | TStruct l -> + let oval + (t' : typ) + (v' : type_of_typ t') + : Pure (otype_of_typ t') + (requires (t' << l)) + (ensures (fun _ -> True)) + = ovalue_of_value t' v' + in + ostruct_create l (ostruct_field_of_struct_field l oval v) + | TArray len t -> + let (v: array len (type_of_typ t)) = v in + assert (UInt32.v len == Seq.length v); + let f + (i: nat {i < UInt32.v len}) + : Tot (otype_of_typ t) + = ovalue_of_value t (Seq.index v i) + in + let (v': array len (otype_of_typ t)) = Seq.init (UInt32.v len) f in + Some v' + | TUnion l -> + let (v: union l) = v in + let k = _union_get_key v in + ounion_create l k (ovalue_of_value (typ_of_struct_field l k) (union_get_value v k)) + | _ -> Some v + +let ovalue_is_readable_ostruct_field_of_struct_field + (l: struct_typ) + (ih: ( + (t: typ) -> + (v: type_of_typ t) -> + Lemma + (requires (t << l)) + (ensures (ovalue_is_readable t (ovalue_of_value t v))) + )) + (v: struct l) + (f: struct_field l) +: Lemma + (ovalue_is_readable (typ_of_struct_field l f) (ostruct_field_of_struct_field l ovalue_of_value v f)) += ih (typ_of_struct_field l f) (struct_sel #l v f) + +let rec ovalue_is_readable_ovalue_of_value + (t: typ) + (v: type_of_typ t) +: Lemma + (requires True) + (ensures (ovalue_is_readable t (ovalue_of_value t v))) + (decreases t) + [SMTPat (ovalue_is_readable t (ovalue_of_value t v))] += match t with + | TStruct l -> + let (v: struct l) = v in + let (v': ostruct l) = ovalue_of_value (TStruct l) v in + let phi + (t: typ) + (v: type_of_typ t) + : Lemma + (requires (t << l)) + (ensures (ovalue_is_readable t (ovalue_of_value t v))) + = ovalue_is_readable_ovalue_of_value t v + in + Classical.forall_intro (ovalue_is_readable_ostruct_field_of_struct_field l phi v); + ovalue_is_readable_struct_intro l v' + | TArray len t -> + let (v: array len (type_of_typ t)) = v in + let (v': otype_of_typ (TArray len t)) = ovalue_of_value (TArray len t) v in + let (v': array len (otype_of_typ t)) = Some?.v v' in + let phi + (i: nat { i < Seq.length v' } ) + : Lemma + (ovalue_is_readable t (Seq.index v' i)) + = ovalue_is_readable_ovalue_of_value t (Seq.index v i) + in + Classical.forall_intro phi + | TUnion l -> + let (v: union l) = v in + let k = _union_get_key v in + ovalue_is_readable_ovalue_of_value (typ_of_struct_field l k) (union_get_value v k) + | _ -> () + +let rec value_of_ovalue + (t: typ) + (v: otype_of_typ t) +: Tot (type_of_typ t) + (decreases t) += match t with + | TStruct l -> + let (v: ostruct l) = v in + if Some? v + then + let phi + (f: struct_field l) + : Tot (type_of_struct_field l f) + = value_of_ovalue (typ_of_struct_field l f) (ostruct_sel v f) + in + struct_create_fun l phi + else dummy_val t + | TArray len t' -> + let (v: option (array len (otype_of_typ t'))) = v in + begin match v with + | None -> dummy_val t + | Some v -> + let phi + (i: nat { i < UInt32.v len } ) + : Tot (type_of_typ t') + = value_of_ovalue t' (Seq.index v i) + in + Seq.init (UInt32.v len) phi + end + | TUnion l -> + let (v: ounion l) = v in + begin match v with + | None -> dummy_val t + | _ -> + let k = ounion_get_key v in + union_create l k (value_of_ovalue (typ_of_struct_field l k) (ounion_get_value v k)) + end + | TBase b -> + let (v: option (type_of_base_typ b)) = v in + begin match v with + | None -> dummy_val t + | Some v -> v + end + | TPointer t' -> + let (v: option (pointer t')) = v in + begin match v with + | None -> dummy_val t + | Some v -> v + end + | TNPointer t' -> + let (v: option (npointer t')) = v in + begin match v with + | None -> dummy_val t + | Some v -> v + end + | TBuffer t' -> + let (v: option (buffer t')) = v in + begin match v with + | None -> dummy_val t + | Some v -> v + end + +let ovalue_of_value_array_index + (#len: array_length_t) + (t' : typ) + (v: array len (type_of_typ t')) + (sv: array len (otype_of_typ t')) +: Lemma + (requires (ovalue_of_value (TArray len t') v == Some sv)) + (ensures (forall (i: nat) . i < UInt32.v len ==> Seq.index sv i == ovalue_of_value t' (Seq.index v i))) += () + + +let value_of_ovalue_array_index + (#len: array_length_t) + (t': typ) + (sv: array len (otype_of_typ t')) +: Lemma + (ensures (forall (i: nat) . i < UInt32.v len ==> Seq.index (value_of_ovalue (TArray len t') (Some sv)) i == value_of_ovalue t' (Seq.index sv i))) += () + +#set-options "--z3rlimit 16" + +let rec value_of_ovalue_of_value + (t: typ) + (v: type_of_typ t) +: Lemma + (value_of_ovalue t (ovalue_of_value t v) == v) + [SMTPat (value_of_ovalue t (ovalue_of_value t v))] += match t with + | TStruct l -> + let v : struct l = v in + let v' : struct l = value_of_ovalue t (ovalue_of_value t v) in + let phi + (f: struct_field l) + : Lemma + (struct_sel #l v' f == struct_sel #l v f) + = value_of_ovalue_of_value (typ_of_struct_field l f) (struct_sel #l v f) + in + Classical.forall_intro phi; + DM.equal_intro v' v; + DM.equal_elim #(struct_field l) #(type_of_struct_field' l (fun x -> type_of_typ' x)) v' v + | TArray len t' -> + let (v: array len (type_of_typ t')) = v in + let ov : option (array len (otype_of_typ t')) = ovalue_of_value (TArray len t') v in + assert (Some? ov); + let sv : array len (otype_of_typ t') = Some?.v ov in + assert (Seq.length sv == UInt32.v len); +// assert (forall (i : nat { i < UInt32.v len } ) . Seq.index sv i == ovalue_of_value t' (Seq.index v i)); + ovalue_of_value_array_index t' v sv; + let v' : array len (type_of_typ t') = value_of_ovalue t ov in + assert (Seq.length v' == UInt32.v len); +// assert (forall (i: nat { i < UInt32.v len } ) . Seq.index v' i == value_of_ovalue t' (Seq.index sv i)); + value_of_ovalue_array_index t' sv; + let phi + (i: nat { i < UInt32.v len } ) + : Lemma + (value_of_ovalue t' (ovalue_of_value t' (Seq.index v i)) == Seq.index v i) + = value_of_ovalue_of_value t' (Seq.index v i) + in + Classical.forall_intro phi; + Seq.lemma_eq_intro v' v; + Seq.lemma_eq_elim v' v + | TUnion l -> + let v : union l = v in + let k = _union_get_key v in + value_of_ovalue_of_value (typ_of_struct_field l k) (union_get_value v k) + | _ -> () + +let none_ovalue + (t: typ) +: Tot (otype_of_typ t) += match t with + | TStruct l -> (None <: ostruct l) + | TArray len t' -> (None <: option (array len (otype_of_typ t'))) + | TUnion l -> (None <: ounion l) + | TBase b -> (None <: option (type_of_base_typ b)) + | TPointer t' -> (None <: option (pointer t')) + | TNPointer t' -> (None <: option (npointer t')) + | TBuffer t' -> (None <: option (buffer t')) + +let not_ovalue_is_readable_none_ovalue + (t: typ) +: Lemma + (ovalue_is_readable t (none_ovalue t) == false) += () + +(*** Semantics of pointers *) + +(** Pointer paths *) + +let step_sel + (#from: typ) + (#to: typ) + (m': otype_of_typ from) + (s: step from to) += match s with + | StepField l fd -> + let (m': ostruct l) = m' in + begin match m' with + | None -> none_ovalue to + | _ -> ostruct_sel m' fd + end + | StepUField l fd -> + let (m' : ounion l) = m' in + begin match m' with + | None -> none_ovalue to + | _ -> + if fd = ounion_get_key m' + then ounion_get_value m' fd + else none_ovalue to + end + | StepCell length value i -> + let (m': option (array length (otype_of_typ to))) = m' in + begin match m' with + | None -> none_ovalue to + | Some m' -> Seq.index m' (UInt32.v i) + end + +(* TODO: we used to have this: +<<< +let ovalue_is_readable_step_sel + (#from: typ) + (#to: typ) + (m': otype_of_typ from) + (s: step from to) +: Lemma + (requires (ovalue_is_readable from m')) + (ensures (ovalue_is_readable to (step_sel m' s))) + [SMTPat (ovalue_is_readable to (step_sel m' s))] += match s with + | StepField l fd -> ovalue_is_readable_struct_elim l m' fd + | _ -> () +>>> +Which is, of course, wrong with unions. So we have to specialize this rule for each step: +*) + +let ovalue_is_readable_step_sel_cell + (#length: array_length_t) + (#value: typ) + (m': otype_of_typ (TArray length value)) + (index: UInt32.t { UInt32.v index < UInt32.v length } ) +: Lemma + (requires (ovalue_is_readable (TArray length value) m')) + (ensures (ovalue_is_readable value (step_sel m' (StepCell length value index)))) + [SMTPat (ovalue_is_readable value (step_sel m' (StepCell length value index)))] += () + +let ovalue_is_readable_step_sel_field + (#l: struct_typ) + (m: ostruct l) + (fd: struct_field l) +: Lemma + (requires (ovalue_is_readable (TStruct l) m)) + (ensures (ovalue_is_readable (typ_of_struct_field l fd) (step_sel m (StepField l fd)))) + [SMTPat (ovalue_is_readable (typ_of_struct_field l fd) (step_sel m (StepField l fd)))] += () + +let ovalue_is_readable_step_sel_union_same + (#l: union_typ) + (m: ounion l) + (fd: struct_field l) +: Lemma + (requires ( + ovalue_is_readable (TUnion l) m /\ + ounion_get_key m == fd + )) + (ensures (ovalue_is_readable (typ_of_struct_field l fd) (step_sel m (StepUField l fd)))) += () + +let step_sel_none_ovalue + (#from: typ) + (#to: typ) + (s: step from to) +: Lemma + (step_sel (none_ovalue from) s == none_ovalue to) += () + +let rec path_sel + (#from: typ) + (#to: typ) + (m: otype_of_typ from) + (p: path from to) +: Tot (otype_of_typ to) + (decreases p) += match p with + | PathBase -> m + | PathStep through' to' p' s -> + let (m': otype_of_typ through') = path_sel m p' in + step_sel m' s + +let rec path_sel_none_ovalue + (#from: typ) + (#to: typ) + (p: path from to) +: Lemma + (requires True) + (ensures (path_sel (none_ovalue from) p == none_ovalue to)) + (decreases p) += match p with + | PathBase -> () + | PathStep through' to' p' s -> + path_sel_none_ovalue p' + +let step_upd + (#from: typ) + (#to: typ) + (m: otype_of_typ from) + (s: step from to) + (v: otype_of_typ to) +: Tot (otype_of_typ from) + (decreases s) += match s with + | StepField l fd -> + let (m: ostruct l) = m in + begin match m with + | None -> + (* whole structure does not exist yet, + so create one with only one field initialized, + and all others uninitialized *) + let phi + (fd' : struct_field l) + : Tot (otype_of_struct_field l fd') + = if fd' = fd + then v + else none_ovalue (typ_of_struct_field l fd') + in + ostruct_create l phi + | Some _ -> ostruct_upd m fd v + end + | StepCell len _ i -> + let (m: option (array len (otype_of_typ to))) = m in + begin match m with + | None -> + (* whole array does not exist yet, + so create one with only one cell initialized, + and all others uninitialized *) + let phi + (j: nat { j < UInt32.v len } ) + : Tot (otype_of_typ to) + = if j = UInt32.v i + then v + else none_ovalue to + in + let (m' : option (array len (otype_of_typ to))) = + Some (Seq.init (UInt32.v len) phi) + in + m' + | Some m -> + let (m' : option (array len (otype_of_typ to))) = + Some (Seq.upd m (UInt32.v i) v) + in + m' + end + | StepUField l fd -> + (* overwrite the whole union with the new field *) + ounion_create l fd v + +let step_sel_upd_same + (#from: typ) + (#to: typ) + (m: otype_of_typ from) + (s: step from to) + (v: otype_of_typ to) +: Lemma + (step_sel (step_upd m s v) s == v) += () + +let rec path_upd + (#from: typ) + (#to: typ) + (m: otype_of_typ from) + (p: path from to) + (v: otype_of_typ to) +: Tot (otype_of_typ from) + (decreases p) += match p with + | PathBase -> v + | PathStep through' to' p' st -> + let s = path_sel m p' in + path_upd m p' (step_upd s st v) + +let rec path_sel_upd_same + (#from: typ) + (#to: typ) + (m: otype_of_typ from) + (p: path from to) + (v: otype_of_typ to) +: Lemma + (requires True) + (ensures (path_sel (path_upd m p v) p == v)) + (decreases p) + [SMTPat (path_sel (path_upd m p v) p)] += match p with + | PathBase -> () + | PathStep through' to' p' st -> + let s = path_sel m p' in + step_sel_upd_same s st v; + let s' = step_upd s st v in + path_sel_upd_same m p' s' + +let rec path_concat + (#from: typ) + (#through: typ) + (#to: typ) + (p: path from through) + (q: path through to) +: Pure (path from to) + (requires True) + (ensures (fun _ -> True)) + (decreases q) += match q with + | PathBase -> p + | PathStep through' to' q' st -> PathStep through' to' (path_concat p q') st + +let path_concat_base_r + (#from: typ) + (#to: typ) + (p: path from to) +: Lemma + (ensures (path_concat p PathBase == p)) += () + +let rec path_concat_base_l + (#from: typ) + (#to: typ) + (p: path from to) +: Lemma + (requires True) + (ensures (path_concat PathBase p == p)) + (decreases p) + [SMTPat (path_concat PathBase p)] += match p with + | PathBase -> () + | PathStep _ _ p' _ -> path_concat_base_l p' + +let rec path_concat_assoc + (#t0 #t1 #t2 #t3: typ) + (p01: path t0 t1) + (p12: path t1 t2) + (p23: path t2 t3) +: Lemma + (requires True) + (ensures (path_concat (path_concat p01 p12) p23 == path_concat p01 (path_concat p12 p23))) + (decreases p23) += match p23 with + | PathBase -> () + | PathStep _ _ p23' _ -> path_concat_assoc p01 p12 p23' + +let rec path_sel_concat + (#from: typ) + (#through: typ) + (#to: typ) + (m: otype_of_typ from) + (p: path from through) + (q: path through to) +: Lemma + (requires True) + (ensures (path_sel m (path_concat p q) == path_sel (path_sel m p) q)) + (decreases q) + [SMTPat (path_sel m (path_concat p q))] += match q with + | PathBase -> () + | PathStep _ _ q' _ -> path_sel_concat m p q' + +let rec path_upd_concat + (#from: typ) + (#through: typ) + (#to: typ) + (m: otype_of_typ from) + (p: path from through) + (q: path through to) + (v: otype_of_typ to) +: Lemma + (requires True) + (ensures (path_upd m (path_concat p q) v == path_upd m p (path_upd (path_sel m p) q v))) + (decreases q) + [SMTPat (path_upd m (path_concat p q) v)] += match q with + | PathBase -> () + | PathStep through' to' q' st -> + let (s: otype_of_typ through') = path_sel m (path_concat p q') in + let (s': otype_of_typ through') = step_upd s st v in + path_upd_concat m p q' s' + +// TODO: rename as: prefix_of; use infix notation (p1 `prefix_of` p2) +let rec path_includes + (#from: typ) + (#to1 #to2: typ) + (p1: path from to1) + (p2: path from to2) +: Ghost bool + (requires True) + (ensures (fun _ -> True)) + (decreases p2) += (to1 = to2 && p1 = p2) || (match p2 with + | PathBase -> false + | PathStep _ _ p2' _ -> + path_includes p1 p2' + ) + +let rec path_includes_base + (#from: typ) + (#to: typ) + (p: path from to) +: Lemma + (requires True) + (ensures (path_includes (PathBase #from) p)) + (decreases p) + [SMTPat (path_includes PathBase p)] += match p with + | PathBase -> () + | PathStep _ _ p2' _ -> path_includes_base p2' + +let path_includes_refl + (#from #to: typ) + (p: path from to) +: Lemma + (requires True) + (ensures (path_includes p p)) + [SMTPat (path_includes p p)] += () + +let path_includes_step_r + (#from #through #to: typ) + (p: path from through) + (s: step through to) +: Lemma + (requires True) + (ensures (path_includes p (PathStep through to p s))) + [SMTPat (path_includes p (PathStep through to p s))] += () + +let rec path_includes_trans + (#from #to1 #to2 #to3: typ) + (p1: path from to1) + (p2: path from to2) + (p3: path from to3 {path_includes p1 p2 /\ path_includes p2 p3}) +: Lemma + (requires True) + (ensures (path_includes p1 p3)) + (decreases p3) += FStar.Classical.or_elim + #(to2 == to3 /\ p2 == p3) + #(match p3 with + | PathBase -> False + | PathStep _ _ p3' _ -> + path_includes p2 p3') + #(fun _ -> path_includes p1 p3) + (fun _ -> ()) + (fun _ -> match p3 with + | PathBase -> assert False + | PathStep _ _ p3' _ -> + path_includes_trans p1 p2 p3' + ) + +let rec path_includes_ind + (#from: typ) + (x:((#to1: typ) -> + (#to2: typ) -> + (p1: path from to1) -> + (p2: path from to2 {path_includes p1 p2} ) -> + GTot Type0)) + (h_step: + ((#through: typ) -> + (#to: typ) -> + (p: path from through) -> + (s: step through to { path_includes p (PathStep through to p s) } ) -> + Lemma (x p (PathStep through to p s)))) + (h_refl: + ((#to: typ) -> + (p: path from to {path_includes p p}) -> + Lemma (x p p))) + (h_trans: + ((#to1: typ) -> + (#to2: typ) -> + (#to3: typ) -> + (p1: path from to1) -> + (p2: path from to2) -> + (p3: path from to3 {path_includes p1 p2 /\ path_includes p2 p3 /\ path_includes p1 p3 /\ x p1 p2 /\ x p2 p3}) -> + Lemma (x p1 p3))) + (#to1: typ) + (#to2: typ) + (p1: path from to1) + (p2: path from to2 {path_includes p1 p2}) +: Lemma + (requires True) + (ensures (x p1 p2)) + (decreases p2) += FStar.Classical.or_elim + #(to1 == to2 /\ p1 == p2) + #(match p2 with + | PathBase -> False + | PathStep _ _ p' _ -> path_includes p1 p') + #(fun _ -> x p1 p2) + (fun _ -> h_refl p1) + (fun _ -> match p2 with + | PathBase -> assert False + | PathStep _ _ p2' st -> + let _ = path_includes_ind x h_step h_refl h_trans p1 p2' in + let _ = path_includes_step_r p2' st in + let _ = h_step p2' st in + h_trans p1 p2' p2 + ) + +let rec path_length + (#from #to: typ) + (p: path from to) +: Tot nat + (decreases p) += match p with + | PathBase -> 0 + | PathStep _ _ p' _ -> 1 + path_length p' + +let path_includes_length + (#from: typ) + (#to1 #to2: typ) + (p1: path from to1) + (p2: path from to2 {path_includes p1 p2}) +: Lemma + (ensures (path_length p1 <= path_length p2)) += path_includes_ind + (fun #to1_ #to2_ p1_ p2_ -> path_length p1_ <= path_length p2_) + (fun #through #to p st -> ()) + (fun #to p -> ()) + (fun #to1_ #to2_ #to3_ p1_ p2_ p3_ -> ()) + p1 p2 + +let path_includes_step_l + (#from: typ) + (#through: typ) + (#to: typ) + (p: path from through) + (s: step through to) +: Lemma + (requires True) + (ensures (~ (path_includes (PathStep through to p s) p))) + [SMTPat (path_includes (PathStep through to p s) p)] += assert (path_length (PathStep through to p s) > path_length p); + FStar.Classical.forall_intro (path_includes_length #from #to #through (PathStep through to p s)) + +let rec path_includes_concat + (#from: typ) + (#through: typ) + (#to: typ) + (p: path from through) + (q: path through to) +: Lemma + (requires True) + (ensures (path_includes p (path_concat p q))) + (decreases q) + [SMTPat (path_includes p (path_concat p q))] += match q with + | PathBase -> () + | PathStep _ _ q' _ -> path_includes_concat p q' + +let path_includes_exists_concat + (#from #through: typ) + (p: path from through) + (#to: typ) + (q: path from to { path_includes p q } ) +: Lemma + (ensures (exists (r: path through to) . q == path_concat p r)) += path_includes_ind + (fun #to1_ #to2_ p1_ p2_ -> exists r . p2_ == path_concat p1_ r) + (fun #through #to_ p s -> + let r = PathStep through to_ PathBase s in + assert_norm (PathStep through to_ p s == path_concat p r) + ) + (fun #to p -> FStar.Classical.exists_intro (fun r -> p == path_concat p r) PathBase) + (fun #to1_ #to2_ #to3_ p1_ p2_ p3_ -> + FStar.Classical.exists_elim (exists r . p3_ == path_concat p1_ r) #_ #(fun r12 -> p2_ == path_concat p1_ r12) () (fun r12 -> + FStar.Classical.exists_elim (exists r . p3_ == path_concat p1_ r) #_ #(fun r23 -> p3_ == path_concat p2_ r23) () (fun r23 -> + path_concat_assoc p1_ r12 r23; + FStar.Classical.exists_intro (fun r -> p3_ == path_concat p1_ r) (path_concat r12 r23) + ) + ) + ) + p q + +let path_concat_includes + (#from #through: typ) + (p: path from through) + (phi: ( + (#to: typ) -> + (p': path from to) -> + Ghost Type0 + (requires (path_includes p p')) + (ensures (fun _ -> True)) + )) + (f: ( + (to: typ) -> + (p': path through to) -> + Lemma + (ensures (phi (path_concat p p'))) + )) + (#to: typ) + (q: path from to) +: Lemma + (requires (path_includes p q)) + (ensures (path_includes p q /\ phi q)) += Classical.forall_intro_2 f; + path_includes_exists_concat p q + +let step_disjoint + (#from: typ) + (#to1 #to2: typ) + (s1: step from to1) + (s2: step from to2) +: GTot bool += match s1 with + | StepField _ fd1 -> + let (StepField _ fd2) = s2 in + fd1 <> fd2 + | StepCell _ _ i1 -> + let (StepCell _ _ i2) = s2 in + UInt32.v i1 <> UInt32.v i2 + | StepUField _ _ -> + (* two fields of the same union are never disjoint *) + false + +let step_eq + (#from: typ) + (#to1 #to2: typ) + (s1: step from to1) + (s2: step from to2) +: Tot (b: bool { b = true <==> to1 == to2 /\ s1 == s2 } ) += match s1 with + | StepField l1 fd1 -> + let (StepField _ fd2) = s2 in + fd1 = fd2 + | StepCell _ _ i1 -> + let (StepCell _ _ i2) = s2 in + i1 = i2 + | StepUField l1 fd1 -> + let (StepUField _ fd2) = s2 in + fd1 = fd2 + +let step_disjoint_not_eq + (#from: typ) + (#to1 #to2: typ) + (s1: step from to1) + (s2: step from to2) +: Lemma + (requires (step_disjoint s1 s2 == true)) + (ensures (step_eq s1 s2 == false)) += () (* Note: the converse is now wrong, due to unions *) + +let step_disjoint_sym + (#from: typ) + (#to1 #to2: typ) + (s1: step from to1) + (s2: step from to2) +: Lemma + (requires (step_disjoint s1 s2)) + (ensures (step_disjoint s2 s1)) += () + +noeq type path_disjoint_t (#from: typ): + (#to1: typ) -> + (#to2: typ) -> + (p1: path from to1) -> + (p2: path from to2) -> + Type0 += | PathDisjointStep: + (#through: typ) -> + (#to1: typ) -> + (#to2: typ) -> + (p: path from through) -> + (s1: step through to1) -> + (s2: step through to2 { step_disjoint s1 s2 } ) -> + path_disjoint_t (PathStep through to1 p s1) (PathStep through to2 p s2) + | PathDisjointIncludes: + (#to1: typ) -> + (#to2: typ) -> + (p1: path from to1) -> + (p2: path from to2) -> + (#to1': typ) -> + (#to2': typ) -> + (p1': path from to1' {path_includes p1 p1'}) -> + (p2': path from to2' {path_includes p2 p2'}) -> + path_disjoint_t p1 p2 -> + path_disjoint_t p1' p2' + +let rec path_disjoint_t_rect + (#from: typ) + (x: + ((#value1: typ) -> + (#value2: typ) -> + (p1: path from value1) -> + (p2: path from value2) -> + (h: path_disjoint_t p1 p2) -> + GTot Type)) + (h_step: + ((#through: typ) -> + (#to1: typ) -> + (#to2: typ) -> + (p: path from through) -> + (s1: step through to1) -> + (s2: step through to2 { step_disjoint s1 s2 } ) -> + (h: path_disjoint_t (PathStep through to1 p s1) (PathStep through to2 p s2)) -> + GTot (x (PathStep through to1 p s1) (PathStep through to2 p s2) h))) + (h_includes: + ((#value1: typ) -> + (#value2: typ) -> + (p1: path from value1) -> + (p2: path from value2) -> + (#value1': typ) -> + (#value2': typ) -> + (p1': path from value1' {path_includes p1 p1'}) -> + (p2': path from value2' {path_includes p2 p2'}) -> + (h: path_disjoint_t p1 p2) -> + (h': path_disjoint_t p1' p2') -> + (ihx: x p1 p2 h) -> + GTot (x p1' p2' h'))) + (#value1: typ) + (#value2: typ) + (p1: path from value1) + (p2: path from value2) + (h: path_disjoint_t p1 p2) +: Ghost (x p1 p2 h) + (requires True) + (ensures (fun _ -> True)) + (decreases h) += match h with + | PathDisjointStep p s1 s2 -> h_step p s1 s2 h + | PathDisjointIncludes p1_ p2_ p1' p2' h_ -> h_includes p1_ p2_ p1' p2' h_ h (path_disjoint_t_rect x h_step h_includes p1_ p2_ h_) + +let path_disjoint + (#from: typ) + (#value1: typ) + (#value2: typ) + (p1: path from value1) + (p2: path from value2) +: GTot Type0 += squash (path_disjoint_t p1 p2) + +#push-options "--smtencoding.valid_intro true --smtencoding.valid_elim true" +let path_disjoint_ind + (#from: typ) + (x: + ((#value1: typ) -> + (#value2: typ) -> + (p1: path from value1) -> + (p2: path from value2 {path_disjoint p1 p2} ) -> + GTot Type)) + (h_step: + ((#through: typ) -> + (#to1: typ) -> + (#to2: typ) -> + (p: path from through) -> + (s1: step through to1) -> + (s2: step through to2 { step_disjoint s1 s2 /\ path_disjoint (PathStep through to1 p s1) (PathStep through to2 p s2) } ) -> + Lemma (x (PathStep through to1 p s1) (PathStep through to2 p s2) ))) + (h_includes: + ((#value1: typ) -> + (#value2: typ) -> + (p1: path from value1) -> + (p2: path from value2) -> + (#value1': typ) -> + (#value2': typ) -> + (p1': path from value1' {path_includes p1 p1'}) -> + (p2': path from value2' {path_includes p2 p2' /\ path_disjoint p1 p2 /\ path_disjoint p1' p2' /\ x p1 p2}) -> + Lemma (x p1' p2'))) + (#value1: typ) + (#value2: typ) + (p1: path from value1) + (p2: path from value2 { path_disjoint p1 p2 } ) +: Lemma (x p1 p2) += let h : squash (path_disjoint_t p1 p2) = FStar.Squash.join_squash () in + FStar.Squash.bind_squash h (fun (h: path_disjoint_t p1 p2) -> + path_disjoint_t_rect + (fun #v1 #v2 p1 p2 h -> let _ = FStar.Squash.return_squash h in squash (x p1 p2)) + (fun #through #to1 #to2 p s1 s2 h -> let _ = FStar.Squash.return_squash h in h_step p s1 s2) + (fun #v1 #v2 p1 p2 #v1' #v2' p1' p2' h h' hx -> + let _ = FStar.Squash.return_squash h in + let _ = FStar.Squash.return_squash h' in + let _ = FStar.Squash.return_squash hx in + h_includes p1 p2 p1' p2') + p1 p2 h) +#pop-options + +let path_disjoint_step + (#from: typ) + (#through: typ) + (#to1: typ) + (#to2: typ) + (p: path from through) + (s1: step through to1) + (s2: step through to2 { step_disjoint s1 s2 } ) +: Lemma + (requires True) + (ensures (path_disjoint (PathStep through to1 p s1) (PathStep through to2 p s2))) + [SMTPat (path_disjoint (PathStep through to1 p s1) (PathStep through to2 p s2))] += FStar.Classical.give_witness (FStar.Squash.return_squash (PathDisjointStep p s1 s2)) + +#push-options "--smtencoding.valid_intro true --smtencoding.valid_elim true" +let path_disjoint_includes + (#from: typ) + (#to1: typ) + (#to2: typ) + (p1: path from to1) + (p2: path from to2) + (#to1': typ) + (#to2': typ) + (p1': path from to1') + (p2': path from to2') +: Lemma + (requires (path_disjoint p1 p2 /\ path_includes p1 p1' /\ path_includes p2 p2')) + (ensures (path_disjoint p1' p2')) += let h : squash (path_disjoint_t p1 p2) = FStar.Squash.join_squash () in + FStar.Squash.bind_squash h (fun h -> FStar.Squash.return_squash (PathDisjointIncludes p1 p2 p1' p2' h)) +#pop-options + +let path_disjoint_includes_l + (#from: typ) + (#to1: typ) + (#to2: typ) + (p1: path from to1) + (p2: path from to2) + (#to1': typ) + (p1': path from to1') +: Lemma + (requires (path_disjoint p1 p2 /\ path_includes p1 p1')) + (ensures (path_disjoint p1' p2)) + [SMTPatOr [ + [SMTPat (path_disjoint p1 p2); SMTPat (path_includes p1 p1')]; + [SMTPat (path_disjoint p1' p2); SMTPat (path_includes p1 p1')]; + ]] += path_disjoint_includes p1 p2 p1' p2 + +let path_disjoint_sym + (#from: typ) + (#value1: typ) + (#value2: typ) + (p1: path from value1) + (p2: path from value2) +: Lemma + (requires (path_disjoint p1 p2)) + (ensures (path_disjoint p2 p1)) + [SMTPatOr [[SMTPat (path_disjoint p1 p2)]; [SMTPat (path_disjoint p2 p1)]]] += path_disjoint_ind + (fun #v1 #v2 p1 p2 -> path_disjoint p2 p1) + (fun #through #to1 #to2 p s1 s2 -> path_disjoint_step p s2 s1) + (fun #v1 #v2 p1 p2 #v1' #v2' p1' p2' -> path_disjoint_includes p2 p1 p2' p1') + p1 p2 + +let rec path_equal + (#from: typ) + (#value1: typ) + (#value2: typ) + (p1: path from value1) + (p2: path from value2) +: Tot (b: bool { b == true <==> (value1 == value2 /\ p1 == p2) } ) + (decreases p1) += match p1 with + | PathBase -> PathBase? p2 + | PathStep _ _ p1' s1 -> + PathStep? p2 && ( + let (PathStep _ _ p2' s2) = p2 in ( + path_equal p1' p2' && + step_eq s1 s2 + )) + +let rec path_length_concat + (#t0 #t1 #t2: typ) + (p01: path t0 t1) + (p12: path t1 t2) +: Lemma + (requires True) + (ensures (path_length (path_concat p01 p12) == path_length p01 + path_length p12)) + (decreases p12) += match p12 with + | PathBase -> () + | PathStep _ _ p' s' -> path_length_concat p01 p' + +let rec path_concat_inj_l + (#from #through1: typ) + (p1_: path from through1) + (#v1: typ) + (p1: path through1 v1) + (#through2 #v2: typ) + (p2_: path from through2) + (p2: path through2 v2) +: Lemma + (requires (path_equal (path_concat p1_ p1) (path_concat p2_ p2) == true /\ path_length p1_ == path_length p2_)) + (ensures (path_equal p1_ p2_ == true /\ path_equal p1 p2 == true)) + (decreases p1) += path_length_concat p1_ p1; + path_length_concat p2_ p2; + match p1 with + | PathBase -> () + | PathStep _ _ p1' s1 -> + let (PathStep _ _ p2' s2) = p2 in + path_concat_inj_l p1_ p1' p2_ p2' + +type path_disjoint_decomp_t + (#from: typ) + (#value1: typ) + (#value2: typ) + (p1: path from value1) + (p2: path from value2) +: Type += | PathDisjointDecomp: + (d_through: typ) -> + (d_p: path from d_through) -> + (d_v1: typ) -> + (d_s1: step d_through d_v1) -> + (d_p1': path d_v1 value1) -> + (d_v2: typ) -> + (d_s2: step d_through d_v2) -> + (d_p2': path d_v2 value2) -> + squash ( + step_disjoint d_s1 d_s2 == true /\ + p1 == path_concat (PathStep _ _ d_p d_s1) d_p1' /\ + p2 == path_concat (PathStep _ _ d_p d_s2) d_p2' + ) -> + path_disjoint_decomp_t p1 p2 + +let path_disjoint_decomp_includes + (#from: typ) + (#value1: typ) + (#value2: typ) + (p1: path from value1) + (p2: path from value2) + (#value1': typ) + (#value2': typ) + (p1': path from value1') + (p2': path from value2') +: Lemma + (requires ( + path_includes p1 p1' /\ + path_includes p2 p2' /\ ( + exists (d : path_disjoint_decomp_t p1 p2) . True + ))) + (ensures (exists (d: path_disjoint_decomp_t p1' p2') . True)) += let f + (q1: path value1 value1' ) + (q2: path value2 value2' ) + (d: path_disjoint_decomp_t p1 p2) + : Lemma + (requires ( + p1' == path_concat p1 q1 /\ + p2' == path_concat p2 q2 + )) + (ensures (exists (d: path_disjoint_decomp_t p1' p2') . True)) + = let (PathDisjointDecomp _ p _ s1 p1_ _ s2 p2_ _) = d in + path_concat_assoc (PathStep _ _ p s1) p1_ q1; + path_concat_assoc (PathStep _ _ p s2) p2_ q2; + let d' : path_disjoint_decomp_t p1' p2' = + PathDisjointDecomp _ p _ s1 (path_concat p1_ q1) _ s2 (path_concat p2_ q2) () + in + Classical.exists_intro (fun _ -> True) d' + in + let g + (q1: path value1 value1' ) + (q2: path value2 value2' ) + (d: path_disjoint_decomp_t p1 p2) + : Lemma + (( + p1' == path_concat p1 q1 /\ + p2' == path_concat p2 q2 + ) ==> ( + exists (d: path_disjoint_decomp_t p1' p2') . True + )) + = Classical.move_requires (f q1 q2) d // FIXME: annoying to repeat those type annotations above. WHY WHY WHY can't I just use (fun q1 q2 d -> Classical.move_requires (f q1 q2) d) as an argument of Classical.forall_intro_3 below instead of this g??? + in + path_includes_exists_concat p1 p1' ; + path_includes_exists_concat p2 p2' ; + let _ : squash (exists (d: path_disjoint_decomp_t p1' p2') . True) = + Classical.forall_intro_3 g + in + () + +let path_disjoint_decomp + (#from: typ) + (#value1: typ) + (#value2: typ) + (p1: path from value1) + (p2: path from value2) +: Lemma + (requires (path_disjoint p1 p2)) + (ensures (exists (d: path_disjoint_decomp_t p1 p2) . True)) += path_disjoint_ind + (fun #v1 #v2 p1 p2 -> exists (d: path_disjoint_decomp_t #from #v1 #v2 p1 p2) . True) + (fun #through #to1 #to2 p s1 s2 -> + let d : path_disjoint_decomp_t (PathStep _ _ p s1) (PathStep _ _ p s2) = + PathDisjointDecomp _ p _ s1 PathBase _ s2 PathBase () + in + Classical.exists_intro (fun _ -> True) d + ) + (fun #v1 #v2 p1 p2 #v1' #v2' p1' p2' -> path_disjoint_decomp_includes p1 p2 p1' p2') + p1 p2 + +let path_disjoint_not_path_equal + (#from: typ) + (#value1: typ) + (#value2: typ) + (p1: path from value1) + (p2: path from value2) +: Lemma + (requires (path_disjoint p1 p2)) + (ensures (path_equal p1 p2 == false)) += let f + (d: path_disjoint_decomp_t p1 p2) + : Lemma (path_equal p1 p2 == false) + = if path_equal p1 p2 + then + let (PathDisjointDecomp _ p _ s1 p1_ _ s2 p2_ _) = d in + path_concat_inj_l (PathStep _ _ p s1) p1_ (PathStep _ _ p s2) p2_ + else () + in + path_disjoint_decomp p1 p2; + Classical.forall_intro f + +let rec path_destruct_l + (#t0 #t2: typ) + (p: path t0 t2) +: Tot ( + x: option (t1: typ & (s: step t0 t1 & (p' : path t1 t2 { p == path_concat (PathStep _ _ PathBase s) p' /\ path_length p' < path_length p } ) ) ) + { None? x <==> PathBase? p } + ) + (decreases p) += match p with + | PathBase -> None + | PathStep _ _ p' s -> + begin match path_destruct_l p' with + | None -> Some (| _, (| s, PathBase |) |) + | Some (| t_, (| s_, p_ |) |) -> + Some (| t_, (| s_, PathStep _ _ p_ s |) |) + end + +let rec path_equal' + (#from #to1 #to2: typ) + (p1: path from to1) + (p2: path from to2) +: Tot (b: bool { b == true <==> to1 == to2 /\ p1 == p2 } ) + (decreases (path_length p1)) += match path_destruct_l p1 with + | None -> PathBase? p2 + | Some (| t1, (| s1, p1' |) |) -> + begin match path_destruct_l p2 with + | None -> false + | (Some (| t2, (| s2, p2' |) |) ) -> + step_eq s1 s2 && + path_equal' p1' p2' + end + +let path_includes_concat_l + (#from #through #to1 #to2: typ) + (p0: path from through) + (p1: path through to1) + (p2: path through to2) +: Lemma + (requires (path_includes p1 p2)) + (ensures (path_includes (path_concat p0 p1) (path_concat p0 p2))) += path_includes_ind + (fun #to1_ #to2_ p1_ p2_ -> path_includes (path_concat p0 p1_) (path_concat p0 p2_)) + (fun #through #to p st -> ()) + (fun #to p -> path_includes_refl (path_concat p0 p)) + (fun #to1_ #to2_ #to3_ p1_ p2_ p3_ -> path_includes_trans (path_concat p0 p1_) (path_concat p0 p2_) (path_concat p0 p3_)) + p1 p2 + +let path_disjoint_concat + (#from #through #to1 #to2: typ) + (p0: path from through) + (p1: path through to1) + (p2: path through to2) +: Lemma + (requires (path_disjoint p1 p2)) + (ensures (path_disjoint (path_concat p0 p1) (path_concat p0 p2))) += path_disjoint_ind + (fun #v1 #v2 p1 p2 -> path_disjoint (path_concat p0 p1) (path_concat p0 p2)) + (fun #through #to1 #to2 p s1 s2 -> path_disjoint_step (path_concat p0 p) s1 s2) + (fun #v1 #v2 p1 p2 #v1' #v2' p1' p2' -> + path_includes_concat_l p0 p1 p1'; + path_includes_concat_l p0 p2 p2'; + path_disjoint_includes (path_concat p0 p1) (path_concat p0 p2) (path_concat p0 p1') (path_concat p0 p2')) + p1 p2 + +(* TODO: the following is now wrong due to unions, but should still hold if we restrict ourselves to readable paths +let rec not_path_equal_path_disjoint_same_type + (#from: typ) + (#value: typ) + (p1: path from value) + (p2: path from value) +: Lemma + (requires (path_equal p1 p2 == false)) + (ensures (path_disjoint p1 p2)) + (decreases (path_length p1)) += assert (path_equal p1 p2 == path_equal' p1 p2); + match path_destruct_l p1 with + | None -> path_typ_depth p2 + | Some (| t1, (| s1, p1' |) |) -> + begin match path_destruct_l p2 with + | None -> path_typ_depth p1 + | Some (| t2, (| s2, p2' |) |) -> + if step_eq s1 s2 + then begin + not_path_equal_path_disjoint_same_type p1' p2' ; + path_disjoint_concat (PathStep _ _ PathBase s1) p1' p2' + end else begin + path_disjoint_step PathBase s1 s2; + path_includes_concat (PathStep _ _ PathBase s1) p1'; + path_includes_concat (PathStep _ _ PathBase s2) p2'; + path_disjoint_includes (PathStep _ _ PathBase s1) (PathStep _ _ PathBase s2) p1 p2 + end + end +*) + +let step_sel_upd_other + (#from: typ) + (#to1 #to2: typ) + (s1: step from to1) + (s2: step from to2 {step_disjoint s1 s2}) + (m: otype_of_typ from) + (v: otype_of_typ to1) +: Lemma + (step_sel (step_upd m s1 v) s2 == step_sel m s2) += match s1 with + | StepField l1 fd1 -> + let (m: ostruct l1) = m in + let (StepField _ fd2) = s2 in + begin match m with + | None -> () + | Some m -> DM.sel_upd_other m fd1 v fd2 + end + | StepCell length1 _ i1 -> + let (m: option (array length1 (otype_of_typ to1))) = m in + let (StepCell _ _ i2) = s2 in + begin match m with + | None -> () + | Some m -> + Seq.lemma_index_upd2 m (UInt32.v i1) v (UInt32.v i2) + end + +let path_sel_upd_other + (#from: typ) + (#to1 #to2: typ) + (p1: path from to1) + (p2: path from to2 {path_disjoint p1 p2}) +: Lemma + (ensures (forall (m: otype_of_typ from) (v: otype_of_typ to1) . path_sel (path_upd m p1 v) p2 == path_sel m p2)) += path_disjoint_ind + (fun #v1 #v2 p1_ p2_ -> forall (m: otype_of_typ from) (v: otype_of_typ v1) . path_sel (path_upd m p1_ v) p2_ == path_sel m p2_) + (fun #through #to1_ #to2_ p s1 s2 -> + FStar.Classical.forall_intro_sub #_ #(fun m -> forall (v: otype_of_typ to1_) . path_sel (path_upd m (PathStep through to1_ p s1) v) (PathStep through to2_ p s2) == path_sel m (PathStep through to2_ p s2)) (fun m -> + FStar.Classical.forall_intro_sub #_ #(fun v -> path_sel (path_upd m (PathStep through to1_ p s1) v) (PathStep through to2_ p s2) == path_sel m (PathStep through to2_ p s2)) (fun v -> + let m0 = path_sel m p in + let m1 = step_sel m0 s1 in + let m2 = step_sel m0 s2 in + let m0' = step_upd m0 s1 v in + path_sel_upd_same m p m0'; + step_sel_upd_other s1 s2 m0 v + ))) + (fun #v1 #v2 p1 p2 #v1' #v2' p1' p2' -> + let h1: squash (exists r1 . p1' == path_concat p1 r1) = path_includes_exists_concat p1 p1' in + let h2: squash (exists r2 . p2' == path_concat p2 r2) = path_includes_exists_concat p2 p2' in + FStar.Classical.forall_intro_sub #_ #(fun (m: otype_of_typ from) -> forall v . path_sel (path_upd m p1' v) p2' == path_sel m p2') (fun (m: otype_of_typ from) -> + FStar.Classical.forall_intro_sub #_ #(fun (v: otype_of_typ v1') -> path_sel (path_upd m p1' v) p2' == path_sel m p2') (fun (v: otype_of_typ v1') -> + FStar.Classical.exists_elim (path_sel (path_upd m p1' v) p2' == path_sel m p2') h1 (fun r1 -> + FStar.Classical.exists_elim (path_sel (path_upd m p1' v) p2' == path_sel m p2') h2 (fun r2 -> + path_upd_concat m p1 r1 v; + path_sel_concat m p2 r2 + ))))) + p1 p2 + +let path_sel_upd_other' + (#from: typ) + (#to1: typ) + (p1: path from to1) + (m: otype_of_typ from) + (v: otype_of_typ to1) + (#to2: typ) + (p2: path from to2) +: Lemma + (requires (path_disjoint p1 p2)) + (ensures (path_sel (path_upd m p1 v) p2 == path_sel m p2)) += path_sel_upd_other p1 p2 + +(** Operations on pointers *) + +let equal + (#t1 #t2: typ) + (p1: pointer t1) + (p2: pointer t2) +: Ghost bool + (requires True) + (ensures (fun b -> b == true <==> t1 == t2 /\ p1 == p2 )) += Pointer?.from p1 = Pointer?.from p2 && + HS.aref_equal (Pointer?.contents p1) (Pointer?.contents p2) && + path_equal (Pointer?.p p1) (Pointer?.p p2) + +let as_addr (#t: typ) (p: pointer t) = + HS.aref_as_addr (Pointer?.contents p) + +let _field + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) +: Tot (pointer (typ_of_struct_field l fd)) += let (Pointer from contents p') = p in + let p' : path from (TStruct l) = p' in + let p'' : path from (typ_of_struct_field l fd) = PathStep _ _ p' (StepField _ fd) in + Pointer from contents p'' + +let _cell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + (i: UInt32.t {UInt32.v i < UInt32.v length}) +: Tot (pointer value) += let (Pointer from contents p') = p in + let p' : path from (TArray length value) = p' in + let p'' : path from value = PathStep _ _ p' (StepCell _ _ i) in + Pointer from contents p'' + +let _ufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) +: Tot (pointer (typ_of_struct_field l fd)) += let (Pointer from contents p') = p in + let p' : path from (TUnion l) = p' in + let p'' : path from (typ_of_struct_field l fd) = PathStep _ _ p' (StepUField _ fd) in + Pointer from contents p'' + +let unused_in + (#value: typ) + (p: pointer value) + (h: HS.mem) +: GTot Type0 += let (Pointer from contents p') = p in + HS.aref_unused_in contents h + +let pointer_ref_contents : Type0 = (t: typ & otype_of_typ t) + +let live + (#value: typ) + (h: HS.mem) + (p: pointer value) +: GTot Type0 += let rel = Heap.trivial_preorder pointer_ref_contents in + let (Pointer from contents _) = p in ( + HS.aref_live_at h contents pointer_ref_contents rel /\ ( + let untyped_contents = HS.greference_of contents pointer_ref_contents rel in ( + dfst (HS.sel h untyped_contents) == from + ))) + +let nlive + (#value: typ) + (h: HS.mem) + (p: npointer value) +: GTot Type0 += if g_is_null p + then True + else live h p + +let live_nlive + (#value: typ) + (h: HS.mem) + (p: pointer value) += () + +let g_is_null_nlive + (#t: typ) + (h: HS.mem) + (p: npointer t) += () + +let greference_of + (#value: typ) + (p: pointer value) +: Ghost (HS.reference pointer_ref_contents) + (requires (exists h . live h p)) + (ensures (fun x -> (exists h . live h p) /\ x == HS.greference_of (Pointer?.contents p) pointer_ref_contents (Heap.trivial_preorder pointer_ref_contents) /\ HS.aref_of x == Pointer?.contents p)) += HS.greference_of (Pointer?.contents p) pointer_ref_contents (Heap.trivial_preorder pointer_ref_contents) + +let unused_in_greference_of + (#value: typ) + (p: pointer value) + (h: HS.mem) +: Lemma + (requires (exists h . live h p)) + (ensures ((exists h . live h p) /\ (HS.unused_in (greference_of p) h <==> unused_in p h))) + [SMTPatOr [ + [SMTPat (HS.unused_in (greference_of p) h)]; + [SMTPat (unused_in p h)]; + ]] += () + +let live_not_unused_in + (#value: typ) + (h: HS.mem) + (p: pointer value) += let f () : Lemma + (requires (live h p /\ p `unused_in` h)) + (ensures False) + = let r = greference_of p in + HS.contains_aref_unused_in h r (Pointer?.contents p) + in + Classical.move_requires f () + +let gread + (#value: typ) + (h: HS.mem) + (p: pointer value) +: GTot (type_of_typ value) += if StrongExcludedMiddle.strong_excluded_middle (live h p) + then + let content = greference_of p in + let (| _, c |) = HS.sel h content in + value_of_ovalue value (path_sel c (Pointer?.p p)) + else + dummy_val value + +let frameOf + (#value: typ) + (p: pointer value) +: GTot HS.rid += HS.frameOf_aref (Pointer?.contents p) + +let live_region_frameOf #value h p = + let content = greference_of p in + assert (HS.contains h content) + +let disjoint_roots_intro_pointer_vs_pointer + (#value1 value2: typ) + (h: HS.mem) + (p1: pointer value1) + (p2: pointer value2) +: Lemma + (requires (live h p1 /\ unused_in p2 h)) + (ensures (frameOf p1 <> frameOf p2 \/ as_addr p1 =!= as_addr p2)) += () + +let disjoint_roots_intro_pointer_vs_reference + (#value1: typ) + (#value2: Type) + (h: HS.mem) + (p1: pointer value1) + (p2: HS.reference value2) +: Lemma + (requires (live h p1 /\ p2 `HS.unused_in` h)) + (ensures (frameOf p1 <> HS.frameOf p2 \/ as_addr p1 =!= HS.as_addr p2)) += let r = greference_of p1 in + assert (HS.contains h r) + +let disjoint_roots_intro_reference_vs_pointer + (#value1: Type) + (#value2: typ) + (h: HS.mem) + (p1: HS.reference value1) + (p2: pointer value2) +: Lemma + (requires (HS.contains h p1 /\ p2 `unused_in` h)) + (ensures (HS.frameOf p1 <> frameOf p2 \/ HS.as_addr p1 =!= as_addr p2)) += () + +let is_mm + (#value: typ) + (p: pointer value) +: GTot bool += HS.aref_is_mm (Pointer?.contents p) + +(* // TODO: recover with addresses? +let recall + (#value: Type) + (p: pointer value {is_eternal_region (frameOf p) && not (is_mm p)}) +: HST.Stack unit + (requires (fun m -> True)) + (ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 p)) += HST.recall (Pointer?.content p) +*) + +let gfield + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) += _field p fd + +let as_addr_gfield + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) += () + +let unused_in_gfield + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) + (h: HS.mem) += () + +let live_gfield + (h: HS.mem) + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) += () + +let gread_gfield + (h: HS.mem) + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) += () + +let frameOf_gfield + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) += () + +let is_mm_gfield + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) += () + +let gufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) += _ufield p fd + +let as_addr_gufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) += () + +let unused_in_gufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) + (h: HS.mem) += () + +let live_gufield + (h: HS.mem) + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) += () + +let gread_gufield + (h: HS.mem) + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) += () + +let frameOf_gufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) += () + +let is_mm_gufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) += () + +let gcell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + i += _cell p i + +let as_addr_gcell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + i += () + +let unused_in_gcell + (#length: array_length_t) + (#value: typ) + (h: HS.mem) + (p: pointer (TArray length value)) + i += () + +let live_gcell + (#length: array_length_t) + (#value: typ) + (h: HS.mem) + (p: pointer (TArray length value)) + i += () + +let gread_gcell + (#length: array_length_t) + (#value: typ) + (h: HS.mem) + (p: pointer (TArray length value)) + i += () + +let frameOf_gcell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + i += () + +let is_mm_gcell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + i += () + +let includes + (#value1: typ) + (#value2: typ) + (p1: pointer value1) + (p2: pointer value2) +: GTot bool += Pointer?.from p1 = Pointer?.from p2 && + HS.aref_equal (Pointer?.contents p1) (Pointer?.contents p2) && + path_includes (Pointer?.p p1) (Pointer?.p p2) + +let includes_refl + (#value: typ) + (p: pointer value) += () + +let includes_trans + (#value1 #value2 #value3: typ) + (p1: pointer value1) + (p2: pointer value2) + (p3: pointer value3) += path_includes_trans (Pointer?.p p1) (Pointer?.p p2) (Pointer?.p p3) + +let includes_gfield + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) += () + +let includes_gufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) += () + +let includes_gcell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + i += () + +let includes_ind + (x:((#value1: typ) -> + (#value2: typ) -> + (p1: pointer value1) -> + (p2: pointer value2 {includes p1 p2} ) -> + GTot Type0)) + (h_field: + ((l: struct_typ) -> + (p: pointer (TStruct l)) -> + (fd: struct_field l {includes p (gfield p fd)}) -> + Lemma (x p (gfield p fd)))) + (h_ufield: + ((l: union_typ) -> + (p: pointer (TUnion l)) -> + (fd: struct_field l {includes p (gufield p fd)}) -> + Lemma (x p (gufield p fd)))) + (h_cell: + ((#length: array_length_t) -> + (#value: typ) -> + (p: pointer (TArray length value)) -> + (i: UInt32.t {UInt32.v i < UInt32.v length /\ includes p (gcell p i)}) -> + Lemma (x p (gcell p i)))) + (h_refl: + ((#value: typ) -> + (p: pointer value {includes p p}) -> + Lemma (x p p))) + (h_trans: + ((#value1: typ) -> + (#value2: typ) -> + (#value3: typ) -> + (p1: pointer value1) -> + (p2: pointer value2) -> + (p3: pointer value3 {includes p1 p2 /\ includes p2 p3 /\ includes p1 p3 /\ x p1 p2 /\ x p2 p3}) -> + Lemma (x p1 p3))) + (#value1: typ) + (#value2: typ) + (p1: pointer value1) + (p2: pointer value2 {includes p1 p2}) +: Lemma (x p1 p2) += let (Pointer from contents _) = p1 in + path_includes_ind + (fun #to1 #to2 p1_ p2_ -> x (Pointer from contents p1_) (Pointer from contents p2_)) + (fun #through #to p s -> + match s with + | StepField l fd -> let (pt: pointer (TStruct l)) = (Pointer from contents p) in h_field l pt fd + | StepUField l fd -> let (pt: pointer (TUnion l)) = (Pointer from contents p) in h_ufield l pt fd + | StepCell length value i -> let (pt: pointer (TArray length value)) = (Pointer from contents p) in h_cell pt i + ) + (fun #to p -> h_refl (Pointer from contents p)) + (fun #to1 #to2 #to3 p1_ p2_ p3_ -> h_trans (Pointer from contents p1_) (Pointer from contents p2_) (Pointer from contents p3_)) + (Pointer?.p p1) + (Pointer?.p p2) + +(* +let unused_in_includes + (#value1: typ) + (#value2: typ) + (h: HS.mem) + (p1: pointer value1) + (p2: pointer value2) +: Lemma + (requires (includes p1 p2)) + (unused_in p1 h <==> unused_in p2 h) + [SMTPat (unused_in p2 h); SMTPat (includes p1 p2)] += includes_ind + (fun #v1 #v2 p1 p2 -> unused_in p1 h <==> unused_in p2 h) + (fun l p fd -> unused_in_gfield p fd h) + (fun l p fd -> unused_in_gufield p fd h) + (fun #length #value p i -> unused_in_gcell h p i) + (fun #v p -> ()) + (fun #v1 #v2 #v3 p1 p2 p3 -> ()) + p1 p2 + +let live_includes + (#value1: typ) + (#value2: typ) + (h: HS.mem) + (p1: pointer value1) + (p2: pointer value2) +: Lemma + (requires (includes p1 p2)) + (ensures (live h p1 <==> live h p2)) + [SMTPat (live h p2); SMTPat (includes p1 p2)] += includes_ind + (fun #v1 #v2 p1 p2 -> live h p1 <==> live h p2) + (fun l p fd -> live_gfield h p fd) + (fun l p fd -> live_gufield h p fd) + (fun #length #value p i -> live_gcell h p i) + (fun #v p -> ()) + (fun #v1 #v2 #v3 p1 p2 p3 -> ()) + p1 p2 +*) + +(** The readable permission. + We choose to implement it only abstractly, instead of explicitly + tracking the permission in the heap. +*) + +let readable + (#a: typ) + (h: HS.mem) + (b: pointer a) +: GTot Type0 += let () = () in // necessary to somehow remove the `logic` qualifier + live h b /\ ( + let content = greference_of b in + let (| _, c |) = HS.sel h content in + ovalue_is_readable a (path_sel c (Pointer?.p b)) + ) + +let readable_live + (#a: typ) + (h: HS.mem) + (b: pointer a) += () + +let readable_gfield + (#l: struct_typ) + (h: HS.mem) + (p: pointer (TStruct l)) + (fd: struct_field l) += () + +let readable_struct + (#l: struct_typ) + (h: HS.mem) + (p: pointer (TStruct l)) +: Lemma + (requires ( + forall (f: struct_field l) . + readable h (gfield p f) + )) + (ensures (readable h p)) +// [SMTPat (readable #(TStruct l) h p)] // TODO: dubious pattern, will probably trigger unreplayable hints += let dummy_field : struct_field l = fst (List.Tot.hd l.fields) in // struct is nonempty + let dummy_field_ptr = gfield p dummy_field in + assert (readable h dummy_field_ptr); + let content = greference_of p in + let (| _, c |) = HS.sel h content in + let (v: otype_of_typ (TStruct l)) = path_sel c (Pointer?.p p) in + let (v: ostruct l {Some? v}) = v in + ovalue_is_readable_struct_intro l v + +let readable_struct_forall_mem + (#l: struct_typ) + (p: pointer (TStruct l)) +: Lemma (forall + (h: HS.mem) + . ( + forall (f: struct_field l) . + readable h (gfield p f) + ) ==> + readable h p + ) += let f + (h: HS.mem) + : Lemma // FIXME: WHY WHY WHY do we need this explicit annotation? + (requires ( + forall (f: struct_field l) . + readable h (gfield p f) + )) + (ensures (readable h p)) + = readable_struct h p + in + Classical.forall_intro (Classical.move_requires f) + +let rec readable_struct_fields' + (#l: struct_typ) + (h: HS.mem) + (p: pointer (TStruct l)) + (s: list string) +: GTot Type0 + (decreases s) += match s with + | [] -> True + | f :: s' -> + readable_struct_fields' h p s' /\ ( + if List.Tot.mem f (List.Tot.map fst l.fields) + then + let f : struct_field l = f in + readable h (gfield p f) + else + True + ) + +let readable_struct_fields #l h p s = readable_struct_fields' h p s + +let readable_struct_fields_nil #l h p = () + +let readable_struct_fields_cons #l h p f q = () + +let rec readable_struct_fields_elim + (#l: struct_typ) + (h: HS.mem) + (p: pointer (TStruct l)) + (s: list string) +: Lemma + (requires (readable_struct_fields h p s)) + (ensures (forall f . (List.Tot.mem f s /\ List.Tot.mem f (List.Tot.map fst l.fields)) ==> (let f : struct_field l = f in readable h (gfield p f)))) + (decreases s) += match s with + | [] -> () + | _ :: q -> readable_struct_fields_elim h p q + +let readable_struct_fields_readable_struct #l h p = + readable_struct_fields_elim h p (List.Tot.map fst l.fields); + readable_struct h p + +let readable_gcell + (#length: array_length_t) + (#value: typ) + (h: HS.mem) + (p: pointer (TArray length value)) + i += () + +let readable_array + (#length: array_length_t) + (#value: typ) + (h: HS.mem) + (p: pointer (TArray length value)) += assert (readable h (gcell p 0ul)); // for Some? + let content = greference_of p in + let (| _, c |) = HS.sel h content in + let (v0: otype_of_typ (TArray length value)) = path_sel c (Pointer?.p p) in + ovalue_is_readable_array_intro v0 + +(* TODO: improve on the following interface *) +let readable_gufield + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd: struct_field l) += () + +(** The active field of a union *) + +let is_active_union_field + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd: struct_field l) +: GTot Type0 += let () = () in // necessary to somehow remove the `logic` qualifier + live h p /\ ( + let content = greference_of p in + let (| _, c |) = HS.sel h content in + let vu : otype_of_typ (TUnion l) = path_sel c (Pointer?.p p) in + let vu : option (gtdata (struct_field l) (type_of_struct_field' l otype_of_typ)) = vu in + Some? vu /\ gtdata_get_key (Some?.v vu) == fd + ) + +let is_active_union_live + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd: struct_field l) += () + +let is_active_union_field_live + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd: struct_field l) += () + +let is_active_union_field_eq + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd1 fd2: struct_field l) += () + +let is_active_union_field_get_key + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd: struct_field l) += () + +let is_active_union_field_readable + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd: struct_field l) += () + +let is_active_union_field_includes_readable + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd: struct_field l) + (#t': typ) + (p' : pointer t') += let content = greference_of p in + let (| _ , c |) = HS.sel h content in + let t = typ_of_struct_field l fd in + let (Pointer from cts p0) = p in + let pf = PathStep _ _ p0 (StepUField l fd) in + let (v0 : otype_of_typ t) = path_sel c pf in + let phi + (#t': typ) + (pt': path from t') + : Ghost Type0 + (requires (path_includes pf pt')) + (ensures (fun _ -> True)) + = (~ (path_sel c pt' == none_ovalue t')) ==> is_active_union_field h p fd + in + let f + (t' : typ) + (pt' : path t t') + : Lemma + (ensures (phi (path_concat pf pt'))) + = path_sel_concat c pf pt'; + path_sel_none_ovalue pf; + path_sel_none_ovalue pt' + in + path_concat_includes pf phi f (Pointer?.p p') + +(*** Semantics of buffers *) + +(** Operations on buffers *) + +#push-options "--ifuel 2" +let _singleton_buffer_of_pointer + (#t: typ) + (p: pointer t) +: Tot (buffer t) += let Pointer from contents pth = p in + match pth with + | PathStep _ _ pth' (StepCell ln ty i) -> + (* reconstruct the buffer to the enclosing array *) + Buffer (BufferRootArray #ty #ln (Pointer from contents pth')) i 1ul + | _ -> + Buffer (BufferRootSingleton p) 0ul 1ul +#pop-options + +let gsingleton_buffer_of_pointer #t p = _singleton_buffer_of_pointer p + +let singleton_buffer_of_pointer #t p = _singleton_buffer_of_pointer p + +let gbuffer_of_array_pointer + (#t: typ) + (#length: array_length_t) + (p: pointer (TArray length t)) +: GTot (buffer t) += Buffer (BufferRootArray p) 0ul length + +let buffer_of_array_pointer + (#t: typ) + (#length: array_length_t) + (p: pointer (TArray length t)) +: HST.Stack (buffer t) + (requires (fun h -> live h p)) + (ensures (fun h b h' -> h' == h /\ b == gbuffer_of_array_pointer p)) += Buffer (BufferRootArray p) 0ul length + +let buffer_length + (#t: typ) + (b: buffer t) +: GTot UInt32.t += Buffer?.blength b + +let buffer_length_gsingleton_buffer_of_pointer + (#t: typ) + (p: pointer t) +: Lemma + (requires True) + (ensures (buffer_length (gsingleton_buffer_of_pointer p) == 1ul)) + [SMTPat (buffer_length (gsingleton_buffer_of_pointer p))] += () + +let buffer_length_gbuffer_of_array_pointer + (#t: typ) + (#len: array_length_t) + (p: pointer (TArray len t)) +: Lemma + (requires True) + (ensures (buffer_length (gbuffer_of_array_pointer p) == len)) + [SMTPat (buffer_length (gbuffer_of_array_pointer p))] += () + +let buffer_live + (#t: typ) + (h: HS.mem) + (b: buffer t) +: GTot Type0 += let () = () in ( // necessary to somehow remove the `logic` qualifier + match b.broot with + | BufferRootSingleton p -> live h p + | BufferRootArray p -> live h p + ) + +let buffer_live_gsingleton_buffer_of_pointer + (#t: typ) + (p: pointer t) + (h: HS.mem) +: Lemma + (ensures (buffer_live h (gsingleton_buffer_of_pointer p) <==> live h p )) + [SMTPat (buffer_live h (gsingleton_buffer_of_pointer p))] += () + +let buffer_live_gbuffer_of_array_pointer + (#t: typ) + (#length: array_length_t) + (p: pointer (TArray length t)) + (h: HS.mem) +: Lemma + (requires True) + (ensures (buffer_live h (gbuffer_of_array_pointer p) <==> live h p)) + [SMTPat (buffer_live h (gbuffer_of_array_pointer p))] += () + +let buffer_unused_in #t b h = + match b.broot with + | BufferRootSingleton p -> unused_in p h + | BufferRootArray p -> unused_in p h + +let buffer_live_not_unused_in #t b h = () + +let buffer_unused_in_gsingleton_buffer_of_pointer #t p h = () + +let buffer_unused_in_gbuffer_of_array_pointer #t #length p h = () + +let frameOf_buffer + (#t: typ) + (b: buffer t) +: GTot HS.rid += match b.broot with + | BufferRootSingleton p -> frameOf p + | BufferRootArray p -> frameOf p + +let frameOf_buffer_gsingleton_buffer_of_pointer + (#t: typ) + (p: pointer t) += () + +let frameOf_buffer_gbuffer_of_array_pointer + (#t: typ) + (#length: array_length_t) + (p: pointer (TArray length t)) += () + +let live_region_frameOf_buffer #value h p = () + +let buffer_as_addr #t b = + match b.broot with + | BufferRootSingleton p -> as_addr p + | BufferRootArray p -> as_addr p + +let buffer_as_addr_gsingleton_buffer_of_pointer #t p = () + +let buffer_as_addr_gbuffer_of_array_pointer #t #length p = () + +let gsub_buffer + (#t: typ) + (b: buffer t) + (i: UInt32.t) + len += Buffer (Buffer?.broot b) FStar.UInt32.(Buffer?.bidx b +^ i) len + +let frameOf_buffer_gsub_buffer #t b i len = () + +let buffer_as_addr_gsub_buffer #t b i len = () + +let sub_buffer + (#t: typ) + (b: buffer t) + (i: UInt32.t) + len += Buffer (Buffer?.broot b) FStar.UInt32.(Buffer?.bidx b +^ i) len + +let offset_buffer #t b i = + sub_buffer b i (UInt32.sub (Buffer?.blength b) i) + +let buffer_length_gsub_buffer + (#t: typ) + (b: buffer t) + (i: UInt32.t) + len += () + +let buffer_live_gsub_buffer_equiv + (#t: typ) + (b: buffer t) + (i: UInt32.t) + len + h += () + +let buffer_live_gsub_buffer_intro + (#t: typ) + (b: buffer t) + (i: UInt32.t) + len + h += () + +let buffer_unused_in_gsub_buffer #t b i len h = () + +let gsub_buffer_gsub_buffer + (#a: typ) + (b: buffer a) + (i1: UInt32.t) + len1 i2 len2 += () + +let gsub_buffer_zero_buffer_length + (#a: typ) + (b: buffer a) += () + +let buffer_root_as_seq + (#t: typ) + (h: HS.mem) + (b: buffer_root t) +: GTot (Seq.seq (type_of_typ t)) += match b with + | BufferRootSingleton p -> + Seq.create 1 (gread h p) + | BufferRootArray p -> + gread h p + +let length_buffer_root_as_seq + (#t: typ) + (h: HS.mem) + (b: buffer_root t) +: Lemma + (requires True) + (ensures (Seq.length (buffer_root_as_seq h b) == UInt32.v (buffer_root_length b))) + [SMTPat (Seq.length (buffer_root_as_seq h b))] += () + +let buffer_as_seq + (#t: typ) + (h: HS.mem) + (b: buffer t) +: GTot (Seq.seq (type_of_typ t)) += let i = UInt32.v (Buffer?.bidx b) in + Seq.slice (buffer_root_as_seq h (Buffer?.broot b)) i (i + UInt32.v (Buffer?.blength b)) + +let buffer_length_buffer_as_seq + (#t: typ) + (h: HS.mem) + (b: buffer t) += () + +#push-options "--ifuel 2 --z3rlimit_factor 4 --retry 4" +let buffer_as_seq_gsingleton_buffer_of_pointer #t h p = + let Pointer from contents pth = p in + match pth with + | PathStep through to pth' (StepCell ln ty i) -> + assert (through == TArray ln ty); + assert (to == ty); + assert (t == ty); + let p' : pointer (TArray ln ty) = Pointer from contents pth' in + let s : array ln (type_of_typ t) = gread h p' in + let s1 = Seq.slice s (UInt32.v i) (UInt32.v i + 1) in + let v = gread h p in + assert (v == Seq.index s (UInt32.v i)); + let s2 = Seq.create 1 v in + assert (Seq.length s1 == 1); + assert (Seq.length s2 == 1); + assert (Seq.index s1 0 == v); + assert (Seq.index s2 0 == v); + assert (Seq.equal s1 s2) + | _ -> + Seq.slice_length (Seq.create 1 (gread h p)) +#pop-options + +let buffer_as_seq_gbuffer_of_array_pointer + (#length: array_length_t) + (#t: typ) + (h: HS.mem) + (p: pointer (TArray length t)) += let s : array length (type_of_typ t) = gread h p in + Seq.slice_length s + +let buffer_as_seq_gsub_buffer + (#t: typ) + (h: HS.mem) + (b: buffer t) + (i: UInt32.t) + len += Seq.slice_slice (buffer_root_as_seq h (Buffer?.broot b)) (UInt32.v (Buffer?.bidx b)) (UInt32.v (Buffer?.bidx b) + UInt32.v (Buffer?.blength b)) (UInt32.v i) (UInt32.v i + UInt32.v len) + +let gpointer_of_buffer_cell + (#t: typ) + (b: buffer t) + i += match Buffer?.broot b with + | BufferRootSingleton p -> p + | BufferRootArray p -> + gcell p FStar.UInt32.(Buffer?.bidx b +^ i) + +let pointer_of_buffer_cell + (#t: typ) + (b: buffer t) + i += match Buffer?.broot b with + | BufferRootSingleton p -> p + | BufferRootArray p -> + _cell p FStar.UInt32.(Buffer?.bidx b +^ i) + +let gpointer_of_buffer_cell_gsub_buffer + (#t: typ) + (b: buffer t) + i1 len i2 += () + +let live_gpointer_of_buffer_cell + (#t: typ) + (b: buffer t) + i h += () + +#set-options "--initial_ifuel 2 --max_ifuel 2" +let gpointer_of_buffer_cell_gsingleton_buffer_of_pointer + (#t: typ) + (p: pointer t) + i += () + +#set-options "--initial_ifuel 1 --max_ifuel 1" +let gpointer_of_buffer_cell_gbuffer_of_array_pointer + (#length: array_length_t) + (#t: typ) + (p: pointer (TArray length t)) + i += () + +let frameOf_gpointer_of_buffer_cell #t b i = () + +let as_addr_gpointer_of_buffer_cell #t b i = () + +let gread_gpointer_of_buffer_cell + (#t: typ) + (h: HS.mem) + (b: buffer t) + i += () + +let gread_gpointer_of_buffer_cell' + (#t: typ) + (h: HS.mem) + (b: buffer t) + i += () + +let index_buffer_as_seq + (#t: typ) + (h: HS.mem) + (b: buffer t) + i += () + +let gsingleton_buffer_of_pointer_gcell #t #len p i = () + +let gsingleton_buffer_of_pointer_gpointer_of_buffer_cell #t b i = () + +(* The readable permission lifted to buffers. *) + +let buffer_readable' + (#t: typ) + (h: HS.mem) + (b: buffer t) +: GTot Type0 += buffer_live h b /\ ( + forall (i: UInt32.t) . + UInt32.v i < UInt32.v (buffer_length b) ==> + readable h (gpointer_of_buffer_cell b i) + ) + +let buffer_readable + (#t: typ) + (h: HS.mem) + (b: buffer t) +: GTot Type0 += buffer_readable' h b + +let buffer_readable_buffer_live + (#t: typ) + (h: HS.mem) + (b: buffer t) += () + +let buffer_readable_gsingleton_buffer_of_pointer + (#t: typ) + (h: HS.mem) + (p: pointer t) += let phi () : Lemma + (requires (buffer_readable h (gsingleton_buffer_of_pointer p))) + (ensures (readable h p)) + = assert (readable h (gpointer_of_buffer_cell (gsingleton_buffer_of_pointer p) 0ul)) + in + Classical.move_requires phi () + +let buffer_readable_gbuffer_of_array_pointer + (#len: array_length_t) + (#t: typ) + (h: HS.mem) + (p: pointer (TArray len t)) += let phi () + : Lemma + (requires (buffer_readable h (gbuffer_of_array_pointer p))) + (ensures (readable h p)) + = let psi + (i: UInt32.t { UInt32.v i < UInt32.v len } ) + : Lemma + (readable h (gcell p i)) + = assert (readable h (gpointer_of_buffer_cell (gbuffer_of_array_pointer p) i)) + in + Classical.forall_intro psi; + readable_array h p + in + Classical.move_requires phi () + +let buffer_readable_gsub_buffer + (#t: typ) + (h: HS.mem) + (b: buffer t) + (i: UInt32.t) + len += Classical.forall_intro (Classical.move_requires (gpointer_of_buffer_cell_gsub_buffer b i len)) + +let readable_gpointer_of_buffer_cell + (#t: typ) + (h: HS.mem) + (b: buffer t) + i += () + +let buffer_readable_intro + (#t: typ) + (h: HS.mem) + (b: buffer t) += () + +let buffer_readable_elim #t h b = () + +(*** Disjointness of pointers *) + +let disjoint + (#value1: typ) + (#value2: typ) + (p1: pointer value1) + (p2: pointer value2) +: GTot Type0 += if + frameOf p1 = frameOf p2 && + as_addr p1 = as_addr p2 + then + Pointer?.from p1 == Pointer?.from p2 /\ + Pointer?.contents p1 == Pointer?.contents p2 /\ + path_disjoint (Pointer?.p p1) (Pointer?.p p2) + else + True + +let disjoint_root + (#value1: typ) + (#value2: typ) + (p1: pointer value1) + (p2: pointer value2) +: Lemma + (requires (frameOf p1 <> frameOf p2 \/ as_addr p1 <> as_addr p2)) + (ensures (disjoint p1 p2)) += () + +let disjoint_gfield + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd1 fd2: struct_field l) +: Lemma + (requires (fd1 <> fd2)) + (ensures (disjoint (gfield p fd1) (gfield p fd2))) + [SMTPat (disjoint (gfield p fd1) (gfield p fd2))] += () + +let disjoint_gcell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + (i1: UInt32.t) + (i2: UInt32.t) +: Lemma + (requires ( + UInt32.v i1 < UInt32.v length /\ + UInt32.v i2 < UInt32.v length /\ + UInt32.v i1 <> UInt32.v i2 + )) + (ensures ( + UInt32.v i1 < UInt32.v length /\ + UInt32.v i2 < UInt32.v length /\ + disjoint (gcell p i1) (gcell p i2) + )) + [SMTPat (disjoint (gcell p i1) (gcell p i2))] += () + +let disjoint_includes + (#value1: typ) + (#value2: typ) + (p1: pointer value1) + (p2: pointer value2) + (#value1': typ) + (#value2': typ) + (p1': pointer value1') + (p2': pointer value2') +: Lemma + (requires (includes p1 p1' /\ includes p2 p2' /\ disjoint p1 p2)) + (ensures (disjoint p1' p2')) += if + frameOf p1 = frameOf p2 && + as_addr p1 = as_addr p2 + then + path_disjoint_includes (Pointer?.p p1) (Pointer?.p p2) (Pointer?.p p1') (Pointer?.p p2') + else + () + +let disjoint_ind + (x: + ((#value1: typ) -> + (#value2: typ) -> + (p1: pointer value1) -> + (p2: pointer value2 {disjoint p1 p2} ) -> + GTot Type0)) + (h_root: + ((#value1: typ) -> + (#value2: typ) -> + (p1: pointer value1) -> + (p2: pointer value2 { frameOf p1 <> frameOf p2 \/ as_addr p1 <> as_addr p2 } ) -> + Lemma (x p1 p2))) + (h_field: + ((#l: struct_typ) -> + (p: pointer (TStruct l)) -> + (fd1: struct_field l) -> + (fd2: struct_field l { fd1 <> fd2 /\ disjoint (gfield p fd1) (gfield p fd2) } ) -> + Lemma (x (gfield p fd1) (gfield p fd2)))) + (h_cell: + ((#length: array_length_t) -> + (#value: typ) -> + (p: pointer (TArray length value)) -> + (i1: UInt32.t {UInt32.v i1 < UInt32.v length}) -> + (i2: UInt32.t {UInt32.v i2 < UInt32.v length /\ UInt32.v i1 <> UInt32.v i2 /\ disjoint (gcell p i1) (gcell p i2) }) -> + Lemma (x (gcell p i1) (gcell p i2)) + )) + (h_includes: + ((#value1: typ) -> + (#value2: typ) -> + (p1: pointer value1) -> + (p2: pointer value2) -> + (#value1': typ) -> + (#value2': typ) -> + (p1': pointer value1' {includes p1 p1'}) -> + (p2': pointer value2' {includes p2 p2' /\ disjoint p1 p2 /\ disjoint p1' p2' /\ x p1 p2}) -> + Lemma (x p1' p2'))) + (#value1: typ) + (#value2: typ) + (p1: pointer value1) + (p2: pointer value2 { disjoint p1 p2 } ) +: Lemma (x p1 p2) += if + frameOf p1 = frameOf p2 && + as_addr p1 = as_addr p2 + then + let (Pointer from contents _) = p1 in + path_disjoint_ind + (fun #v1 #v2 p1_ p2_ -> x (Pointer from contents p1_) (Pointer from contents p2_)) + (fun #through #to1 #to2 p s1 s2 -> + match s1 with + | StepField l fd1 -> + let (StepField _ fd2) = s2 in + h_field #l (Pointer from contents p) fd1 fd2 + | StepCell le va i1 -> + let (StepCell _ _ i2) = s2 in + h_cell #le #va (Pointer from contents p) i1 i2 + ) + (fun #v1 #v2 p1_ p2_ #v1' #v2' p1' p2' -> h_includes (Pointer from contents p1_) (Pointer from contents p2_) (Pointer from contents p1') (Pointer from contents p2')) + (Pointer?.p p1) + (Pointer?.p p2); + assert (x p1 p2) + else + h_root p1 p2 + +let disjoint_sym + (#value1: typ) + (#value2: typ) + (p1: pointer value1) + (p2: pointer value2) +: Lemma + (requires (disjoint p1 p2)) + (ensures (disjoint p2 p1)) += disjoint_ind + (fun #v1 #v2 p1 p2 -> disjoint p2 p1) + (fun #v1 #v2 p1 p2 -> disjoint_root p2 p1) + (fun #l p fd1 fd2 -> disjoint_gfield p fd2 fd1) + (fun #le #va p i1 i2 -> disjoint_gcell p i2 i1) + (fun #v1 #v2 p1 p2 #v1' #v2' p1' p2' -> disjoint_includes p2 p1 p2' p1') + p1 p2 + +let disjoint_sym' + (#value1: typ) + (#value2: typ) + (p1: pointer value1) + (p2: pointer value2) +: Lemma + (requires True) + (ensures (disjoint p1 p2 <==> disjoint p2 p1)) + [SMTPat (disjoint p1 p2)] += FStar.Classical.move_requires (disjoint_sym #value1 #value2 p1) p2; + FStar.Classical.move_requires (disjoint_sym #value2 #value1 p2) p1 + +let disjoint_sym'' + (value1: typ) + (value2: typ) + (p1: pointer value1) + (p2: pointer value2) +: Lemma + (ensures (disjoint p1 p2 <==> disjoint p2 p1)) += disjoint_sym' p1 p2 + +let disjoint_includes_l #a #es #a' (x: pointer a) (subx:pointer es) (y:pointer a') : Lemma + (requires (includes x subx /\ disjoint x y)) + (ensures (disjoint subx y)) + [SMTPat (disjoint subx y); SMTPat (includes x subx)] + = disjoint_includes x y subx y + +let disjoint_includes_l_swap #a #es #a' (x:pointer a) (subx:pointer es) (y:pointer a') : Lemma + (requires (includes x subx /\ disjoint x y)) + (ensures (disjoint y subx)) + [SMTPat (disjoint y subx); SMTPat (includes x subx)] + = disjoint_includes_l x subx y; + disjoint_sym subx y + +let disjoint_includes_r + #t1 #t2 #t3 + (p1: pointer t1) + (p2: pointer t2) + (p3: pointer t3) +: Lemma + (requires (disjoint p1 p2 /\ includes p2 p3)) + (ensures (disjoint p1 p3)) + [SMTPat (disjoint p1 p2); SMTPat (includes p2 p3)] += disjoint_sym p1 p2; + disjoint_includes_l_swap p2 p3 p1 + +(* TODO: The following is now wrong, should be replaced with readable + +let live_not_equal_disjoint + (#t: typ) + (h: HS.mem) + (p1 p2: pointer t) +: Lemma + (requires (live h p1 /\ live h p2 /\ equal p1 p2 == false)) + (ensures (disjoint p1 p2)) += if + frameOf p1 = frameOf p2 && + as_addr p1 = as_addr p2 + then begin + let c1 = greference_of p1 in + let c2 = greference_of p2 in + HS.lemma_same_addrs_same_types_same_refs h c1 c2; + not_path_equal_path_disjoint_same_type p1.p p2.p + end else + disjoint_root p1 p2 +*) + + +(*** The modifies clause *) + +noeq +type loc_aux = + | LocBuffer: + (#t: typ) -> + (b: buffer t) -> + loc_aux + | LocPointer: + (#t: typ) -> + (p: pointer t) -> + loc_aux + +(* Necessary to handle `exists` *) + +let buffer_includes_pointer + (#t1 #t2: typ) + (b: buffer t1) + (p: pointer t2) +: GTot Type0 += exists (i: UInt32.t) . UInt32.v i < UInt32.v (buffer_length b) /\ gpointer_of_buffer_cell b i `includes` p + +let loc_aux_includes_pointer + (s: loc_aux) + (#t: typ) + (p: pointer t) +: GTot Type0 += match s with + | LocPointer p' -> + p' `includes` p + | LocBuffer b -> + buffer_includes_pointer b p + +let loc_aux_includes_pointer_trans + (s: loc_aux) + (#t1 #t2: typ) + (p1: pointer t1) + (p2: pointer t2) +: Lemma + (requires (loc_aux_includes_pointer s p1 /\ p1 `includes` p2)) + (ensures (loc_aux_includes_pointer s p2)) += match s with + | LocPointer p -> includes_trans p p1 p2 + | LocBuffer b -> + let f + (i: UInt32.t) + : Lemma + (requires (UInt32.v i < UInt32.v (buffer_length b) /\ gpointer_of_buffer_cell b i `includes` p1)) + (ensures (UInt32.v i < UInt32.v (buffer_length b) /\ gpointer_of_buffer_cell b i `includes` p2)) + = includes_trans (gpointer_of_buffer_cell b i) p1 p2 + in + Classical.forall_intro (Classical.move_requires f) + +(* Same problem *) + +let loc_aux_includes_buffer + (s: loc_aux) + (#t: typ) + (b: buffer t) +: GTot Type0 += forall (i: UInt32.t) . UInt32.v i < UInt32.v (buffer_length b) ==> loc_aux_includes_pointer s (gpointer_of_buffer_cell b i) + +let loc_aux_includes + (s: loc_aux) + (s2: loc_aux) +: GTot Type0 + (decreases s2) += match s2 with + | LocPointer p -> + loc_aux_includes_pointer s p + | LocBuffer b -> + loc_aux_includes_buffer s b + +let loc_aux_includes_refl' + (s: loc_aux) +: Lemma + (ensures (loc_aux_includes s s)) += () + +(* FIXME: WHY WHY WHY do I need to duplicate the lemma? Because Classical.forall_intro DOES NOT UNIFY/typecheck if there is a pattern *) +let loc_aux_includes_refl'' + (s: loc_aux) +: Lemma + (loc_aux_includes s s) + [SMTPat (loc_aux_includes s s)] += loc_aux_includes_refl' s + +let loc_aux_includes_loc_aux_includes_pointer + (s1: loc_aux) + (s2: loc_aux) + (#t: typ) + (p: pointer t) +: Lemma + (requires (loc_aux_includes s1 s2 /\ loc_aux_includes_pointer s2 p)) + (ensures (loc_aux_includes_pointer s1 p)) += match s2 with + | LocPointer p' -> + loc_aux_includes_pointer_trans s1 p' p + | LocBuffer b -> + let f + (i: UInt32.t) + : Lemma + (requires (UInt32.v i < UInt32.v (buffer_length b) /\ gpointer_of_buffer_cell b i `includes` p)) + (ensures (loc_aux_includes_pointer s1 p)) + = loc_aux_includes_pointer_trans s1 (gpointer_of_buffer_cell b i) p + in + Classical.forall_intro (Classical.move_requires f) + +let loc_aux_includes_trans + (s1 s2: loc_aux) + (s3: loc_aux) +: Lemma + (requires (loc_aux_includes s1 s2 /\ loc_aux_includes s2 s3)) + (ensures (loc_aux_includes s1 s3)) += match s3 with + | LocPointer p -> + loc_aux_includes_loc_aux_includes_pointer s1 s2 p + | LocBuffer b -> + let f + (i: UInt32.t) + : Lemma + (requires (UInt32.v i < UInt32.v (buffer_length b))) + (ensures (UInt32.v i < UInt32.v (buffer_length b) /\ loc_aux_includes_pointer s1 (gpointer_of_buffer_cell b i))) + = loc_aux_includes_loc_aux_includes_pointer s1 s2 (gpointer_of_buffer_cell b i) + in + Classical.forall_intro (Classical.move_requires f) + +(* the following is necessary because `decreases` messes up 2nd-order unification with `Classical.forall_intro_3` *) + +let loc_aux_includes_trans' + (s1 s2: loc_aux) + (s3: loc_aux) +: Lemma + ((loc_aux_includes s1 s2 /\ loc_aux_includes s2 s3) ==> loc_aux_includes s1 s3) += Classical.move_requires (loc_aux_includes_trans s1 s2) s3 + + +(* Disjointness of two memory locations *) + +let disjoint_buffer_vs_pointer + (#t1 #t2: typ) + (b: buffer t1) + (p: pointer t2) +: GTot Type0 += forall (i: UInt32.t) . UInt32.v i < UInt32.v (buffer_length b) ==> disjoint (gpointer_of_buffer_cell b i) p + +let loc_aux_disjoint_pointer + (l: loc_aux) + (#t: typ) + (p: pointer t) +: GTot Type0 += match l with + | LocPointer p' -> disjoint p' p + | LocBuffer b -> disjoint_buffer_vs_pointer b p + +let loc_aux_disjoint_buffer + (l: loc_aux) + (#t: typ) + (b: buffer t) +: GTot Type0 += forall (i: UInt32.t) . UInt32.v i < UInt32.v (buffer_length b) ==> loc_aux_disjoint_pointer l (gpointer_of_buffer_cell b i) + +let loc_aux_disjoint_buffer_sym + (#t1 #t2: typ) + (b1: buffer t1) + (b2: buffer t2) +: Lemma + (loc_aux_disjoint_buffer (LocBuffer b1) b2 <==> loc_aux_disjoint_buffer (LocBuffer b2) b1) += Classical.forall_intro_2 (disjoint_sym'' t1 t2) + +let loc_aux_disjoint_pointer_buffer_sym + (#t1 #t2: typ) + (b1: buffer t1) + (p2: pointer t2) +: Lemma + (loc_aux_disjoint_pointer (LocBuffer b1) p2 <==> loc_aux_disjoint_buffer (LocPointer p2) b1) += Classical.forall_intro_2 (disjoint_sym'' t1 t2) + +let loc_aux_disjoint + (l1 l2: loc_aux) +: GTot Type0 + (decreases l2) += match l2 with + | LocPointer p -> + loc_aux_disjoint_pointer l1 p + | LocBuffer b -> + loc_aux_disjoint_buffer l1 b + +let loc_aux_disjoint_sym + (l1 l2: loc_aux) +: Lemma + (ensures (loc_aux_disjoint l1 l2 <==> loc_aux_disjoint l2 l1)) += + begin match (l1, l2) with + | (LocPointer p1, LocPointer p2) -> disjoint_sym' p1 p2 + | (LocPointer p1, LocBuffer b2) -> loc_aux_disjoint_pointer_buffer_sym b2 p1 + | (LocBuffer b1, LocPointer p2) -> loc_aux_disjoint_pointer_buffer_sym b1 p2 + | (LocBuffer b1, LocBuffer b2) -> loc_aux_disjoint_buffer_sym b1 b2 + end + +(* Same problem with decreases here *) + +let loc_aux_disjoint_sym' + (l1 l2: loc_aux) +: Lemma + (loc_aux_disjoint l1 l2 <==> loc_aux_disjoint l2 l1) += loc_aux_disjoint_sym l1 l2 + +let loc_aux_disjoint_pointer_includes + (l: loc_aux) + (#t1: typ) + (p1: pointer t1) + (#t2: typ) + (p2: pointer t2) +: Lemma + (requires (loc_aux_disjoint_pointer l p1 /\ p1 `includes` p2)) + (ensures (loc_aux_disjoint_pointer l p2)) += () + +let loc_aux_disjoint_loc_aux_includes_pointer + (l1 l2: loc_aux) + (#t3: typ) + (p3: pointer t3) +: Lemma + (requires (loc_aux_disjoint l1 l2 /\ loc_aux_includes_pointer l2 p3)) + (ensures (loc_aux_disjoint_pointer l1 p3)) += match l2 with + | LocPointer p2 -> + loc_aux_disjoint_pointer_includes l1 p2 p3 + | LocBuffer b2 -> + let f + (i: UInt32.t) + : Lemma + (requires ( + UInt32.v i < UInt32.v (buffer_length b2) /\ + gpointer_of_buffer_cell b2 i `includes` p3 + )) + (ensures (loc_aux_disjoint_pointer l1 p3)) + = loc_aux_disjoint_pointer_includes l1 (gpointer_of_buffer_cell b2 i) p3 + in + Classical.forall_intro (Classical.move_requires f) + +let loc_aux_disjoint_loc_aux_includes + (l1 l2 l3: loc_aux) +: Lemma + (requires (loc_aux_disjoint l1 l2 /\ loc_aux_includes l2 l3)) + (ensures (loc_aux_disjoint l1 l3)) += match l3 with + | LocPointer p3 -> + loc_aux_disjoint_loc_aux_includes_pointer l1 l2 p3 + | LocBuffer b3 -> + let f + (i: UInt32.t) + : Lemma + (requires ( + UInt32.v i < UInt32.v (buffer_length b3) + )) + (ensures ( + UInt32.v i < UInt32.v (buffer_length b3) /\ + loc_aux_disjoint_pointer l1 (gpointer_of_buffer_cell b3 i) + )) + = loc_aux_disjoint_loc_aux_includes_pointer l1 l2 (gpointer_of_buffer_cell b3 i) + in + Classical.forall_intro (Classical.move_requires f) + +let pointer_preserved + (#t: typ) + (p: pointer t) + (h h' : HS.mem) +: GTot Type0 += equal_values h p h' p + +let buffer_preserved + (#t: typ) + (b: buffer t) + (h h' : HS.mem) +: GTot Type0 += forall (i: FStar.UInt32.t) . FStar.UInt32.v i < FStar.UInt32.v (buffer_length b) ==> pointer_preserved (gpointer_of_buffer_cell b i) h h' + +let loc_aux_preserved (l: loc_aux) (h h' : HS.mem) : GTot Type0 = + match l with + | LocBuffer b -> buffer_preserved b h h' + | LocPointer p -> pointer_preserved p h h' + +let pointer_preserved_intro + (#t: typ) + (p: pointer t) + (h1 h2 : HS.mem) + (f: ( + (a' : Type0) -> + (pre: Preorder.preorder a') -> + (r': HS.mreference a' pre) -> + Lemma + (requires (h1 `HS.contains` r' /\ frameOf p == HS.frameOf r' /\ as_addr p == HS.as_addr r')) + (ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r')) + )) +: Lemma + (pointer_preserved p h1 h2) += let g () : Lemma + (requires (live h1 p)) + (ensures (pointer_preserved p h1 h2)) + = f _ _ (greference_of p) + in + Classical.move_requires g () + +let buffer_preserved_intro + (#t: typ) + (p: buffer t) + (h1 h2 : HS.mem) + (f: ( + (a' : Type0) -> + (pre: Preorder.preorder a') -> + (r': HS.mreference a' pre) -> + Lemma + (requires (h1 `HS.contains` r' /\ frameOf_buffer p == HS.frameOf r' /\ buffer_as_addr p == HS.as_addr r')) + (ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r')) + )) +: Lemma + (buffer_preserved p h1 h2) += let g + (i: FStar.UInt32.t { FStar.UInt32.v i < FStar.UInt32.v (buffer_length p) } ) + : Lemma + (ensures (pointer_preserved (gpointer_of_buffer_cell p i) h1 h2)) + = pointer_preserved_intro (gpointer_of_buffer_cell p i) h1 h2 f + in + Classical.forall_intro g + +let disjoint_not_self + (#t: typ) + (p: pointer t) +: Lemma + (disjoint p p ==> False) += Classical.move_requires (path_disjoint_not_path_equal (Pointer?.p p)) (Pointer?.p p) + +let loc_aux_in_addr + (l: loc_aux) + (r: HS.rid) + (n: nat) +: GTot Type0 += match l with + | LocBuffer b -> + frameOf_buffer b == r /\ + buffer_as_addr b == n + | LocPointer p -> + frameOf p == r /\ + as_addr p == n + +let aloc (r: HS.rid) (n: nat) : Tot Type0 = + (l: loc_aux { loc_aux_in_addr l r n } ) + +module MG = FStar.ModifiesGen + +let cls : MG.cls aloc = MG.Cls #aloc + (fun #r #a -> loc_aux_includes) + (fun #r #a x -> ()) + (fun #r #a -> loc_aux_includes_trans) + (fun #r #a -> loc_aux_disjoint) + (fun #r #a -> loc_aux_disjoint_sym) + (fun #r #a larger1 larger2 smaller1 smaller2 -> + loc_aux_disjoint_loc_aux_includes larger1 larger2 smaller2; + loc_aux_disjoint_sym larger1 smaller2; + loc_aux_disjoint_loc_aux_includes smaller2 larger1 smaller1; + loc_aux_disjoint_sym smaller2 smaller1 + ) + (fun #r #a -> loc_aux_preserved) + (fun #r #a x h -> ()) + (fun #r #a x h1 h2 h3 -> ()) + (fun #r #a b h1 h2 f -> + match b with + | LocPointer p -> pointer_preserved_intro p h1 h2 f + | LocBuffer p -> buffer_preserved_intro p h1 h2 f + ) + +let loc = MG.loc cls + +let loc_none = MG.loc_none + +let loc_union = MG.loc_union + +let loc_union_idem = MG.loc_union_idem + +let loc_pointer #t p = + MG.loc_of_aloc #_ #cls #(frameOf p) #(as_addr p) (LocPointer p) + +let loc_buffer #t p = + MG.loc_of_aloc #_ #cls #(frameOf_buffer p) #(buffer_as_addr p) (LocBuffer p) + +let loc_addresses = MG.loc_addresses #_ #cls false + +let loc_regions = MG.loc_regions false + +let loc_includes = MG.loc_includes + +let loc_includes_refl = MG.loc_includes_refl + +let loc_includes_trans = MG.loc_includes_trans + +let loc_includes_union_r = MG.loc_includes_union_r + +let loc_includes_union_l = MG.loc_includes_union_l + +let loc_includes_none = MG.loc_includes_none + +let loc_includes_pointer_pointer #t1 #t2 p1 p2 = + MG.loc_includes_aloc #_ #cls #(frameOf p1) #(as_addr p1) (LocPointer p1) (LocPointer p2) + +let loc_includes_gsingleton_buffer_of_pointer l #t p = + MG.loc_includes_aloc #_ #cls #(frameOf p) #(as_addr p) (LocPointer p) (LocBuffer (gsingleton_buffer_of_pointer p)); + MG.loc_includes_trans l (loc_pointer p) (loc_buffer (gsingleton_buffer_of_pointer p)) + +let loc_includes_gbuffer_of_array_pointer l #len #t p = + MG.loc_includes_aloc #_ #cls #(frameOf p) #(as_addr p) (LocPointer p) (LocBuffer (gbuffer_of_array_pointer p)); + MG.loc_includes_trans l (loc_pointer p) (loc_buffer (gbuffer_of_array_pointer p)) + +let loc_includes_gpointer_of_array_cell l #t b i = + MG.loc_includes_aloc #_ #cls #(frameOf_buffer b) #(buffer_as_addr b) (LocBuffer b) (LocPointer (gpointer_of_buffer_cell b i)); + MG.loc_includes_trans l (loc_buffer b) (loc_pointer (gpointer_of_buffer_cell b i)) + +let loc_includes_gsub_buffer_r l #t b i len = + MG.loc_includes_aloc #_ #cls #(frameOf_buffer b) #(buffer_as_addr b) (LocBuffer b) (LocBuffer (gsub_buffer b i len)); + MG.loc_includes_trans l (loc_buffer b) (loc_buffer (gsub_buffer b i len)) + +let loc_includes_gsub_buffer_l #t b i1 len1 i2 len2 = + let b1 = gsub_buffer b i1 len1 in + let b2 = gsub_buffer b1 (FStar.UInt32.sub i2 i1) len2 in + MG.loc_includes_aloc #_ #cls #(frameOf_buffer b) #(buffer_as_addr b) (LocBuffer b1) (LocBuffer b2) + +let loc_includes_addresses_pointer #t r s p = + MG.loc_includes_addresses_aloc #_ #cls false r s #(as_addr p) (LocPointer p) + +let loc_includes_addresses_buffer #t r s p = + MG.loc_includes_addresses_aloc #_ #cls false r s #(buffer_as_addr p) (LocBuffer p) + +let loc_includes_region_pointer #t s p = + MG.loc_includes_region_aloc #_ #cls false s #(frameOf p) #(as_addr p) (LocPointer p) + +let loc_includes_region_buffer #t s b = + MG.loc_includes_region_aloc #_ #cls false s #(frameOf_buffer b) #(buffer_as_addr b) (LocBuffer b) + +let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls false false + +let loc_includes_region_region = MG.loc_includes_region_region #_ #cls false false + +let loc_includes_region_union_l = MG.loc_includes_region_union_l false + +let loc_disjoint = MG.loc_disjoint + +let loc_disjoint_sym = MG.loc_disjoint_sym + +let loc_disjoint_none_r = MG.loc_disjoint_none_r + +let loc_disjoint_union_r = MG.loc_disjoint_union_r + +let loc_disjoint_root #value1 #value2 p1 p2 = + MG.loc_disjoint_addresses #_ #cls false false (frameOf p1) (frameOf p2) (Set.singleton (as_addr p1)) (Set.singleton (as_addr p2)); + loc_includes_addresses_pointer (frameOf p1) (Set.singleton (as_addr p1)) p1; + loc_includes_addresses_pointer (frameOf p2) (Set.singleton (as_addr p2)) p2; + MG.loc_disjoint_includes #_ #cls (loc_addresses (frameOf p1) (Set.singleton (as_addr p1))) (loc_addresses (frameOf p2) (Set.singleton (as_addr p2))) (loc_pointer p1) (loc_pointer p2) + +let loc_disjoint_gfield #l p fd1 fd2 = + MG.loc_disjoint_aloc_intro #_ #cls #(frameOf p) #(as_addr p) #(frameOf p) #(as_addr p) (LocPointer (gfield p fd1)) (LocPointer (gfield p fd2)) + +let loc_disjoint_gcell #length #value p i1 i2 = + MG.loc_disjoint_aloc_intro #_ #cls #(frameOf p) #(as_addr p) #(frameOf p) #(as_addr p) (LocPointer (gcell p i1)) (LocPointer (gcell p i2)) + +let loc_disjoint_includes = MG.loc_disjoint_includes + +let live_unused_in_disjoint_strong #value1 #value2 h p1 p2 = () + +let live_unused_in_disjoint #value1 #value2 h p1 p2 = + loc_disjoint_root p1 p2 + +let pointer_live_reference_unused_in_disjoint #value1 #value2 h p1 p2 = + loc_includes_addresses_pointer (frameOf p1) (Set.singleton (as_addr p1)) p1; + loc_includes_refl (MG.loc_freed_mreference p2); + disjoint_roots_intro_pointer_vs_reference h p1 p2; + MG.loc_disjoint_addresses #_ #cls false false (frameOf p1) (HS.frameOf p2) (Set.singleton (as_addr p1)) (Set.singleton (HS.as_addr p2)); + MG.loc_disjoint_includes #_ #cls (loc_addresses (frameOf p1) (Set.singleton (as_addr p1))) (MG.loc_freed_mreference p2) (loc_pointer p1) (MG.loc_freed_mreference p2) + +let reference_live_pointer_unused_in_disjoint #value1 #value2 h p1 p2 = + loc_includes_addresses_pointer (frameOf p2) (Set.singleton (as_addr p2)) p2; + loc_includes_refl (MG.loc_freed_mreference p1); + disjoint_roots_intro_reference_vs_pointer h p1 p2; + MG.loc_disjoint_addresses #_ #cls false false (HS.frameOf p1) (frameOf p2) (Set.singleton (HS.as_addr p1)) (Set.singleton (as_addr p2)); + MG.loc_disjoint_includes #_ #cls (MG.loc_freed_mreference p1) (loc_addresses (frameOf p2) (Set.singleton (as_addr p2))) (MG.loc_freed_mreference p1) (loc_pointer p2) + +let loc_disjoint_gsub_buffer #t b i1 len1 i2 len2 = + MG.loc_disjoint_aloc_intro #_ #cls #(frameOf_buffer b) #(buffer_as_addr b) #(frameOf_buffer b) #(buffer_as_addr b) (LocBuffer (gsub_buffer b i1 len1)) (LocBuffer (gsub_buffer b i2 len2)) + +let loc_disjoint_gpointer_of_buffer_cell #t b i1 i2 = + MG.loc_disjoint_aloc_intro #_ #cls #(frameOf_buffer b) #(buffer_as_addr b) #(frameOf_buffer b) #(buffer_as_addr b) (LocPointer (gpointer_of_buffer_cell b i1)) (LocPointer (gpointer_of_buffer_cell b i2)) + +let loc_disjoint_addresses = MG.loc_disjoint_addresses #_ #cls false false + +let loc_disjoint_pointer_addresses #t p r n = + loc_disjoint_includes (loc_addresses (frameOf p) (Set.singleton (as_addr p))) (loc_addresses r n) (loc_pointer p) (loc_addresses r n) + +let loc_disjoint_buffer_addresses #t p r n = + loc_disjoint_includes (loc_addresses (frameOf_buffer p) (Set.singleton (buffer_as_addr p))) (loc_addresses r n) (loc_buffer p) (loc_addresses r n) + +let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls false false + +let modifies = MG.modifies + +let modifies_loc_regions_intro rs h1 h2 = + MG.modifies_loc_regions_intro #_ #cls rs h1 h2; + MG.loc_includes_region_region #_ #cls false true rs rs; + MG.modifies_loc_includes (loc_regions rs) h1 h2 (MG.loc_regions true rs) + +let modifies_pointer_elim s h1 h2 #a' p' = + MG.modifies_aloc_elim #_ #_ #(frameOf p') #(as_addr p') (LocPointer p') s h1 h2 + +val modifies_buffer_elim' + (#t1: typ) + (b: buffer t1) + (p: loc) + (h h': HS.mem) +: Lemma + (requires ( + loc_disjoint (loc_buffer b) p /\ + buffer_live h b /\ + UInt32.v (buffer_length b) > 0 /\ + modifies p h h' + )) + (ensures ( + buffer_live h' b /\ ( + buffer_readable h b ==> ( + buffer_readable h' b /\ + buffer_as_seq h b == buffer_as_seq h' b + )))) + +let modifies_buffer_elim' #t1 b p h h' = + Classical.forall_intro_2 HS.lemma_tip_top; + loc_disjoint_sym (loc_buffer b) p; + let n = UInt32.v (buffer_length b) in + begin + assert (n > 0); + let pre + (i: UInt32.t) + : GTot Type0 + = UInt32.v i < n + in + let post + (i: UInt32.t) + : GTot Type0 + = pre i /\ ( + let q = gpointer_of_buffer_cell b i in + equal_values h q h' q + ) + in + let f + (i: UInt32.t) + : Lemma + (requires (pre i)) + (ensures (post i)) + = modifies_pointer_elim p h h' (gpointer_of_buffer_cell b i) + in + f 0ul; // for the liveness of the whole buffer + Classical.forall_intro (Classical.move_requires f); + assert (buffer_readable h b ==> buffer_readable h' b); + let g () : Lemma + (requires (buffer_readable h b)) + (ensures (buffer_as_seq h b == buffer_as_seq h' b)) + = let s = buffer_as_seq h b in + let s' = buffer_as_seq h' b in + Seq.lemma_eq_intro s s'; + Seq.lemma_eq_elim s s' + in + Classical.move_requires g () + end + +let modifies_buffer_elim #t1 b p h h' = + if buffer_length b = 0ul + then () + else modifies_buffer_elim' b p h h' + +let modifies_reference_elim #t b p h h' = + MG.loc_includes_addresses_addresses #_ cls false true (HS.frameOf b) (Set.singleton (HS.as_addr b)) (Set.singleton (HS.as_addr b)); + MG.loc_includes_refl p; + MG.loc_disjoint_includes (MG.loc_freed_mreference b) p (MG.loc_mreference b) p; + MG.modifies_mreference_elim b p h h' + +let modifies_refl = MG.modifies_refl + +let modifies_loc_includes = MG.modifies_loc_includes + +let modifies_trans = MG.modifies_trans + +(** Concrete allocators, getters and setters *) + +let screate + (value:typ) + (s: option (type_of_typ value)) += let h0 = HST.get () in + let s = match s with + | Some s -> ovalue_of_value value s + | _ -> none_ovalue value + in + let content: HS.reference pointer_ref_contents = + HST.salloc (| value, s |) + in + let aref = HS.aref_of content in + let p = Pointer value aref PathBase in + let h1 = HST.get () in + assert (HS.aref_live_at h1 aref pointer_ref_contents (Heap.trivial_preorder pointer_ref_contents)); + let f () : Lemma ( + let gref = HS.greference_of aref pointer_ref_contents (Heap.trivial_preorder pointer_ref_contents) in + HS.sel h1 gref == HS.sel h1 content + ) + = let gref = HS.greference_of aref pointer_ref_contents (Heap.trivial_preorder pointer_ref_contents) in + assert (HS.frameOf content == HS.frameOf gref); + assert (HS.as_addr content == HS.as_addr gref); + HS.lemma_sel_same_addr h1 content gref + in + f (); + MG.modifies_intro loc_none h0 h1 + (fun _ -> ()) + (fun _ _ _ -> ()) + (fun _ _ _ -> ()) + (fun _ _ -> ()) + (fun r a b -> + cls.MG.same_mreference_aloc_preserved b h0 h1 (fun _ _ _ -> ()) + ) + ; + p + +// TODO: move to HyperStack? +let domain_upd (#a:Type) (h:HS.mem) (x:HS.reference a{HS.live_region h (HS.frameOf x)}) (v:a) : Lemma + (requires True) + (ensures (Map.domain (HS.get_hmap h) == Map.domain (HS.get_hmap (HS.upd h x v)))) + = let m = (HS.get_hmap h) in + let m' = Map.upd m (HS.frameOf x) (Heap.upd (Map.sel m (HS.frameOf x)) (HS.as_ref x) v) in + Set.lemma_equal_intro (Map.domain m) (Map.domain m') + +let ecreate + (t:typ) + (r:HS.rid) + (s: option (type_of_typ t)) += let h0 = HST.get () in + let s0 = s in + let s = match s with + | Some s -> ovalue_of_value t s + | _ -> none_ovalue t + in + let content: HS.ref pointer_ref_contents = + HST.ralloc r (| t, s |) + in + domain_upd h0 content (| t, s |) ; + let aref = HS.aref_of content in + let p = Pointer t aref PathBase in + let h1 = HST.get () in + assert (HS.aref_live_at h1 aref pointer_ref_contents (Heap.trivial_preorder pointer_ref_contents)); + let f () : Lemma ( + let gref = HS.greference_of aref pointer_ref_contents (Heap.trivial_preorder pointer_ref_contents) in + HS.sel h1 gref == HS.sel h1 content + ) + = let gref = HS.greference_of aref pointer_ref_contents (Heap.trivial_preorder pointer_ref_contents) in + assert (HS.frameOf content == HS.frameOf gref); + assert (HS.as_addr content == HS.as_addr gref); + HS.lemma_sel_same_addr h1 content gref + in + f (); + MG.modifies_intro loc_none h0 h1 + (fun _ -> ()) + (fun _ _ _ -> ()) + (fun _ _ _ -> ()) + (fun _ _ -> ()) + (fun r a b -> + cls.MG.same_mreference_aloc_preserved b h0 h1 (fun _ _ _ -> ()) + ) + ; + p + +let field + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) += _field p fd + +let ufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) += _ufield p fd + +let cell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + i += _cell p i + +let reference_of + (#value: typ) + (h: HS.mem) + (p: pointer value) +: Pure (HS.reference pointer_ref_contents) + (requires (live h p)) + (ensures (fun x -> + live h p /\ + x == HS.reference_of h (Pointer?.contents p) pointer_ref_contents (Heap.trivial_preorder pointer_ref_contents) /\ + HS.frameOf x == HS.frameOf (greference_of p) /\ + HS.as_addr x == HS.as_addr (greference_of p) /\ + (forall h' . h' `HS.contains` x <==> h' `HS.contains` (greference_of p)) /\ + (forall h' . (h' `HS.contains` x \/ h' `HS.contains` (greference_of p)) ==> (h' `HS.contains` x /\ h' `HS.contains` (greference_of p) /\ HS.sel h' x == HS.sel h' (greference_of p))) /\ + (forall h' z . + (h' `HS.contains` x \/ h' `HS.contains` (greference_of p)) ==> + (h' `HS.contains` x /\ h' `HS.contains` (greference_of p) /\ HS.upd h' x z == HS.upd h' (greference_of p) z) + ))) += let x = + HS.reference_of h (Pointer?.contents p) pointer_ref_contents (Heap.trivial_preorder pointer_ref_contents) + in + let f (h' : HS.mem) : Lemma + ( (exists h' . live h' p) /\ // necessary to typecheck Classical.forall_intro + (h' `HS.contains` x <==> h' `HS.contains` (greference_of p)) /\ + ((h' `HS.contains` x \/ h' `HS.contains` (greference_of p)) ==> HS.sel h' x == HS.sel h' (greference_of p))) + = let y = greference_of p in + Classical.move_requires (HS.lemma_sel_same_addr h' y) x; + Classical.move_requires (HS.lemma_sel_same_addr h' x) y + in + let g (z: pointer_ref_contents) (h' : HS.mem) : Lemma ( + (exists h' . live h' p) /\ + ((h' `HS.contains` x \/ h' `HS.contains` (greference_of p)) ==> (h' `HS.contains` x /\ h' `HS.contains` (greference_of p) /\ HS.upd h' x z == HS.upd h' (greference_of p) z)) + ) + = let y = greference_of p in + Classical.move_requires (HS.lemma_upd_same_addr h' y x) z; + Classical.move_requires (HS.lemma_upd_same_addr h' x y) z + in + Classical.forall_intro f ; + Classical.forall_intro_2 g; + x + +let read + (#value: typ) + (p: pointer value) += let h = HST.get () in + let r = reference_of h p in + HST.witness_region (HS.frameOf r); + HST.witness_hsref r; + let (| _ , c |) = !r in + value_of_ovalue value (path_sel c (Pointer?.p p)) + +let is_null + (#t: typ) + (p: npointer t) += match p with + | NullPtr -> true + | _ -> false + +let owrite + (#a: typ) + (b: pointer a) + (z: otype_of_typ a) +: HST.Stack unit + (requires (fun h -> live h b)) + (ensures (fun h0 _ h1 -> + live h0 b /\ + live h1 b /\ + modifies_1 b h0 h1 /\ ( + let g = greference_of b in + let (| _, c1 |) = HS.sel h1 g in + path_sel c1 (Pointer?.p b) == z + ))) += let h0 = HST.get () in + let r = reference_of h0 b in + HST.witness_region (HS.frameOf r); + HST.witness_hsref r; + let v0 = !r in + let (| t , c0 |) = v0 in + let c1 = path_upd c0 (Pointer?.p b) z in + let v1 = (| t, c1 |) in + r := v1; + let h1 = HST.get () in + let e () : Lemma ( + let gref = greference_of b in ( + HS.frameOf r == HS.frameOf gref /\ + HS.as_addr r == HS.as_addr gref /\ + HS.sel h0 gref == v0 /\ + HS.sel h1 gref == v1 + )) + = let gref = greference_of b in + HS.lemma_sel_same_addr h0 r gref; + HS.lemma_sel_same_addr h1 r gref + in + e (); + let prf_alocs + (r': HS.rid) + (a': nat) + (b' : aloc r' a') + : Lemma + (requires (MG.loc_disjoint (MG.loc_of_aloc b') (loc_pointer b))) + (ensures (cls.MG.aloc_preserved b' h0 h1)) + = + let f + (t: typ) + (p: pointer t) + : Lemma + (requires ( + live h0 p /\ + disjoint b p + )) + (ensures ( + equal_values h0 p h1 p + )) + = let grefp = greference_of p in + if frameOf p = frameOf b && as_addr p = as_addr b + then begin + HS.lemma_sel_same_addr h0 r grefp; + HS.lemma_sel_same_addr h1 r grefp; + path_sel_upd_other' (Pointer?.p b) c0 z (Pointer?.p p) + end + else () + in + let f' + (t: typ) + (p: pointer t) + : Lemma + ( ( + live h0 p /\ + disjoint b p + ) ==> ( + equal_values h0 p h1 p + )) + = Classical.move_requires (f t) p + in + MG.loc_disjoint_aloc_elim #_ #cls #r' #a' #(frameOf b) #(as_addr b) b' (LocPointer b); + Classical.forall_intro_2 f' + in + MG.modifies_intro (loc_pointer b) h0 h1 + (fun _ -> ()) + (fun t' pre' p' -> + loc_disjoint_sym (MG.loc_mreference p') (loc_pointer b); + MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (LocPointer b) true (HS.frameOf p') (Set.singleton (HS.as_addr p')) + ) + (fun _ _ _ -> ()) + (fun _ _ -> ()) + prf_alocs + +let write #a b z = + owrite b (ovalue_of_value a z) + +let write_union_field + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) += let field_t : typ = typ_of_struct_field l fd in + + // We could avoid removing the data if `fd` is already the current tag. + + // However this seems impossible to specify with the current set of + // user-available predicates and functions (the only thing we have to + // distinguish between actual data and dummy values is `readable`, which is + // too coarse-grained for our needs). + let vu : option (gtdata (struct_field l) (type_of_struct_field' l otype_of_typ)) = + Some (gtdata_create fd (none_ovalue field_t)) in + let vu : otype_of_typ (TUnion l) = vu in + owrite p vu + +(** Lemmas and patterns *) + +let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped + +let modifies_only_live_regions = MG.modifies_only_live_regions + +let modifies_loc_addresses_intro r a l h1 h2 = + MG.modifies_loc_addresses_intro r a l h1 h2; + MG.loc_includes_addresses_addresses #_ cls false true r a a; + MG.loc_includes_refl l; + MG.loc_includes_union_l (loc_addresses r a) l l; + MG.loc_includes_union_l (loc_addresses r a) l (MG.loc_addresses true r a); + MG.loc_includes_union_r (loc_union (loc_addresses r a) l) (MG.loc_addresses true r a) l; + MG.modifies_loc_includes (loc_union (loc_addresses r a) l) h1 h2 (loc_union (MG.loc_addresses true r a) l) + +(* `modifies` and the readable permission *) + +(** NOTE: we historically used to have this lemma for arbitrary +pointer inclusion, but that became wrong for unions. *) + +let modifies_1_readable_struct #l f p h h' = + readable_struct h' p + +let modifies_1_readable_array #t #len i p h h' = + readable_array h' p + +(* buffer read: can be defined as a derived operation: pointer_of_buffer_cell ; read *) + +let read_buffer + (#t: typ) + (b: buffer t) + i += read (pointer_of_buffer_cell b i) + +let write_buffer + (#t: typ) + (b: buffer t) + i v += write (pointer_of_buffer_cell b i) v + +(* unused_in, cont'd *) + +let buffer_live_unused_in_disjoint #t1 #t2 h b1 b2 = + MG.loc_disjoint_aloc_intro #_ #cls #(frameOf_buffer b1) #(buffer_as_addr b1) #(frameOf_buffer b2) #(buffer_as_addr b2) (LocBuffer b1) (LocBuffer b2) + +let pointer_live_buffer_unused_in_disjoint #t1 #t2 h b1 b2 = + MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf_buffer b2) #(buffer_as_addr b2) (LocPointer b1) (LocBuffer b2) + +let buffer_live_pointer_unused_in_disjoint #t1 #t2 h b1 b2 = + MG.loc_disjoint_aloc_intro #_ #cls #(frameOf_buffer b1) #(buffer_as_addr b1) #(frameOf b2) #(as_addr b2) (LocBuffer b1) (LocPointer b2) + +let reference_live_buffer_unused_in_disjoint #t1 #t2 h b1 b2 = + loc_includes_addresses_buffer (frameOf_buffer b2) (Set.singleton (buffer_as_addr b2)) b2; + loc_includes_refl (MG.loc_freed_mreference b1); + MG.loc_disjoint_addresses #_ #cls false false (HS.frameOf b1) (frameOf_buffer b2) (Set.singleton (HS.as_addr b1)) (Set.singleton (buffer_as_addr b2)); + MG.loc_disjoint_includes #_ #cls (MG.loc_freed_mreference b1) (loc_addresses (frameOf_buffer b2) (Set.singleton (buffer_as_addr b2))) (MG.loc_freed_mreference b1) (loc_buffer b2) + +let buffer_live_reference_unused_in_disjoint #t1 #t2 h b1 b2 = + loc_includes_addresses_buffer (frameOf_buffer b1) (Set.singleton (buffer_as_addr b1)) b1; + loc_includes_refl (MG.loc_freed_mreference b2); + (match b1.broot with + | BufferRootSingleton p1 -> disjoint_roots_intro_pointer_vs_reference h p1 b2 + | BufferRootArray p1 -> disjoint_roots_intro_pointer_vs_reference h p1 b2 + ); + MG.loc_disjoint_addresses #_ #cls false false (frameOf_buffer b1) (HS.frameOf b2) (Set.singleton (buffer_as_addr b1)) (Set.singleton (HS.as_addr b2)); + MG.loc_disjoint_includes #_ #cls (loc_addresses (frameOf_buffer b1) (Set.singleton (buffer_as_addr b1))) (MG.loc_freed_mreference b2) (loc_buffer b1) (MG.loc_freed_mreference b2) + +(* Buffer inclusion without existential quantifiers: remnants of the legacy buffer interface *) + +let root_buffer #t b = + let root = Buffer?.broot b in + match root with + | BufferRootSingleton p -> Buffer root 0ul 1ul + | BufferRootArray #_ #len _ -> Buffer root 0ul len + +let buffer_idx #t b = + Buffer?.bidx b + +let buffer_eq_gsub_root #t b = + assert (UInt32.add 0ul (buffer_idx b) == buffer_idx b) + +let root_buffer_gsub_buffer #t b i len = () + +let buffer_idx_gsub_buffer #t b i len = () + +let buffer_includes #t blarge bsmall = + let () = () in ( + root_buffer blarge == root_buffer bsmall /\ + UInt32.v (buffer_idx blarge) <= UInt32.v (buffer_idx bsmall) /\ + UInt32.v (buffer_idx bsmall) + UInt32.v (buffer_length bsmall) <= UInt32.v (buffer_idx blarge) + UInt32.v (buffer_length blarge) + ) + +let buffer_includes_refl #t b = () + +let buffer_includes_trans #t b1 b2 b3 = () + +let buffer_includes_gsub_r #t b i len = () + +let buffer_includes_gsub #t b i1 i2 len1 len2 = () + +let buffer_includes_elim #t b1 b2 = () + +let buffer_includes_loc_includes #t b1 b2 = + buffer_includes_elim b1 b2; + loc_includes_refl (loc_buffer b1); + loc_includes_gsub_buffer_r (loc_buffer b1) b1 (UInt32.sub (buffer_idx b2) (buffer_idx b1)) (buffer_length b2) + + +(* Type class instance *) + +let cloc_aloc = aloc + +let cloc_cls = cls + +let cloc_of_loc l = l + +let loc_of_cloc l = l + +let loc_of_cloc_of_loc l = () + +let cloc_of_loc_of_cloc l = () + +let loc_includes_to_cloc l1 l2 = () + +let loc_disjoint_to_cloc l1 l2 = () + +let modifies_to_cloc l h1 h2 = () diff --git a/stage0/ulib/legacy/FStar.Pointer.Base.fsti b/stage0/ulib/legacy/FStar.Pointer.Base.fsti new file mode 100644 index 00000000000..82e8d8e72be --- /dev/null +++ b/stage0/ulib/legacy/FStar.Pointer.Base.fsti @@ -0,0 +1,2492 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Pointer.Base + +module HH = FStar.HyperStack +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST +open FStar.HyperStack.ST // for := , ! + +(*** Definitions *) + +(** Type codes *) + +type base_typ = +| TUInt +| TUInt8 +| TUInt16 +| TUInt32 +| TUInt64 +| TInt +| TInt8 +| TInt16 +| TInt32 +| TInt64 +| TChar +| TBool +| TUnit + +// C11, Sect. 6.2.5 al. 20: arrays must be nonempty +type array_length_t = (length: UInt32.t { UInt32.v length > 0 } ) + +type typ = +| TBase: + (b: base_typ) -> + typ +| TStruct: + (l: struct_typ) -> + typ +| TUnion: + (l: union_typ) -> + typ +| TArray: + (length: array_length_t ) -> + (t: typ) -> + typ +| TPointer: + (t: typ) -> + typ +| TNPointer: + (t: typ) -> + typ +| TBuffer: + (t: typ) -> + typ +and struct_typ' = (l: list (string & typ) { + Cons? l /\ // C11, 6.2.5 al. 20: structs and unions must have at least one field + List.Tot.noRepeats (List.Tot.map fst l) +}) +and struct_typ = { + name: string; + fields: struct_typ' ; +} +and union_typ = struct_typ + +(** `struct_field l` is the type of fields of `TStruct l` + (i.e. a refinement of `string`). +*) +let struct_field' + (l: struct_typ') +: Tot eqtype += (s: string { List.Tot.mem s (List.Tot.map fst l) } ) + +let struct_field + (l: struct_typ) +: Tot eqtype += struct_field' l.fields + +(** `union_field l` is the type of fields of `TUnion l` + (i.e. a refinement of `string`). +*) +let union_field = struct_field + +(** `typ_of_struct_field l f` is the type of data associated with field `f` in + `TStruct l` (i.e. a refinement of `typ`). +*) + +let typ_of_struct_field' + (l: struct_typ') + (f: struct_field' l) +: Tot (t: typ {t << l}) += List.Tot.assoc_mem f l; + let y = Some?.v (List.Tot.assoc f l) in + List.Tot.assoc_precedes f l y; + y + +let typ_of_struct_field + (l: struct_typ) + (f: struct_field l) +: Tot (t: typ {t << l}) += typ_of_struct_field' l.fields f + +(** `typ_of_union_field l f` is the type of data associated with field `f` in + `TUnion l` (i.e. a refinement of `typ`). +*) +let typ_of_union_field + (l: union_typ) + (f: union_field l) +: Tot (t: typ {t << l}) += typ_of_struct_field l f + +let rec typ_depth + (t: typ) +: GTot nat += match t with + | TArray _ t -> 1 + typ_depth t + | TUnion l + | TStruct l -> 1 + struct_typ_depth l.fields + | _ -> 0 +and struct_typ_depth + (l: list (string & typ)) +: GTot nat += match l with + | [] -> 0 + | h :: l -> + let (_, t) = h in // matching like this prevents needing two units of ifuel + let n1 = typ_depth t in + let n2 = struct_typ_depth l in + if n1 > n2 then n1 else n2 + +let rec typ_depth_typ_of_struct_field + (l: struct_typ') + (f: struct_field' l) +: Lemma + (ensures (typ_depth (typ_of_struct_field' l f) <= struct_typ_depth l)) + (decreases l) += let ((f', _) :: l') = l in + if f = f' + then () + else begin + let f: string = f in + assert (List.Tot.mem f (List.Tot.map fst l')); + List.Tot.assoc_mem f l'; + typ_depth_typ_of_struct_field l' f + end + +(** Pointers to data of type t. + + This defines two main types: + - `npointer (t: typ)`, a pointer that may be "NULL"; + - `pointer (t: typ)`, a pointer that cannot be "NULL" + (defined as a refinement of `npointer`). + + `nullptr #t` (of type `npointer t`) represents the "NULL" value. +*) + +val npointer (t: typ) : Tot Type0 + +(** The null pointer *) + +val nullptr (#t: typ): Tot (npointer t) + +val g_is_null (#t: typ) (p: npointer t) : GTot bool + +val g_is_null_intro + (t: typ) +: Lemma + (g_is_null (nullptr #t) == true) + [SMTPat (g_is_null (nullptr #t))] + +// concrete, for subtyping +let pointer (t: typ) : Tot Type0 = (p: npointer t { g_is_null p == false } ) + +(** Buffers *) + +val buffer (t: typ): Tot Type0 + +(** Interpretation of type codes. + + Defines functions mapping from type codes (`typ`) to their interpretation as + FStar types. For example, `type_of_typ (TBase TUInt8)` is `FStar.UInt8.t`. +*) + +(** Interpretation of base types. *) +let type_of_base_typ + (t: base_typ) +: Tot Type0 += match t with + | TUInt -> nat + | TUInt8 -> FStar.UInt8.t + | TUInt16 -> FStar.UInt16.t + | TUInt32 -> FStar.UInt32.t + | TUInt64 -> FStar.UInt64.t + | TInt -> int + | TInt8 -> FStar.Int8.t + | TInt16 -> FStar.Int16.t + | TInt32 -> FStar.Int32.t + | TInt64 -> FStar.Int64.t + | TChar -> FStar.Char.char + | TBool -> bool + | TUnit -> unit + +(** Interpretation of arrays of elements of (interpreted) type `t`. *) +type array (length: array_length_t) (t: Type) = (s: Seq.seq t {Seq.length s == UInt32.v length}) + +let type_of_struct_field'' + (l: struct_typ') + (type_of_typ: ( + (t: typ { t << l } ) -> + Tot Type0 + )) + (f: struct_field' l) +: Tot Type0 = + List.Tot.assoc_mem f l; + let y = typ_of_struct_field' l f in + List.Tot.assoc_precedes f l y; + type_of_typ y + +[@@ unifier_hint_injective] +let type_of_struct_field' + (l: struct_typ) + (type_of_typ: ( + (t: typ { t << l } ) -> + Tot Type0 + )) + (f: struct_field l) +: Tot Type0 += type_of_struct_field'' l.fields type_of_typ f + +val struct (l: struct_typ) : Tot Type0 +val union (l: union_typ) : Tot Type0 + +(* Interprets a type code (`typ`) as a FStar type (`Type0`). *) +let rec type_of_typ + (t: typ) +: Tot Type0 += match t with + | TBase b -> type_of_base_typ b + | TStruct l -> + struct l + | TUnion l -> + union l + | TArray length t -> + array length (type_of_typ t) + | TPointer t -> + pointer t + | TNPointer t -> + npointer t + | TBuffer t -> + buffer t + +let type_of_typ_array + (len: array_length_t) + (t: typ) +: Lemma + (type_of_typ (TArray len t) == array len (type_of_typ t)) + [SMTPat (type_of_typ (TArray len t))] += () + +let type_of_struct_field + (l: struct_typ) +: Tot (struct_field l -> Tot Type0) += type_of_struct_field' l (fun (x:typ{x << l}) -> type_of_typ x) + +let type_of_typ_struct + (l: struct_typ) +: Lemma + (type_of_typ (TStruct l) == struct l) + [SMTPat (type_of_typ (TStruct l))] += assert_norm (type_of_typ (TStruct l) == struct l) + +let type_of_typ_type_of_struct_field + (l: struct_typ) + (f: struct_field l) +: Lemma + (type_of_typ (typ_of_struct_field l f) == type_of_struct_field l f) + [SMTPat (type_of_typ (typ_of_struct_field l f))] += () + +val struct_sel (#l: struct_typ) (s: struct l) (f: struct_field l) : Tot (type_of_struct_field l f) + +let dfst_struct_field + (s: struct_typ) + (p: (x: struct_field s & type_of_struct_field s x)) +: Tot string += + let (| f, _ |) = p in + f + +let struct_literal (s: struct_typ) : Tot Type0 = list (x: struct_field s & type_of_struct_field s x) + +let struct_literal_wf (s: struct_typ) (l: struct_literal s) : Tot bool = + List.Tot.sortWith FStar.String.compare (List.Tot.map fst s.fields) = + List.Tot.sortWith FStar.String.compare + (List.Tot.map (dfst_struct_field s) l) + +let fun_of_list + (s: struct_typ) + (l: struct_literal s) + (f: struct_field s) +: Pure (type_of_struct_field s f) + (requires (normalize_term (struct_literal_wf s l) == true)) + (ensures (fun _ -> True)) += + let f' : string = f in + let phi (p: (x: struct_field s & type_of_struct_field s x)) : Tot bool = + dfst_struct_field s p = f' + in + match List.Tot.find phi l with + | Some p -> let (| _, v |) = p in v + | _ -> + List.Tot.sortWith_permutation FStar.String.compare (List.Tot.map fst s.fields); + List.Tot.sortWith_permutation FStar.String.compare (List.Tot.map (dfst_struct_field s) l); + List.Tot.mem_memP f' (List.Tot.map fst s.fields); + List.Tot.mem_count (List.Tot.map fst s.fields) f'; + List.Tot.mem_count (List.Tot.map (dfst_struct_field s) l) f'; + List.Tot.mem_memP f' (List.Tot.map (dfst_struct_field s) l); + List.Tot.memP_map_elim (dfst_struct_field s) f' l; + Classical.forall_intro (Classical.move_requires (List.Tot.find_none phi l)); + false_elim () + +val struct_create_fun (l: struct_typ) (f: ((fd: struct_field l) -> Tot (type_of_struct_field l fd))) : Tot (struct l) + +let struct_create + (s: struct_typ) + (l: struct_literal s) +: Pure (struct s) + (requires (normalize_term (struct_literal_wf s l) == true)) + (ensures (fun _ -> True)) += struct_create_fun s (fun_of_list s l) + +val struct_sel_struct_create_fun + (l: struct_typ) + (f: ((fd: struct_field l) -> Tot (type_of_struct_field l fd))) + (fd: struct_field l) +: Lemma + (struct_sel (struct_create_fun l f) fd == f fd) + [SMTPat (struct_sel (struct_create_fun l f) fd)] + +let type_of_typ_union + (l: union_typ) +: Lemma + (type_of_typ (TUnion l) == union l) + [SMTPat (type_of_typ (TUnion l))] += assert_norm (type_of_typ (TUnion l) == union l) + +val union_get_key (#l: union_typ) (v: union l) : GTot (struct_field l) + +val union_get_value + (#l: union_typ) + (v: union l) + (fd: struct_field l) +: Pure (type_of_struct_field l fd) + (requires (union_get_key v == fd)) + (ensures (fun _ -> True)) + +val union_create + (l: union_typ) + (fd: struct_field l) + (v: type_of_struct_field l fd) +: Tot (union l) + +(*** Semantics of pointers *) + +(** Operations on pointers *) + +val equal + (#t1 #t2: typ) + (p1: pointer t1) + (p2: pointer t2) +: Ghost bool + (requires True) + (ensures (fun b -> b == true <==> t1 == t2 /\ p1 == p2 )) + +val as_addr (#t: typ) (p: pointer t): GTot (x: nat { x > 0 } ) + +val unused_in + (#value: typ) + (p: pointer value) + (h: HS.mem) +: GTot Type0 + +val live + (#value: typ) + (h: HS.mem) + (p: pointer value) +: GTot Type0 + +val nlive + (#value: typ) + (h: HS.mem) + (p: npointer value) +: GTot Type0 + +val live_nlive + (#value: typ) + (h: HS.mem) + (p: pointer value) +: Lemma + (nlive h p <==> live h p) + [SMTPat (nlive h p)] + +val g_is_null_nlive + (#t: typ) + (h: HS.mem) + (p: npointer t) +: Lemma + (requires (g_is_null p)) + (ensures (nlive h p)) + [SMTPat (g_is_null p); SMTPat (nlive h p)] + +val live_not_unused_in + (#value: typ) + (h: HS.mem) + (p: pointer value) +: Lemma + (ensures (live h p /\ p `unused_in` h ==> False)) + [SMTPat (live h p); SMTPat (p `unused_in` h)] + +val gread + (#value: typ) + (h: HS.mem) + (p: pointer value) +: GTot (type_of_typ value) + +val frameOf + (#value: typ) + (p: pointer value) +: GTot HS.rid + +val live_region_frameOf + (#value: typ) + (h: HS.mem) + (p: pointer value) +: Lemma + (requires (live h p)) + (ensures (HS.live_region h (frameOf p))) + [SMTPatOr [ + [SMTPat (HS.live_region h (frameOf p))]; + [SMTPat (live h p)] + ]] + +val disjoint_roots_intro_pointer_vs_pointer + (#value1 value2: typ) + (h: HS.mem) + (p1: pointer value1) + (p2: pointer value2) +: Lemma + (requires (live h p1 /\ unused_in p2 h)) + (ensures (frameOf p1 <> frameOf p2 \/ as_addr p1 =!= as_addr p2)) + +val disjoint_roots_intro_pointer_vs_reference + (#value1: typ) + (#value2: Type) + (h: HS.mem) + (p1: pointer value1) + (p2: HS.reference value2) +: Lemma + (requires (live h p1 /\ p2 `HS.unused_in` h)) + (ensures (frameOf p1 <> HS.frameOf p2 \/ as_addr p1 =!= HS.as_addr p2)) + +val disjoint_roots_intro_reference_vs_pointer + (#value1: Type) + (#value2: typ) + (h: HS.mem) + (p1: HS.reference value1) + (p2: pointer value2) +: Lemma + (requires (HS.contains h p1 /\ p2 `unused_in` h)) + (ensures (HS.frameOf p1 <> frameOf p2 \/ HS.as_addr p1 =!= as_addr p2)) + +val is_mm + (#value: typ) + (p: pointer value) +: GTot bool + +(* // TODO: recover with addresses? +val recall + (#value: Type) + (p: pointer value {is_eternal_region (frameOf p) && not (is_mm p)}) +: HST.Stack unit + (requires (fun m -> True)) + (ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 p)) +*) + +val gfield + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) +: GTot (pointer (typ_of_struct_field l fd)) + +val as_addr_gfield + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) +: Lemma + (requires True) + (ensures (as_addr (gfield p fd) == as_addr p)) + [SMTPat (as_addr (gfield p fd))] + +val unused_in_gfield + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) + (h: HS.mem) +: Lemma + (requires True) + (ensures (unused_in (gfield p fd) h <==> unused_in p h)) + [SMTPat (unused_in (gfield p fd) h)] + +val live_gfield + (h: HS.mem) + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) +: Lemma + (requires True) + (ensures (live h (gfield p fd) <==> live h p)) + [SMTPat (live h (gfield p fd))] + +val gread_gfield + (h: HS.mem) + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) +: Lemma + (requires True) + (ensures (gread h (gfield p fd) == struct_sel (gread h p) fd)) + [SMTPatOr [[SMTPat (gread h (gfield p fd))]; [SMTPat (struct_sel (gread h p) fd)]]] + +val frameOf_gfield + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) +: Lemma + (requires True) + (ensures (frameOf (gfield p fd) == frameOf p)) + [SMTPat (frameOf (gfield p fd))] + +val is_mm_gfield + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) +: Lemma + (requires True) + (ensures (is_mm (gfield p fd) <==> is_mm p)) + [SMTPat (is_mm (gfield p fd))] + +val gufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) +: GTot (pointer (typ_of_struct_field l fd)) + +val as_addr_gufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) +: Lemma + (requires True) + (ensures (as_addr (gufield p fd) == as_addr p)) + [SMTPat (as_addr (gufield p fd))] + +val unused_in_gufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) + (h: HS.mem) +: Lemma + (requires True) + (ensures (unused_in (gufield p fd) h <==> unused_in p h)) + [SMTPat (unused_in (gufield p fd) h)] + +val live_gufield + (h: HS.mem) + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) +: Lemma + (requires True) + (ensures (live h (gufield p fd) <==> live h p)) + [SMTPat (live h (gufield p fd))] + +val gread_gufield + (h: HS.mem) + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) +: Lemma + (requires (union_get_key (gread h p) == fd)) + (ensures ( + union_get_key (gread h p) == fd /\ + gread h (gufield p fd) == union_get_value (gread h p) fd + )) + [SMTPatOr [[SMTPat (gread h (gufield p fd))]; [SMTPat (union_get_value (gread h p) fd)]]] + +val frameOf_gufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) +: Lemma + (requires True) + (ensures (frameOf (gufield p fd) == frameOf p)) + [SMTPat (frameOf (gufield p fd))] + +val is_mm_gufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) +: Lemma + (requires True) + (ensures (is_mm (gufield p fd) <==> is_mm p)) + [SMTPat (is_mm (gufield p fd))] + +val gcell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + (i: UInt32.t) +: Ghost (pointer value) + (requires (UInt32.v i < UInt32.v length)) + (ensures (fun _ -> True)) + +val as_addr_gcell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v length)) + (ensures (UInt32.v i < UInt32.v length /\ as_addr (gcell p i) == as_addr p)) + [SMTPat (as_addr (gcell p i))] + +val unused_in_gcell + (#length: array_length_t) + (#value: typ) + (h: HS.mem) + (p: pointer (TArray length value)) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v length)) + (ensures (UInt32.v i < UInt32.v length /\ (unused_in (gcell p i) h <==> unused_in p h))) + [SMTPat (unused_in (gcell p i) h)] + +val live_gcell + (#length: array_length_t) + (#value: typ) + (h: HS.mem) + (p: pointer (TArray length value)) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v length)) + (ensures (UInt32.v i < UInt32.v length /\ (live h (gcell p i) <==> live h p))) + [SMTPat (live h (gcell p i))] + +val gread_gcell + (#length: array_length_t) + (#value: typ) + (h: HS.mem) + (p: pointer (TArray length value)) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v length)) + (ensures (UInt32.v i < UInt32.v length /\ gread h (gcell p i) == Seq.index (gread h p) (UInt32.v i))) + [SMTPat (gread h (gcell p i))] + +val frameOf_gcell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v length)) + (ensures (UInt32.v i < UInt32.v length /\ frameOf (gcell p i) == frameOf p)) + [SMTPat (frameOf (gcell p i))] + +val is_mm_gcell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v length)) + (ensures (UInt32.v i < UInt32.v length /\ is_mm (gcell p i) == is_mm p)) + [SMTPat (is_mm (gcell p i))] + +val includes + (#value1: typ) + (#value2: typ) + (p1: pointer value1) + (p2: pointer value2) +: GTot bool + +val includes_refl + (#t: typ) + (p: pointer t) +: Lemma + (ensures (includes p p)) + [SMTPat (includes p p)] + +val includes_trans + (#t1 #t2 #t3: typ) + (p1: pointer t1) + (p2: pointer t2) + (p3: pointer t3) +: Lemma + (requires (includes p1 p2 /\ includes p2 p3)) + (ensures (includes p1 p3)) + +val includes_gfield + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) +: Lemma + (requires True) + (ensures (includes p (gfield p fd))) + +val includes_gufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) +: Lemma + (requires True) + (ensures (includes p (gufield p fd))) + +val includes_gcell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v length)) + (ensures (UInt32.v i < UInt32.v length /\ includes p (gcell p i))) + +(** The readable permission. + We choose to implement it only abstractly, instead of explicitly + tracking the permission in the heap. +*) + +val readable + (#a: typ) + (h: HS.mem) + (b: pointer a) +: GTot Type0 + +val readable_live + (#a: typ) + (h: HS.mem) + (b: pointer a) +: Lemma + (requires (readable h b)) + (ensures (live h b)) + [SMTPatOr [ + [SMTPat (readable h b)]; + [SMTPat (live h b)]; + ]] + +val readable_gfield + (#l: struct_typ) + (h: HS.mem) + (p: pointer (TStruct l)) + (fd: struct_field l) +: Lemma + (requires (readable h p)) + (ensures (readable h (gfield p fd))) + [SMTPat (readable h (gfield p fd))] + +val readable_struct + (#l: struct_typ) + (h: HS.mem) + (p: pointer (TStruct l)) +: Lemma + (requires ( + forall (f: struct_field l) . + readable h (gfield p f) + )) + (ensures (readable h p)) +// [SMTPat (readable #(TStruct l) h p)] // TODO: dubious pattern, will probably trigger unreplayable hints + +val readable_struct_forall_mem + (#l: struct_typ) + (p: pointer (TStruct l)) +: Lemma (forall + (h: HS.mem) + . ( + forall (f: struct_field l) . + readable h (gfield p f) + ) ==> + readable h p + ) + +val readable_struct_fields + (#l: struct_typ) + (h: HS.mem) + (p: pointer (TStruct l)) + (s: list string) +: GTot Type0 + +val readable_struct_fields_nil + (#l: struct_typ) + (h: HS.mem) + (p: pointer (TStruct l)) +: Lemma + (readable_struct_fields h p []) + [SMTPat (readable_struct_fields h p [])] + +val readable_struct_fields_cons + (#l: struct_typ) + (h: HS.mem) + (p: pointer (TStruct l)) + (f: string) + (q: list string) +: Lemma + (requires (readable_struct_fields h p q /\ (List.Tot.mem f (List.Tot.map fst l.fields) ==> (let f : struct_field l = f in readable h (gfield p f))))) + (ensures (readable_struct_fields h p (f::q))) + [SMTPat (readable_struct_fields h p (f::q))] + +val readable_struct_fields_readable_struct + (#l: struct_typ) + (h: HS.mem) + (p: pointer (TStruct l)) +: Lemma + (requires (readable_struct_fields h p (normalize_term (List.Tot.map fst l.fields)))) + (ensures (readable h p)) + +val readable_gcell + (#length: array_length_t) + (#value: typ) + (h: HS.mem) + (p: pointer (TArray length value)) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v length /\ readable h p)) + (ensures (UInt32.v i < UInt32.v length /\ readable h (gcell p i))) + [SMTPat (readable h (gcell p i))] + +val readable_array + (#length: array_length_t) + (#value: typ) + (h: HS.mem) + (p: pointer (TArray length value)) +: Lemma + (requires ( + forall (i: UInt32.t) . + UInt32.v i < UInt32.v length ==> + readable h (gcell p i) + )) + (ensures (readable h p)) +// [SMTPat (readable #(TArray length value) h p)] // TODO: dubious pattern, will probably trigger unreplayable hints + +(* TODO: improve on the following interface *) +val readable_gufield + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd: struct_field l) +: Lemma + (requires True) + (ensures (readable h (gufield p fd) <==> (readable h p /\ union_get_key (gread h p) == fd))) + [SMTPat (readable h (gufield p fd))] + +(** The active field of a union *) + +val is_active_union_field + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd: struct_field l) +: GTot Type0 + +val is_active_union_live + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd: struct_field l) +: Lemma + (requires (is_active_union_field h p fd)) + (ensures (live h p)) + [SMTPat (is_active_union_field h p fd)] + +val is_active_union_field_live + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd: struct_field l) +: Lemma + (requires (is_active_union_field h p fd)) + (ensures (live h (gufield p fd))) + [SMTPat (is_active_union_field h p fd)] + +val is_active_union_field_eq + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd1 fd2: struct_field l) +: Lemma + (requires (is_active_union_field h p fd1 /\ is_active_union_field h p fd2)) + (ensures (fd1 == fd2)) + [SMTPat (is_active_union_field h p fd1); SMTPat (is_active_union_field h p fd2)] + +val is_active_union_field_get_key + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd: struct_field l) +: Lemma + (requires (is_active_union_field h p fd)) + (ensures (union_get_key (gread h p) == fd)) + [SMTPat (is_active_union_field h p fd)] + +val is_active_union_field_readable + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd: struct_field l) +: Lemma + (requires (is_active_union_field h p fd /\ readable h (gufield p fd))) + (ensures (readable h p)) + [SMTPat (is_active_union_field h p fd); SMTPat (readable h (gufield p fd))] + +val is_active_union_field_includes_readable + (#l: union_typ) + (h: HS.mem) + (p: pointer (TUnion l)) + (fd: struct_field l) + (#t': typ) + (p' : pointer t') +: Lemma + (requires (includes (gufield p fd) p' /\ readable h p')) + (ensures (is_active_union_field h p fd)) + +(* Equality predicate on struct contents, without quantifiers *) +let equal_values #a h (b:pointer a) h' (b':pointer a) : GTot Type0 = + (live h b ==> live h' b') /\ ( + readable h b ==> ( + readable h' b' /\ + gread h b == gread h' b' + )) + + +(*** Semantics of buffers *) + +(** Operations on buffers *) + +val gsingleton_buffer_of_pointer + (#t: typ) + (p: pointer t) +: GTot (buffer t) + +val singleton_buffer_of_pointer + (#t: typ) + (p: pointer t) +: HST.Stack (buffer t) + (requires (fun h -> live h p)) + (ensures (fun h b h' -> h' == h /\ b == gsingleton_buffer_of_pointer p)) + +val gbuffer_of_array_pointer + (#t: typ) + (#length: array_length_t) + (p: pointer (TArray length t)) +: GTot (buffer t) + +val buffer_of_array_pointer + (#t: typ) + (#length: array_length_t) + (p: pointer (TArray length t)) +: HST.Stack (buffer t) + (requires (fun h -> live h p)) + (ensures (fun h b h' -> h' == h /\ b == gbuffer_of_array_pointer p)) + +val buffer_length + (#t: typ) + (b: buffer t) +: GTot UInt32.t + +val buffer_length_gsingleton_buffer_of_pointer + (#t: typ) + (p: pointer t) +: Lemma + (requires True) + (ensures (buffer_length (gsingleton_buffer_of_pointer p) == 1ul)) + [SMTPat (buffer_length (gsingleton_buffer_of_pointer p))] + +val buffer_length_gbuffer_of_array_pointer + (#t: typ) + (#len: array_length_t) + (p: pointer (TArray len t)) +: Lemma + (requires True) + (ensures (buffer_length (gbuffer_of_array_pointer p) == len)) + [SMTPat (buffer_length (gbuffer_of_array_pointer p))] + +val buffer_live + (#t: typ) + (h: HS.mem) + (b: buffer t) +: GTot Type0 + +val buffer_live_gsingleton_buffer_of_pointer + (#t: typ) + (p: pointer t) + (h: HS.mem) +: Lemma + (ensures (buffer_live h (gsingleton_buffer_of_pointer p) <==> live h p )) + [SMTPat (buffer_live h (gsingleton_buffer_of_pointer p))] + +val buffer_live_gbuffer_of_array_pointer + (#t: typ) + (#length: array_length_t) + (p: pointer (TArray length t)) + (h: HS.mem) +: Lemma + (requires True) + (ensures (buffer_live h (gbuffer_of_array_pointer p) <==> live h p)) + [SMTPat (buffer_live h (gbuffer_of_array_pointer p))] + +val buffer_unused_in + (#t: typ) + (b: buffer t) + (h: HS.mem) +: GTot Type0 + +val buffer_live_not_unused_in + (#t: typ) + (b: buffer t) + (h: HS.mem) +: Lemma + ((buffer_live h b /\ buffer_unused_in b h) ==> False) + + +val buffer_unused_in_gsingleton_buffer_of_pointer + (#t: typ) + (p: pointer t) + (h: HS.mem) +: Lemma + (ensures (buffer_unused_in (gsingleton_buffer_of_pointer p) h <==> unused_in p h )) + [SMTPat (buffer_unused_in (gsingleton_buffer_of_pointer p) h)] + +val buffer_unused_in_gbuffer_of_array_pointer + (#t: typ) + (#length: array_length_t) + (p: pointer (TArray length t)) + (h: HS.mem) +: Lemma + (requires True) + (ensures (buffer_unused_in (gbuffer_of_array_pointer p) h <==> unused_in p h)) + [SMTPat (buffer_unused_in (gbuffer_of_array_pointer p) h)] + +val frameOf_buffer + (#t: typ) + (b: buffer t) +: GTot HS.rid + +val frameOf_buffer_gsingleton_buffer_of_pointer + (#t: typ) + (p: pointer t) +: Lemma + (ensures (frameOf_buffer (gsingleton_buffer_of_pointer p) == frameOf p)) + [SMTPat (frameOf_buffer (gsingleton_buffer_of_pointer p))] + +val frameOf_buffer_gbuffer_of_array_pointer + (#t: typ) + (#length: array_length_t) + (p: pointer (TArray length t)) +: Lemma + (ensures (frameOf_buffer (gbuffer_of_array_pointer p) == frameOf p)) + [SMTPat (frameOf_buffer (gbuffer_of_array_pointer p))] + +val live_region_frameOf_buffer + (#value: typ) + (h: HS.mem) + (p: buffer value) +: Lemma + (requires (buffer_live h p)) + (ensures (HS.live_region h (frameOf_buffer p))) + [SMTPatOr [ + [SMTPat (HS.live_region h (frameOf_buffer p))]; + [SMTPat (buffer_live h p)] + ]] + +val buffer_as_addr + (#t: typ) + (b: buffer t) +: GTot (x: nat { x > 0 } ) + +val buffer_as_addr_gsingleton_buffer_of_pointer + (#t: typ) + (p: pointer t) +: Lemma + (ensures (buffer_as_addr (gsingleton_buffer_of_pointer p) == as_addr p)) + [SMTPat (buffer_as_addr (gsingleton_buffer_of_pointer p))] + +val buffer_as_addr_gbuffer_of_array_pointer + (#t: typ) + (#length: array_length_t) + (p: pointer (TArray length t)) +: Lemma + (ensures (buffer_as_addr (gbuffer_of_array_pointer p) == as_addr p)) + [SMTPat (buffer_as_addr (gbuffer_of_array_pointer p))] + +val gsub_buffer + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Ghost (buffer t) + (requires (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b))) + (ensures (fun _ -> True)) + +val frameOf_buffer_gsub_buffer + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b))) + (ensures ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + frameOf_buffer (gsub_buffer b i len) == frameOf_buffer b + )) + [SMTPat (frameOf_buffer (gsub_buffer b i len))] + +val buffer_as_addr_gsub_buffer + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b))) + (ensures ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + buffer_as_addr (gsub_buffer b i len) == buffer_as_addr b + )) + [SMTPat (buffer_as_addr (gsub_buffer b i len))] + +val sub_buffer + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: HST.Stack (buffer t) + (requires (fun h -> UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ buffer_live h b)) + (ensures (fun h b' h' -> UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ h' == h /\ b' == gsub_buffer b i len )) + +val offset_buffer + (#t: typ) + (b: buffer t) + (i: UInt32.t) +: HST.Stack (buffer t) + (requires (fun h -> UInt32.v i <= UInt32.v (buffer_length b) /\ buffer_live h b)) + (ensures (fun h b' h' -> UInt32.v i <= UInt32.v (buffer_length b) /\ h' == h /\ b' == gsub_buffer b i (UInt32.sub (buffer_length b) i))) + +val buffer_length_gsub_buffer + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b))) + (ensures (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ buffer_length (gsub_buffer b i len) == len)) + [SMTPat (buffer_length (gsub_buffer b i len))] + +val buffer_live_gsub_buffer_equiv + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) + (h: HS.mem) +: Lemma + (requires (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b))) + (ensures (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ (buffer_live h (gsub_buffer b i len) <==> buffer_live h b))) + [SMTPat (buffer_live h (gsub_buffer b i len))] + +val buffer_live_gsub_buffer_intro + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) + (h: HS.mem) +: Lemma + (requires (buffer_live h b /\ UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b))) + (ensures (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ buffer_live h (gsub_buffer b i len))) + [SMTPat (buffer_live h (gsub_buffer b i len))] + +val buffer_unused_in_gsub_buffer + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) + (h: HS.mem) +: Lemma + (requires (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b))) + (ensures (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ (buffer_unused_in (gsub_buffer b i len) h <==> buffer_unused_in b h))) + [SMTPat (buffer_unused_in (gsub_buffer b i len) h)] + +val gsub_buffer_gsub_buffer + (#a: typ) + (b: buffer a) + (i1: UInt32.t) + (len1: UInt32.t) + (i2: UInt32.t) + (len2: UInt32.t) +: Lemma + (requires ( + UInt32.v i1 + UInt32.v len1 <= UInt32.v (buffer_length b) /\ + UInt32.v i2 + UInt32.v len2 <= UInt32.v len1 + )) + (ensures ( + UInt32.v i1 + UInt32.v len1 <= UInt32.v (buffer_length b) /\ + UInt32.v i2 + UInt32.v len2 <= UInt32.v len1 /\ + gsub_buffer (gsub_buffer b i1 len1) i2 len2 == gsub_buffer b FStar.UInt32.(i1 +^ i2) len2 + )) + [SMTPat (gsub_buffer (gsub_buffer b i1 len1) i2 len2)] + +val gsub_buffer_zero_buffer_length + (#a: typ) + (b: buffer a) +: Lemma + (ensures (gsub_buffer b 0ul (buffer_length b) == b)) + [SMTPat (gsub_buffer b 0ul (buffer_length b))] + +val buffer_as_seq + (#t: typ) + (h: HS.mem) + (b: buffer t) +: GTot (Seq.seq (type_of_typ t)) + +val buffer_length_buffer_as_seq + (#t: typ) + (h: HS.mem) + (b: buffer t) +: Lemma + (requires True) + (ensures (Seq.length (buffer_as_seq h b) == UInt32.v (buffer_length b))) + [SMTPat (Seq.length (buffer_as_seq h b))] + +val buffer_as_seq_gsingleton_buffer_of_pointer + (#t: typ) + (h: HS.mem) + (p: pointer t) +: Lemma + (requires True) + (ensures (buffer_as_seq h (gsingleton_buffer_of_pointer p) == Seq.create 1 (gread h p))) + [SMTPat (buffer_as_seq h (gsingleton_buffer_of_pointer p))] + +val buffer_as_seq_gbuffer_of_array_pointer + (#length: array_length_t) + (#t: typ) + (h: HS.mem) + (p: pointer (TArray length t)) +: Lemma + (requires True) + (ensures (buffer_as_seq h (gbuffer_of_array_pointer p) == gread h p)) + [SMTPat (buffer_as_seq h (gbuffer_of_array_pointer p))] + +val buffer_as_seq_gsub_buffer + (#t: typ) + (h: HS.mem) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b))) + (ensures (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ buffer_as_seq h (gsub_buffer b i len) == Seq.slice (buffer_as_seq h b) (UInt32.v i) (UInt32.v i + UInt32.v len))) + [SMTPat (buffer_as_seq h (gsub_buffer b i len))] + +val gpointer_of_buffer_cell + (#t: typ) + (b: buffer t) + (i: UInt32.t) +: Ghost (pointer t) + (requires (UInt32.v i < UInt32.v (buffer_length b))) + (ensures (fun _ -> True)) + +val pointer_of_buffer_cell + (#t: typ) + (b: buffer t) + (i: UInt32.t) +: HST.Stack (pointer t) + (requires (fun h -> UInt32.v i < UInt32.v (buffer_length b) /\ buffer_live h b)) + (ensures (fun h p h' -> UInt32.v i < UInt32.v (buffer_length b) /\ h' == h /\ p == gpointer_of_buffer_cell b i)) + +val gpointer_of_buffer_cell_gsub_buffer + (#t: typ) + (b: buffer t) + (i1: UInt32.t) + (len: UInt32.t) + (i2: UInt32.t) +: Lemma + (requires ( + UInt32.v i1 + UInt32.v len <= UInt32.v (buffer_length b) /\ + UInt32.v i2 < UInt32.v len + )) + (ensures ( + UInt32.v i1 + UInt32.v len <= UInt32.v (buffer_length b) /\ + UInt32.v i2 < UInt32.v len /\ + gpointer_of_buffer_cell (gsub_buffer b i1 len) i2 == gpointer_of_buffer_cell b FStar.UInt32.(i1 +^ i2) + )) + +let gpointer_of_buffer_cell_gsub_buffer' + (#t: typ) + (b: buffer t) + (i1: UInt32.t) + (len: UInt32.t) + (i2: UInt32.t) +: Lemma + (requires ( + UInt32.v i1 + UInt32.v len <= UInt32.v (buffer_length b) /\ + UInt32.v i2 < UInt32.v len + )) + (ensures ( + UInt32.v i1 + UInt32.v len <= UInt32.v (buffer_length b) /\ + UInt32.v i2 < UInt32.v len /\ + gpointer_of_buffer_cell (gsub_buffer b i1 len) i2 == gpointer_of_buffer_cell b FStar.UInt32.(i1 +^ i2) + )) + [SMTPat (gpointer_of_buffer_cell (gsub_buffer b i1 len) i2)] += gpointer_of_buffer_cell_gsub_buffer b i1 len i2 + +val live_gpointer_of_buffer_cell + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (h: HS.mem) +: Lemma + (requires ( + UInt32.v i < UInt32.v (buffer_length b) + )) + (ensures ( + UInt32.v i < UInt32.v (buffer_length b) /\ + (live h (gpointer_of_buffer_cell b i) <==> buffer_live h b) + )) + [SMTPat (live h (gpointer_of_buffer_cell b i))] + +val gpointer_of_buffer_cell_gsingleton_buffer_of_pointer + (#t: typ) + (p: pointer t) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < 1)) + (ensures (UInt32.v i < 1 /\ gpointer_of_buffer_cell (gsingleton_buffer_of_pointer p) i == p)) + [SMTPat (gpointer_of_buffer_cell (gsingleton_buffer_of_pointer p) i)] + +val gpointer_of_buffer_cell_gbuffer_of_array_pointer + (#length: array_length_t) + (#t: typ) + (p: pointer (TArray length t)) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v length)) + (ensures (UInt32.v i < UInt32.v length /\ gpointer_of_buffer_cell (gbuffer_of_array_pointer p) i == gcell p i)) + [SMTPat (gpointer_of_buffer_cell (gbuffer_of_array_pointer p) i)] + +val frameOf_gpointer_of_buffer_cell + (#t: typ) + (b: buffer t) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v (buffer_length b))) + (ensures (UInt32.v i < UInt32.v (buffer_length b) /\ frameOf (gpointer_of_buffer_cell b i) == frameOf_buffer b)) + [SMTPat (frameOf (gpointer_of_buffer_cell b i))] + +val as_addr_gpointer_of_buffer_cell + (#t: typ) + (b: buffer t) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v (buffer_length b))) + (ensures (UInt32.v i < UInt32.v (buffer_length b) /\ as_addr (gpointer_of_buffer_cell b i) == buffer_as_addr b)) + [SMTPat (as_addr (gpointer_of_buffer_cell b i))] + +val gread_gpointer_of_buffer_cell + (#t: typ) + (h: HS.mem) + (b: buffer t) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v (buffer_length b))) + (ensures (UInt32.v i < UInt32.v (buffer_length b) /\ gread h (gpointer_of_buffer_cell b i) == Seq.index (buffer_as_seq h b) (UInt32.v i))) + [SMTPat (gread h (gpointer_of_buffer_cell b i))] + +val gread_gpointer_of_buffer_cell' + (#t: typ) + (h: HS.mem) + (b: buffer t) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v (buffer_length b))) + (ensures (UInt32.v i < UInt32.v (buffer_length b) /\ gread h (gpointer_of_buffer_cell b i) == Seq.index (buffer_as_seq h b) (UInt32.v i))) + +val index_buffer_as_seq + (#t: typ) + (h: HS.mem) + (b: buffer t) + (i: nat) +: Lemma + (requires (i < UInt32.v (buffer_length b))) + (ensures (i < UInt32.v (buffer_length b) /\ Seq.index (buffer_as_seq h b) i == gread h (gpointer_of_buffer_cell b (UInt32.uint_to_t i)))) + [SMTPat (Seq.index (buffer_as_seq h b) i)] + +val gsingleton_buffer_of_pointer_gcell + (#t: typ) + (#len: array_length_t) + (p: pointer (TArray len t)) + (i: UInt32.t) +: Lemma + (requires ( + UInt32.v i < UInt32.v len + )) + (ensures ( + UInt32.v i < UInt32.v len /\ + gsingleton_buffer_of_pointer (gcell p i) == gsub_buffer (gbuffer_of_array_pointer p) i 1ul + )) + [SMTPat (gsingleton_buffer_of_pointer (gcell p i))] + +val gsingleton_buffer_of_pointer_gpointer_of_buffer_cell + (#t: typ) + (b: buffer t) + (i: UInt32.t) +: Lemma + (requires ( + UInt32.v i < UInt32.v (buffer_length b) + )) + (ensures ( + UInt32.v i < UInt32.v (buffer_length b) /\ + gsingleton_buffer_of_pointer (gpointer_of_buffer_cell b i) == gsub_buffer b i 1ul + )) + [SMTPat (gsingleton_buffer_of_pointer (gpointer_of_buffer_cell b i))] + +(* The readable permission lifted to buffers. *) + +val buffer_readable + (#t: typ) + (h: HS.mem) + (b: buffer t) +: GTot Type0 + +val buffer_readable_buffer_live + (#t: typ) + (h: HS.mem) + (b: buffer t) +: Lemma + (requires (buffer_readable h b)) + (ensures (buffer_live h b)) + [SMTPatOr [ + [SMTPat (buffer_readable h b)]; + [SMTPat (buffer_live h b)]; + ]] + +val buffer_readable_gsingleton_buffer_of_pointer + (#t: typ) + (h: HS.mem) + (p: pointer t) +: Lemma + (ensures (buffer_readable h (gsingleton_buffer_of_pointer p) <==> readable h p)) + [SMTPat (buffer_readable h (gsingleton_buffer_of_pointer p))] + +val buffer_readable_gbuffer_of_array_pointer + (#len: array_length_t) + (#t: typ) + (h: HS.mem) + (p: pointer (TArray len t)) +: Lemma + (requires True) + (ensures (buffer_readable h (gbuffer_of_array_pointer p) <==> readable h p)) + [SMTPat (buffer_readable h (gbuffer_of_array_pointer p))] + +val buffer_readable_gsub_buffer + (#t: typ) + (h: HS.mem) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ buffer_readable h b)) + (ensures (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ buffer_readable h (gsub_buffer b i len))) + [SMTPat (buffer_readable h (gsub_buffer b i len))] + +val readable_gpointer_of_buffer_cell + (#t: typ) + (h: HS.mem) + (b: buffer t) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v (buffer_length b) /\ buffer_readable h b)) + (ensures (UInt32.v i < UInt32.v (buffer_length b) /\ readable h (gpointer_of_buffer_cell b i))) + [SMTPat (readable h (gpointer_of_buffer_cell b i))] + +val buffer_readable_intro + (#t: typ) + (h: HS.mem) + (b: buffer t) +: Lemma + (requires ( + buffer_live h b /\ ( + forall (i: UInt32.t) . + UInt32.v i < UInt32.v (buffer_length b) ==> + readable h (gpointer_of_buffer_cell b i) + ))) + (ensures (buffer_readable h b)) +// [SMTPat (buffer_readable h b)] // TODO: dubious pattern, may trigger unreplayable hints + +val buffer_readable_elim + (#t: typ) + (h: HS.mem) + (b: buffer t) +: Lemma + (requires ( + buffer_readable h b + )) + (ensures ( + buffer_live h b /\ ( + forall (i: UInt32.t) . + UInt32.v i < UInt32.v (buffer_length b) ==> + readable h (gpointer_of_buffer_cell b i) + ))) + + +(*** The modifies clause *) + +val loc : Type u#0 + +val loc_none: loc + +val loc_union + (s1 s2: loc) +: GTot loc + +(** The following is useful to make Z3 cut matching loops with +modifies_trans and modifies_refl *) +val loc_union_idem + (s: loc) +: Lemma + (loc_union s s == s) + [SMTPat (loc_union s s)] + +val loc_pointer + (#t: typ) + (p: pointer t) +: GTot loc + +val loc_buffer + (#t: typ) + (b: buffer t) +: GTot loc + +val loc_addresses + (r: HS.rid) + (n: Set.set nat) +: GTot loc + +val loc_regions + (r: Set.set HS.rid) +: GTot loc + + +(* Inclusion of memory locations *) + +val loc_includes + (s1 s2: loc) +: GTot Type0 + +val loc_includes_refl + (s: loc) +: Lemma + (loc_includes s s) + [SMTPat (loc_includes s s)] + +val loc_includes_trans + (s1 s2 s3: loc) +: Lemma + (requires (loc_includes s1 s2 /\ loc_includes s2 s3)) + (ensures (loc_includes s1 s3)) + +val loc_includes_union_r + (s s1 s2: loc) +: Lemma + (requires (loc_includes s s1 /\ loc_includes s s2)) + (ensures (loc_includes s (loc_union s1 s2))) + [SMTPat (loc_includes s (loc_union s1 s2))] + +val loc_includes_union_l + (s1 s2 s: loc) +: Lemma + (requires (loc_includes s1 s \/ loc_includes s2 s)) + (ensures (loc_includes (loc_union s1 s2) s)) + [SMTPat (loc_includes (loc_union s1 s2) s)] + +val loc_includes_none + (s: loc) +: Lemma + (loc_includes s loc_none) + [SMTPat (loc_includes s loc_none)] + +val loc_includes_pointer_pointer + (#t1 #t2: typ) + (p1: pointer t1) + (p2: pointer t2) +: Lemma + (requires (includes p1 p2)) + (ensures (loc_includes (loc_pointer p1) (loc_pointer p2))) + [SMTPat (loc_includes (loc_pointer p1) (loc_pointer p2))] + +val loc_includes_gsingleton_buffer_of_pointer + (l: loc) + (#t: typ) + (p: pointer t) +: Lemma + (requires (loc_includes l (loc_pointer p))) + (ensures (loc_includes l (loc_buffer (gsingleton_buffer_of_pointer p)))) + [SMTPat (loc_includes l (loc_buffer (gsingleton_buffer_of_pointer p)))] + +val loc_includes_gbuffer_of_array_pointer + (l: loc) + (#len: array_length_t) + (#t: typ) + (p: pointer (TArray len t)) +: Lemma + (requires (loc_includes l (loc_pointer p))) + (ensures (loc_includes l (loc_buffer (gbuffer_of_array_pointer p)))) + [SMTPat (loc_includes l (loc_buffer (gbuffer_of_array_pointer p)))] + +val loc_includes_gpointer_of_array_cell + (l: loc) + (#t: typ) + (b: buffer t) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v (buffer_length b) /\ loc_includes l (loc_buffer b))) + (ensures (UInt32.v i < UInt32.v (buffer_length b) /\ loc_includes l (loc_pointer (gpointer_of_buffer_cell b i)))) + [SMTPat (loc_includes l (loc_pointer (gpointer_of_buffer_cell b i)))] + +val loc_includes_gsub_buffer_r + (l: loc) + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ loc_includes l (loc_buffer b))) + (ensures (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ loc_includes l (loc_buffer (gsub_buffer b i len)))) + [SMTPat (loc_includes l (loc_buffer (gsub_buffer b i len)))] + +val loc_includes_gsub_buffer_l + (#t: typ) + (b: buffer t) + (i1: UInt32.t) + (len1: UInt32.t) + (i2: UInt32.t) + (len2: UInt32.t) +: Lemma + (requires (UInt32.v i1 + UInt32.v len1 <= UInt32.v (buffer_length b) /\ UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1)) + (ensures (UInt32.v i1 + UInt32.v len1 <= UInt32.v (buffer_length b) /\ UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1 /\ loc_includes (loc_buffer (gsub_buffer b i1 len1)) (loc_buffer (gsub_buffer b i2 len2)))) + [SMTPat (loc_includes (loc_buffer (gsub_buffer b i1 len1)) (loc_buffer (gsub_buffer b i2 len2)))] + +val loc_includes_addresses_pointer + (#t: typ) + (r: HS.rid) + (s: Set.set nat) + (p: pointer t) +: Lemma + (requires (frameOf p == r /\ Set.mem (as_addr p) s)) + (ensures (loc_includes (loc_addresses r s) (loc_pointer p))) + [SMTPat (loc_includes (loc_addresses r s) (loc_pointer p))] + +val loc_includes_addresses_buffer + (#t: typ) + (r: HS.rid) + (s: Set.set nat) + (p: buffer t) +: Lemma + (requires (frameOf_buffer p == r /\ Set.mem (buffer_as_addr p) s)) + (ensures (loc_includes (loc_addresses r s) (loc_buffer p))) + [SMTPat (loc_includes (loc_addresses r s) (loc_buffer p))] + +val loc_includes_region_pointer + (#t: typ) + (s: Set.set HS.rid) + (p: pointer t) +: Lemma + (requires (Set.mem (frameOf p) s)) + (ensures (loc_includes (loc_regions s) (loc_pointer p))) + [SMTPat (loc_includes (loc_regions s) (loc_pointer p))] + +val loc_includes_region_buffer + (#t: typ) + (s: Set.set HS.rid) + (b: buffer t) +: Lemma + (requires (Set.mem (frameOf_buffer b) s)) + (ensures (loc_includes (loc_regions s) (loc_buffer b))) + [SMTPat (loc_includes (loc_regions s) (loc_buffer b))] + +val loc_includes_region_addresses + (s: Set.set HS.rid) + (r: HS.rid) + (a: Set.set nat) +: Lemma + (requires (Set.mem r s)) + (ensures (loc_includes (loc_regions s) (loc_addresses r a))) + [SMTPat (loc_includes (loc_regions s) (loc_addresses r a))] + +val loc_includes_region_region + (s1 s2: Set.set HS.rid) +: Lemma + (requires (Set.subset s2 s1)) + (ensures (loc_includes (loc_regions s1) (loc_regions s2))) + [SMTPat (loc_includes (loc_regions s1) (loc_regions s2))] + +val loc_includes_region_union_l + (l: loc) + (s1 s2: Set.set HS.rid) +: Lemma + (requires (loc_includes l (loc_regions (Set.intersect s2 (Set.complement s1))))) + (ensures (loc_includes (loc_union (loc_regions s1) l) (loc_regions s2))) + [SMTPat (loc_includes (loc_union (loc_regions s1) l) (loc_regions s2))] + + +(* Disjointness of two memory locations *) + +val loc_disjoint + (s1 s2: loc) +: GTot Type0 + +val loc_disjoint_sym + (s1 s2: loc) +: Lemma + (requires (loc_disjoint s1 s2)) + (ensures (loc_disjoint s2 s1)) + [SMTPat (loc_disjoint s1 s2)] + +val loc_disjoint_none_r + (s: loc) +: Lemma + (ensures (loc_disjoint s loc_none)) + [SMTPat (loc_disjoint s loc_none)] + +val loc_disjoint_union_r + (s s1 s2: loc) +: Lemma + (requires (loc_disjoint s s1 /\ loc_disjoint s s2)) + (ensures (loc_disjoint s (loc_union s1 s2))) + [SMTPat (loc_disjoint s (loc_union s1 s2))] + +val loc_disjoint_root + (#value1: typ) + (#value2: typ) + (p1: pointer value1) + (p2: pointer value2) +: Lemma + (requires (frameOf p1 <> frameOf p2 \/ as_addr p1 <> as_addr p2)) + (ensures (loc_disjoint (loc_pointer p1) (loc_pointer p2))) + +val loc_disjoint_gfield + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd1 fd2: struct_field l) +: Lemma + (requires (fd1 <> fd2)) + (ensures (loc_disjoint (loc_pointer (gfield p fd1)) (loc_pointer (gfield p fd2)))) + [SMTPat (loc_disjoint (loc_pointer (gfield p fd1)) (loc_pointer (gfield p fd2)))] + +val loc_disjoint_gcell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + (i1: UInt32.t) + (i2: UInt32.t) +: Lemma + (requires ( + UInt32.v i1 < UInt32.v length /\ + UInt32.v i2 < UInt32.v length /\ + UInt32.v i1 <> UInt32.v i2 + )) + (ensures ( + UInt32.v i1 < UInt32.v length /\ + UInt32.v i2 < UInt32.v length /\ + loc_disjoint (loc_pointer (gcell p i1)) (loc_pointer (gcell p i2)) + )) + [SMTPat (loc_disjoint (loc_pointer (gcell p i1)) (loc_pointer (gcell p i2)))] + +val loc_disjoint_includes + (p1 p2 p1' p2' : loc) +: Lemma + (requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2)) + (ensures (loc_disjoint p1' p2')) + +(* TODO: The following is now wrong, should be replaced with readable + +val live_not_equal_disjoint + (#t: typ) + (h: HS.mem) + (p1 p2: pointer t) +: Lemma + (requires (live h p1 /\ live h p2 /\ equal p1 p2 == false)) + (ensures (disjoint p1 p2)) +*) + +val live_unused_in_disjoint_strong + (#value1: typ) + (#value2: typ) + (h: HS.mem) + (p1: pointer value1) + (p2: pointer value2) +: Lemma + (requires (live h p1 /\ unused_in p2 h)) + (ensures (frameOf p1 <> frameOf p2 \/ as_addr p1 <> as_addr p2)) + +val live_unused_in_disjoint + (#value1: typ) + (#value2: typ) + (h: HS.mem) + (p1: pointer value1) + (p2: pointer value2) +: Lemma + (requires (live h p1 /\ unused_in p2 h)) + (ensures (loc_disjoint (loc_pointer p1) (loc_pointer p2))) + [SMTPatOr [ + [SMTPat (loc_disjoint (loc_pointer p1) (loc_pointer p2)); SMTPat (live h p1)]; + [SMTPat (loc_disjoint (loc_pointer p1) (loc_pointer p2)); SMTPat (unused_in p2 h)]; + [SMTPat (live h p1); SMTPat (unused_in p2 h)]; + ]] + +val pointer_live_reference_unused_in_disjoint + (#value1: typ) + (#value2: Type0) + (h: HS.mem) + (p1: pointer value1) + (p2: HS.reference value2) +: Lemma + (requires (live h p1 /\ HS.unused_in p2 h)) + (ensures (loc_disjoint (loc_pointer p1) (loc_addresses (HS.frameOf p2) (Set.singleton (HS.as_addr p2))))) + [SMTPat (live h p1); SMTPat (HS.unused_in p2 h)] + +val reference_live_pointer_unused_in_disjoint + (#value1: Type0) + (#value2: typ) + (h: HS.mem) + (p1: HS.reference value1) + (p2: pointer value2) +: Lemma + (requires (HS.contains h p1 /\ unused_in p2 h)) + (ensures (loc_disjoint (loc_addresses (HS.frameOf p1) (Set.singleton (HS.as_addr p1))) (loc_pointer p2))) + [SMTPat (HS.contains h p1); SMTPat (unused_in p2 h)] + +val loc_disjoint_gsub_buffer + (#t: typ) + (b: buffer t) + (i1: UInt32.t) + (len1: UInt32.t) + (i2: UInt32.t) + (len2: UInt32.t) +: Lemma + (requires ( + UInt32.v i1 + UInt32.v len1 <= UInt32.v (buffer_length b) /\ + UInt32.v i2 + UInt32.v len2 <= UInt32.v (buffer_length b) /\ ( + UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/ + UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + ))) + (ensures ( + UInt32.v i1 + UInt32.v len1 <= UInt32.v (buffer_length b) /\ + UInt32.v i2 + UInt32.v len2 <= UInt32.v (buffer_length b) /\ + loc_disjoint (loc_buffer (gsub_buffer b i1 len1)) (loc_buffer (gsub_buffer b i2 len2)) + )) + [SMTPat (loc_disjoint (loc_buffer (gsub_buffer b i1 len1)) (loc_buffer (gsub_buffer b i2 len2)))] + +val loc_disjoint_gpointer_of_buffer_cell + (#t: typ) + (b: buffer t) + (i1: UInt32.t) + (i2: UInt32.t) +: Lemma + (requires ( + UInt32.v i1 < UInt32.v (buffer_length b) /\ + UInt32.v i2 < UInt32.v (buffer_length b) /\ ( + UInt32.v i1 <> UInt32.v i2 + ))) + (ensures ( + UInt32.v i1 < UInt32.v (buffer_length b) /\ + UInt32.v i2 < UInt32.v (buffer_length b) /\ + loc_disjoint (loc_pointer (gpointer_of_buffer_cell b i1)) (loc_pointer (gpointer_of_buffer_cell b i2)) + )) + [SMTPat (loc_disjoint (loc_pointer (gpointer_of_buffer_cell b i1)) (loc_pointer (gpointer_of_buffer_cell b i2)))] + +let loc_disjoint_gpointer_of_buffer_cell_r + (l: loc) + (#t: typ) + (b: buffer t) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v (buffer_length b) /\ loc_disjoint l (loc_buffer b))) + (ensures (UInt32.v i < UInt32.v (buffer_length b) /\ loc_disjoint l (loc_pointer (gpointer_of_buffer_cell b i)))) + [SMTPat (loc_disjoint l (loc_pointer (gpointer_of_buffer_cell b i)))] += loc_disjoint_includes l (loc_buffer b) l (loc_pointer (gpointer_of_buffer_cell b i)) + +let loc_disjoint_gpointer_of_buffer_cell_l + (l: loc) + (#t: typ) + (b: buffer t) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v (buffer_length b) /\ loc_disjoint (loc_buffer b) l)) + (ensures (UInt32.v i < UInt32.v (buffer_length b) /\ loc_disjoint (loc_pointer (gpointer_of_buffer_cell b i)) l)) + [SMTPat (loc_disjoint (loc_pointer (gpointer_of_buffer_cell b i)) l)] += loc_disjoint_includes (loc_buffer b) l (loc_pointer (gpointer_of_buffer_cell b i)) l + +val loc_disjoint_addresses + (r1 r2: HS.rid) + (n1 n2: Set.set nat) +: Lemma + (requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty)) + (ensures (loc_disjoint (loc_addresses r1 n1) (loc_addresses r2 n2))) + [SMTPat (loc_disjoint (loc_addresses r1 n1) (loc_addresses r2 n2))] + +val loc_disjoint_pointer_addresses + (#t: typ) + (p: pointer t) + (r: HS.rid) + (n: Set.set nat) +: Lemma + (requires (r <> frameOf p \/ (~ (Set.mem (as_addr p) n)))) + (ensures (loc_disjoint (loc_pointer p) (loc_addresses r n))) + [SMTPat (loc_disjoint (loc_pointer p) (loc_addresses r n))] + +val loc_disjoint_buffer_addresses + (#t: typ) + (p: buffer t) + (r: HH.rid) + (n: Set.set nat) +: Lemma + (requires (r <> frameOf_buffer p \/ (~ (Set.mem (buffer_as_addr p) n)))) + (ensures (loc_disjoint (loc_buffer p) (loc_addresses r n))) + [SMTPat (loc_disjoint (loc_buffer p) (loc_addresses r n))] + +val loc_disjoint_regions + (rs1 rs2: Set.set HS.rid) +: Lemma + (requires (Set.subset (Set.intersect rs1 rs2) Set.empty)) + (ensures (loc_disjoint (loc_regions rs1) (loc_regions rs2))) + [SMTPat (loc_disjoint (loc_regions rs1) (loc_regions rs2))] + +(** The modifies clause proper *) + +val modifies + (s: loc) + (h1 h2: HS.mem) +: GTot Type0 + +val modifies_loc_regions_intro + (rs: Set.set HS.rid) + (h1 h2: HS.mem) +: Lemma + (requires (HS.modifies rs h1 h2)) + (ensures (modifies (loc_regions rs) h1 h2)) + +val modifies_pointer_elim + (s: loc) + (h1 h2: HS.mem) + (#a': typ) + (p': pointer a') +: Lemma + (requires ( + modifies s h1 h2 /\ + live h1 p' /\ + loc_disjoint (loc_pointer p') s + )) + (ensures ( + equal_values h1 p' h2 p' + )) + [SMTPatOr [ + [ SMTPat (modifies s h1 h2); SMTPat (gread h1 p') ] ; + [ SMTPat (modifies s h1 h2); SMTPat (readable h1 p') ] ; + [ SMTPat (modifies s h1 h2); SMTPat (live h1 p') ]; + [ SMTPat (modifies s h1 h2); SMTPat (gread h2 p') ] ; + [ SMTPat (modifies s h1 h2); SMTPat (readable h2 p') ] ; + [ SMTPat (modifies s h1 h2); SMTPat (live h2 p') ] + ] ] + +val modifies_buffer_elim + (#t1: typ) + (b: buffer t1) + (p: loc) + (h h': HS.mem) +: Lemma + (requires ( + loc_disjoint (loc_buffer b) p /\ + buffer_live h b /\ + (UInt32.v (buffer_length b) == 0 ==> buffer_live h' b) /\ // necessary for liveness, because all buffers of size 0 are disjoint for any memory location, so we cannot talk about their liveness individually without referring to a larger nonempty buffer + modifies p h h' + )) + (ensures ( + buffer_live h' b /\ ( + buffer_readable h b ==> ( + buffer_readable h' b /\ + buffer_as_seq h b == buffer_as_seq h' b + )))) + [SMTPatOr [ + [ SMTPat (modifies p h h'); SMTPat (buffer_as_seq h b) ] ; + [ SMTPat (modifies p h h'); SMTPat (buffer_readable h b) ] ; + [ SMTPat (modifies p h h'); SMTPat (buffer_live h b) ]; + [ SMTPat (modifies p h h'); SMTPat (buffer_as_seq h' b) ] ; + [ SMTPat (modifies p h h'); SMTPat (buffer_readable h' b) ] ; + [ SMTPat (modifies p h h'); SMTPat (buffer_live h' b) ] + ] ] + +val modifies_reference_elim + (#t: Type0) + (b: HS.reference t) + (p: loc) + (h h': HS.mem) +: Lemma + (requires ( + loc_disjoint (loc_addresses (HS.frameOf b) (Set.singleton (HS.as_addr b))) p /\ + HS.contains h b /\ + modifies p h h' + )) + (ensures ( + HS.contains h' b /\ + HS.sel h b == HS.sel h' b + )) + [SMTPatOr [ + [ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ; + [ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ]; + [ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ; + [ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ] + ] ] + +val modifies_refl + (s: loc) + (h: HS.mem) +: Lemma + (modifies s h h) + [SMTPat (modifies s h h)] + +val modifies_loc_includes + (s1: loc) + (h h': HS.mem) + (s2: loc) +: Lemma + (requires (modifies s2 h h' /\ loc_includes s1 s2)) + (ensures (modifies s1 h h')) + [SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')] + +val modifies_trans + (s12: loc) + (h1 h2: HS.mem) + (s23: loc) + (h3: HS.mem) +: Lemma + (requires (modifies s12 h1 h2 /\ modifies s23 h2 h3)) + (ensures (modifies (loc_union s12 s23) h1 h3)) + [SMTPat (modifies s12 h1 h2); SMTPat (modifies s23 h2 h3)] + +let modifies_0 (h0 h1: HS.mem) : GTot Type0 = + modifies loc_none h0 h1 + +let modifies_1 (#t: typ) (p: pointer t) (h0 h1: HS.mem) : GTot Type0 = + modifies (loc_pointer p) h0 h1 + +(** Concrete allocators, getters and setters *) + +val screate + (value:typ) + (s: option (type_of_typ value)) +: HST.StackInline (pointer value) + (requires (fun h -> True)) + (ensures (fun (h0:HS.mem) b h1 -> + unused_in b h0 + /\ live h1 b + /\ frameOf b = HS.get_tip h0 + /\ modifies_0 h0 h1 + /\ begin match s with + | Some s' -> + readable h1 b /\ + gread h1 b == s' + | _ -> True + end + )) + +val ecreate + (t:typ) + (r:HS.rid) + (s: option (type_of_typ t)) +: HST.ST (pointer t) + (requires (fun h -> is_eternal_region r /\ HST.witnessed (region_contains_pred r))) + (ensures (fun (h0:HS.mem) b h1 -> unused_in b h0 + /\ live h1 b + /\ frameOf b == r + /\ modifies_0 h0 h1 + /\ begin match s with + | Some s' -> + readable h1 b /\ + gread h1 b == s' + | _ -> True + end + /\ ~(is_mm b))) + +val field + (#l: struct_typ) + (p: pointer (TStruct l)) + (fd: struct_field l) +: HST.Stack (pointer (typ_of_struct_field l fd)) + (requires (fun h -> live h p)) + (ensures (fun h0 p' h1 -> h0 == h1 /\ p' == gfield p fd)) + +val ufield + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) +: HST.Stack (pointer (typ_of_struct_field l fd)) + (requires (fun h -> live h p)) + (ensures (fun h0 p' h1 -> h0 == h1 /\ p' == gufield p fd)) + +val cell + (#length: array_length_t) + (#value: typ) + (p: pointer (TArray length value)) + (i: UInt32.t) +: HST.Stack (pointer value) + (requires (fun h -> UInt32.v i < UInt32.v length /\ live h p)) + (ensures (fun h0 p' h1 -> UInt32.v i < UInt32.v length /\ h0 == h1 /\ p' == gcell p i)) + +val read + (#value: typ) + (p: pointer value) +: HST.Stack (type_of_typ value) + (requires (fun h -> readable h p)) + (ensures (fun h0 v h1 -> readable h0 p /\ h0 == h1 /\ v == gread h0 p)) + +val is_null + (#t: typ) + (p: npointer t) +: HST.Stack bool + (requires (fun h -> nlive h p)) + (ensures (fun h b h' -> h' == h /\ b == g_is_null p)) + +val write: #a:typ -> b:pointer a -> z:type_of_typ a -> HST.Stack unit + (requires (fun h -> live h b)) + (ensures (fun h0 _ h1 -> live h0 b /\ live h1 b + /\ modifies_1 b h0 h1 + /\ readable h1 b + /\ gread h1 b == z )) + +(** Given our model, this operation is stateful, however it should be translated + to a no-op by Karamel, as the tag does not actually exist at runtime. +*) +val write_union_field + (#l: union_typ) + (p: pointer (TUnion l)) + (fd: struct_field l) +: HST.Stack unit + (requires (fun h -> live h p)) + (ensures (fun h0 _ h1 -> live h0 p /\ live h1 p + /\ modifies_1 p h0 h1 + /\ is_active_union_field h1 p fd + )) + +val modifies_fresh_frame_popped + (h0 h1: HS.mem) + (s: loc) + (h2 h3: HS.mem) +: Lemma + (requires ( + HS.fresh_frame h0 h1 /\ + modifies (loc_union (loc_regions (HS.mod_set (Set.singleton (HS.get_tip h1)))) s) h1 h2 /\ + (HS.get_tip h2) == (HS.get_tip h1) /\ + HS.popped h2 h3 + )) + (ensures ( + modifies s h0 h3 /\ + (HS.get_tip h3) == HS.get_tip h0 + )) + [SMTPat (HS.fresh_frame h0 h1); SMTPat (HS.popped h2 h3); SMTPat (modifies s h0 h3)] + +val modifies_only_live_regions + (rs: Set.set HS.rid) + (l: loc) + (h h' : HS.mem) +: Lemma + (requires ( + modifies (loc_union (loc_regions rs) l) h h' /\ + (forall r . Set.mem r rs ==> (~ (HS.live_region h r))) + )) + (ensures (modifies l h h')) + +val modifies_loc_addresses_intro + (r: HS.rid) + (a: Set.set nat) + (l: loc) + (h1 h2: HS.mem) +: Lemma + (requires ( + HS.live_region h2 r /\ + modifies (loc_union (loc_regions (Set.singleton r)) l) h1 h2 /\ + HS.modifies_ref r a h1 h2 + )) + (ensures (modifies (loc_union (loc_addresses r a) l) h1 h2)) + +(* `modifies` and the readable permission *) + +(** NOTE: we historically used to have this lemma for arbitrary +pointer inclusion, but that became wrong for unions. *) + +val modifies_1_readable_struct + (#l: struct_typ) + (f: struct_field l) + (p: pointer (TStruct l)) + (h h' : HS.mem) +: Lemma + (requires (readable h p /\ modifies_1 (gfield p f) h h' /\ readable h' (gfield p f))) + (ensures (readable h' p)) + [SMTPatOr [ + [SMTPat (modifies_1 (gfield p f) h h'); SMTPat (readable h p)]; + [SMTPat (modifies_1 (gfield p f) h h'); SMTPat (readable h' p)]; + [SMTPat (readable h p); SMTPat (readable h' (gfield p f))]; +// [SMTPat (readable h' p); SMTPat (readable h' (gfield p f))]; // this pattern is incomplete + [SMTPat (readable h p); SMTPat (readable h' p); SMTPat (gfield p f)]; +]] + +val modifies_1_readable_array + (#t: typ) + (#len: array_length_t) + (i: UInt32.t) + (p: pointer (TArray len t)) + (h h' : HS.mem) +: Lemma + (requires (UInt32.v i < UInt32.v len /\ readable h p /\ modifies_1 (gcell p i) h h' /\ readable h' (gcell p i))) + (ensures (readable h' p)) + [SMTPatOr [ + [SMTPat (modifies_1 (gcell p i) h h'); SMTPat (readable h p)]; + [SMTPat (modifies_1 (gcell p i) h h'); SMTPat (readable h' p)]; + [SMTPat (readable h p); SMTPat (readable h' (gcell p i))]; +// [SMTPat (readable h' p); SMTPat (readable h' (gcell p i))]; // this pattern is incomplete + [SMTPat (readable h p); SMTPat (readable h' p); SMTPat (gcell p i)]; + ]] + +(* buffer read: can be defined as a derived operation: pointer_of_buffer_cell ; read *) +val read_buffer + (#t: typ) + (b: buffer t) + (i: UInt32.t) +: HST.Stack (type_of_typ t) + (requires (fun h -> UInt32.v i < UInt32.v (buffer_length b) /\ readable h (gpointer_of_buffer_cell b i))) + (ensures (fun h v h' -> UInt32.v i < UInt32.v (buffer_length b) /\ h' == h /\ v == Seq.index (buffer_as_seq h b) (UInt32.v i))) + +(* buffer write: needs clearer "modifies" clauses *) + +val write_buffer + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (v: type_of_typ t) +: HST.Stack unit + (requires (fun h -> UInt32.v i < UInt32.v (buffer_length b) /\ buffer_live h b)) + (ensures (fun h _ h' -> + UInt32.v i < UInt32.v (buffer_length b) /\ + modifies_1 (gpointer_of_buffer_cell b i) h h' /\ + buffer_live h' b /\ + readable h' (gpointer_of_buffer_cell b i) /\ + Seq.index (buffer_as_seq h' b) (UInt32.v i) == v /\ + (buffer_readable h b ==> buffer_readable h' b) + )) + +(* unused_in, cont'd *) + +val buffer_live_unused_in_disjoint + (#t1 #t2: typ) + (h: HS.mem) + (b1: buffer t1) + (b2: buffer t2) +: Lemma + (requires (buffer_live h b1 /\ buffer_unused_in b2 h)) + (ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2))) + [SMTPat (buffer_live h b1); SMTPat (buffer_unused_in b2 h)] + +val pointer_live_buffer_unused_in_disjoint + (#t1 #t2: typ) + (h: HS.mem) + (b1: pointer t1) + (b2: buffer t2) +: Lemma + (requires (live h b1 /\ buffer_unused_in b2 h)) + (ensures (loc_disjoint (loc_pointer b1) (loc_buffer b2))) + [SMTPat (live h b1); SMTPat (buffer_unused_in b2 h)] + +val buffer_live_pointer_unused_in_disjoint + (#t1 #t2: typ) + (h: HS.mem) + (b1: buffer t1) + (b2: pointer t2) +: Lemma + (requires (buffer_live h b1 /\ unused_in b2 h)) + (ensures (loc_disjoint (loc_buffer b1) (loc_pointer b2))) + [SMTPat (buffer_live h b1); SMTPat (unused_in b2 h)] + +val reference_live_buffer_unused_in_disjoint + (#t1: Type0) + (#t2: typ) + (h: HS.mem) + (b1: HS.reference t1) + (b2: buffer t2) +: Lemma + (requires (HS.contains h b1 /\ buffer_unused_in b2 h)) + (ensures (loc_disjoint (loc_addresses (HS.frameOf b1) (Set.singleton (HS.as_addr b1))) (loc_buffer b2))) + [SMTPat (HS.contains h b1); SMTPat (buffer_unused_in b2 h)] + +val buffer_live_reference_unused_in_disjoint + (#t1: typ) + (#t2: Type0) + (h: HS.mem) + (b1: buffer t1) + (b2: HS.reference t2) +: Lemma + (requires (buffer_live h b1 /\ HS.unused_in b2 h)) + (ensures (loc_disjoint (loc_buffer b1) (loc_addresses (HS.frameOf b2) (Set.singleton (HS.as_addr b2))))) + +(* Buffer inclusion without existential quantifiers: remnants of the legacy buffer interface *) + +(* Returns the greatest buffer (of the same type) including b *) + +val root_buffer + (#t: typ) + (b: buffer t) +: GTot (buffer t) + +(* Return the "offset" of b within its root buffer *) + +val buffer_idx + (#t: typ) + (b: buffer t) +: Ghost UInt32.t + (requires True) + (ensures (fun y -> + UInt32.v y + UInt32.v (buffer_length b) <= + UInt32.v (buffer_length (root_buffer b)) + )) + +val buffer_eq_gsub_root + (#t: typ) + (b: buffer t) +: Lemma + (b == gsub_buffer (root_buffer b) (buffer_idx b) (buffer_length b)) + +val root_buffer_gsub_buffer + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) + )) + (ensures ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + root_buffer (gsub_buffer b i len) == root_buffer b + )) + [SMTPat (root_buffer (gsub_buffer b i len))] + +val buffer_idx_gsub_buffer + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) + )) + (ensures ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + buffer_idx (gsub_buffer b i len) == UInt32.add (buffer_idx b) i + )) + [SMTPat (buffer_idx (gsub_buffer b i len))] + +val buffer_includes + (#t: typ) + (blarge bsmall: buffer t) +: GTot Type0 + +val buffer_includes_refl + (#t: typ) + (b: buffer t) +: Lemma + (buffer_includes b b) + [SMTPat (buffer_includes b b)] + +val buffer_includes_trans + (#t: typ) + (b1 b2 b3: buffer t) +: Lemma + (requires (buffer_includes b1 b2 /\ buffer_includes b2 b3)) + (ensures (buffer_includes b1 b3)) + +val buffer_includes_gsub_r + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) + )) + (ensures ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + buffer_includes b (gsub_buffer b i len) + )) + +val buffer_includes_gsub + (#t: typ) + (b: buffer t) + (i1: UInt32.t) + (i2: UInt32.t) + (len1: UInt32.t) + (len2: UInt32.t) +: Lemma + (requires ( + UInt32.v i1 <= UInt32.v i2 /\ + UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1 /\ + UInt32.v i1 + UInt32.v len1 <= UInt32.v (buffer_length b) + )) + (ensures ( + UInt32.v i1 + UInt32.v len1 <= UInt32.v (buffer_length b) /\ + UInt32.v i2 + UInt32.v len2 <= UInt32.v (buffer_length b) /\ + buffer_includes (gsub_buffer b i1 len1) (gsub_buffer b i2 len2) + )) + [SMTPat (buffer_includes (gsub_buffer b i1 len1) (gsub_buffer b i2 len2))] + +val buffer_includes_elim + (#t: typ) + (b1 b2: buffer t) +: Lemma + (requires ( + buffer_includes b1 b2 + )) + (ensures ( + UInt32.v (buffer_idx b1) <= UInt32.v (buffer_idx b2) /\ + UInt32.v (buffer_idx b2) + UInt32.v (buffer_length b2) <= UInt32.v (buffer_idx b1) + UInt32.v (buffer_length b1) /\ + b2 == gsub_buffer b1 (UInt32.sub (buffer_idx b2) (buffer_idx b1)) (buffer_length b2) + )) + +val buffer_includes_loc_includes + (#t: typ) + (b1 b2: buffer t) +: Lemma + (requires (buffer_includes b1 b2)) + (ensures (loc_includes (loc_buffer b1) (loc_buffer b2))) + [SMTPatOr [ + [SMTPat (buffer_includes b1 b2)]; + [SMTPat (loc_includes(loc_buffer b1) (loc_buffer b2))] + ]] + + + +/// Type class instantiation for compositionality with other kinds of memory locations than regions, references or buffers (just in case). +/// No usage pattern has been found yet. + +module MG = FStar.ModifiesGen + +val cloc_aloc: HS.rid -> nat -> Tot Type0 + +val cloc_cls: MG.cls cloc_aloc + +val cloc_of_loc (l: loc) : Tot (MG.loc cloc_cls) + +val loc_of_cloc (l: MG.loc cloc_cls) : Tot loc + +val loc_of_cloc_of_loc (l: loc) : Lemma + (loc_of_cloc (cloc_of_loc l) == l) + [SMTPat (loc_of_cloc (cloc_of_loc l))] + +val cloc_of_loc_of_cloc (l: MG.loc cloc_cls) : Lemma + (cloc_of_loc (loc_of_cloc l) == l) + [SMTPat (cloc_of_loc (loc_of_cloc l))] + +val loc_includes_to_cloc (l1 l2: loc) : Lemma + (loc_includes l1 l2 <==> MG.loc_includes (cloc_of_loc l1) (cloc_of_loc l2)) + +val loc_disjoint_to_cloc (l1 l2: loc) : Lemma + (loc_disjoint l1 l2 <==> MG.loc_disjoint (cloc_of_loc l1) (cloc_of_loc l2)) + +val modifies_to_cloc (l: loc) (h1 h2: HS.mem) : Lemma + (modifies l h1 h2 <==> MG.modifies (cloc_of_loc l) h1 h2) diff --git a/stage0/ulib/legacy/FStar.Pointer.Derived1.fst b/stage0/ulib/legacy/FStar.Pointer.Derived1.fst new file mode 100644 index 00000000000..13ac7cd0f20 --- /dev/null +++ b/stage0/ulib/legacy/FStar.Pointer.Derived1.fst @@ -0,0 +1,223 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Pointer.Derived1 + +module HH = FStar.HyperStack +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +let includes_gfield_gen #t p #l q fd = + includes_gfield q fd; + includes_trans p q (gfield q fd) + +let includes_gufield_gen #t p #l q fd = + includes_gufield q fd; + includes_trans p q (gufield q fd) + +let includes_gcell_gen #t p #length #value q i = + includes_gcell q i; + includes_trans p q (gcell q i) + +let loc_includes_union_assoc_r2l s1 s2 s3 s = + loc_includes_trans (loc_union (loc_union s1 s2) s3) (loc_union s1 (loc_union s2 s3)) s + +let loc_includes_union_assoc_l2r s1 s2 s3 s = + loc_includes_trans (loc_union s1 (loc_union s2 s3)) (loc_union (loc_union s1 s2) s3) s + +let loc_includes_union_assoc_focalize_1 l1 l2 x r s = + loc_includes_trans (loc_union l1 (loc_union (loc_union l2 x) r)) (loc_union (loc_union l1 l2) (loc_union x r)) s + +let loc_includes_union_assoc_focalize_2 l x r1 r2 s = + loc_includes_trans (loc_union l (loc_union (loc_union x r1) r2)) (loc_union l (loc_union x (loc_union r1 r2))) s + +let loc_includes_region_union_r l s1 s2 = + loc_includes_trans (loc_union l (loc_regions s1)) (loc_union (loc_regions s1) l) (loc_regions s2) + +let loc_includes_region_union_assoc l r s1 s2 = + loc_includes_trans (loc_union l (loc_union (loc_regions s1) r)) (loc_union (loc_regions s1) (loc_union l r)) (loc_regions s2) + +let loc_disjoint_none_l s = + loc_disjoint_none_r s; + loc_disjoint_sym s loc_none + +let loc_disjoint_union_l s s1 s2 = + loc_disjoint_sym s1 s; + loc_disjoint_sym s2 s; + loc_disjoint_union_r s s1 s2; + loc_disjoint_sym s (loc_union s1 s2) + +let loc_disjoint_gfield_r p #l q fd = + loc_disjoint_includes p (loc_pointer q) p (loc_pointer (gfield q fd)) + +let loc_disjoint_gfield_l p #l q fd = + loc_disjoint_sym (loc_pointer q) p; + loc_disjoint_gfield_r p q fd; + loc_disjoint_sym p (loc_pointer (gfield q fd)) + +let loc_disjoint_gufield_r p #l q fd = + loc_disjoint_includes p (loc_pointer q) p (loc_pointer (gufield q fd)) + +let loc_disjoint_gufield_l p #l q fd = + loc_disjoint_sym (loc_pointer q) p; + loc_disjoint_gufield_r p q fd; + loc_disjoint_sym p (loc_pointer (gufield q fd)) + +let loc_disjoint_gcell_r p #value #len q i = + loc_disjoint_includes p (loc_pointer q) p (loc_pointer (gcell q i)) + +let loc_disjoint_gcell_l p #value #len q i = + loc_disjoint_sym (loc_pointer q) p; + loc_disjoint_gcell_r p q i; + loc_disjoint_sym p (loc_pointer (gcell q i)) + +let loc_disjoint_gsingleton_buffer_of_pointer_r l #t p = + loc_disjoint_includes l (loc_pointer p) l (loc_buffer (gsingleton_buffer_of_pointer p)) + +let loc_disjoint_gsingleton_buffer_of_pointer_l l #t p = + loc_disjoint_sym (loc_pointer p) l; + loc_disjoint_gsingleton_buffer_of_pointer_r l p; + loc_disjoint_sym l (loc_buffer (gsingleton_buffer_of_pointer p)) + +let loc_disjoint_gbuffer_of_array_pointer_r l #t #len p = + loc_disjoint_includes l (loc_pointer p) l (loc_buffer (gbuffer_of_array_pointer p)) + +let loc_disjoint_gbuffer_of_array_pointer_l l #t #len p = + loc_disjoint_includes (loc_pointer p) l (loc_buffer (gbuffer_of_array_pointer p)) l + +let loc_disjoint_gpointer_of_buffer_cell_r l #t b i = + loc_disjoint_includes l (loc_buffer b) l (loc_pointer (gpointer_of_buffer_cell b i)) + +let loc_disjoint_gpointer_of_buffer_cell_l l #t b i = + loc_disjoint_includes (loc_buffer b) l (loc_pointer (gpointer_of_buffer_cell b i)) l + +let loc_disjoint_gsub_buffer_r l #t b i len = + loc_disjoint_includes l (loc_buffer b) l (loc_buffer (gsub_buffer b i len)) + +let loc_disjoint_gsub_buffer_l l #t b i len = + loc_disjoint_includes (loc_buffer b) l (loc_buffer (gsub_buffer b i len)) l + +let loc_disjoint_addresses_pointer #t p r n = + loc_disjoint_sym (loc_pointer p) (loc_addresses r n) + +let loc_disjoint_union_r_elim l l1 l2 = + loc_disjoint_includes l (loc_union l1 l2) l l1; + loc_disjoint_includes l (loc_union l1 l2) l l2 + +let loc_disjoint_union_l_elim l l1 l2 = () + +let modifies_trans_incl_l s12 h1 h2 s23 h3 = () + +let modifies_trans_incl_r s12 h1 h2 s23 h3 = () + +let modifies_fresh_frame_popped' h0 h1 s h2 h3 = + modifies_fresh_frame_popped h0 h1 s h2 h3 + +let buffer_includes_gsub_r_gen #t b0 b i len = + buffer_includes_gsub_r b i len; + buffer_includes_trans b0 b (gsub_buffer b i len) + +let readable_gpointer_of_buffer_cell_gsub #t h b i len j = + assert (gpointer_of_buffer_cell b j == gpointer_of_buffer_cell (gsub_buffer b i len) (UInt32.sub j i)) + +private +let rec buffer_contents_equal_aux + (#a: typ) + (b1 b2: buffer a) + (len: UInt32.t) +: HST.Stack bool + (requires (fun h -> + hasEq (type_of_typ a) /\ + UInt32.v len == UInt32.v (buffer_length b1) /\ + UInt32.v len == UInt32.v (buffer_length b2) /\ + buffer_readable h b1 /\ + buffer_readable h b2 + )) + (ensures (fun h0 z h1 -> + h1 == h0 /\ + UInt32.v len == UInt32.v (buffer_length b1) /\ + UInt32.v len == UInt32.v (buffer_length b2) /\ + (z == true <==> Seq.equal (buffer_as_seq h0 b1) (buffer_as_seq h0 b2)) + )) + (decreases (UInt32.v len)) += if len = 0ul + then true + else begin + let len' = UInt32.sub len 1ul in + let t : eqtype = type_of_typ a in + let r1 : t = read_buffer b1 len' in + let r2 : t = read_buffer b2 len' in + let b1' = sub_buffer b1 0ul len' in + let b2' = sub_buffer b2 0ul len' in + let h = HST.get () in + assert (Seq.equal (buffer_as_seq h b1) (Seq.snoc (buffer_as_seq h b1') r1)); + assert (Seq.equal (buffer_as_seq h b2) (Seq.snoc (buffer_as_seq h b2') r2)); + if r1 = r2 + then + buffer_contents_equal_aux b1' b2' len' + else + false + end + +let buffer_contents_equal #a b1 b2 len = + let b1' = sub_buffer b1 0ul len in + let b2' = sub_buffer b2 0ul len in + buffer_contents_equal_aux b1' b2' len + +let buffer_readable_intro_empty #t h b = + buffer_readable_intro h b + +let loc_disjoint_gsub_buffer_gpointer_of_buffer_cell #a b i len j = + assert (gpointer_of_buffer_cell b j == gpointer_of_buffer_cell (gsub_buffer b j 1ul) 0ul) + +let buffer_readable_gsub_intro #t h b i len = + buffer_readable_intro h (gsub_buffer b i len) + +let buffer_readable_gsub_elim #t h b i len = + buffer_readable_elim h (gsub_buffer b i len) + +let buffer_as_seq_gsub_buffer_append #t h b i len1 len2 = + Seq.lemma_eq_intro (buffer_as_seq h (gsub_buffer b i (UInt32.add len1 len2))) (Seq.append (buffer_as_seq h (gsub_buffer b i len1)) (buffer_as_seq h (gsub_buffer b (UInt32.add i len1) len2))) + +let buffer_as_seq_gsub_buffer_snoc #t h b i len = + Seq.lemma_eq_intro (buffer_as_seq h (gsub_buffer b i (UInt32.add len 1ul))) (Seq.snoc (buffer_as_seq h (gsub_buffer b i len)) (Seq.index (buffer_as_seq h b) (UInt32.v i + UInt32.v len))) + +let buffer_as_seq_gsub_buffer_cons #t h b i len = + Seq.lemma_eq_intro (buffer_as_seq h (gsub_buffer b i (UInt32.add len 1ul))) (Seq.cons (Seq.index (buffer_as_seq h b) (UInt32.v i)) (buffer_as_seq h (gsub_buffer b (UInt32.add i 1ul) len))) + +let buffer_snoc #t b i len v = + let h = HST.get () in + buffer_readable_gsub_elim h b i len; + write_buffer b (UInt32.add i len) v; + let h' = HST.get () in + buffer_readable_gsub_intro h' b i (UInt32.add len 1ul); + buffer_as_seq_gsub_buffer_snoc h' b i len; + assert (Seq.index (buffer_as_seq h' b) (UInt32.v (UInt32.add i len)) == v) + +let buffer_cons #t b i len v = + let h = HST.get () in + buffer_readable_gsub_elim h b (UInt32.add i 1ul) len; + write_buffer b i v; + let h' = HST.get () in + buffer_readable_gsub_intro h' b i (UInt32.add len 1ul); + buffer_as_seq_gsub_buffer_cons h' b i len + +let buffer_readable_gsub_merge #t b i len h = + buffer_readable_intro h b + +let buffer_readable_modifies_gsub #t b i len h0 h1 l = + buffer_readable_intro h1 (gsub_buffer b 0ul i); + buffer_readable_intro h1 (gsub_buffer b (UInt32.add i len) (UInt32.sub (buffer_length b) (UInt32.add i len))); + buffer_readable_gsub_merge b i len h1 diff --git a/stage0/ulib/legacy/FStar.Pointer.Derived1.fsti b/stage0/ulib/legacy/FStar.Pointer.Derived1.fsti new file mode 100644 index 00000000000..78fac7a4758 --- /dev/null +++ b/stage0/ulib/legacy/FStar.Pointer.Derived1.fsti @@ -0,0 +1,564 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Pointer.Derived1 +include FStar.Pointer.Base + +module HH = FStar.HyperStack +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +val includes_gfield_gen + (#t: typ) + (p: pointer t) + (#l: struct_typ) + (q: pointer (TStruct l)) + (fd: struct_field l) +: Lemma + (requires (includes p q)) + (ensures (includes p (gfield q fd))) + [SMTPat (includes p (gfield q fd))] + +val includes_gufield_gen + (#t: typ) + (p: pointer t) + (#l: union_typ) + (q: pointer (TUnion l)) + (fd: struct_field l) +: Lemma + (requires (includes p q)) + (ensures (includes p (gufield q fd))) + [SMTPat (includes p (gufield q fd))] + +val includes_gcell_gen + (#t: typ) + (p: pointer t) + (#length: array_length_t) + (#value: typ) + (q: pointer (TArray length value)) + (i: UInt32.t) +: Lemma + (requires (includes p q /\ UInt32.v i < UInt32.v length)) + (ensures (UInt32.v i < UInt32.v length /\ includes p (gcell q i))) + [SMTPat (includes p (gcell q i))] + +val loc_includes_union_assoc_r2l + (s1 s2 s3 s: loc) +: Lemma + (requires (loc_includes (loc_union s1 (loc_union s2 s3)) s)) + (ensures (loc_includes (loc_union (loc_union s1 s2) s3) s)) + [SMTPat (loc_includes (loc_union (loc_union s1 s2) s3) s)] + +val loc_includes_union_assoc_l2r + (s1 s2 s3 s: loc) +: Lemma + (requires (loc_includes (loc_union (loc_union s1 s2) s3) s)) + (ensures (loc_includes (loc_union s1 (loc_union s2 s3)) s)) + [SMTPat (loc_includes (loc_union s1 (loc_union s2 s3)) s)] + +val loc_includes_union_assoc_focalize_1 + (l1 l2 x r s: loc) +: Lemma + (requires (loc_includes (loc_union (loc_union l1 l2) (loc_union x r)) s)) + (ensures (loc_includes (loc_union l1 (loc_union (loc_union l2 x) r)) s)) + [SMTPat (loc_includes (loc_union l1 (loc_union (loc_union l2 x) r)) s)] + +val loc_includes_union_assoc_focalize_2 + (l x r1 r2 s: loc) +: Lemma + (requires (loc_includes (loc_union l (loc_union x (loc_union r1 r2))) s)) + (ensures (loc_includes (loc_union l (loc_union (loc_union x r1) r2)) s)) + [SMTPat (loc_includes (loc_union l (loc_union (loc_union x r1) r2)) s)] + +val loc_includes_region_union_r + (l: loc) + (s1 s2: Set.set HH.rid) +: Lemma + (requires (loc_includes l (loc_regions (Set.intersect s2 (Set.complement s1))))) + (ensures (loc_includes (loc_union l (loc_regions s1)) (loc_regions s2))) + [SMTPat (loc_includes (loc_union l (loc_regions s1)) (loc_regions s2))] + +val loc_includes_region_union_assoc + (l r: loc) + (s1 s2: Set.set HH.rid) +: Lemma + (requires (loc_includes (loc_union l r)) (loc_regions (Set.intersect s2 (Set.complement s1)))) + (ensures (loc_includes (loc_union l (loc_union (loc_regions s1) r)) (loc_regions s2))) + [SMTPat (loc_includes (loc_union l (loc_union (loc_regions s1) r)) (loc_regions s2))] + +val loc_disjoint_none_l + (s: loc) +: Lemma + (ensures (loc_disjoint loc_none s)) + [SMTPat (loc_disjoint loc_none s)] + +val loc_disjoint_union_l + (s s1 s2: loc) +: Lemma + (requires (loc_disjoint s1 s /\ loc_disjoint s2 s)) + (ensures (loc_disjoint (loc_union s1 s2) s)) + [SMTPat (loc_disjoint (loc_union s1 s2) s)] + +val loc_disjoint_gfield_r + (p: loc) + (#l: struct_typ) + (q: pointer (TStruct l)) + (fd: struct_field l) +: Lemma + (requires (loc_disjoint p (loc_pointer q))) + (ensures (loc_disjoint p (loc_pointer (gfield q fd)))) + [SMTPat (loc_disjoint p (loc_pointer (gfield q fd)))] + +val loc_disjoint_gfield_l + (p: loc) + (#l: struct_typ) + (q: pointer (TStruct l)) + (fd: struct_field l) +: Lemma + (requires (loc_disjoint (loc_pointer q) p)) + (ensures (loc_disjoint (loc_pointer (gfield q fd)) p)) + [SMTPat (loc_disjoint (loc_pointer (gfield q fd)) p)] + +val loc_disjoint_gufield_r + (p: loc) + (#l: struct_typ) + (q: pointer (TUnion l)) + (fd: struct_field l) +: Lemma + (requires (loc_disjoint p (loc_pointer q))) + (ensures (loc_disjoint p (loc_pointer (gufield q fd)))) + [SMTPat (loc_disjoint p (loc_pointer (gufield q fd)))] + +val loc_disjoint_gufield_l + (p: loc) + (#l: struct_typ) + (q: pointer (TUnion l)) + (fd: struct_field l) +: Lemma + (requires (loc_disjoint (loc_pointer q) p)) + (ensures (loc_disjoint (loc_pointer (gufield q fd)) p)) + [SMTPat (loc_disjoint (loc_pointer (gufield q fd)) p)] + +val loc_disjoint_gcell_r + (p: loc) + (#value: typ) + (#len: array_length_t) + (q: pointer (TArray len value)) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v len /\ loc_disjoint p (loc_pointer q))) + (ensures (UInt32.v i < UInt32.v len /\ loc_disjoint p (loc_pointer (gcell q i)))) + [SMTPat (loc_disjoint p (loc_pointer (gcell q i)))] + +val loc_disjoint_gcell_l + (p: loc) + (#value: typ) + (#len: array_length_t) + (q: pointer (TArray len value)) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v len /\ loc_disjoint (loc_pointer q) p)) + (ensures (UInt32.v i < UInt32.v len /\ loc_disjoint (loc_pointer (gcell q i)) p)) + [SMTPat (loc_disjoint (loc_pointer (gcell q i)) p)] + +val loc_disjoint_gsingleton_buffer_of_pointer_r + (l: loc) + (#t: typ) + (p: pointer t) +: Lemma + (requires (loc_disjoint l (loc_pointer p))) + (ensures (loc_disjoint l (loc_buffer (gsingleton_buffer_of_pointer p)))) + [SMTPat (loc_disjoint l (loc_buffer (gsingleton_buffer_of_pointer p)))] + +val loc_disjoint_gsingleton_buffer_of_pointer_l + (l: loc) + (#t: typ) + (p: pointer t) +: Lemma + (requires (loc_disjoint (loc_pointer p) l)) + (ensures (loc_disjoint (loc_buffer (gsingleton_buffer_of_pointer p)) l)) + [SMTPat (loc_disjoint (loc_buffer (gsingleton_buffer_of_pointer p)) l)] + +val loc_disjoint_gbuffer_of_array_pointer_r + (l: loc) + (#t: typ) + (#len: array_length_t) + (p: pointer (TArray len t)) +: Lemma + (requires (loc_disjoint l (loc_pointer p))) + (ensures (loc_disjoint l (loc_buffer (gbuffer_of_array_pointer p)))) + [SMTPat (loc_disjoint l (loc_buffer (gbuffer_of_array_pointer p)))] + +val loc_disjoint_gbuffer_of_array_pointer_l + (l: loc) + (#t: typ) + (#len: array_length_t) + (p: pointer (TArray len t)) +: Lemma + (requires (loc_disjoint (loc_pointer p) l)) + (ensures (loc_disjoint (loc_buffer (gbuffer_of_array_pointer p)) l)) + [SMTPat (loc_disjoint (loc_buffer (gbuffer_of_array_pointer p)) l)] + +val loc_disjoint_gpointer_of_buffer_cell_r + (l: loc) + (#t: typ) + (b: buffer t) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v (buffer_length b) /\ loc_disjoint l (loc_buffer b))) + (ensures (UInt32.v i < UInt32.v (buffer_length b) /\ loc_disjoint l (loc_pointer (gpointer_of_buffer_cell b i)))) + [SMTPat (loc_disjoint l (loc_pointer (gpointer_of_buffer_cell b i)))] + +val loc_disjoint_gpointer_of_buffer_cell_l + (l: loc) + (#t: typ) + (b: buffer t) + (i: UInt32.t) +: Lemma + (requires (UInt32.v i < UInt32.v (buffer_length b) /\ loc_disjoint (loc_buffer b) l)) + (ensures (UInt32.v i < UInt32.v (buffer_length b) /\ loc_disjoint (loc_pointer (gpointer_of_buffer_cell b i)) l)) + [SMTPat (loc_disjoint (loc_pointer (gpointer_of_buffer_cell b i)) l)] + +val loc_disjoint_gsub_buffer_r + (l: loc) + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ loc_disjoint l (loc_buffer b))) + (ensures (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ loc_disjoint l (loc_buffer (gsub_buffer b i len)))) + [SMTPat (loc_disjoint l (loc_buffer (gsub_buffer b i len)))] + +val loc_disjoint_gsub_buffer_l + (l: loc) + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ loc_disjoint (loc_buffer b) l)) + (ensures (UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ loc_disjoint (loc_buffer (gsub_buffer b i len)) l)) + [SMTPat (loc_disjoint (loc_buffer (gsub_buffer b i len)) l)] + +val loc_disjoint_addresses_pointer + (#t: typ) + (p: pointer t) + (r: HH.rid) + (n: Set.set nat) +: Lemma + (requires (r <> frameOf p \/ (~ (Set.mem (as_addr p) n)))) + (ensures (loc_disjoint (loc_addresses r n) (loc_pointer p))) + [SMTPat (loc_disjoint (loc_addresses r n) (loc_pointer p))] + +val loc_disjoint_union_r_elim + (l l1 l2: loc) +: Lemma + (requires (loc_disjoint l (loc_union l1 l2))) + (ensures (loc_disjoint l l1 /\ loc_disjoint l l2)) + [SMTPat (loc_disjoint l (loc_union l1 l2))] + +val loc_disjoint_union_l_elim + (l l1 l2: loc) +: Lemma + (requires (loc_disjoint (loc_union l1 l2) l)) + (ensures (loc_disjoint l1 l /\ loc_disjoint l2 l)) + [SMTPat (loc_disjoint (loc_union l1 l2) l)] + +val modifies_trans_incl_l + (s12: loc) + (h1 h2: HS.mem) + (s23: loc) + (h3: HS.mem) +: Lemma + (requires (modifies s12 h1 h2 /\ modifies s23 h2 h3 /\ loc_includes s12 s23)) + (ensures (modifies s12 h1 h3)) + [SMTPat (modifies s12 h1 h2); SMTPat (modifies s23 h2 h3)] + +val modifies_trans_incl_r + (s12: loc) + (h1 h2: HS.mem) + (s23: loc) + (h3: HS.mem) +: Lemma + (requires (modifies s12 h1 h2 /\ modifies s23 h2 h3 /\ loc_includes s23 s12)) + (ensures (modifies s23 h1 h3)) + [SMTPat (modifies s12 h1 h2); SMTPat (modifies s23 h2 h3)] + +val modifies_fresh_frame_popped' + (h0 h1: HS.mem) + (s: loc) + (h2 h3: HS.mem) +: Lemma + (requires ( + HS.fresh_frame h0 h1 /\ + modifies (loc_union (loc_regions (Set.singleton (HS.get_tip h1))) s) h1 h2 /\ + (HS.get_tip h2) == (HS.get_tip h1) /\ + HS.popped h2 h3 + )) + (ensures ( + modifies s h0 h3 /\ + (HS.get_tip h3) == HS.get_tip h0 + )) + +val buffer_includes_gsub_r_gen + (#t: typ) + (b0: buffer t) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + buffer_includes b0 b + )) + (ensures ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + buffer_includes b0 (gsub_buffer b i len) + )) + [SMTPat (buffer_includes b0 (gsub_buffer b i len))] + +val readable_gpointer_of_buffer_cell_gsub + (#t: typ) + (h: HS.mem) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) + (j: UInt32.t) +: Lemma + (requires ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + UInt32.v i <= UInt32.v j /\ + UInt32.v j < UInt32.v i + UInt32.v len /\ + buffer_readable h (gsub_buffer b i len) + )) + (ensures ( + UInt32.v j < UInt32.v (buffer_length b) /\ + readable h (gpointer_of_buffer_cell b j) + )) + [SMTPat (readable h (gpointer_of_buffer_cell b j)); SMTPat (buffer_readable h (gsub_buffer b i len))] + +val buffer_contents_equal + (#a: typ) + (b1 b2: buffer a) + (len: UInt32.t) +: HST.Stack bool + (requires (fun h -> + hasEq (type_of_typ a) /\ + UInt32.v len <= UInt32.v (buffer_length b1) /\ + UInt32.v len <= UInt32.v (buffer_length b2) /\ + buffer_readable h (gsub_buffer b1 0ul len) /\ + buffer_readable h (gsub_buffer b2 0ul len) + )) + (ensures (fun h0 z h1 -> + h1 == h0 /\ + UInt32.v len <= UInt32.v (buffer_length b1) /\ + UInt32.v len <= UInt32.v (buffer_length b2) /\ + (z == true <==> Seq.equal (buffer_as_seq h0 (gsub_buffer b1 0ul len)) (buffer_as_seq h0 (gsub_buffer b2 0ul len))) + )) + +val buffer_readable_intro_empty + (#t: typ) + (h: HS.mem) + (b: buffer t) +: Lemma + (requires ( + buffer_live h b /\ + UInt32.v (buffer_length b) == 0 + )) + (ensures (buffer_readable h b)) + [SMTPatOr [ + [SMTPat (buffer_readable h b)]; + [SMTPat (buffer_live h b)]; + ]] + +val loc_disjoint_gsub_buffer_gpointer_of_buffer_cell + (#a: typ) + (b: buffer a) + (i: UInt32.t) + (len: UInt32.t) + (j: UInt32.t) +: Lemma + (requires ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + UInt32.v j < UInt32.v (buffer_length b) /\ + (UInt32.v j < UInt32.v i \/ UInt32.v i + UInt32.v len <= UInt32.v j) + )) + (ensures ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + UInt32.v j < UInt32.v (buffer_length b) /\ + loc_disjoint (loc_buffer (gsub_buffer b i len)) (loc_pointer (gpointer_of_buffer_cell b j)) + )) + [SMTPat (loc_disjoint (loc_buffer (gsub_buffer b i len)) (loc_pointer (gpointer_of_buffer_cell b j)))] + +val buffer_readable_gsub_intro + (#t: typ) + (h: HS.mem) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + buffer_live h b /\ ( + forall (j: UInt32.t) . + (UInt32.v i <= UInt32.v j /\ + UInt32.v j < UInt32.v i + UInt32.v len) ==> + readable h (gpointer_of_buffer_cell b j) + ))) + (ensures ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + buffer_readable h (gsub_buffer b i len) + )) + +val buffer_readable_gsub_elim + (#t: typ) + (h: HS.mem) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + buffer_readable h (gsub_buffer b i len) + )) + (ensures ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + buffer_live h b /\ ( + forall (j: UInt32.t) . + (UInt32.v i <= UInt32.v j /\ + UInt32.v j < UInt32.v i + UInt32.v len) ==> + readable h (gpointer_of_buffer_cell b j) + ))) + +val buffer_as_seq_gsub_buffer_append + (#t: typ) + (h: HS.mem) + (b: buffer t) + (i: UInt32.t) + (len1: UInt32.t) + (len2: UInt32.t) +: Lemma + (requires ( + UInt32.v i + UInt32.v len1 + UInt32.v len2 <= UInt32.v (buffer_length b) +// buffer_readable h (gsub_buffer b i (UInt32.add len1 len2)) + )) + (ensures ( + UInt32.v i + UInt32.v len1 + UInt32.v len2 <= UInt32.v (buffer_length b) /\ + buffer_as_seq h (gsub_buffer b i (UInt32.add len1 len2)) == Seq.append (buffer_as_seq h (gsub_buffer b i len1)) (buffer_as_seq h (gsub_buffer b (UInt32.add i len1) len2)) + )) + +val buffer_as_seq_gsub_buffer_snoc + (#t: typ) + (h: HS.mem) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires ( + UInt32.v i + UInt32.v len < UInt32.v (buffer_length b) + )) + (ensures ( + UInt32.v i + UInt32.v len < UInt32.v (buffer_length b) /\ + buffer_as_seq h (gsub_buffer b i (UInt32.add len 1ul)) == Seq.snoc (buffer_as_seq h (gsub_buffer b i len)) (Seq.index (buffer_as_seq h b) (UInt32.v i + UInt32.v len)) + )) + +val buffer_as_seq_gsub_buffer_cons + (#t: typ) + (h: HS.mem) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) +: Lemma + (requires ( + UInt32.v i + UInt32.v len < UInt32.v (buffer_length b) + )) + (ensures ( + UInt32.v i + UInt32.v len < UInt32.v (buffer_length b) /\ + buffer_as_seq h (gsub_buffer b i (UInt32.add len 1ul)) == Seq.cons (Seq.index (buffer_as_seq h b) (UInt32.v i)) (buffer_as_seq h (gsub_buffer b (UInt32.add i 1ul) len)) + )) + +val buffer_snoc + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) + (v: type_of_typ t) +: HST.Stack unit + (requires (fun h -> + UInt32.v i + UInt32.v len < UInt32.v (buffer_length b) /\ + buffer_readable h (gsub_buffer b i len) + )) + (ensures (fun h _ h' -> + UInt32.v i + UInt32.v len < UInt32.v (buffer_length b) /\ + modifies (loc_pointer (gpointer_of_buffer_cell b (UInt32.add i len))) h h' /\ + buffer_readable h' (gsub_buffer b i (UInt32.add len 1ul)) /\ + buffer_as_seq h' (gsub_buffer b i (UInt32.add len 1ul)) == Seq.snoc (buffer_as_seq h (gsub_buffer b i len)) v + )) + +val buffer_cons + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) + (v: type_of_typ t) +: HST.Stack unit + (requires (fun h -> + UInt32.v i + UInt32.v len < UInt32.v (buffer_length b) /\ + buffer_readable h (gsub_buffer b (UInt32.add i 1ul) len) + )) + (ensures (fun h _ h' -> + UInt32.v i + UInt32.v len < UInt32.v (buffer_length b) /\ + modifies (loc_pointer (gpointer_of_buffer_cell b i)) h h' /\ + buffer_readable h' (gsub_buffer b i (UInt32.add len 1ul)) /\ + buffer_as_seq h' (gsub_buffer b i (UInt32.add len 1ul)) == Seq.cons v (buffer_as_seq h (gsub_buffer b (UInt32.add i 1ul) len)) + )) + +val buffer_readable_gsub_merge + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) + (h: HS.mem) +: Lemma + (requires ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + buffer_readable h (gsub_buffer b 0ul i) /\ + buffer_readable h (gsub_buffer b i len) /\ ( + let off = UInt32.add i len in + buffer_readable h (gsub_buffer b off (UInt32.sub (buffer_length b) off)) + ))) + (ensures (buffer_readable h b)) + +val buffer_readable_modifies_gsub + (#t: typ) + (b: buffer t) + (i: UInt32.t) + (len: UInt32.t) + (h0 h1: HS.mem) + (l: loc) +: Lemma + (requires ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + modifies l h0 h1 /\ + loc_disjoint l (loc_buffer (gsub_buffer b 0ul i)) /\ + loc_disjoint l (loc_buffer (gsub_buffer b (UInt32.add i len) (UInt32.sub (buffer_length b) (UInt32.add i len)))) /\ + buffer_readable h0 b /\ + buffer_readable h1 (gsub_buffer b i len) + )) + (ensures ( + UInt32.v i + UInt32.v len <= UInt32.v (buffer_length b) /\ + buffer_readable h1 b + )) diff --git a/stage0/ulib/legacy/FStar.Pointer.Derived2.fst b/stage0/ulib/legacy/FStar.Pointer.Derived2.fst new file mode 100644 index 00000000000..f8df5b6c23f --- /dev/null +++ b/stage0/ulib/legacy/FStar.Pointer.Derived2.fst @@ -0,0 +1,174 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Pointer.Derived2 + +module HH = FStar.HyperStack +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +private +let copy_buffer_contents_precond' + (#t: typ) + (a: buffer t) (* source *) + (b: buffer t) (* destination *) + (h: HS.mem) +: GTot Type0 += buffer_live h b /\ + buffer_readable h a /\ + buffer_length b == buffer_length a /\ + loc_disjoint (loc_buffer a) (loc_buffer b) + +private +let copy_buffer_contents_postcond' + (#t: typ) + (a: buffer t) (* source *) + (b: buffer t) (* destination *) + (h: HS.mem) + (h' : HS.mem) +: GTot Type0 += copy_buffer_contents_precond' a b h /\ + modifies (loc_buffer b) h h' /\ + buffer_readable h' b /\ + buffer_as_seq h' b == buffer_as_seq h a + +private +let copy_buffer_contents_inv + (#t: typ) + (a: buffer t) (* source *) + (b: buffer t) (* destination *) + (len' : UInt32.t) + (h: HS.mem) + (h' : HS.mem) +: GTot Type0 += copy_buffer_contents_precond' a b h /\ + modifies (loc_buffer b) h h' /\ + UInt32.v len' <= UInt32.v (buffer_length a) /\ + buffer_readable h' (gsub_buffer b 0ul len') /\ + buffer_as_seq h' (gsub_buffer b 0ul len') == buffer_as_seq h (gsub_buffer a 0ul len') + +private +val copy_buffer_contents_init + (#t: typ) + (a: buffer t) (* source *) + (b: buffer t) (* destination *) + (h: HS.mem) +: Lemma + (requires (copy_buffer_contents_precond' a b h)) + (ensures (copy_buffer_contents_inv a b 0ul h h)) + +let copy_buffer_contents_init #t a b h = + buffer_readable_intro_empty h (gsub_buffer b 0ul 0ul); + Seq.lemma_eq_intro (buffer_as_seq h (gsub_buffer b 0ul 0ul)) (buffer_as_seq h (gsub_buffer a 0ul 0ul)) + +private +val copy_buffer_contents_advance + (#t: typ) + (a: buffer t) (* source *) + (b: buffer t) (* destination *) + (len' : UInt32.t) + (h: Ghost.erased HS.mem) +: HST.Stack unit + (requires (fun h0 -> + copy_buffer_contents_inv a b len' (Ghost.reveal h) h0 /\ + UInt32.v len' < UInt32.v (buffer_length a) + )) + (ensures (fun h1 _ h2 -> + copy_buffer_contents_inv a b len' (Ghost.reveal h) h1 /\ + UInt32.v len' < UInt32.v (buffer_length a) /\ + copy_buffer_contents_inv a b (UInt32.add len' 1ul) (Ghost.reveal h) h2 + )) + +#set-options "--z3rlimit 16" + +let copy_buffer_contents_advance #t a b len' h = + let v = read_buffer a len' in + buffer_snoc b 0ul len' v; + buffer_as_seq_gsub_buffer_snoc (Ghost.reveal h) a 0ul len' + +private +val copy_buffer_contents_aux + (#t: typ) + (a: buffer t) (* source *) + (b: buffer t) (* destination *) + (len: UInt32.t) + (len': UInt32.t) + (h: Ghost.erased HS.mem) +: HST.Stack unit + (requires (fun h0 -> + copy_buffer_contents_inv a b len' (Ghost.reveal h) h0 /\ + len == buffer_length a + )) + (ensures (fun h0 _ h1 -> + copy_buffer_contents_inv a b len' (Ghost.reveal h) h0 /\ + copy_buffer_contents_postcond' a b (Ghost.reveal h) h1 + )) + (decreases (UInt32.v (buffer_length a) - UInt32.v len')) + +let rec copy_buffer_contents_aux #t a b len len' h = + if len = len' + then () + else begin + copy_buffer_contents_advance a b len' h; + copy_buffer_contents_aux a b len (UInt32.add len' 1ul) h + end + +let copy_buffer_contents_fin + (#t: typ) + (a: buffer t) (* source *) + (idx_a: UInt32.t) + (b: buffer t) (* destination *) + (idx_b: UInt32.t) + (len: UInt32.t) + (h: HS.mem) + (h' : HS.mem) +: Lemma + (requires ( + copy_buffer_contents_precond a idx_a b idx_b len h /\ + copy_buffer_contents_postcond' (gsub_buffer a idx_a len) (gsub_buffer b idx_b len) h h' + )) + (ensures ( + copy_buffer_contents_precond a idx_a b idx_b len h /\ + copy_buffer_contents_postcond a idx_a b idx_b len h h' + )) += () + +(* FIXME: Does not work if I directly try to define copy_buffer_contents *) + +(* FIXME: Works in batch mode (even with --record_hints --use_hints --detail_hint_replay --query_stats) but fails in interactive mode *) + +let copy_buffer_contents' + (#t: typ) + (a: buffer t) (* source *) + (idx_a: UInt32.t) + (b: buffer t) (* destination *) + (idx_b: UInt32.t) + (len: UInt32.t) +: HST.Stack unit + (requires (fun h -> + copy_buffer_contents_precond a idx_a b idx_b len h + )) + (ensures (fun h0 _ h1 -> + copy_buffer_contents_postcond a idx_a b idx_b len h0 h1 + )) += let h0 = HST.get () in + let a' = sub_buffer a idx_a len in + let b' = sub_buffer b idx_b len in + copy_buffer_contents_init a' b' h0; + copy_buffer_contents_aux a' b' len 0ul (Ghost.hide h0); + let h1 = HST.get () in + copy_buffer_contents_fin a idx_a b idx_b len h0 h1 + +let copy_buffer_contents = copy_buffer_contents' diff --git a/stage0/ulib/legacy/FStar.Pointer.Derived2.fsti b/stage0/ulib/legacy/FStar.Pointer.Derived2.fsti new file mode 100644 index 00000000000..d5b3a4b6f46 --- /dev/null +++ b/stage0/ulib/legacy/FStar.Pointer.Derived2.fsti @@ -0,0 +1,67 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Pointer.Derived2 +include FStar.Pointer.Base +include FStar.Pointer.Derived1 + +module HH = FStar.HyperStack +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +let copy_buffer_contents_precond + (#t: typ) + (a: buffer t) (* source *) + (idx_a: UInt32.t) + (b: buffer t) (* destination *) + (idx_b: UInt32.t) + (len: UInt32.t) + (h: HS.mem) +: GTot Type0 += UInt32.v idx_a + UInt32.v len <= UInt32.v (buffer_length a) /\ + UInt32.v idx_b + UInt32.v len <= UInt32.v (buffer_length b) /\ + buffer_live h (gsub_buffer b idx_b len) /\ + buffer_readable h (gsub_buffer a idx_a len) /\ + loc_disjoint (loc_buffer (gsub_buffer a idx_a len)) (loc_buffer (gsub_buffer b idx_b len)) + +let copy_buffer_contents_postcond + (#t: typ) + (a: buffer t) (* source *) + (idx_a: UInt32.t) + (b: buffer t) (* destination *) + (idx_b: UInt32.t) + (len: UInt32.t) + (h: HS.mem) + (h' : HS.mem) +: GTot Type0 += copy_buffer_contents_precond a idx_a b idx_b len h /\ + modifies (loc_buffer (gsub_buffer b idx_b len)) h h' /\ + buffer_readable h' (gsub_buffer b idx_b len) /\ + buffer_as_seq h' (gsub_buffer b idx_b len) == buffer_as_seq h (gsub_buffer a idx_a len) + +val copy_buffer_contents + (#t: typ) + (a: buffer t) (* source *) + (idx_a: UInt32.t) + (b: buffer t) (* destination *) + (idx_b: UInt32.t) + (len: UInt32.t) +: HST.Stack unit + (requires (fun h -> + copy_buffer_contents_precond a idx_a b idx_b len h + )) + (ensures (fun h0 _ h1 -> + copy_buffer_contents_postcond a idx_a b idx_b len h0 h1 + )) diff --git a/stage0/ulib/legacy/FStar.Pointer.Derived3.fst b/stage0/ulib/legacy/FStar.Pointer.Derived3.fst new file mode 100644 index 00000000000..60d00c692a5 --- /dev/null +++ b/stage0/ulib/legacy/FStar.Pointer.Derived3.fst @@ -0,0 +1,163 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Pointer.Derived3 + +module HH = FStar.HyperStack +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +private +let fill_buffer_precond' + (#t: typ) + (b: buffer t) (* destination *) + (h: HS.mem) +: GTot Type0 += buffer_live h b + +private +let fill_buffer_postcond' + (#t: typ) + (b: buffer t) (* destination *) + (v: type_of_typ t) + (h: HS.mem) + (h' : HS.mem) +: GTot Type0 += fill_buffer_precond' b h /\ + modifies (loc_buffer b) h h' /\ + buffer_readable h' b /\ + buffer_as_seq h' b == Seq.create (UInt32.v (buffer_length b)) v + +private +let fill_buffer_inv + (#t: typ) + (b: buffer t) (* destination *) + (len' : UInt32.t) + (v: type_of_typ t) + (h: HS.mem) + (h' : HS.mem) +: GTot Type0 += fill_buffer_precond' b h /\ + modifies (loc_buffer b) h h' /\ + UInt32.v len' <= UInt32.v (buffer_length b) /\ + buffer_readable h' (gsub_buffer b 0ul len') /\ + buffer_as_seq h' (gsub_buffer b 0ul len') == Seq.create (UInt32.v len') v + +private +val fill_buffer_init + (#t: typ) + (b: buffer t) (* destination *) + (v: type_of_typ t) + (h: HS.mem) +: Lemma + (requires (fill_buffer_precond' b h)) + (ensures (fill_buffer_inv b 0ul v h h)) + +let fill_buffer_init #t b v h = + buffer_readable_intro_empty h (gsub_buffer b 0ul 0ul); + Seq.lemma_eq_intro (buffer_as_seq h (gsub_buffer b 0ul 0ul)) (Seq.create 0 v) + +private +val fill_buffer_advance + (#t: typ) + (b: buffer t) (* destination *) + (len' : UInt32.t) + (v: type_of_typ t) + (h: Ghost.erased HS.mem) +: HST.Stack unit + (requires (fun h0 -> + fill_buffer_inv b len' v (Ghost.reveal h) h0 /\ + UInt32.v len' < UInt32.v (buffer_length b) + )) + (ensures (fun h1 _ h2 -> + fill_buffer_inv b len' v (Ghost.reveal h) h1 /\ + UInt32.v len' < UInt32.v (buffer_length b) /\ + fill_buffer_inv b (UInt32.add len' 1ul) v (Ghost.reveal h) h2 + )) + +#set-options "--z3rlimit 16" + +let fill_buffer_advance #t b len' v h = + buffer_snoc b 0ul len' v; + Seq.lemma_eq_intro (Seq.snoc (Seq.create (UInt32.v len') v) v) (Seq.create (UInt32.v (UInt32.add len' 1ul)) v) + +private +val fill_buffer_aux + (#t: typ) + (b: buffer t) (* destination *) + (len: UInt32.t) + (len': UInt32.t) + (v: type_of_typ t) + (h: Ghost.erased HS.mem) +: HST.Stack unit + (requires (fun h0 -> + fill_buffer_inv b len' v (Ghost.reveal h) h0 /\ + len == buffer_length b + )) + (ensures (fun h0 _ h1 -> + fill_buffer_inv b len' v (Ghost.reveal h) h0 /\ + fill_buffer_postcond' b v (Ghost.reveal h) h1 + )) + (decreases (UInt32.v (buffer_length b) - UInt32.v len')) + +let rec fill_buffer_aux #t b len len' v h = + if len = len' + then () + else begin + fill_buffer_advance b len' v h; + fill_buffer_aux b len (UInt32.add len' 1ul) v h + end + +let fill_buffer_fin + (#t: typ) + (b: buffer t) (* destination *) + (idx_b: UInt32.t) + (len: UInt32.t) + (v: type_of_typ t) + (h: HS.mem) + (h' : HS.mem) +: Lemma + (requires ( + fill_buffer_precond b idx_b len h /\ + fill_buffer_postcond' (gsub_buffer b idx_b len) v h h' + )) + (ensures ( + fill_buffer_precond b idx_b len h /\ + fill_buffer_postcond b idx_b len v h h' + )) += () + +let fill_buffer' + (#t: typ) + (b: buffer t) (* destination *) + (idx_b: UInt32.t) + (len: UInt32.t) + (v: type_of_typ t) +: HST.Stack unit + (requires (fun h -> + fill_buffer_precond b idx_b len h + )) + (ensures (fun h0 _ h1 -> + fill_buffer_postcond b idx_b len v h0 h1 + )) += let h0 = HST.get () in + let b' = sub_buffer b idx_b len in + fill_buffer_init b' v h0; + fill_buffer_aux b' len 0ul v (Ghost.hide h0); + let h1 = HST.get () in + fill_buffer_fin b idx_b len v h0 h1 + + +let fill_buffer = fill_buffer' diff --git a/stage0/ulib/legacy/FStar.Pointer.Derived3.fsti b/stage0/ulib/legacy/FStar.Pointer.Derived3.fsti new file mode 100644 index 00000000000..408c4ac1d4a --- /dev/null +++ b/stage0/ulib/legacy/FStar.Pointer.Derived3.fsti @@ -0,0 +1,61 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Pointer.Derived3 +include FStar.Pointer.Base +include FStar.Pointer.Derived1 +// include FStar.Pointer.Derived2 // useless here + +module HH = FStar.HyperStack +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +let fill_buffer_precond + (#t: typ) + (b: buffer t) (* destination *) + (idx_b: UInt32.t) + (len: UInt32.t) + (h: HS.mem) +: GTot Type0 += UInt32.v idx_b + UInt32.v len <= UInt32.v (buffer_length b) /\ + buffer_live h (gsub_buffer b idx_b len) + +let fill_buffer_postcond + (#t: typ) + (b: buffer t) (* destination *) + (idx_b: UInt32.t) + (len: UInt32.t) + (v: type_of_typ t) + (h: HS.mem) + (h' : HS.mem) +: GTot Type0 += fill_buffer_precond b idx_b len h /\ + modifies (loc_buffer (gsub_buffer b idx_b len)) h h' /\ + buffer_readable h' (gsub_buffer b idx_b len) /\ + buffer_as_seq h' (gsub_buffer b idx_b len) == Seq.create (UInt32.v len) v + +val fill_buffer + (#t: typ) + (b: buffer t) (* destination *) + (idx_b: UInt32.t) + (len: UInt32.t) + (v: type_of_typ t) +: HST.Stack unit + (requires (fun h -> + fill_buffer_precond b idx_b len h + )) + (ensures (fun h0 _ h1 -> + fill_buffer_postcond b idx_b len v h0 h1 + )) diff --git a/stage0/ulib/legacy/FStar.Pointer.fst b/stage0/ulib/legacy/FStar.Pointer.fst new file mode 100644 index 00000000000..e4c35c867c7 --- /dev/null +++ b/stage0/ulib/legacy/FStar.Pointer.fst @@ -0,0 +1,20 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Pointer +include FStar.Pointer.Base +include FStar.Pointer.Derived1 +include FStar.Pointer.Derived2 +include FStar.Pointer.Derived3 diff --git a/stage0/ulib/legacy/FStar.Relational.Comp.fst b/stage0/ulib/legacy/FStar.Relational.Comp.fst new file mode 100644 index 00000000000..4e0a530a34a --- /dev/null +++ b/stage0/ulib/legacy/FStar.Relational.Comp.fst @@ -0,0 +1,116 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Relational.Comp +open FStar.ST +open FStar.All +open FStar.Heap +open FStar.Relational.Relational + +type heap2 = double heap + +new_effect STATE2 = STATE_h heap2 +let st2_Pre = st_pre_h heap2 +let st2_Post' (a:Type) (pre:Type) = st_post_h' heap2 a pre +let st2_Post (a:Type) = st_post_h heap2 a +let st2_WP (a:Type) = st_wp_h heap2 a +effect ST2 (a:Type) (pre:st2_Pre) (post: (h:heap2 -> Tot (st2_Post' a (pre h)))) = + STATE2 a + (fun (p:st2_Post a) (h:heap2) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *) +effect St2 (a:Type) = ST2 a (fun h -> True) (fun h0 r h1 -> True) +sub_effect + DIV ~> STATE2 = (fun (a:Type) (wp:pure_wp a) (p:st2_Post a) -> (fun h2 -> wp (fun a0 -> p a0 h2))) + +(* construct a st2_WP from 2 st_wps *) +val comp : (a:Type) -> (b:Type) -> (wp0:st_wp a) -> (wp1:st_wp b) -> Tot (st2_WP (rel a b)) +let comp a b wp0 wp1 p h2 = + wp0 (fun y0 h0 -> + wp1 (fun y1 h1 -> p (R y0 y1) (R h0 h1)) + (R?.r h2)) + (R?.l h2) + +//TODO: this should be conditional on the monotonicity of the wps +assume Monotone_comp: forall a b wp1 wp2 p1 p2. (forall x h. p1 x h ==> p2 x h) + ==> (forall h. comp a b wp1 wp2 p1 h + ==> comp a b wp1 wp2 p2 h) + + +assume val compose2: #a0:Type -> #b0:Type -> #wp0:(a0 -> Tot (st_wp b0)) + -> #a1:Type -> #b1:Type -> #wp1:(a1 -> Tot (st_wp b1)) + -> $c0:(x0:a0 -> STATE b0 (wp0 x0)) + -> $c1:(x1:a1 -> STATE b1 (wp1 x1)) + -> x: rel a0 a1 + -> STATE2 (rel b0 b1) + (comp b0 b1 (wp0 (R?.l x)) (wp1 (R?.r x))) + +val compose2_self : #a:Type -> #b:Type -> #wp:(a -> Tot (st_wp b)) + -> $c:(x:a -> STATE b (wp x)) + -> x: double a + -> STATE2 (double b) + (comp b b (wp (R?.l x)) (wp (R?.r x))) +let compose2_self #a #b #wp f x = compose2 #a #b #wp #a #b #wp f f x + +(* Combine two ST2 statements A and B to create a new ST2 statement C where + the left side of C is equivalent to the left side of A and + the right side of C is equivalent to the right side of B *) +assume val cross : #a:Type -> #b:Type -> #c:Type -> #d:Type + -> #p:(heap2 -> Type) + -> #p':(heap2 -> Type) + -> #q:(heap2 -> rel a b -> heap2 -> Type) + -> #q':(heap2 -> rel c d -> heap2 -> Type) + -> $c1:(double unit -> ST2 (rel a b) + (requires (fun h -> p h)) + (ensures (fun h1 r h2 -> q h1 r h2))) + -> $c2:(double unit -> ST2 (rel c d) + (requires (fun h -> p' h)) + (ensures (fun h1 r h2 -> q' h1 r h2))) + -> ST2 (rel a d) (requires (fun h -> (exists (hl:heap) (hr:heap). + p (R (R?.l h) hr) + /\ p' (R hl (R?.r h))))) + (ensures (fun h1 r h2 -> (exists (h2l:heap) (h2r:heap) (rl:c) (rr:b). + q h1 (R (R?.l r) rr) (R (R?.l h2) (h2r)) + /\ q' h1 (R rl (R?.r r)) (R h2l (R?.r h2))))) + + +(* Create a ST statement from a ST2 statement by projection *) +val decomp_l : (a0:Type) -> (a1:Type) -> (b0:Type) -> (b1:Type) -> (al:a0) -> (wp:(rel a0 a1 -> Tot (st2_WP (rel b0 b1)))) + -> Tot (st_wp_h heap b0) +let decomp_l a0 a1 b0 b1 al wp = + fun p hl -> + (exists (ar:a1) (hr:heap). + wp (R al ar) (fun y2 h2 -> p (R?.l y2) (R?.l h2)) + (R hl hr)) + +val decomp_r : (a0:Type) -> (a1:Type) -> (b0:Type) -> (b1:Type) -> (ar:a1) -> (wp:(rel a0 a1 -> Tot (st2_WP (rel b0 b1)))) + -> Tot (st_wp_h heap b1) +let decomp_r a0 a1 b0 b1 ar wp = + fun p hr -> + (exists (al:a0) (hl:heap). + wp (R al ar) (fun y2 h2 -> p (R?.r y2) (R?.r h2)) + (R hl hr)) + +assume val project_l : #a0:Type -> #b0:Type -> #a1:Type -> #b1:Type + -> #wp:(rel a0 a1 -> Tot (st2_WP (rel b0 b1))) + -> $c:(x:rel a0 a1 -> STATE2 (rel b0 b1) (wp x)) + -> x:a0 + -> STATE b0 (decomp_l a0 a1 b0 b1 x wp) + +assume val project_r : #a0:Type -> #b0:Type -> #a1:Type -> #b1:Type + -> #wp:(rel a0 a1 -> Tot (st2_WP (rel b0 b1))) + -> $c:(x:rel a0 a1 -> STATE2 (rel b0 b1) (wp x)) + -> x:a1 + -> STATE b1 (decomp_r a0 a1 b0 b1 x wp) + + diff --git a/stage0/ulib/legacy/FStar.Relational.Relational.fst b/stage0/ulib/legacy/FStar.Relational.Relational.fst new file mode 100644 index 00000000000..825b4110e63 --- /dev/null +++ b/stage0/ulib/legacy/FStar.Relational.Relational.fst @@ -0,0 +1,79 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.Relational.Relational +open FStar.Heap + +(* Relational Type constructor (Equivalent to pairs) *) +type rel (a:Type) (b:Type) : Type = + | R : l:a -> r:b -> rel a b + +(* Some frequently used abbreviations *) +type double (t:Type) = rel t t +type eq (t:Type) = p:(double t){R?.l p == R?.r p} + +let twice x = R x x +let tu = twice () + +(* functions to lift normal functions to Relational functions *) +val rel_map1T : ('a -> Tot 'b) -> (double 'a) -> Tot (double 'b) +let rel_map1T f (R x1 x2) = R (f x1) (f x2) + +val rel_map2Teq : #a:eqtype -> #b:eqtype -> #c:Type -> (a -> b -> Tot c) -> (double a) -> (double b) -> Tot (double c) +let rel_map2Teq #a #b #c f (R x1 x2) (R y1 y2) = R (f x1 y1) (f x2 y2) + +val rel_map2T : ('a -> 'b -> Tot 'c) -> (double 'a) -> (double 'b) -> Tot (double 'c) +let rel_map2T f (R x1 x2) (R y1 y2) = R (f x1 y1) (f x2 y2) + +val rel_map2G : ('a -> 'b -> GTot 'c) -> (double 'a) -> (double 'b) -> GTot (double 'c) +let rel_map2G f (R x1 x2) (R y1 y2) = R (f x1 y1) (f x2 y2) + +val rel_map3T : ('a -> 'b -> 'c -> Tot 'd) -> (double 'a) -> (double 'b) -> (double 'c) -> Tot (double 'd) +let rel_map3T f (R x1 x2) (R y1 y2) (R z1 z2) = R (f x1 y1 z1) (f x2 y2 z2) + +val rel_map3G : ('a -> 'b -> 'c -> GTot 'd) -> (double 'a) -> (double 'b) -> (double 'c) -> GTot (double 'd) +let rel_map3G f (R x1 x2) (R y1 y2) (R z1 z2) = R (f x1 y1 z1) (f x2 y2 z2) + +(* Some convenient arithmetic functions *) +let op_Hat_Plus = rel_map2T (fun x y -> x + y) +let op_Hat_Minus = rel_map2T (fun x y -> x - y) +let op_Hat_Star = rel_map2T (fun x y -> op_Multiply x y) +let op_Hat_Slash = rel_map2T (fun x y -> x / y) + +(* Some convenient list functions *) +val tl_rel: #a:Type -> l:double (list a){Cons? (R?.l l) /\ Cons? (R?.r l)}-> Tot (double (list a)) +let tl_rel #a (R (_::xs) (_::ys)) = R xs ys +let cons_rel (R x y) (R xs ys) = R (x::xs) (y::ys) +(* Some convenient tuple functions *) +let pair_rel (R a b) (R c d) = R (a,c) (b,d) +let triple_rel (R a b) (R c d) (R e f) = R (a,c,e) (b,d,f) +let fst_rel = rel_map1T fst +let snd_rel = rel_map1T snd + +(* Some convenient boolean functions *) +let and_rel = rel_map2T (fun x y -> x && y) +let or_rel = rel_map2T (fun x y -> x || y) +let eq_rel = rel_map2Teq (fun (x y:bool) -> x = y) + +(* Some convenient functions combining left and right side (for specification only) *) +let and_irel (R a b) = a && b +let or_irel (R a b) = a || b +let eq_irel (#t:eqtype) (x:(rel t t)) = match x with + | R a b -> a = b + +(* Some convenient functions on heap (for specification) *) +let sel_rel1 (#a:Type) (h:double heap) (r:ref a) = rel_map2G sel h (twice r) +//let sel_rel2 = rel_map2G sel +//let upd_rel = rel_map3G upd diff --git a/stage0/ulib/legacy/FStar.TaggedUnion.fst b/stage0/ulib/legacy/FStar.TaggedUnion.fst new file mode 100644 index 00000000000..8b1c34baf61 --- /dev/null +++ b/stage0/ulib/legacy/FStar.TaggedUnion.fst @@ -0,0 +1,428 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.TaggedUnion + +module P = FStar.Pointer +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +(** Code of a tagged union *) + +let typ_l (l: P.union_typ) = { + P.name = "__tagged__" ^ l.P.name; + P.fields = P.([("tag", TBase TUInt32); ("union", TUnion l)]); +} + +let tag_field (l: P.union_typ) : P.struct_field (typ_l l) = "tag" +let union_field (l: P.union_typ) : P.struct_field (typ_l l) = "union" + +let typ (l: P.union_typ) : P.typ = P.TStruct (typ_l l) + +(******************************************************************************) + +(* Tagging, at the logical level *) + +(* Abstract predicate providing a proof that some field matches some tag. *) + +let field_matches_tag + (#l: P.union_typ) (tgs: tags l) + (f: P.struct_field l) (t: UInt32.t) +: Tot Type0 += tag_of_field tgs f == t + +let rec field_of_tag_of_field' + (#l: P.struct_typ') + (tgs: tags' l) + (f: P.struct_field' l) +: Lemma (field_of_tag' #l tgs (tag_of_field' #l tgs f) == f) + [SMTPat (field_of_tag' #l tgs (tag_of_field' #l tgs f))] += let ((f', _) :: l') = l in + let (t' :: tgs') = tgs in + if f = f' then () + else ( + let ff : string = f in + field_of_tag_of_field' #l' tgs' ff + ) + +let field_of_tag_of_field + (#l: P.union_typ) + (tgs: tags l) + (f: P.struct_field l) +: Lemma (field_of_tag #l tgs (tag_of_field #l tgs f) == f) + [SMTPat (field_of_tag #l tgs (tag_of_field #l tgs f))] += field_of_tag_of_field' tgs f + +let rec tag_of_field_of_tag' + (#l: P.struct_typ') + (tgs: tags' l) + (t: UInt32.t) +: Lemma + (requires (List.Tot.mem t tgs)) + (ensures ( + List.Tot.mem t tgs /\ + tag_of_field' #l tgs (field_of_tag' #l tgs t) == t + )) + [SMTPat (tag_of_field' #l tgs (field_of_tag' #l tgs t))] += let ((f', _) :: l') = l in + let (t' :: tgs') = tgs in + if t = t' then () + else ( + tag_of_field_of_tag' #l' tgs' t + ) + +let tag_of_field_of_tag + (#l: P.union_typ) + (tgs: tags l) + (t: UInt32.t) +: Lemma + (requires (List.Tot.mem t tgs)) + (ensures ( + List.Tot.mem t tgs /\ + tag_of_field #l tgs (field_of_tag #l tgs t) == t + )) + [SMTPat (tag_of_field #l tgs (field_of_tag #l tgs t))] += tag_of_field_of_tag' tgs t + +let field_matches_tag_intro + (#l: P.union_typ) (tgs: tags l) + (f: P.struct_field l) (t: UInt32.t) +: Lemma + (requires (tag_of_field tgs f == t)) + (ensures (field_matches_tag tgs f t)) += () + +let field_matches_tag_elim + (#l: P.union_typ) (tgs: tags l) + (f: P.struct_field l) (t: UInt32.t) +: Lemma + (requires (field_matches_tag tgs f t)) + (ensures (tag_of_field tgs f == t)) += () + +let assert_field_matches_tag + (#l: P.union_typ) (tgs: tags l) + (f: P.struct_field l) (t: UInt32.t) +: Lemma + (requires (normalize_term (tag_of_field tgs f) == t)) + (ensures (field_matches_tag tgs f t)) += () + +(******************************************************************************) + +(* Stateful invariant *) + +let valid + (#l: P.union_typ) + (h: HS.mem) + (tgs: tags l) + (p: P.pointer (typ l)) +: GTot Type0 += + let tag_ptr = P.gfield p (tag_field l) in + let u_ptr = P.gfield p (union_field l) in + let t = P.gread h tag_ptr in + P.readable h tag_ptr /\ + List.Tot.mem t tgs /\ + (let f = field_of_tag #l tgs t in + P.is_active_union_field h u_ptr f) + +let valid_live + (#l: P.union_typ) + (h: HS.mem) + (tgs: tags l) + (p: P.pointer (typ l)) +: Lemma (requires (valid h tgs p)) + (ensures (P.live h p)) + [SMTPat (valid h tgs p)] += () + +(******************************************************************************) + +(* Operations *) + +let gread_tag + (#l: P.union_typ) + (h: HS.mem) + (tgs: tags l) + (p: P.pointer (typ l)) +: GTot UInt32.t += P.gread h (P.gfield p (tag_field l)) + +let read_tag + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) +: HST.Stack UInt32.t + (requires (fun h -> valid h tgs p)) + (ensures (fun h0 t h1 -> + h0 == h1 /\ + List.Tot.mem t tgs /\ + t == gread_tag h0 tgs p)) += P.read (P.field p (tag_field l)) + + +let gfield + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) +: GTot (p': P.pointer (P.typ_of_struct_field l f) { P.includes p p' }) += P.gufield (P.gfield p (union_field l)) f + + +let field + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) +: HST.Stack (P.pointer (P.typ_of_struct_field l f)) + (requires (fun h -> + valid h tgs p /\ + gread_tag h tgs p == normalize_term (tag_of_field tgs f) + )) + (ensures (fun h0 p' h1 -> + h0 == h1 /\ + field_matches_tag tgs f (gread_tag h0 tgs p) /\ + p' == gfield tgs p f + )) += P.ufield (P.field p (union_field l)) f + +// We could also require the user to manually provide the integer tagged. I +// claim it should not be needed since we need to normalise/inline write before +// extraction anyway + +let write + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (v: P.type_of_typ (P.typ_of_struct_field l f)) +: HST.Stack unit + (requires (fun h -> + P.live h p + )) + (ensures (fun h0 _ h1 -> + P.live h0 p /\ P.live h1 p /\ + P.modifies_1 p h0 h1 /\ + P.readable h1 p /\ + valid h1 tgs p /\ + gread_tag h1 tgs p == normalize_term (tag_of_field tgs f) /\ + field_matches_tag tgs f (gread_tag h1 tgs p) /\ + P.gread h1 (gfield tgs p f) == v + )) += + let tag_ptr = P.field p (tag_field l) in + let u_ptr = P.field p (union_field l) in + let t = tag_of_field #l tgs f in + P.write tag_ptr t; + let h11 = HST.get () in + P.write (P.ufield u_ptr f) v; + let h1 = HST.get () in + // SMTPats for this lemma do not seem to trigger? +// P.no_upd_lemma_1 h11 h1 u_ptr tag_ptr; + assert (P.readable h1 tag_ptr); + assert (P.readable h1 u_ptr); + P.readable_struct_fields_readable_struct h1 p; + let uf = P.ufield u_ptr f in + P.is_active_union_field_includes_readable #l h1 u_ptr f uf; + assert (P.is_active_union_field #l h1 u_ptr f) + +let write_tag + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) +: HST.Stack unit + (requires (fun h -> + valid h tgs p + )) + (ensures (fun h0 _ h1 -> + valid h0 tgs p /\ valid h1 tgs p + /\ P.modifies_1 p h0 h1 + /\ gread_tag h1 tgs p == normalize_term (tag_of_field tgs f) + /\ field_matches_tag tgs f (gread_tag h1 tgs p) + )) += + let tag_ptr = P.field p (tag_field l) in + let u_ptr : P.pointer (P.TUnion l) = P.field p (union_field l) in + let t = tag_of_field #l tgs f in + P.write tag_ptr t; + P.write_union_field u_ptr f + +(******************************************************************************) + +(* Lemmas *) + +let includes_gfield + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) +: Lemma + (requires True) + (ensures (P.includes p (gfield tgs p f))) + [SMTPat (P.includes p (gfield tgs p f))] += () + +let live_gfield #l tgs p f h = () + +let modifies_1_valid + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (h0 h1: HS.mem) + (#t': P.typ) + (p': P.pointer t') +: Lemma + (requires ( + valid h0 tgs p /\ + field_matches_tag tgs f (gread_tag h0 tgs p) /\ + P.modifies_1 (gfield tgs p f) h0 h1 /\ + P.includes (gfield tgs p f) p' /\ + P.readable h1 p' + )) + (ensures (valid h1 tgs p)) + [SMTPat (valid #l h0 tgs p); SMTPat (P.modifies_1 (gfield #l tgs p f) h0 h1); + SMTPat (P.includes #_ #t' (gfield #l tgs p f) p')] += + let u_ptr = P.gfield p (union_field l) in + P.is_active_union_field_includes_readable h1 u_ptr f p' + +let modifies_1_field_tag + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (h0 h1: HS.mem) +: Lemma + (requires ( + valid h0 tgs p /\ + P.modifies_1 (gfield tgs p f) h0 h1 + )) + (ensures (gread_tag h1 tgs p == gread_tag h0 tgs p)) + [SMTPat (valid #l h0 tgs p); SMTPat (P.modifies_1 (gfield #l tgs p f) h0 h1)] += () + +let modifies_1_field + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (h0 h1: HS.mem) +: Lemma + (requires (valid h0 tgs p /\ P.modifies_1 (gfield tgs p f) h0 h1)) + (ensures (P.modifies_1 p h0 h1)) + [SMTPat (valid #l h0 tgs p); SMTPat (P.modifies_1 (gfield #l tgs p f) h0 h1)] += () + +let modifies_1_field_tag_equal + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (h0 h1: HS.mem) +: Lemma + (requires (valid h0 tgs p /\ P.modifies_1 (gfield tgs p f) h0 h1)) + (ensures (gread_tag h0 tgs p == gread_tag h1 tgs p)) + [SMTPat (valid h0 tgs p); SMTPat (gread_tag h1 tgs p); SMTPat (gfield tgs p f)] += () + +let readable_intro + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (h: HS.mem) +: Lemma + (requires ( + valid h tgs p /\ + P.readable h (gfield tgs p f) + )) + (ensures (P.readable h p)) + [SMTPat (valid #l h tgs p); SMTPat (P.readable h (gfield #l tgs p f))] += P.readable_struct_fields_readable_struct h p + +let readable_field + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (h: HS.mem) +: Lemma + (requires ( + valid h tgs p /\ P.readable h p /\ + field_matches_tag tgs f (gread_tag h tgs p) + )) + (ensures (P.readable h (gfield tgs p f))) + [SMTPat (P.readable h (gfield tgs p f))] += () + +(******************************************************************************) + +(* Logical representation of a tagged union. +*) + +let raw_get_tag (#l: P.union_typ) (tu: raw l) +: Tot UInt32.t += + P.struct_sel #(typ_l l) tu (tag_field l) + +let raw_get_field (#l: P.union_typ) (tu: raw l) +: GTot (P.struct_field l) += + P.union_get_key #l (P.struct_sel #(typ_l l) tu (union_field l)) + +let raw_get_value (#l: P.union_typ) (tu: raw l) (f: P.struct_field l) +: Pure (P.type_of_typ (P.typ_of_struct_field l f)) + (requires (raw_get_field tu == f)) + (ensures (fun _ -> True)) += + let u : P.union l = P.struct_sel #(typ_l l) tu (union_field l) in + P.union_get_value u f + +(* Lemma: "valid p ==> matching_tags (gread p)" *) + +let valid_matching_tags + (#l: P.union_typ) + (h: HS.mem) + (tgs: tags l) + (p: P.pointer (typ l)) +: Lemma + (requires (valid h tgs p)) + (ensures (matching_tags (P.gread h p) tgs)) + [SMTPatOr [[SMTPat (valid h tgs p)]; [SMTPat (matching_tags (P.gread h p) tgs)]]] += () + + +// Not sure if useful +(* +let read + (#l: P.union_typ) + (h: HS.mem) + (tgs: tags l) + (p: P.pointer (typ l)) +: HST.Stack (t l tgs) + (requires (fun h -> + P.live h p /\ P.readable h p /\ + valid h tgs p)) + (ensures (fun h0 tu h1 -> + h0 == h1 /\ + (let raw_tu : raw l = tu in + raw_tu == P.gread h0 p) + )) += P.read p +*) diff --git a/stage0/ulib/legacy/FStar.TaggedUnion.fsti b/stage0/ulib/legacy/FStar.TaggedUnion.fsti new file mode 100644 index 00000000000..f67223524df --- /dev/null +++ b/stage0/ulib/legacy/FStar.TaggedUnion.fsti @@ -0,0 +1,452 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.TaggedUnion + +module P = FStar.Pointer +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST + +(** Code + + The code of a tagged union with fields `l` is `typ l` +*) + +val typ (l: P.union_typ) : P.typ + +(******************************************************************************) + +(* Tagging, at the logical level + + `tags l` defines "physical tags" (i.e. integers) for the fields of `l`. +*) + +let tags' (l: P.struct_typ') : Tot Type0 = + tl: list UInt32.t { + List.Tot.length tl == List.Tot.length l /\ + List.Tot.noRepeats tl + } + +let tags (l: P.union_typ) : Tot Type0 = + tags' l.P.fields + +(* Get a field from its physical tag. *) +let rec field_of_tag' + (#l: P.struct_typ') + (tgs: tags' l) + (t: UInt32.t) +: Pure (P.struct_field' l) + (requires (List.Tot.mem t tgs)) + (ensures (fun _ -> True)) += let ((f, _) :: l') = l in + let (t' :: tgs') = tgs in + if t = t' then f + else ( + assert (Cons? l'); + let ff' : string = field_of_tag' #l' tgs' t in + ff' + ) + +let field_of_tag + (#l: P.union_typ) + (tgs: tags l) + (t: UInt32.t) +: Pure (P.struct_field l) + (requires (List.Tot.mem t tgs)) + (ensures (fun _ -> True)) += field_of_tag' tgs t + +(* Get the physical tag corresponding to a field. *) +let rec tag_of_field' + (#l: P.struct_typ') + (tgs: tags' l) + (f: P.struct_field' l) +: Pure UInt32.t + (requires True) + (ensures (fun t -> List.Tot.mem t tgs)) += let ((f', _) :: l') = l in + let (t :: tgs') = tgs in + if f = f' then t + else ( + assert (Cons? l'); + let ff : string = f in + tag_of_field' #l' tgs' ff + ) + +let tag_of_field + (#l: P.union_typ) + (tgs: tags l) + (f: P.struct_field l) +: Pure UInt32.t + (requires True) + (ensures (fun t -> List.Tot.mem t tgs)) += tag_of_field' tgs f + +(* Abstract predicate providing a proof that some field matches some tag. + + It is directly implemented using `tag_of_field`, but is useful on its own: + typically, we want to only use `tag_of_field` (or `field_of_tag`) at + normalization time, because unfolding them in Z3 does not scale. However, we + still want to require that some field matches some tag in lemmas + automatically used by Z3 - and at that stage, the normalizer cannot run. + + The strategy is therefore for operations that are called by the user to use + `tag_of_field`/`field_of_tag` in a `normalize_term`, and provide as a + post-condition that `field_matches_tag` holds. This fact can then be used by + lemmas triggered in Z3. +*) +val field_matches_tag + (#l: P.union_typ) (tgs: tags l) + (f: P.struct_field l) (t: UInt32.t) +: Tot Type0 + +val field_of_tag_of_field + (#l: P.union_typ) + (tgs: tags l) + (f: P.struct_field l) +: Lemma (field_of_tag #l tgs (tag_of_field #l tgs f) == f) + [SMTPat (field_of_tag #l tgs (tag_of_field #l tgs f))] + +val tag_of_field_of_tag + (#l: P.union_typ) + (tgs: tags l) + (t: UInt32.t) +: Lemma + (requires (List.Tot.mem t tgs)) + (ensures ( + List.Tot.mem t tgs /\ + tag_of_field #l tgs (field_of_tag #l tgs t) == t + )) + [SMTPat (tag_of_field #l tgs (field_of_tag #l tgs t))] + +val field_matches_tag_intro + (#l: P.union_typ) (tgs: tags l) + (f: P.struct_field l) (t: UInt32.t) +: Lemma + (requires (tag_of_field tgs f == t)) + (ensures (field_matches_tag tgs f t)) + +val field_matches_tag_elim + (#l: P.union_typ) (tgs: tags l) + (f: P.struct_field l) (t: UInt32.t) +: Lemma + (requires (field_matches_tag tgs f t)) + (ensures (tag_of_field tgs f == t)) + +val assert_field_matches_tag + (#l: P.union_typ) (tgs: tags l) + (f: P.struct_field l) (t: UInt32.t) +: Lemma + (requires (normalize_term (tag_of_field tgs f) == t)) + (ensures (field_matches_tag tgs f t)) + +(******************************************************************************) + +(* Stateful invariant + + `valid h tgs p` states that p points to a tagged union: + - which physical tag is readable and valid wrt `tgs` + - which union has an active field corresponding to its physical tag +*) + +val valid + (#l: P.union_typ) + (h: HS.mem) + (tgs: tags l) + (p: P.pointer (typ l)) +: GTot Type0 + +val valid_live + (#l: P.union_typ) + (h: HS.mem) + (tgs: tags l) + (p: P.pointer (typ l)) +: Lemma (requires (valid h tgs p)) + (ensures (P.live h p)) + [SMTPat (valid h tgs p)] + + +(******************************************************************************) + +(* Operations *) + +val gread_tag + (#l: P.union_typ) + (h: HS.mem) + (tgs: tags l) + (p: P.pointer (typ l)) +: GTot UInt32.t + +val read_tag + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) +: HST.Stack UInt32.t + (requires (fun h -> valid h tgs p)) + (ensures (fun h0 t h1 -> + h0 == h1 /\ + List.Tot.mem t tgs /\ + t == gread_tag h0 tgs p)) + + +val gfield + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) +: GTot (p': P.pointer (P.typ_of_struct_field l f) { P.includes p p' }) + + +val field + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) +: HST.Stack (P.pointer (P.typ_of_struct_field l f)) + (requires (fun h -> + valid h tgs p /\ + gread_tag h tgs p == normalize_term (tag_of_field tgs f) + )) + (ensures (fun h0 p' h1 -> + h0 == h1 /\ + field_matches_tag tgs f (gread_tag h0 tgs p) /\ + p' == gfield tgs p f + )) + + +val write + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (v: P.type_of_typ (P.typ_of_struct_field l f)) +: HST.Stack unit + (requires (fun h -> + P.live h p + )) + (ensures (fun h0 _ h1 -> + P.live h0 p /\ P.live h1 p /\ + P.modifies_1 p h0 h1 /\ + P.readable h1 p /\ + valid h1 tgs p /\ + gread_tag #l h1 tgs p == normalize_term (tag_of_field tgs f) /\ + field_matches_tag tgs f (gread_tag h1 tgs p) /\ + P.gread h1 (gfield tgs p f) == v + )) + +val write_tag + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) +: HST.Stack unit + (requires (fun h -> + valid h tgs p + )) + (ensures (fun h0 _ h1 -> + valid h0 tgs p /\ valid h1 tgs p + /\ P.modifies_1 p h0 h1 + /\ gread_tag #l h1 tgs p == normalize_term (tag_of_field tgs f) + /\ field_matches_tag tgs f (gread_tag h1 tgs p) + )) + +(******************************************************************************) + +(* Lemmas *) + +val includes_gfield + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) +: Lemma + (requires True) + (ensures (P.includes p (gfield tgs p f))) + +let includes_gfield_gen + (#t: P.typ) + (q: P.pointer t) + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) +: Lemma + (requires (P.includes q p)) + (ensures (P.includes q (gfield tgs p f))) + [SMTPat (P.includes q (gfield tgs p f))] += includes_gfield tgs p f; + P.includes_trans q p (gfield tgs p f) + +val live_gfield + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (h: HS.mem) +: Lemma + (P.live h (gfield tgs p f) <==> P.live h p) + [SMTPat (P.live h (gfield tgs p f))] + +val modifies_1_valid + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (h0 h1: HS.mem) + (#t': P.typ) + (p': P.pointer t') +: Lemma + (requires ( + valid h0 tgs p /\ + field_matches_tag tgs f (gread_tag h0 tgs p) /\ + P.modifies_1 (gfield tgs p f) h0 h1 /\ + P.includes (gfield tgs p f) p' /\ + P.readable h1 p' + )) + (ensures (valid h1 tgs p)) + [SMTPat (valid #l h0 tgs p); + SMTPat (P.readable h1 p'); + SMTPat (gfield #l tgs p f)] + +val modifies_1_field_tag + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (h0 h1: HS.mem) +: Lemma + (requires ( + valid h0 tgs p /\ + P.modifies_1 (gfield tgs p f) h0 h1 + )) + (ensures (gread_tag h1 tgs p == gread_tag h0 tgs p)) + [SMTPat (valid #l h0 tgs p); SMTPat (gfield tgs p f); SMTPat (gread_tag h1 tgs p)] + +val modifies_1_field + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (h0 h1: HS.mem) +: Lemma + (requires (valid h0 tgs p /\ P.modifies_1 (gfield tgs p f) h0 h1)) + (ensures (P.modifies_1 p h0 h1)) + [SMTPat (valid #l h0 tgs p); SMTPat (P.modifies_1 (gfield #l tgs p f) h0 h1)] + +val modifies_1_field_tag_equal + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (h0 h1: HS.mem) +: Lemma + (requires (valid h0 tgs p /\ P.modifies_1 (gfield tgs p f) h0 h1)) + (ensures (gread_tag h0 tgs p == gread_tag h1 tgs p)) + [SMTPat (valid h0 tgs p); SMTPat (gread_tag h1 tgs p); SMTPat (gfield tgs p f)] + +val readable_intro + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (h: HS.mem) +: Lemma + (requires ( + valid h tgs p /\ + P.readable h (gfield tgs p f) + )) + (ensures (P.readable h p)) + [SMTPat (valid #l h tgs p); SMTPat (P.readable h (gfield #l tgs p f))] + + +val readable_field + (#l: P.union_typ) + (tgs: tags l) + (p: P.pointer (typ l)) + (f: P.struct_field l) + (h: HS.mem) +: Lemma + (requires ( + valid h tgs p /\ P.readable h p /\ + field_matches_tag tgs f (gread_tag h tgs p) + )) + (ensures (P.readable h (gfield tgs p f))) + [SMTPat (P.readable h (gfield #l tgs p f))] + + +(******************************************************************************) + +(* Logical representation of a tagged union. +*) + +let raw (l: P.union_typ) : Tot Type0 = P.type_of_typ (typ l) + +val raw_get_tag (#l: P.union_typ) (tu: raw l): Tot UInt32.t +val raw_get_field (#l: P.union_typ) (tu: raw l): GTot (P.struct_field l) + +val raw_get_value (#l: P.union_typ) (tu: raw l) (f: P.struct_field l) +: Pure (P.type_of_typ (P.typ_of_struct_field l f)) + (requires (raw_get_field tu == f)) + (ensures (fun _ -> True)) + +let matching_tags + (#l: P.union_typ) + (raw_tu: raw l) + (tgs: tags l) +: Tot Type += + let t = raw_get_tag raw_tu in + List.Tot.mem t tgs /\ + field_of_tag tgs t == raw_get_field raw_tu + + +let t (l: P.union_typ) (tgs: tags l) : Tot Type0 = + tu : raw l { matching_tags tu tgs } + +let get_field (#l: P.union_typ) (#tgs: tags l) (tu: t l tgs) +: GTot (P.struct_field l) += + raw_get_field tu + +let get_tag (#l: P.union_typ) (#tgs: tags l) (tu: t l tgs) +: Pure (UInt32.t) + (requires True) + (ensures (fun t -> + List.Tot.mem t tgs /\ + t == tag_of_field tgs (get_field tu))) += + raw_get_tag #l tu + +let get_value + (#l: P.union_typ) (#tgs: tags l) + (tu: t l tgs) + (f: P.struct_field l) +: Pure (P.type_of_typ (P.typ_of_struct_field l f)) + (requires (get_field tu == f)) + (ensures (fun _ -> True)) += + raw_get_value #l tu f + +(* Lemma: "valid p ==> matching_tags (gread p)" *) + +val valid_matching_tags + (#l: P.union_typ) + (h: HS.mem) + (tgs: tags l) + (p: P.pointer (typ l)) +: Lemma + (requires (valid h tgs p)) + (ensures (matching_tags (P.gread h p) tgs)) + [SMTPatOr [[SMTPat (valid h tgs p)]; [SMTPat (matching_tags (P.gread h p) tgs)]]] diff --git a/stage0/ulib/legacy/FStar.TwoLevelHeap.fst b/stage0/ulib/legacy/FStar.TwoLevelHeap.fst new file mode 100644 index 00000000000..0e9cd100361 --- /dev/null +++ b/stage0/ulib/legacy/FStar.TwoLevelHeap.fst @@ -0,0 +1,88 @@ +(* + Copyright 2008-2014 Nikhil Swamy and Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module FStar.TwoLevelHeap +open FStar.Map +open FStar.Heap +type rid = int //region id +type t = Map.t rid heap + +let st_pre = st_pre_h t +let st_post' (a:Type) (pre:Type) = st_post_h' t a pre +let st_post (a:Type) = st_post_h t a +let st_wp (a:Type) = st_wp_h t a +new_effect STATE = STATE_h t +effect State (a:Type) (wp:st_wp a) = + STATE a wp +effect ST (a:Type) (pre:st_pre) (post: (h0:t -> Tot (st_post' a (pre h0)))) = + STATE a + (fun (p:st_post a) (h:t) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *) +effect St (a:Type) = + ST a (fun h -> True) (fun h0 r h1 -> True) +sub_effect + DIV ~> STATE = fun (a:Type) (wp:pure_wp a) (p:st_post a) (h:t) -> wp (fun a -> p a h) + +type rref (id:rid) (a:Type) = ref a +val as_ref : #a:Type -> #id:rid -> r:rref id a -> Tot (ref a) +let as_ref #a #id r = r + +private val ref_as_rref : #a:Type -> i:rid -> r:ref a -> Tot (rref i a) +let ref_as_rref #a i r = r + +val lemma_as_ref_inj: #a:Type -> #i:rid -> r:rref i a + -> Lemma (requires (True)) + (ensures ((ref_as_rref i (as_ref r) == r))) + [SMTPat (as_ref r)] +let lemma_as_ref_inj #a #i r = () + +assume val new_region: unit -> ST rid + (requires (fun m -> True)) + (ensures (fun (m0:t) (id:rid) (m1:t) -> exists (h:heap). not(Map.contains m0 id) /\ m1==Map.upd m0 id h)) + +let sel (#a:Type) (#i:rid) (m:t) (r:rref i a) = Heap.sel (Map.sel m i) (as_ref r) +let upd (#a:Type) (#i:rid) (m:t) (r:rref i a) (v:a) = Map.upd m i (Heap.upd (Map.sel m i) (as_ref r) v) + +assume val ralloc: #a:Type -> i:rid -> init:a -> ST (rref i a) + (requires (fun m -> Map.contains m i)) + (ensures (fun m0 x m1 -> + let region_i = Map.sel m0 i in + (~ (Heap.contains region_i (as_ref x)) + /\ m1==Map.upd m0 i (Heap.upd region_i (as_ref x) init)))) + +assume val op_Colon_Equals: #a:Type -> #i:rid -> r:rref i a -> v:a -> ST unit + (requires (fun m -> True)) + (ensures (fun m0 _u m1 -> m1== Map.upd m0 i (Heap.upd (Map.sel m0 i) (as_ref r) v))) + +assume val op_Bang:#a:Type -> #i:rid -> r:rref i a -> ST a + (requires (fun m -> True)) + (ensures (fun m0 x m1 -> m1==m0 /\ x==Heap.sel (Map.sel m0 i) (as_ref r))) + +assume val get: unit -> ST t + (requires (fun m -> True)) + (ensures (fun m0 x m1 -> m0==x /\ m1==m0)) + +let modifies (s:Set.set rid) (m0:t) (m1:t) = + Map.equal m1 (Map.concat m1 (Map.restrict (Set.complement s) m0)) + +let fresh_region (i:rid) (m0:t) (m1:t) = + not (Map.contains m0 i) + /\ Map.contains m1 i + +let contains_ref (#a:Type) (#i:rid) (r:rref i a) (m:t) = + Map.contains m i /\ Heap.contains (Map.sel m i) (as_ref r) + +let fresh_rref (#a:Type) (#i:rid) (r:rref i a) (m0:t) (m1:t) = + ~ (Heap.contains (Map.sel m0 i) (as_ref r)) + /\ (Heap.contains (Map.sel m1 i) (as_ref r)) diff --git a/stage0/ulib/legacy/LowStar.BufferCompat.fst b/stage0/ulib/legacy/LowStar.BufferCompat.fst new file mode 100644 index 00000000000..6a037223b83 --- /dev/null +++ b/stage0/ulib/legacy/LowStar.BufferCompat.fst @@ -0,0 +1,104 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.BufferCompat +include LowStar.Buffer + +module HS = FStar.HyperStack +module HST = FStar.HyperStack.ST +module U32 = FStar.UInt32 +module G = FStar.Ghost +module Seq = FStar.Seq + +unfold +let rcreate_post_mem_common + (#a: Type) + (r: HS.rid) + (len: nat) + (b: buffer a) + (h0 h1: HS.mem) + (s:Seq.seq a) += alloc_post_mem_common b h0 h1 s /\ frameOf b == r /\ length b == len + +inline_for_extraction +let rfree + (#a: Type) + (b: buffer a) +: HST.ST unit + (requires (fun h0 -> live h0 b /\ freeable b)) + (ensures (fun h0 _ h1 -> + (not (g_is_null b)) /\ + Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\ + (HS.get_tip h1) == (HS.get_tip h0) /\ + modifies (loc_addr_of_buffer b) h0 h1 /\ + HS.live_region h1 (frameOf b) + )) += free b + +inline_for_extraction +let rcreate + (#a: Type) + (r: HS.rid) + (init: a) + (len: U32.t) +: HST.ST (buffer a) + (requires (fun h -> HST.is_eternal_region r /\ U32.v len > 0)) + (ensures (fun h b h' -> + rcreate_post_mem_common r (U32.v len) b h h' (Seq.create (U32.v len) init) /\ + recallable b + )) += let b = gcmalloc r init len in + b + +inline_for_extraction +let rcreate_mm + (#a: Type) + (r: HS.rid) + (init: a) + (len: U32.t) +: HST.ST (buffer a) + (requires (fun h -> HST.is_eternal_region r /\ U32.v len > 0)) + (ensures (fun h b h' -> + rcreate_post_mem_common r (U32.v len) b h h' (Seq.create (U32.v len) init) /\ + freeable b + )) += malloc r init len + +inline_for_extraction +let create + (#a: Type) + (init: a) + (len: U32.t) +: HST.StackInline (buffer a) + (requires (fun h -> U32.v len > 0)) + (ensures (fun h b h' -> + rcreate_post_mem_common (HS.get_tip h) (U32.v len) b h h' (Seq.create (U32.v len) init) + )) += alloca init len + +unfold let createL_pre (#a: Type0) (init: list a) : GTot Type0 = + alloca_of_list_pre init + +let createL + (#a: Type0) + (init: list a) +: HST.StackInline (buffer a) + (requires (fun h -> createL_pre #a init)) + (ensures (fun h b h' -> + let len = FStar.List.Tot.length init in + rcreate_post_mem_common (HS.get_tip h) len b h h' (Seq.seq_of_list init) /\ + length b == normalize_term (List.Tot.length init) + )) += alloca_of_list init diff --git a/stage0/ulib/legacy/LowStar.ToFStarBuffer.fst b/stage0/ulib/legacy/LowStar.ToFStarBuffer.fst new file mode 100644 index 00000000000..43ed7628305 --- /dev/null +++ b/stage0/ulib/legacy/LowStar.ToFStarBuffer.fst @@ -0,0 +1,432 @@ +(* + Copyright 2008-2018 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) +module LowStar.ToFStarBuffer + +(* WARNING: FOR TRANSITIONAL PURPOSES ONLY *) +#set-options "--ext 'context_pruning='" +module Old = FStar.Buffer +module OldM = FStar.Modifies +module New = LowStar.Buffer +module NewM = LowStar.Modifies +module HST = FStar.HyperStack.ST +module HS = FStar.HyperStack +module U32 = FStar.UInt32 + +assume val new_to_old_ghost (#t: Type0) (b: New.buffer t) : GTot (Old.buffer t) +assume val old_to_new_ghost (#t: Type0) (b: Old.buffer t) : GTot (New.buffer t) + +assume val new_to_old_st (#t: Type0) (b: New.buffer t) : HST.Stack (b' : Old.buffer t { b' == new_to_old_ghost b } ) + (requires (fun h -> New.live h b)) + (ensures (fun h b' h' -> h' == h)) + +assume val old_to_new_st (#t: Type0) (b: Old.buffer t) : HST.Stack (b' : New.buffer t { b' == old_to_new_ghost b }) + (requires (fun h -> Old.live h b)) + (ensures (fun h b' h' -> h' == h)) + +assume val new_to_old_to_new (#t: Type0) (b: New.buffer t) : Lemma + (old_to_new_ghost (new_to_old_ghost b) == b) + [SMTPat (old_to_new_ghost (new_to_old_ghost b))] + +assume val old_to_new_to_old (#t: Type0) (b: Old.buffer t) : Lemma + (new_to_old_ghost (old_to_new_ghost b) == b) + [SMTPat (new_to_old_ghost (old_to_new_ghost b))] + +assume val live_new_to_old (#t: Type0) (h: HS.mem) (b: New.buffer t) : Lemma + (Old.live h (new_to_old_ghost b) <==> New.live h b) + [SMTPat (Old.live h (new_to_old_ghost b))] + +assume val live_old_to_new (#t: Type0) (h: HS.mem) (b: Old.buffer t) : Lemma + (New.live h (old_to_new_ghost b) <==> Old.live h b) + [SMTPat (New.live h (old_to_new_ghost b))] + +assume val frameOf_new_to_old (#t: Type0) (b: New.buffer t) : Lemma + (Old.frameOf (new_to_old_ghost b) == New.frameOf b) + [SMTPat (Old.frameOf (new_to_old_ghost b))] + +assume val frameOf_old_to_new (#t: Type0) (b: Old.buffer t) : Lemma + (New.frameOf (old_to_new_ghost b) == Old.frameOf b) + [SMTPat (New.frameOf (old_to_new_ghost b))] + +assume val as_addr_new_to_old (#t: Type0) (b: New.buffer t) : Lemma + (Old.as_addr (new_to_old_ghost b) == New.as_addr b) + [SMTPat (Old.as_addr (new_to_old_ghost b))] + +assume val as_addr_old_to_new (#t: Type0) (b: Old.buffer t) : Lemma + (New.as_addr (old_to_new_ghost b) == Old.as_addr b) + [SMTPat (New.as_addr (old_to_new_ghost b))] + +assume val length_new_to_old (#t: Type0) (b: New.buffer t) : Lemma + (Old.length (new_to_old_ghost b) == New.length b) + [SMTPat (Old.length (new_to_old_ghost b))] + +assume val length_old_to_new (#t: Type0) (b: Old.buffer t) : Lemma + (New.length (old_to_new_ghost b) == Old.length b) + [SMTPat (New.length (old_to_new_ghost b))] + +assume val as_seq_new_to_old (#t: Type0) (h: HS.mem) (b: New.buffer t) : Lemma + (Old.as_seq h (new_to_old_ghost b) == New.as_seq h b) + [SMTPat (Old.as_seq h (new_to_old_ghost b))] + +assume val as_seq_old_to_new (#t: Type0) (h: HS.mem) (b: Old.buffer t) : Lemma + (New.as_seq h (old_to_new_ghost b) == Old.as_seq h b) + [SMTPat (New.as_seq h (old_to_new_ghost b))] + +assume val gsub_new_to_old (#t: Type0) (b: New.buffer t) (i: U32.t) (len: U32.t) : Lemma + (requires (U32.v i + U32.v len <= New.length b)) + (ensures (Old.sub (new_to_old_ghost b) i len == new_to_old_ghost (New.gsub b i len))) + [SMTPatOr [ + [SMTPat (Old.sub (new_to_old_ghost b) i len)]; + [SMTPat (new_to_old_ghost (New.gsub b i len))]; + ]] + +assume val gsub_old_to_new (#t: Type0) (b: Old.buffer t) (i: U32.t) (len: U32.t) : Lemma + (requires (U32.v i + U32.v len <= Old.length b)) + (ensures (New.gsub (old_to_new_ghost b) i len == old_to_new_ghost (Old.sub b i len))) + [SMTPatOr [ + [SMTPat (New.gsub (old_to_new_ghost b) i len)]; + [SMTPat (old_to_new_ghost (Old.sub b i len))]; + ]] + +assume val new_to_old_includes_left (#t: Type0) (b1: New.buffer t) (b2: Old.buffer t) : Lemma + ((new_to_old_ghost b1 `Old.includes` b2) <==> (b1 `New.includes` (old_to_new_ghost b2))) + [SMTPatOr [ + [SMTPat (new_to_old_ghost b1 `Old.includes` b2)]; + [SMTPat (b1 `New.includes` old_to_new_ghost b2)]; + ]] + +assume val new_to_old_includes_right (#t: Type0) (b1: Old.buffer t) (b2: New.buffer t) : Lemma + ((b1 `Old.includes` (new_to_old_ghost b2)) <==> (old_to_new_ghost b1 `New.includes` b2)) + [SMTPatOr [ + [SMTPat (b1 `Old.includes` new_to_old_ghost b2)]; + [SMTPat (old_to_new_ghost b1 `New.includes` b2)]; + ]] + +assume val new_to_old_disjoint (#t1 #t2: Type0) (b1: New.buffer t1) (b2: Old.buffer t2) : Lemma + ((new_to_old_ghost b1 `Old.disjoint` b2) <==> (b1 `New.disjoint` (old_to_new_ghost b2))) + [SMTPatOr [ + [SMTPat (new_to_old_ghost b1 `Old.disjoint` b2)]; + [SMTPat (b1 `New.disjoint` old_to_new_ghost b2)]; + ]] + +(* The modifies clause *) + +module M = FStar.ModifiesGen + +noextract +let old_and_new_aloc (is_new: bool) : Tot M.aloc_t = + if is_new then M.raise_aloc New.abuffer else OldM.cloc_aloc + +noextract +let old_and_new_cl (is_new: bool) : Tot (M.cls (old_and_new_aloc is_new)) = + if is_new then M.raise_cls NewM.cloc_cls else OldM.cloc_cls + +noextract +let old_and_new_cl_union : M.cls (M.aloc_union old_and_new_aloc) = M.cls_union old_and_new_cl + +let old_to_union_loc (l: OldM.loc) : GTot (M.loc old_and_new_cl_union) = + M.union_loc_of_loc old_and_new_cl false (OldM.cloc_of_loc l) + +let new_to_union_loc (l: NewM.loc) : GTot (M.loc old_and_new_cl_union) = + M.union_loc_of_loc old_and_new_cl true (M.raise_loc (NewM.cloc_of_loc l)) + +let old_to_new_modifies (old_l: OldM.loc) (new_l: NewM.loc) (h h' : HS.mem) : Lemma + (requires (OldM.modifies old_l h h' /\ old_to_union_loc old_l == new_to_union_loc new_l)) + (ensures (NewM.modifies new_l h h')) += OldM.modifies_to_cloc old_l h h'; + M.modifies_union_loc_of_loc old_and_new_cl false (OldM.cloc_of_loc old_l) h h'; + M.modifies_union_loc_of_loc old_and_new_cl true (M.raise_loc (NewM.cloc_of_loc new_l)) h h'; + M.modifies_raise_loc (NewM.cloc_of_loc new_l) h h'; + NewM.modifies_to_cloc new_l h h' + +let old_to_union_loc_none : squash (old_to_union_loc OldM.loc_none == M.loc_none) = + OldM.cloc_of_loc_none (); + M.union_loc_of_loc_none old_and_new_cl false + +let new_to_union_loc_none : squash (new_to_union_loc NewM.loc_none == M.loc_none) = + NewM.cloc_of_loc_none (); + M.raise_loc_none #_ #NewM.cloc_cls; + M.union_loc_of_loc_none old_and_new_cl true + +let old_to_union_loc_union (old1 old2: OldM.loc) : Lemma + (old_to_union_loc (old1 `OldM.loc_union` old2) == old_to_union_loc old1 `M.loc_union` old_to_union_loc old2) + [SMTPat (old_to_union_loc (old1 `OldM.loc_union` old2))] += OldM.cloc_of_loc_union old1 old2; + M.union_loc_of_loc_union old_and_new_cl false (OldM.cloc_of_loc old1) (OldM.cloc_of_loc old2) + +let new_to_union_loc_union (new1 new2: NewM.loc) : Lemma + (new_to_union_loc (new1 `NewM.loc_union` new2) == new_to_union_loc new1 `M.loc_union` new_to_union_loc new2) + [SMTPat (new_to_union_loc (new1 `NewM.loc_union` new2))] += NewM.cloc_of_loc_union new1 new2; + M.raise_loc_union (NewM.cloc_of_loc new1) (NewM.cloc_of_loc new2); + M.union_loc_of_loc_union old_and_new_cl true (M.raise_loc (NewM.cloc_of_loc new1)) (M.raise_loc (NewM.cloc_of_loc new2)) + +let old_to_union_loc_addresses (preserve_liveness: bool) (r: HS.rid) (n: Set.set nat) : Lemma + (old_to_union_loc (OldM.loc_addresses preserve_liveness r n) == M.loc_addresses preserve_liveness r n) + [SMTPat (old_to_union_loc (OldM.loc_addresses preserve_liveness r n))] += OldM.cloc_of_loc_addresses preserve_liveness r n; + M.union_loc_of_loc_addresses old_and_new_cl false preserve_liveness r n + +let new_to_union_loc_addresses (preserve_liveness: bool) (r: HS.rid) (n: Set.set nat) : Lemma + (new_to_union_loc (NewM.loc_addresses preserve_liveness r n) == M.loc_addresses preserve_liveness r n) + [SMTPat (new_to_union_loc (NewM.loc_addresses preserve_liveness r n))] += NewM.cloc_of_loc_addresses preserve_liveness r n; + M.raise_loc_addresses u#0 u#0 #_ #NewM.cloc_cls preserve_liveness r n; + M.union_loc_of_loc_addresses old_and_new_cl true preserve_liveness r n + +let old_to_union_loc_regions (preserve_liveness: bool) (r: Set.set HS.rid) : Lemma + (old_to_union_loc (OldM.loc_regions preserve_liveness r) == M.loc_regions preserve_liveness r) + [SMTPat (old_to_union_loc (OldM.loc_regions preserve_liveness r))] += OldM.cloc_of_loc_regions preserve_liveness r; + M.union_loc_of_loc_regions old_and_new_cl false preserve_liveness r + +let new_to_union_loc_regions (preserve_liveness: bool) (r: Set.set HS.rid) : Lemma + (new_to_union_loc (NewM.loc_regions preserve_liveness r) == M.loc_regions preserve_liveness r) + [SMTPat (new_to_union_loc (NewM.loc_regions preserve_liveness r))] += NewM.cloc_of_loc_regions preserve_liveness r; + M.raise_loc_regions u#0 u#0 #_ #NewM.cloc_cls preserve_liveness r; + M.union_loc_of_loc_regions old_and_new_cl true preserve_liveness r + +let union_loc_to_new (l: M.loc old_and_new_cl_union) : GTot NewM.loc = + NewM.loc_of_cloc (M.lower_loc (M.loc_of_union_loc true l)) + +let union_loc_to_new_new_to_union_loc (l: NewM.loc) : Lemma + (union_loc_to_new (new_to_union_loc l) == l) + [SMTPat (union_loc_to_new (new_to_union_loc l))] += M.loc_of_union_loc_union_loc_of_loc old_and_new_cl true (M.raise_loc (NewM.cloc_of_loc l)); + M.lower_loc_raise_loc u#0 u#0 (NewM.cloc_of_loc l); + NewM.loc_of_cloc_of_loc l + +let union_loc_to_new_none : squash (union_loc_to_new M.loc_none == NewM.loc_none) = + M.loc_of_union_loc_none old_and_new_cl true; + M.lower_loc_none u#0 u#0 #_ #NewM.cloc_cls; + NewM.cloc_of_loc_none (); + NewM.loc_of_cloc_of_loc NewM.loc_none + +let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun y -> x == y)) = x + +let union_loc_to_new_union (l1 l2: M.loc old_and_new_cl_union) : Lemma + (union_loc_to_new (l1 `M.loc_union` l2) == union_loc_to_new l1 `NewM.loc_union` union_loc_to_new l2) + [SMTPat (union_loc_to_new (l1 `M.loc_union` l2))] += M.loc_of_union_loc_union old_and_new_cl true l1 l2; + let t : Type u#1 = M.loc (old_and_new_cl true) in + let i1 : t = M.loc_of_union_loc true l1 in + let i2 : t = M.loc_of_union_loc true l2 in + let j1 : M.loc (M.raise_cls NewM.cloc_cls) = coerce (M.loc (M.raise_cls NewM.cloc_cls)) i1 in + let j2 : M.loc (M.raise_cls u#0 u#0 NewM.cloc_cls) = coerce (M.loc (M.raise_cls NewM.cloc_cls)) i2 in + M.lower_loc_union u#0 u#0 j1 j2; + NewM.cloc_of_loc_union (NewM.loc_of_cloc (M.lower_loc j1)) (NewM.loc_of_cloc (M.lower_loc j2)); + NewM.loc_of_cloc_of_loc (NewM.loc_of_cloc (M.lower_loc j1) `NewM.loc_union` NewM.loc_of_cloc (M.lower_loc j2)) + +let union_loc_to_new_addresses (preserve_liveness: bool) (r: HS.rid) (n: Set.set nat) : Lemma + (union_loc_to_new (M.loc_addresses preserve_liveness r n) == NewM.loc_addresses preserve_liveness r n) + [SMTPat (union_loc_to_new (M.loc_addresses preserve_liveness r n))] += M.loc_of_union_loc_addresses old_and_new_cl true preserve_liveness r n; + M.lower_loc_addresses u#0 u#0 #_ #NewM.cloc_cls preserve_liveness r n; + NewM.cloc_of_loc_addresses preserve_liveness r n; + NewM.cloc_of_loc_of_cloc (M.loc_addresses preserve_liveness r n) + +let union_loc_to_new_regions (preserve_liveness: bool) (r: Set.set HS.rid) : Lemma + (union_loc_to_new (M.loc_regions preserve_liveness r) == NewM.loc_regions preserve_liveness r) + [SMTPat (union_loc_to_new (M.loc_regions preserve_liveness r))] += M.loc_of_union_loc_regions old_and_new_cl true preserve_liveness r; + M.lower_loc_regions u#0 u#0 #_ #NewM.cloc_cls preserve_liveness r; + NewM.cloc_of_loc_regions preserve_liveness r; + NewM.cloc_of_loc_of_cloc (M.loc_regions preserve_liveness r) + +let old_to_new_modifies' (old_l: OldM.loc) (h h' : HS.mem) : Lemma + (requires (OldM.modifies old_l h h' /\ new_to_union_loc (union_loc_to_new (old_to_union_loc old_l)) == old_to_union_loc old_l)) + (ensures (NewM.modifies (union_loc_to_new (old_to_union_loc old_l)) h h')) + [SMTPat (OldM.modifies old_l h h')] += old_to_new_modifies old_l (union_loc_to_new (old_to_union_loc old_l)) h h' + +assume val loc_buffer_new_to_old (#t: Type0) (b: New.buffer t) : Lemma + (old_to_union_loc (OldM.loc_buffer (new_to_old_ghost b)) == new_to_union_loc (NewM.loc_buffer b)) + [SMTPat (old_to_union_loc (OldM.loc_buffer (new_to_old_ghost b)))] + +let modifies_0_modifies (h h' : HS.mem) : Lemma + (requires (Old.modifies_0 h h')) + (ensures (NewM.modifies NewM.loc_none h h')) += () + +let modifies_1_modifies + (#t: Type) + (b: New.buffer t) + (h h' : HS.mem) +: Lemma + (requires (Old.modifies_1 (new_to_old_ghost b) h h')) + (ensures (NewM.modifies (NewM.loc_buffer b) h h')) += () + +let modifies_2_modifies + (#t1 #t2: Type) + (b1: New.buffer t1) + (b2: New.buffer t2) + (h h' : HS.mem) +: Lemma + (requires (Old.modifies_2 (new_to_old_ghost b1) (new_to_old_ghost b2) h h')) + (ensures (NewM.modifies (NewM.loc_union (NewM.loc_buffer b1) (NewM.loc_buffer b2)) h h')) += () + +(* Examples *) + +/// Basic example of mutating a new buffer by converting it first to +/// an old buffer. +/// +/// The spec shows that all three flavors of modifies clauses can be +/// proven and the precise contents are reflected into the new buffer +let ex1 (#a:Type) (b:New.buffer nat{New.length b > 0}) (b1:New.buffer a) + : HST.ST unit + (requires (fun h -> New.live h b /\ New.disjoint b b1 /\ New.live h b1)) + (ensures (fun h0 _ h1 -> + New.get h1 b 0 == 0 /\ + Old.get h1 (new_to_old_ghost b) 0 == 0 /\ + New.as_seq h0 b1 == New.as_seq h1 b1 /\ + NewM.modifies (NewM.loc_buffer b) h0 h1 /\ + OldM.modifies (OldM.loc_buffer (new_to_old_ghost b)) h0 h1 /\ + Old.modifies_1 (new_to_old_ghost b) h0 h1)) = + let old = new_to_old_st b in + Old.upd old 0ul 0 + +let new_eqb + (#a: eqtype) + (b1 b2: New.buffer a) + (len: U32.t) +: HST.Stack bool + (requires (fun h -> New.live h b1 /\ New.live h b2 /\ U32.v len <= New.length b1 /\ U32.v len <= New.length b2)) + (ensures (fun h res h' -> + h' == h /\ + (res <==> Seq.equal (New.as_seq h (New.gsub b1 0ul len)) (New.as_seq h (New.gsub b2 0ul len))) + )) += let b1' = new_to_old_st b1 in + let b2' = new_to_old_st b2 in + Old.eqb b1' b2' len + +let new_blit + (#t: Type) + (src: New.buffer t) + (idx_src: U32.t) + (dst: New.buffer t) + (idx_dst: U32.t) + (len: U32.t) +: HST.Stack unit + (requires (fun h -> + New.live h src /\ New.live h dst /\ New.disjoint src dst /\ + U32.v idx_src + U32.v len <= New.length src /\ + U32.v idx_dst + U32.v len <= New.length dst + )) + (ensures (fun h _ h' -> + NewM.modifies (NewM.loc_buffer dst) h h' /\ + New.live h' dst /\ + Seq.slice (New.as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) == + Seq.slice (New.as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len) /\ + Seq.slice (New.as_seq h' dst) 0 (U32.v idx_dst) == + Seq.slice (New.as_seq h dst) 0 (U32.v idx_dst) /\ + Seq.slice (New.as_seq h' dst) (U32.v idx_dst + U32.v len) (New.length dst) == + Seq.slice (New.as_seq h dst) (U32.v idx_dst + U32.v len) (New.length dst) + )) += let src' = new_to_old_st src in + let dst' = new_to_old_st dst in + Old.blit src' idx_src dst' idx_dst len + +let new_fill + (#t: Type) + (b: New.buffer t) + (z: t) + (len: U32.t) +: HST.Stack unit + (requires (fun h -> New.live h b /\ U32.v len <= New.length b)) + (ensures (fun h _ h' -> + NewM.modifies (NewM.loc_buffer b) h h' /\ + Seq.slice (New.as_seq h' b) 0 (U32.v len) == Seq.create (U32.v len) z /\ + Seq.slice (New.as_seq h' b) (U32.v len) (New.length b) == Seq.slice (New.as_seq h b) (U32.v len) (New.length b) + )) += let b' = new_to_old_st b in + Old.fill b' z len + + +(* Conversions in the other way round, to have old buffer code call into new buffer code. Those are more aggressive. *) + +assume +val loc_disjoint_new_disjoint_old + (#t1 #t2: Type) + (b1: New.buffer t1) + (b2: New.buffer t2) +: Lemma + (requires (NewM.loc_disjoint (NewM.loc_buffer b1) (NewM.loc_buffer b2))) + (ensures (Old.disjoint (new_to_old_ghost b1) (new_to_old_ghost b2))) + [SMTPat (Old.disjoint (new_to_old_ghost b1) (new_to_old_ghost b2))] + +assume +val modifies_modifies_0 + (h h' : HS.mem) +: Lemma + (requires (NewM.modifies NewM.loc_none h h')) + (ensures (Old.modifies_0 h h')) + [SMTPat (NewM.modifies NewM.loc_none h h')] + +assume +val modifies_modifies_1 + (#t: Type) + (b: Old.buffer t) + (h h' : HS.mem) +: Lemma + (requires (NewM.modifies (NewM.loc_buffer (old_to_new_ghost b)) h h')) + (ensures (Old.modifies_1 b h h')) + [SMTPat (NewM.modifies (NewM.loc_buffer (old_to_new_ghost b)) h h')] + +assume +val modifies_modifies_2 + (#t1 #t2: Type) + (b1: Old.buffer t1) + (b2: Old.buffer t2) + (h h' : HS.mem) +: Lemma + (requires (NewM.modifies (NewM.loc_buffer (old_to_new_ghost b1) `NewM.loc_union` NewM.loc_buffer (old_to_new_ghost b2)) h h')) + (ensures (Old.modifies_2 b1 b2 h h')) + [SMTPat (NewM.modifies (NewM.loc_buffer (old_to_new_ghost b1) `NewM.loc_union` NewM.loc_buffer (old_to_new_ghost b2)) h h')] + + +/// Basic example of mutating an old buffer by converting it first to +/// a new buffer. +/// +/// The spec shows that all two flavors of modifies clauses can be +/// proven and the precise contents are reflected into the new buffer + +let ex1' (#a:Type) (b:Old.buffer nat{Old.length b > 0}) (b1:Old.buffer a) + : HST.ST unit + (requires (fun h -> Old.live h b /\ Old.disjoint b b1 /\ Old.live h b1)) + (ensures (fun h0 _ h1 -> + Old.get h1 b 0 == 0 /\ + New.get h1 (old_to_new_ghost b) 0 == 0 /\ + Old.as_seq h0 b1 == Old.as_seq h1 b1 /\ + NewM.modifies (NewM.loc_buffer (old_to_new_ghost b)) h0 h1 /\ + Old.modifies_1 b h0 h1)) = + let ne = old_to_new_st b in + New.upd ne 0ul 0 + +let ex1'' (#a:Type) (b:New.buffer nat{New.length b > 0}) (b1:New.buffer a) + : HST.ST unit + (requires (fun h -> New.live h b /\ NewM.loc_disjoint (NewM.loc_buffer b) (NewM.loc_buffer b1) /\ New.live h b1)) + (ensures (fun h0 _ h1 -> + New.get h1 b 0 == 0 /\ + Old.get h1 (new_to_old_ghost b) 0 == 0 /\ + New.as_seq h0 b1 == New.as_seq h1 b1 /\ + NewM.modifies (NewM.loc_buffer b) h0 h1 /\ + OldM.modifies (OldM.loc_buffer (new_to_old_ghost b)) h0 h1 /\ + Old.modifies_1 (new_to_old_ghost b) h0 h1)) = + let old = new_to_old_st b in + let old1 = new_to_old_st b1 in + ex1' old old1 diff --git a/ulib/mk_int.sh b/stage0/ulib/mk_int.sh similarity index 100% rename from ulib/mk_int.sh rename to stage0/ulib/mk_int.sh diff --git a/ulib/ml/.gitignore b/stage0/ulib/ml/.gitignore similarity index 100% rename from ulib/ml/.gitignore rename to stage0/ulib/ml/.gitignore diff --git a/ulib/ml/FStar_Int16.ml.prefix b/stage0/ulib/ml/FStar_Int16.ml.prefix similarity index 100% rename from ulib/ml/FStar_Int16.ml.prefix rename to stage0/ulib/ml/FStar_Int16.ml.prefix diff --git a/ulib/ml/FStar_Int32.ml.prefix b/stage0/ulib/ml/FStar_Int32.ml.prefix similarity index 100% rename from ulib/ml/FStar_Int32.ml.prefix rename to stage0/ulib/ml/FStar_Int32.ml.prefix diff --git a/ulib/ml/FStar_Int64.ml.prefix b/stage0/ulib/ml/FStar_Int64.ml.prefix similarity index 100% rename from ulib/ml/FStar_Int64.ml.prefix rename to stage0/ulib/ml/FStar_Int64.ml.prefix diff --git a/ulib/ml/FStar_Int8.ml.prefix b/stage0/ulib/ml/FStar_Int8.ml.prefix similarity index 100% rename from ulib/ml/FStar_Int8.ml.prefix rename to stage0/ulib/ml/FStar_Int8.ml.prefix diff --git a/ulib/ml/FStar_Ints.ml.body b/stage0/ulib/ml/FStar_Ints.ml.body similarity index 100% rename from ulib/ml/FStar_Ints.ml.body rename to stage0/ulib/ml/FStar_Ints.ml.body diff --git a/ulib/ml/FStar_UInt16.ml.prefix b/stage0/ulib/ml/FStar_UInt16.ml.prefix similarity index 100% rename from ulib/ml/FStar_UInt16.ml.prefix rename to stage0/ulib/ml/FStar_UInt16.ml.prefix diff --git a/ulib/ml/FStar_UInt32.ml.prefix b/stage0/ulib/ml/FStar_UInt32.ml.prefix similarity index 100% rename from ulib/ml/FStar_UInt32.ml.prefix rename to stage0/ulib/ml/FStar_UInt32.ml.prefix diff --git a/ulib/ml/FStar_UInt64.ml.prefix b/stage0/ulib/ml/FStar_UInt64.ml.prefix similarity index 100% rename from ulib/ml/FStar_UInt64.ml.prefix rename to stage0/ulib/ml/FStar_UInt64.ml.prefix diff --git a/ulib/ml/Makefile b/stage0/ulib/ml/Makefile similarity index 100% rename from ulib/ml/Makefile rename to stage0/ulib/ml/Makefile diff --git a/ulib/ml/Makefile.include b/stage0/ulib/ml/Makefile.include similarity index 100% rename from ulib/ml/Makefile.include rename to stage0/ulib/ml/Makefile.include diff --git a/ulib/ml/Makefile.realized b/stage0/ulib/ml/Makefile.realized similarity index 100% rename from ulib/ml/Makefile.realized rename to stage0/ulib/ml/Makefile.realized diff --git a/ulib/reclaimable/FStar.ST.fst b/stage0/ulib/reclaimable/FStar.ST.fst similarity index 100% rename from ulib/reclaimable/FStar.ST.fst rename to stage0/ulib/reclaimable/FStar.ST.fst diff --git a/stage1/.gitignore b/stage1/.gitignore new file mode 100644 index 00000000000..2cebf99e862 --- /dev/null +++ b/stage1/.gitignore @@ -0,0 +1,7 @@ +/inst +/*.checked/** +/*.ml/** +/*.pluginml/** +.*_depend +.depend +*.install diff --git a/stage1/Makefile b/stage1/Makefile new file mode 120000 index 00000000000..954fcba3244 --- /dev/null +++ b/stage1/Makefile @@ -0,0 +1 @@ +../mk/stage.mk \ No newline at end of file diff --git a/stage1/dune/dune b/stage1/dune/dune new file mode 100644 index 00000000000..190d1fc7081 --- /dev/null +++ b/stage1/dune/dune @@ -0,0 +1,5 @@ +(env + (_ + (bin_annot false) + (flags (:standard -w -A))) +) diff --git a/stage1/dune/dune-project b/stage1/dune/dune-project new file mode 100644 index 00000000000..93297f1ccae --- /dev/null +++ b/stage1/dune/dune-project @@ -0,0 +1,9 @@ +(lang dune 3.8) +(name fstar) +(generate_opam_files false) +(using menhir 2.1) + +(package + (name fstar) + (synopsis "The F* programming language and proof assistant") +) diff --git a/stage1/dune/fstar-guts/FStarC_Parser_Parse.mly b/stage1/dune/fstar-guts/FStarC_Parser_Parse.mly new file mode 120000 index 00000000000..53c2cb3f29e --- /dev/null +++ b/stage1/dune/fstar-guts/FStarC_Parser_Parse.mly @@ -0,0 +1 @@ +../../../src/ml/bare/FStarC_Parser_Parse.mly \ No newline at end of file diff --git a/stage1/dune/fstar-guts/app b/stage1/dune/fstar-guts/app new file mode 120000 index 00000000000..1cf9fa391d7 --- /dev/null +++ b/stage1/dune/fstar-guts/app @@ -0,0 +1 @@ +../../../ulib/ml/app/ \ No newline at end of file diff --git a/stage1/dune/fstar-guts/bare b/stage1/dune/fstar-guts/bare new file mode 120000 index 00000000000..1f9e8d416f1 --- /dev/null +++ b/stage1/dune/fstar-guts/bare @@ -0,0 +1 @@ +../../../src/ml/bare/ \ No newline at end of file diff --git a/stage1/dune/fstar-guts/dune b/stage1/dune/fstar-guts/dune new file mode 100644 index 00000000000..5162865f29e --- /dev/null +++ b/stage1/dune/fstar-guts/dune @@ -0,0 +1,33 @@ +(include_subdirs unqualified) +(library + (name fstarcompiler) + (public_name fstar.compiler) + (libraries + batteries + zarith + stdint + yojson + ppxlib + dynlink + menhirLib + pprint + sedlex + mtime.clock.os + ) + (modes native) + (preprocess (pps ppx_deriving.show ppx_deriving_yojson sedlex.ppx)) +) + +(menhir + (modules FStarC_Parser_Parse)) + +(rule + (target FStarC_Version.ml) + (deps (:script make_fstar_version.sh) + (:version ../../version.txt)) + (action + (progn + (copy %{version} version.txt) + (with-stdout-to + FStarC_Version.ml + (run bash %{script}))))) diff --git a/stage1/dune/fstar-guts/fstarc.ml b/stage1/dune/fstar-guts/fstarc.ml new file mode 120000 index 00000000000..d00d5889cff --- /dev/null +++ b/stage1/dune/fstar-guts/fstarc.ml @@ -0,0 +1 @@ +../../fstarc.ml/ \ No newline at end of file diff --git a/stage1/dune/fstar-guts/make_fstar_version.sh b/stage1/dune/fstar-guts/make_fstar_version.sh new file mode 120000 index 00000000000..19236f209a6 --- /dev/null +++ b/stage1/dune/fstar-guts/make_fstar_version.sh @@ -0,0 +1 @@ +../../../.scripts/make_fstar_version.sh \ No newline at end of file diff --git a/stage1/dune/fstar-plugins/app b/stage1/dune/fstar-plugins/app new file mode 120000 index 00000000000..1cf9fa391d7 --- /dev/null +++ b/stage1/dune/fstar-plugins/app @@ -0,0 +1 @@ +../../../ulib/ml/app/ \ No newline at end of file diff --git a/stage1/dune/fstar-plugins/dune b/stage1/dune/fstar-plugins/dune new file mode 100644 index 00000000000..41cea55c81b --- /dev/null +++ b/stage1/dune/fstar-plugins/dune @@ -0,0 +1,11 @@ +(include_subdirs unqualified) +(library + (name fstar_plugins) + (libraries + fstarcompiler + ) + (modes native) + (wrapped false) + (preprocess (pps ppx_deriving.show ppx_deriving_yojson sedlex.ppx)) + ; ^ Needed for some of the base modules. +) diff --git a/stage1/dune/fstar-plugins/full b/stage1/dune/fstar-plugins/full new file mode 120000 index 00000000000..fbfc3b1c40c --- /dev/null +++ b/stage1/dune/fstar-plugins/full @@ -0,0 +1 @@ +../../../src/ml/full/ \ No newline at end of file diff --git a/stage1/dune/fstar-plugins/plugin b/stage1/dune/fstar-plugins/plugin new file mode 120000 index 00000000000..ce023fa39b3 --- /dev/null +++ b/stage1/dune/fstar-plugins/plugin @@ -0,0 +1 @@ +../../../ulib/ml/plugin/ \ No newline at end of file diff --git a/stage1/dune/fstar-plugins/plugins.ml b/stage1/dune/fstar-plugins/plugins.ml new file mode 120000 index 00000000000..00768efd8e8 --- /dev/null +++ b/stage1/dune/fstar-plugins/plugins.ml @@ -0,0 +1 @@ +../../plugins.ml/ \ No newline at end of file diff --git a/stage1/dune/fstarc-bare/dune b/stage1/dune/fstarc-bare/dune new file mode 100644 index 00000000000..990c87bd207 --- /dev/null +++ b/stage1/dune/fstarc-bare/dune @@ -0,0 +1,11 @@ +(include_subdirs unqualified) +(executable + (name main) + ; ^ can we avoid this? Users do not really need it + (libraries + fstarcompiler + memtrace + ) + (link_flags "-linkall") + (modes (native exe)) +) diff --git a/stage1/dune/fstarc-bare/main.ml b/stage1/dune/fstarc-bare/main.ml new file mode 100644 index 00000000000..43d25ac5234 --- /dev/null +++ b/stage1/dune/fstarc-bare/main.ml @@ -0,0 +1,19 @@ +let x = + (* On Unix, if we write to a pipe that tries to send something + to a process that died, we receive a SIGPIPE signal which + by default will terminate F*, and we won't get an exception + or anything. So, block them, and instead rely on OCaml exceptions + to detect this. *) + if Fstarcompiler.FStarC_Platform.system = Posix then + ignore (Unix.sigprocmask Unix.SIG_BLOCK [Sys.sigpipe]); + + (* Enable memtrace, only if the environment variable MEMTRACE is set. *) + Memtrace.trace_if_requested (); + + (* Record a backtrace on exceptions, for --trace_error. *) + Printexc.record_backtrace true; + + (* Tweak garbage collector parameters. *) + Gc.set { (Gc.get()) with Gc.minor_heap_size = 1048576; Gc.major_heap_increment = 4194304; Gc.space_overhead = 150; }; + + Fstarcompiler.FStarC_Main.main () diff --git a/stage1/dune/fstarc-full/dune b/stage1/dune/fstarc-full/dune new file mode 100644 index 00000000000..c1241e93e9d --- /dev/null +++ b/stage1/dune/fstarc-full/dune @@ -0,0 +1,14 @@ +(include_subdirs unqualified) +(executable + (name main) + (public_name fstar.exe) + (libraries + ; Can we just say we extend fstar_bare instead of _guts, and duplicating main? + fstarcompiler + fstar_plugins + memtrace + ) + (link_flags "-linkall") + (modes (native exe)) + (preprocess (pps ppx_deriving.show ppx_deriving_yojson sedlex.ppx)) +) diff --git a/stage1/dune/fstarc-full/main.ml b/stage1/dune/fstarc-full/main.ml new file mode 120000 index 00000000000..f747791324d --- /dev/null +++ b/stage1/dune/fstarc-full/main.ml @@ -0,0 +1 @@ +../fstarc-bare/main.ml \ No newline at end of file diff --git a/stage1/dune/libapp/app b/stage1/dune/libapp/app new file mode 120000 index 00000000000..1cf9fa391d7 --- /dev/null +++ b/stage1/dune/libapp/app @@ -0,0 +1 @@ +../../../ulib/ml/app/ \ No newline at end of file diff --git a/stage1/dune/libapp/app-extra b/stage1/dune/libapp/app-extra new file mode 120000 index 00000000000..6a7c7ee2201 --- /dev/null +++ b/stage1/dune/libapp/app-extra @@ -0,0 +1 @@ +../../../ulib/ml/app-extra/ \ No newline at end of file diff --git a/stage1/dune/libapp/dune b/stage1/dune/libapp/dune new file mode 100644 index 00000000000..c9ab622f539 --- /dev/null +++ b/stage1/dune/libapp/dune @@ -0,0 +1,20 @@ +(include_subdirs unqualified) +(library + (name fstar) + (public_name fstar.lib) + (libraries + batteries + zarith + stdint + pprint + ) + (modes native byte) + ; ^ Note: we need to compile fstar-lib in bytecode since some + ; clients use it (e.g. MLS* when being compiled into javascript + ; via js_of_ocaml, in general anything that wants to use js_of_ocaml). + ; We should consider a toggle to selectively enable it, as most users + ; do not need it. + (wrapped false) + (preprocess (pps ppx_deriving.show ppx_deriving_yojson)) + ; ^FIXME we really should not have to expose nor depend on this +) diff --git a/stage1/dune/libapp/ulib.ml b/stage1/dune/libapp/ulib.ml new file mode 120000 index 00000000000..9fec5c68a72 --- /dev/null +++ b/stage1/dune/libapp/ulib.ml @@ -0,0 +1 @@ +../../ulib.ml/ \ No newline at end of file diff --git a/stage1/dune/libplugin/app b/stage1/dune/libplugin/app new file mode 120000 index 00000000000..1cf9fa391d7 --- /dev/null +++ b/stage1/dune/libplugin/app @@ -0,0 +1 @@ +../../../ulib/ml/app/ \ No newline at end of file diff --git a/stage1/dune/libplugin/app-extra b/stage1/dune/libplugin/app-extra new file mode 120000 index 00000000000..6a7c7ee2201 --- /dev/null +++ b/stage1/dune/libplugin/app-extra @@ -0,0 +1 @@ +../../../ulib/ml/app-extra/ \ No newline at end of file diff --git a/stage1/dune/libplugin/dune b/stage1/dune/libplugin/dune new file mode 100644 index 00000000000..3e9373aa035 --- /dev/null +++ b/stage1/dune/libplugin/dune @@ -0,0 +1,17 @@ +(include_subdirs unqualified) +(library + (name fstar_pluginlib) + (public_name fstar.pluginlib) + (libraries + fstarcompiler + ) + (modes native) + (wrapped true) + (preprocess (pps ppx_deriving.show ppx_deriving_yojson)) +) + +(env + (_ + (bin_annot false) + (flags (:standard -w -A))) +) diff --git a/stage1/dune/libplugin/full b/stage1/dune/libplugin/full new file mode 120000 index 00000000000..fbfc3b1c40c --- /dev/null +++ b/stage1/dune/libplugin/full @@ -0,0 +1 @@ +../../../src/ml/full/ \ No newline at end of file diff --git a/stage1/dune/libplugin/plugin b/stage1/dune/libplugin/plugin new file mode 120000 index 00000000000..ce023fa39b3 --- /dev/null +++ b/stage1/dune/libplugin/plugin @@ -0,0 +1 @@ +../../../ulib/ml/plugin/ \ No newline at end of file diff --git a/stage1/dune/libplugin/ulib.pluginml b/stage1/dune/libplugin/ulib.pluginml new file mode 120000 index 00000000000..36cb2d97a27 --- /dev/null +++ b/stage1/dune/libplugin/ulib.pluginml @@ -0,0 +1 @@ +../../ulib.pluginml/ \ No newline at end of file diff --git a/stage1/ulib b/stage1/ulib new file mode 120000 index 00000000000..ca044776e6f --- /dev/null +++ b/stage1/ulib @@ -0,0 +1 @@ +../ulib/ \ No newline at end of file diff --git a/stage1/version.txt b/stage1/version.txt new file mode 120000 index 00000000000..aa4e5bece7e --- /dev/null +++ b/stage1/version.txt @@ -0,0 +1 @@ +../version.txt \ No newline at end of file diff --git a/stage2/.gitignore b/stage2/.gitignore new file mode 100644 index 00000000000..ab215efc3e6 --- /dev/null +++ b/stage2/.gitignore @@ -0,0 +1,8 @@ +/inst +/*.checked/** +/*.ml/** +/*.fs/** +/*.pluginml/** +.*_depend +.depend +*.install diff --git a/stage2/Makefile b/stage2/Makefile new file mode 120000 index 00000000000..954fcba3244 --- /dev/null +++ b/stage2/Makefile @@ -0,0 +1 @@ +../mk/stage.mk \ No newline at end of file diff --git a/stage2/dune/dune b/stage2/dune/dune new file mode 100644 index 00000000000..190d1fc7081 --- /dev/null +++ b/stage2/dune/dune @@ -0,0 +1,5 @@ +(env + (_ + (bin_annot false) + (flags (:standard -w -A))) +) diff --git a/stage2/dune/dune-project b/stage2/dune/dune-project new file mode 100644 index 00000000000..93297f1ccae --- /dev/null +++ b/stage2/dune/dune-project @@ -0,0 +1,9 @@ +(lang dune 3.8) +(name fstar) +(generate_opam_files false) +(using menhir 2.1) + +(package + (name fstar) + (synopsis "The F* programming language and proof assistant") +) diff --git a/stage2/dune/fstar-guts/FStarC_Parser_Parse.mly b/stage2/dune/fstar-guts/FStarC_Parser_Parse.mly new file mode 120000 index 00000000000..53c2cb3f29e --- /dev/null +++ b/stage2/dune/fstar-guts/FStarC_Parser_Parse.mly @@ -0,0 +1 @@ +../../../src/ml/bare/FStarC_Parser_Parse.mly \ No newline at end of file diff --git a/stage2/dune/fstar-guts/app b/stage2/dune/fstar-guts/app new file mode 120000 index 00000000000..1cf9fa391d7 --- /dev/null +++ b/stage2/dune/fstar-guts/app @@ -0,0 +1 @@ +../../../ulib/ml/app/ \ No newline at end of file diff --git a/stage2/dune/fstar-guts/bare b/stage2/dune/fstar-guts/bare new file mode 120000 index 00000000000..1f9e8d416f1 --- /dev/null +++ b/stage2/dune/fstar-guts/bare @@ -0,0 +1 @@ +../../../src/ml/bare/ \ No newline at end of file diff --git a/stage2/dune/fstar-guts/dune b/stage2/dune/fstar-guts/dune new file mode 100644 index 00000000000..5162865f29e --- /dev/null +++ b/stage2/dune/fstar-guts/dune @@ -0,0 +1,33 @@ +(include_subdirs unqualified) +(library + (name fstarcompiler) + (public_name fstar.compiler) + (libraries + batteries + zarith + stdint + yojson + ppxlib + dynlink + menhirLib + pprint + sedlex + mtime.clock.os + ) + (modes native) + (preprocess (pps ppx_deriving.show ppx_deriving_yojson sedlex.ppx)) +) + +(menhir + (modules FStarC_Parser_Parse)) + +(rule + (target FStarC_Version.ml) + (deps (:script make_fstar_version.sh) + (:version ../../version.txt)) + (action + (progn + (copy %{version} version.txt) + (with-stdout-to + FStarC_Version.ml + (run bash %{script}))))) diff --git a/stage2/dune/fstar-guts/fstarc.ml b/stage2/dune/fstar-guts/fstarc.ml new file mode 120000 index 00000000000..d00d5889cff --- /dev/null +++ b/stage2/dune/fstar-guts/fstarc.ml @@ -0,0 +1 @@ +../../fstarc.ml/ \ No newline at end of file diff --git a/stage2/dune/fstar-guts/make_fstar_version.sh b/stage2/dune/fstar-guts/make_fstar_version.sh new file mode 120000 index 00000000000..19236f209a6 --- /dev/null +++ b/stage2/dune/fstar-guts/make_fstar_version.sh @@ -0,0 +1 @@ +../../../.scripts/make_fstar_version.sh \ No newline at end of file diff --git a/stage2/dune/fstar-plugins/app b/stage2/dune/fstar-plugins/app new file mode 120000 index 00000000000..1cf9fa391d7 --- /dev/null +++ b/stage2/dune/fstar-plugins/app @@ -0,0 +1 @@ +../../../ulib/ml/app/ \ No newline at end of file diff --git a/stage2/dune/fstar-plugins/dune b/stage2/dune/fstar-plugins/dune new file mode 100644 index 00000000000..41cea55c81b --- /dev/null +++ b/stage2/dune/fstar-plugins/dune @@ -0,0 +1,11 @@ +(include_subdirs unqualified) +(library + (name fstar_plugins) + (libraries + fstarcompiler + ) + (modes native) + (wrapped false) + (preprocess (pps ppx_deriving.show ppx_deriving_yojson sedlex.ppx)) + ; ^ Needed for some of the base modules. +) diff --git a/stage2/dune/fstar-plugins/full b/stage2/dune/fstar-plugins/full new file mode 120000 index 00000000000..fbfc3b1c40c --- /dev/null +++ b/stage2/dune/fstar-plugins/full @@ -0,0 +1 @@ +../../../src/ml/full/ \ No newline at end of file diff --git a/stage2/dune/fstar-plugins/plugin b/stage2/dune/fstar-plugins/plugin new file mode 120000 index 00000000000..ce023fa39b3 --- /dev/null +++ b/stage2/dune/fstar-plugins/plugin @@ -0,0 +1 @@ +../../../ulib/ml/plugin/ \ No newline at end of file diff --git a/stage2/dune/fstar-plugins/plugins.ml b/stage2/dune/fstar-plugins/plugins.ml new file mode 120000 index 00000000000..00768efd8e8 --- /dev/null +++ b/stage2/dune/fstar-plugins/plugins.ml @@ -0,0 +1 @@ +../../plugins.ml/ \ No newline at end of file diff --git a/stage2/dune/fstarc-bare/dune b/stage2/dune/fstarc-bare/dune new file mode 100644 index 00000000000..990c87bd207 --- /dev/null +++ b/stage2/dune/fstarc-bare/dune @@ -0,0 +1,11 @@ +(include_subdirs unqualified) +(executable + (name main) + ; ^ can we avoid this? Users do not really need it + (libraries + fstarcompiler + memtrace + ) + (link_flags "-linkall") + (modes (native exe)) +) diff --git a/stage2/dune/fstarc-bare/main.ml b/stage2/dune/fstarc-bare/main.ml new file mode 100644 index 00000000000..43d25ac5234 --- /dev/null +++ b/stage2/dune/fstarc-bare/main.ml @@ -0,0 +1,19 @@ +let x = + (* On Unix, if we write to a pipe that tries to send something + to a process that died, we receive a SIGPIPE signal which + by default will terminate F*, and we won't get an exception + or anything. So, block them, and instead rely on OCaml exceptions + to detect this. *) + if Fstarcompiler.FStarC_Platform.system = Posix then + ignore (Unix.sigprocmask Unix.SIG_BLOCK [Sys.sigpipe]); + + (* Enable memtrace, only if the environment variable MEMTRACE is set. *) + Memtrace.trace_if_requested (); + + (* Record a backtrace on exceptions, for --trace_error. *) + Printexc.record_backtrace true; + + (* Tweak garbage collector parameters. *) + Gc.set { (Gc.get()) with Gc.minor_heap_size = 1048576; Gc.major_heap_increment = 4194304; Gc.space_overhead = 150; }; + + Fstarcompiler.FStarC_Main.main () diff --git a/stage2/dune/fstarc-full/dune b/stage2/dune/fstarc-full/dune new file mode 100644 index 00000000000..c1241e93e9d --- /dev/null +++ b/stage2/dune/fstarc-full/dune @@ -0,0 +1,14 @@ +(include_subdirs unqualified) +(executable + (name main) + (public_name fstar.exe) + (libraries + ; Can we just say we extend fstar_bare instead of _guts, and duplicating main? + fstarcompiler + fstar_plugins + memtrace + ) + (link_flags "-linkall") + (modes (native exe)) + (preprocess (pps ppx_deriving.show ppx_deriving_yojson sedlex.ppx)) +) diff --git a/stage2/dune/fstarc-full/main.ml b/stage2/dune/fstarc-full/main.ml new file mode 120000 index 00000000000..f747791324d --- /dev/null +++ b/stage2/dune/fstarc-full/main.ml @@ -0,0 +1 @@ +../fstarc-bare/main.ml \ No newline at end of file diff --git a/stage2/dune/libapp/app b/stage2/dune/libapp/app new file mode 120000 index 00000000000..1cf9fa391d7 --- /dev/null +++ b/stage2/dune/libapp/app @@ -0,0 +1 @@ +../../../ulib/ml/app/ \ No newline at end of file diff --git a/stage2/dune/libapp/app-extra b/stage2/dune/libapp/app-extra new file mode 120000 index 00000000000..6a7c7ee2201 --- /dev/null +++ b/stage2/dune/libapp/app-extra @@ -0,0 +1 @@ +../../../ulib/ml/app-extra/ \ No newline at end of file diff --git a/stage2/dune/libapp/dune b/stage2/dune/libapp/dune new file mode 100644 index 00000000000..c9ab622f539 --- /dev/null +++ b/stage2/dune/libapp/dune @@ -0,0 +1,20 @@ +(include_subdirs unqualified) +(library + (name fstar) + (public_name fstar.lib) + (libraries + batteries + zarith + stdint + pprint + ) + (modes native byte) + ; ^ Note: we need to compile fstar-lib in bytecode since some + ; clients use it (e.g. MLS* when being compiled into javascript + ; via js_of_ocaml, in general anything that wants to use js_of_ocaml). + ; We should consider a toggle to selectively enable it, as most users + ; do not need it. + (wrapped false) + (preprocess (pps ppx_deriving.show ppx_deriving_yojson)) + ; ^FIXME we really should not have to expose nor depend on this +) diff --git a/stage2/dune/libapp/ulib.ml b/stage2/dune/libapp/ulib.ml new file mode 120000 index 00000000000..9fec5c68a72 --- /dev/null +++ b/stage2/dune/libapp/ulib.ml @@ -0,0 +1 @@ +../../ulib.ml/ \ No newline at end of file diff --git a/stage2/dune/libplugin/app b/stage2/dune/libplugin/app new file mode 120000 index 00000000000..1cf9fa391d7 --- /dev/null +++ b/stage2/dune/libplugin/app @@ -0,0 +1 @@ +../../../ulib/ml/app/ \ No newline at end of file diff --git a/stage2/dune/libplugin/app-extra b/stage2/dune/libplugin/app-extra new file mode 120000 index 00000000000..6a7c7ee2201 --- /dev/null +++ b/stage2/dune/libplugin/app-extra @@ -0,0 +1 @@ +../../../ulib/ml/app-extra/ \ No newline at end of file diff --git a/stage2/dune/libplugin/dune b/stage2/dune/libplugin/dune new file mode 100644 index 00000000000..3e9373aa035 --- /dev/null +++ b/stage2/dune/libplugin/dune @@ -0,0 +1,17 @@ +(include_subdirs unqualified) +(library + (name fstar_pluginlib) + (public_name fstar.pluginlib) + (libraries + fstarcompiler + ) + (modes native) + (wrapped true) + (preprocess (pps ppx_deriving.show ppx_deriving_yojson)) +) + +(env + (_ + (bin_annot false) + (flags (:standard -w -A))) +) diff --git a/stage2/dune/libplugin/full b/stage2/dune/libplugin/full new file mode 120000 index 00000000000..fbfc3b1c40c --- /dev/null +++ b/stage2/dune/libplugin/full @@ -0,0 +1 @@ +../../../src/ml/full/ \ No newline at end of file diff --git a/stage2/dune/libplugin/plugin b/stage2/dune/libplugin/plugin new file mode 120000 index 00000000000..ce023fa39b3 --- /dev/null +++ b/stage2/dune/libplugin/plugin @@ -0,0 +1 @@ +../../../ulib/ml/plugin/ \ No newline at end of file diff --git a/stage2/dune/libplugin/ulib.pluginml b/stage2/dune/libplugin/ulib.pluginml new file mode 120000 index 00000000000..36cb2d97a27 --- /dev/null +++ b/stage2/dune/libplugin/ulib.pluginml @@ -0,0 +1 @@ +../../ulib.pluginml/ \ No newline at end of file diff --git a/stage2/ulib b/stage2/ulib new file mode 120000 index 00000000000..ca044776e6f --- /dev/null +++ b/stage2/ulib @@ -0,0 +1 @@ +../ulib/ \ No newline at end of file diff --git a/stage2/version.txt b/stage2/version.txt new file mode 120000 index 00000000000..aa4e5bece7e --- /dev/null +++ b/stage2/version.txt @@ -0,0 +1 @@ +../version.txt \ No newline at end of file diff --git a/tests/Cfg.fst.config.json b/tests/Cfg.fst.config.json index 449ae58473d..a406ce08ad0 100644 --- a/tests/Cfg.fst.config.json +++ b/tests/Cfg.fst.config.json @@ -1,5 +1,5 @@ { - "fstar_exe": "../bin/fstar.exe", + "fstar_exe": "../out/bin/fstar.exe", "options": [ "--ext", "context_pruning", "--z3version", "4.13.3" diff --git a/tests/dune_hello/Makefile b/tests/dune_hello/Makefile index 9814d8bcd8d..70bee56c7a0 100644 --- a/tests/dune_hello/Makefile +++ b/tests/dune_hello/Makefile @@ -1,4 +1,6 @@ -FSTAR_EXE ?= ../../bin/fstar.exe +# In this repository, we inherit this variable. Externally we +# just take fstar.exe from the PATH. +FSTAR_EXE ?= fstar.exe .PHONY: all all: run diff --git a/tests/hacl/HaclTests.fst.config.json b/tests/hacl/HaclTests.fst.config.json index 81a0d9b5da4..cc813ae64bd 100644 --- a/tests/hacl/HaclTests.fst.config.json +++ b/tests/hacl/HaclTests.fst.config.json @@ -2,7 +2,8 @@ "fstar_exe": "fstar.exe", "options": [ "--ext", "context_pruning", - "--z3limit_factor", "2" + "--z3limit_factor", "2", + "--z3version", "4.13.3" ], "include_dirs": [ ] diff --git a/tests/semiring/CanonCommSemiring.ml.fixup b/tests/semiring/CanonCommSemiring.ml.fixup index 02da1deab67..70f0156f0af 100644 --- a/tests/semiring/CanonCommSemiring.ml.fixup +++ b/tests/semiring/CanonCommSemiring.ml.fixup @@ -3,7 +3,7 @@ (* This is needed since we have no automatic embeddings for Tac functions, but we should add them *) let _ = - FStarC_Tactics_Native.register_tactic "CanonCommSemiring.canon_semiring_aux" + Fstarcompiler.FStarC_Tactics_Native.register_tactic "CanonCommSemiring.canon_semiring_aux" (Prims.parse_int "11") (fun psc -> fun ncb -> @@ -11,22 +11,22 @@ let _ = fun args -> match args with | (tv_0,_)::args_tail -> - (FStarC_Tactics_InterpFuns.mk_tactic_interpretation_9 + (Fstarcompiler.FStarC_Tactics_InterpFuns.mk_tactic_interpretation_9 "CanonCommSemiring.canon_semiring_aux (plugin)" - (FStarC_Tactics_Native.from_tactic_9 canon_semiring_aux) - FStarC_Reflection_V2_Embeddings.e_term - (FStarC_Tactics_Interpreter.e_tactic_1_alt - FStarC_Reflection_V2_Embeddings.e_term - (FStarC_Syntax_Embeddings.mk_any_emb tv_0)) - (FStarC_Tactics_Interpreter.e_tactic_1_alt - (FStarC_Syntax_Embeddings.mk_any_emb tv_0) - FStarC_Reflection_V2_Embeddings.e_term) - FStarC_Reflection_V2_Embeddings.e_term - FStarC_Reflection_V2_Embeddings.e_term - FStarC_Reflection_V2_Embeddings.e_term - FStarC_Reflection_V2_Embeddings.e_term - FStarC_Reflection_V2_Embeddings.e_term - FStarC_Syntax_Embeddings.e_any - FStarC_Syntax_Embeddings.e_unit + (Fstarcompiler.FStarC_Tactics_Native.from_tactic_9 canon_semiring_aux) + Fstarcompiler.FStarC_Reflection_V2_Embeddings.e_term + (Fstarcompiler.FStarC_Tactics_Interpreter.e_tactic_1_alt + Fstarcompiler.FStarC_Reflection_V2_Embeddings.e_term + (Fstarcompiler.FStarC_Syntax_Embeddings.mk_any_emb tv_0)) + (Fstarcompiler.FStarC_Tactics_Interpreter.e_tactic_1_alt + (Fstarcompiler.FStarC_Syntax_Embeddings.mk_any_emb tv_0) + Fstarcompiler.FStarC_Reflection_V2_Embeddings.e_term) + Fstarcompiler.FStarC_Reflection_V2_Embeddings.e_term + Fstarcompiler.FStarC_Reflection_V2_Embeddings.e_term + Fstarcompiler.FStarC_Reflection_V2_Embeddings.e_term + Fstarcompiler.FStarC_Reflection_V2_Embeddings.e_term + Fstarcompiler.FStarC_Reflection_V2_Embeddings.e_term + Fstarcompiler.FStarC_Syntax_Embeddings.e_any + Fstarcompiler.FStarC_Syntax_Embeddings.e_unit psc ncb us) args_tail | _ -> failwith "arity mismatch") diff --git a/tests/simple_hello/Makefile b/tests/simple_hello/Makefile index a4fc6e90bd5..cb82899064a 100644 --- a/tests/simple_hello/Makefile +++ b/tests/simple_hello/Makefile @@ -2,7 +2,9 @@ # We should not need to include any other internal makefiles. # Dune also works fine under the --ocamlenv. -FSTAR ?= ../../bin/fstar.exe +# In this repository, we inherit this variable. Externally we +# just take fstar.exe from the PATH. +FSTAR_EXE ?= fstar.exe all: Hello.test @@ -11,13 +13,13 @@ Hello.test: Hello.exe Hello.byte ./Hello.byte | grep "Hello F\*!" %.ml: %.fst - $(FSTAR) --codegen OCaml $< --extract $* --z3version 4.13.3 + $(FSTAR_EXE) --codegen OCaml $< --extract $* --z3version 4.13.3 %.exe: %.ml - $(FSTAR) --ocamlopt $< -o $@ + $(FSTAR_EXE) --ocamlopt $< -o $@ %.byte: %.ml - $(FSTAR) --ocamlc $< -o $@ + $(FSTAR_EXE) --ocamlc $< -o $@ clean: rm -f *.ml *.exe *.byte *.cm* *.o diff --git a/ulib/Cfg.fst.config.json b/ulib/Cfg.fst.config.json new file mode 100644 index 00000000000..1bb4e808b59 --- /dev/null +++ b/ulib/Cfg.fst.config.json @@ -0,0 +1,12 @@ +{ + "_comment": "Note: the path below must be the 'uninstalled' path (not in out/) so we can still open ulib interactively even if the library failed to build as a whole (which is usually a time when you want to open it!). You can switch to stage2 too, just also switch the include of ulib.checked below", + + "fstar_exe": "../stage1/dune/_build/default/fstarc-full/main.exe", + "options": [ + "--ext", "context_pruning", + "--z3version", "4.13.3" + ], + "include_dirs": [ + "../stage1/ulib.checked" + ] +} diff --git a/ulib/FStar.Math.Fermat.fst b/ulib/FStar.Math.Fermat.fst index 1c8140d0cff..a14631d3ffe 100644 --- a/ulib/FStar.Math.Fermat.fst +++ b/ulib/FStar.Math.Fermat.fst @@ -477,11 +477,14 @@ let fermat p a = val mod_mult_congr_aux (p:int{is_prime p}) (a b c:int) : Lemma (requires (a * c) % p = (b * c) % p /\ 0 <= b /\ b <= a /\ a < p /\ c % p <> 0) (ensures a = b) +#push-options "--retry 3" // proof below is brittle let mod_mult_congr_aux p a b c = let open FStar.Math.Lemmas in calc (==>) { (a * c) % p == (b * c) % p; ==> { mod_add_both (a * c) (b * c) (-b * c) p } + (a * c + (- b * c)) % p == (b * c + (- b * c)) % p; + ==> {} (a * c - b * c) % p == (b * c - b * c) % p; ==> { swap_mul a c; swap_mul b c; lemma_mul_sub_distr c a b } (c * (a - b)) % p == (b * c - b * c) % p; @@ -491,6 +494,7 @@ let mod_mult_congr_aux p a b c = let r, s = FStar.Math.Euclid.bezout_prime p (c % p) in FStar.Math.Euclid.euclid p (c % p) (a - b) r s; small_mod (a - b) p +#pop-options let mod_mult_congr p a b c = let open FStar.Math.Lemmas in diff --git a/ulib/FStar.Matrix.fst b/ulib/FStar.Matrix.fst index c44a524868f..bca2a15b210 100644 --- a/ulib/FStar.Matrix.fst +++ b/ulib/FStar.Matrix.fst @@ -732,6 +732,7 @@ let matrix_mul_unit_row_lemma #c #eq m (add mul: CE.cm c eq) (i: under m) (SB.create 1 mul.unit `SB.append` SB.create (m-i-1) add.unit)) (row (matrix_mul_unit add mul m) i) +#push-options "--z3rlimit 20" let matrix_mul_unit_col_lemma #c #eq m (add mul: CE.cm c eq) (i: under m) : Lemma ((col (matrix_mul_unit add mul m) i == (SB.create i add.unit) `SB.append` @@ -745,7 +746,8 @@ let matrix_mul_unit_col_lemma #c #eq m (add mul: CE.cm c eq) (i: under m) SB.lemma_eq_elim ((SB.create i add.unit) `SB.append` (SB.create 1 mul.unit `SB.append` SB.create (m-i-1) add.unit)) (col (matrix_mul_unit add mul m) i) - +#pop-options + let seq_of_products_zeroes_lemma #c #eq #m (mul: CE.cm c eq) (z: c{is_absorber z mul}) (s: SB.seq c{SB.length s == m}) diff --git a/ulib/LowStar.BufferView.fst.hints b/ulib/LowStar.BufferView.fst.hints deleted file mode 100644 index 1e097d8c1cc..00000000000 --- a/ulib/LowStar.BufferView.fst.hints +++ /dev/null @@ -1,1307 +0,0 @@ -[ - "w7B\u000bH\u0017V|\fM\u000be\t", - [ - [ - "LowStar.BufferView.view", - 1, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.pos", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5" - ], - 0, - "53a316c597d72adfd7f537c58f6c2678" - ], - [ - "LowStar.BufferView.__proj__View__item__get", - 1, - 2, - 1, - [ "@query" ], - 0, - "78950f6c610bc3d28726e84dc84ce4be" - ], - [ - "LowStar.BufferView.__proj__View__item__get", - 2, - 2, - 1, - [ "@query" ], - 0, - "91adefe994edcc8bfa41748da3712c24" - ], - [ - "LowStar.BufferView.__proj__View__item__put", - 1, - 2, - 1, - [ "@query" ], - 0, - "6c955b164c5862892c07e19ba0fead9c" - ], - [ - "LowStar.BufferView.__proj__View__item__put", - 2, - 2, - 1, - [ "@query" ], - 0, - "9223375504a56f72409489d50d169895" - ], - [ - "LowStar.BufferView.buffer_view", - 1, - 2, - 1, - [ "@query" ], - 0, - "eb6e0238fece8d74fff0ea0541958181" - ], - [ - "LowStar.BufferView.__proj__BufferView__item__v", - 1, - 2, - 1, - [ "@query" ], - 0, - "2688be96bc5af0f2d9fbb51a3fc5f542" - ], - [ - "LowStar.BufferView.__proj__BufferView__item__v", - 2, - 2, - 1, - [ "@query" ], - 0, - "45639ac1fbc65c220bebd2cd5417288d" - ], - [ - "LowStar.BufferView.mk_buffer_view", - 1, - 2, - 1, - [ "@query" ], - 0, - "cb1481902abdfcf45fa5b38f0128cbf3" - ], - [ - "LowStar.BufferView.mk_buffer_view", - 2, - 2, - 1, - [ "@query" ], - 0, - "edb05c09bb7a8cee283eb25bc4c33687" - ], - [ - "LowStar.BufferView.as_buffer_mk_buffer_view", - 1, - 2, - 1, - [ "@query" ], - 0, - "e8eb610d2fc139d31635a894df1b0b6e" - ], - [ - "LowStar.BufferView.as_buffer_mk_buffer_view", - 2, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_LowStar.BufferView.as_buffer", - "equation_LowStar.BufferView.mk_buffer_view", - "fuel_guarded_inversion_LowStar.BufferView.view", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__2", - "proj_equation_FStar.Pervasives.Mkdtuple4__3", - "proj_equation_FStar.Pervasives.Mkdtuple4__4", - "proj_equation_LowStar.BufferView.BufferView_buf", - "projection_inverse_FStar.Pervasives.Mkdtuple4__1", - "projection_inverse_FStar.Pervasives.Mkdtuple4__2", - "projection_inverse_FStar.Pervasives.Mkdtuple4__3", - "projection_inverse_FStar.Pervasives.Mkdtuple4__4", - "projection_inverse_LowStar.BufferView.BufferView_buf", - "refinement_interpretation_Tm_refine_f2ee369a11791d606a8b2bbf30d7d30e" - ], - 0, - "66fe4093d184de40ba7618e29dbb2219" - ], - [ - "LowStar.BufferView.as_buffer_mk_buffer_view", - 3, - 2, - 1, - [ "@query" ], - 0, - "585092d6f4d6308696bc63e3b0365320" - ], - [ - "LowStar.BufferView.get_view_mk_buffer_view", - 1, - 2, - 1, - [ "@query" ], - 0, - "d9c975056cdd41b64de20325860de662" - ], - [ - "LowStar.BufferView.get_view_mk_buffer_view", - 2, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_LowStar.BufferView.get_view", - "equation_LowStar.BufferView.mk_buffer_view", - "fuel_guarded_inversion_LowStar.BufferView.view", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__4", - "proj_equation_LowStar.BufferView.BufferView_v", - "projection_inverse_FStar.Pervasives.Mkdtuple4__1", - "projection_inverse_FStar.Pervasives.Mkdtuple4__4", - "projection_inverse_LowStar.BufferView.BufferView_v", - "refinement_interpretation_Tm_refine_f2ee369a11791d606a8b2bbf30d7d30e" - ], - 0, - "f32ff946de4fe7fcac9a73b45c113642" - ], - [ - "LowStar.BufferView.get_view_mk_buffer_view", - 3, - 2, - 1, - [ "@query" ], - 0, - "585092d6f4d6308696bc63e3b0365320" - ], - [ - "LowStar.BufferView.length", - 1, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pervasives.Mkdtuple4", - "data_elim_LowStar.BufferView.View", - "equation_LowStar.BufferView.as_buffer", - "equation_LowStar.BufferView.as_buffer_t", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", "equation_Prims.logical", - "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.buffer_view", - "fuel_guarded_inversion_LowStar.BufferView.view", - "function_token_typing_Prims.logical", - "interpretation_Tm_abs_2f52ee49857d060e683109f5fabb70fa", - "interpretation_Tm_abs_5883f925e987abdd8f9f3acf72c807b8", - "interpretation_Tm_abs_a8f4e254db91d3480d350b4b0b5a6525", - "primitive_Prims.op_Division", "primitive_Prims.op_Modulus", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__2", - "proj_equation_FStar.Pervasives.Mkdtuple4__3", - "proj_equation_FStar.Pervasives.Mkdtuple4__4", - "proj_equation_LowStar.BufferView.View_n", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_e868304d22002dcb6abc5eb4206665d3", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___1", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___2", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___3", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___4", - "typing_LowStar.BufferView.__proj__BufferView__item__v", - "typing_LowStar.BufferView.as_buffer", - "typing_LowStar.BufferView.get_view", - "typing_LowStar.Monotonic.Buffer.length" - ], - 0, - "0d6519454e6a918c5323276f7a147012" - ], - [ - "LowStar.BufferView.length_eq", - 1, - 2, - 1, - [ "@query" ], - 0, - "03a2cd4d15ee40b8ffe752b0d2a6ddaa" - ], - [ - "LowStar.BufferView.length_eq", - 2, - 2, - 1, - [ "@query", "equation_LowStar.BufferView.length" ], - 0, - "52d2c8c2496bf48f8661880326951bbf" - ], - [ - "LowStar.BufferView.view_indexing", - 1, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pervasives.Mkdtuple4", - "data_elim_LowStar.BufferView.View", - "equation_LowStar.BufferView.as_buffer", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", - "equation_LowStar.Monotonic.Buffer.length", "equation_Prims.logical", - "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.view", - "function_token_typing_Prims.logical", "int_inversion", - "interpretation_Tm_abs_2f52ee49857d060e683109f5fabb70fa", - "interpretation_Tm_abs_5883f925e987abdd8f9f3acf72c807b8", - "interpretation_Tm_abs_a8f4e254db91d3480d350b4b0b5a6525", - "primitive_Prims.op_Addition", "primitive_Prims.op_Minus", - "primitive_Prims.op_Modulus", "primitive_Prims.op_Multiply", - "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__2", - "proj_equation_FStar.Pervasives.Mkdtuple4__3", - "proj_equation_FStar.Pervasives.Mkdtuple4__4", - "proj_equation_LowStar.BufferView.View_n", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_d88f55d519e24baa891585a690bcfcfc", - "refinement_interpretation_Tm_refine_e868304d22002dcb6abc5eb4206665d3", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___1", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___2", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___3", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___4", - "typing_LowStar.BufferView.__proj__BufferView__item__v", - "typing_LowStar.BufferView.get_view" - ], - 0, - "20698754c043a0b76ec6ad3c62d27b35" - ], - [ - "LowStar.BufferView.split_at_i", - 1, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Seq.Base_pretyping_7efa52b424e80c83ad68a652aa3561e4", - "bool_inversion", "data_elim_FStar.Pervasives.Mkdtuple4", - "data_elim_LowStar.BufferView.View", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pervasives.Native.fst", - "equation_FStar.Pervasives.Native.snd", - "equation_FStar.Seq.Properties.split", - "equation_LowStar.BufferView.as_buffer", - "equation_LowStar.BufferView.as_buffer_t", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", - "equation_LowStar.BufferView.length", - "equation_LowStar.Monotonic.Buffer.length", "equation_Prims.nat", - "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.view", - "function_token_typing_FStar.Monotonic.Heap.heap", "int_inversion", - "int_typing", - "interpretation_Tm_abs_2f52ee49857d060e683109f5fabb70fa", - "interpretation_Tm_abs_5883f925e987abdd8f9f3acf72c807b8", - "interpretation_Tm_abs_a8f4e254db91d3480d350b4b0b5a6525", - "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Seq.Base.lemma_len_slice", - "lemma_LowStar.Monotonic.Buffer.length_as_seq", - "primitive_Prims.op_Division", "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Modulus", "primitive_Prims.op_Multiply", - "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__2", - "proj_equation_FStar.Pervasives.Mkdtuple4__3", - "proj_equation_FStar.Pervasives.Mkdtuple4__4", - "proj_equation_FStar.Pervasives.Native.Mktuple2__1", - "proj_equation_FStar.Pervasives.Native.Mktuple2__2", - "proj_equation_LowStar.BufferView.View_n", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__2", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_81407705a0828c2c1b1976675443f647", - "refinement_interpretation_Tm_refine_d88f55d519e24baa891585a690bcfcfc", - "refinement_interpretation_Tm_refine_e868304d22002dcb6abc5eb4206665d3", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_LowStar.BufferView.__proj__BufferView__item__v", - "typing_LowStar.BufferView.as_buffer", - "typing_LowStar.BufferView.get_view", - "typing_LowStar.Monotonic.Buffer.length" - ], - 0, - "29be7c39390ee7fa0d4445123ce5b122" - ], - [ - "LowStar.BufferView.sel", - 1, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_LowStar.BufferView.View", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.view", - "proj_equation_LowStar.BufferView.View_n", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "typing_LowStar.BufferView.get_view" - ], - 0, - "7e3a3cf25925f272552ffd74c2fa7ff7" - ], - [ - "LowStar.BufferView.upd", - 1, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", - "LowStar.BufferView_interpretation_Tm_ghost_arrow_23c7f1237b286d1dbfd156da0bd156c4", - "data_elim_FStar.Pervasives.Mkdtuple4", - "data_elim_LowStar.BufferView.View", - "equation_FStar.Seq.Properties.lseq", - "equation_LowStar.BufferView.as_buffer", - "equation_LowStar.BufferView.as_buffer_t", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.view", - "interpretation_Tm_abs_2f52ee49857d060e683109f5fabb70fa", - "interpretation_Tm_abs_a8f4e254db91d3480d350b4b0b5a6525", - "lemma_FStar.Seq.Base.lemma_len_append", - "lemma_LowStar.Monotonic.Buffer.length_as_seq", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__2", - "proj_equation_FStar.Pervasives.Mkdtuple4__3", - "proj_equation_LowStar.BufferView.View_n", - "proj_equation_LowStar.BufferView.View_put", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__3", - "refinement_interpretation_Tm_refine_452dd5171388b32bcb531f5ed5ed479d", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_a0cd7d06c5da6444b6b51b319febde8e", - "refinement_interpretation_Tm_refine_ab9eafcc4927425d2111559b51b7181f", - "refinement_interpretation_Tm_refine_d1e860561cb285683e38dc83e83caadd", - "token_correspondence_LowStar.BufferView.__proj__View__item__put", - "typing_FStar.Seq.Base.append", - "typing_LowStar.BufferView.as_buffer", - "typing_LowStar.BufferView.get_view" - ], - 0, - "25d9184513bbe4c2fbcef94ad80eae24" - ], - [ - "LowStar.BufferView.sel_upd1", - 1, - 0, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_Prims.pow2.fuel_instrumented", "@query", - "bool_inversion", "data_elim_FStar.Pervasives.Mkdtuple4", - "data_elim_FStar.Pervasives.Native.Mktuple3", - "data_elim_LowStar.BufferView.View", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Seq.Properties.lseq", - "equation_FStar.Seq.Properties.split", "equation_FStar.UInt.max_int", - "equation_LowStar.BufferView.as_buffer", - "equation_LowStar.BufferView.as_buffer_t", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", - "equation_LowStar.BufferView.inverses", - "equation_LowStar.BufferView.length", - "equation_LowStar.BufferView.sel", - "equation_LowStar.BufferView.split_at_i", - "equation_LowStar.BufferView.upd", "equation_Prims.logical", - "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.view", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_LowStar.BufferView.__proj__View__item__get", - "function_token_typing_Prims.logical", "int_inversion", "int_typing", - "interpretation_Tm_abs_2f52ee49857d060e683109f5fabb70fa", - "interpretation_Tm_abs_5883f925e987abdd8f9f3acf72c807b8", - "interpretation_Tm_abs_a8f4e254db91d3480d350b4b0b5a6525", - "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Seq.Base.lemma_eq_elim", - "lemma_FStar.Seq.Base.lemma_eq_refl", - "lemma_FStar.Seq.Base.lemma_len_append", - "lemma_FStar.Seq.Base.lemma_len_slice", - "lemma_FStar.UInt.pow2_values", - "lemma_LowStar.Monotonic.Buffer.length_as_seq", - "primitive_Prims.op_Division", "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Modulus", "primitive_Prims.op_Multiply", - "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__2", - "proj_equation_FStar.Pervasives.Mkdtuple4__3", - "proj_equation_FStar.Pervasives.Mkdtuple4__4", - "proj_equation_LowStar.BufferView.View_get", - "proj_equation_LowStar.BufferView.View_n", - "proj_equation_LowStar.BufferView.View_put", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__3", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_0dca70d6e10ecdb3fcef03a4d9930309", - "refinement_interpretation_Tm_refine_155f92e3c25ca20d3a5794c7425bd632", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_81407705a0828c2c1b1976675443f647", - "refinement_interpretation_Tm_refine_a0cd7d06c5da6444b6b51b319febde8e", - "refinement_interpretation_Tm_refine_ab9eafcc4927425d2111559b51b7181f", - "refinement_interpretation_Tm_refine_d1e860561cb285683e38dc83e83caadd", - "refinement_interpretation_Tm_refine_d88f55d519e24baa891585a690bcfcfc", - "refinement_interpretation_Tm_refine_e868304d22002dcb6abc5eb4206665d3", - "token_correspondence_LowStar.BufferView.__proj__View__item__put", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___4", - "typing_FStar.Seq.Base.append", - "typing_LowStar.BufferView.__proj__BufferView__item__v", - "typing_LowStar.BufferView.as_buffer", - "typing_LowStar.BufferView.get_view", - "typing_LowStar.BufferView.split_at_i" - ], - 0, - "41a6038b3a532e8b4f300e72c2b05aa0" - ], - [ - "LowStar.BufferView.lt_leq_mul", - 1, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.nat", - "int_inversion", "int_typing", "primitive_Prims.op_Addition", - "primitive_Prims.op_Multiply", "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_8233d76b57e95451540fc312b717fa79" - ], - 0, - "f2338508d3635fa4fa68fb2120811b30" - ], - [ - "LowStar.BufferView.sel_upd2", - 1, - 0, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_Prims.pow2.fuel_instrumented", "@query", - "FStar.Seq.Base_pretyping_7efa52b424e80c83ad68a652aa3561e4", - "LowStar.BufferView_interpretation_Tm_ghost_arrow_23c7f1237b286d1dbfd156da0bd156c4", - "b2t_def", "bool_inversion", "data_elim_FStar.Pervasives.Mkdtuple4", - "data_elim_FStar.Pervasives.Native.Mktuple3", - "data_elim_LowStar.BufferView.View", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Seq.Properties.lseq", - "equation_FStar.Seq.Properties.split", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_LowStar.BufferView.as_buffer", - "equation_LowStar.BufferView.as_buffer_t", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", - "equation_LowStar.BufferView.length", - "equation_LowStar.BufferView.sel", - "equation_LowStar.BufferView.split_at_i", - "equation_LowStar.BufferView.upd", - "equation_LowStar.Monotonic.Buffer.length", "equation_Prims.logical", - "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.view", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_Prims.logical", "int_inversion", "int_typing", - "interpretation_Tm_abs_2f52ee49857d060e683109f5fabb70fa", - "interpretation_Tm_abs_5883f925e987abdd8f9f3acf72c807b8", - "interpretation_Tm_abs_a8f4e254db91d3480d350b4b0b5a6525", - "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Seq.Base.lemma_eq_elim", - "lemma_FStar.Seq.Base.lemma_eq_intro", - "lemma_FStar.Seq.Base.lemma_eq_refl", - "lemma_FStar.Seq.Base.lemma_index_app1", - "lemma_FStar.Seq.Base.lemma_index_app2", - "lemma_FStar.Seq.Base.lemma_index_slice", - "lemma_FStar.Seq.Base.lemma_len_append", - "lemma_FStar.Seq.Base.lemma_len_slice", - "lemma_FStar.Seq.Properties.slice_slice", - "lemma_FStar.UInt.pow2_values", - "lemma_LowStar.Monotonic.Buffer.length_as_seq", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_Division", "primitive_Prims.op_LessThan", - "primitive_Prims.op_LessThanOrEqual", "primitive_Prims.op_Modulus", - "primitive_Prims.op_Multiply", "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__2", - "proj_equation_FStar.Pervasives.Mkdtuple4__3", - "proj_equation_FStar.Pervasives.Mkdtuple4__4", - "proj_equation_LowStar.BufferView.View_n", - "proj_equation_LowStar.BufferView.View_put", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__3", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_155f92e3c25ca20d3a5794c7425bd632", - "refinement_interpretation_Tm_refine_19269cc1321ab930659842dd89087ebb", - "refinement_interpretation_Tm_refine_1ba8fd8bb363097813064c67740b2de5", - "refinement_interpretation_Tm_refine_35a0739c434508f48d0bb1d5cd5df9e8", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_6a8bad24b42f72c3930cd7835eb2ac4c", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_81407705a0828c2c1b1976675443f647", - "refinement_interpretation_Tm_refine_889f01bf859912af1db7b4ca8ac1e6d3", - "refinement_interpretation_Tm_refine_a0cd7d06c5da6444b6b51b319febde8e", - "refinement_interpretation_Tm_refine_ab9eafcc4927425d2111559b51b7181f", - "refinement_interpretation_Tm_refine_ac201cf927190d39c033967b63cb957b", - "refinement_interpretation_Tm_refine_d1e860561cb285683e38dc83e83caadd", - "refinement_interpretation_Tm_refine_d3d07693cd71377864ef84dc97d10ec1", - "refinement_interpretation_Tm_refine_d83f8da8ef6c1cb9f71d1465c1bb1c55", - "refinement_interpretation_Tm_refine_d88f55d519e24baa891585a690bcfcfc", - "refinement_interpretation_Tm_refine_e868304d22002dcb6abc5eb4206665d3", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_LowStar.BufferView.__proj__View__item__put", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___4", - "typing_FStar.Seq.Base.append", "typing_FStar.Seq.Base.length", - "typing_FStar.UInt32.v", - "typing_LowStar.BufferView.__proj__BufferView__item__v", - "typing_LowStar.BufferView.as_buffer", - "typing_LowStar.BufferView.get_view", - "typing_LowStar.BufferView.split_at_i", - "typing_LowStar.Monotonic.Buffer.len" - ], - 0, - "a0ee7ec1dfd6a250e71f4bd29828150d" - ], - [ - "LowStar.BufferView.sel_upd2", - 2, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "81b76254c0b127528ed9b19372705b20" - ], - [ - "LowStar.BufferView.sel_upd", - 1, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_155f92e3c25ca20d3a5794c7425bd632", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "8c9799ad7e99ae075a560a8378c13364" - ], - [ - "LowStar.BufferView.sel_upd", - 2, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", "equation_Prims.eqtype", - "equation_Prims.nat", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "lemma_FStar.Map.lemma_ContainsDom", "primitive_Prims.op_Equality", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_155f92e3c25ca20d3a5794c7425bd632", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_d88f55d519e24baa891585a690bcfcfc", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip" - ], - 0, - "ed5c028185ab74285bb2ca355da0a7ec" - ], - [ - "LowStar.BufferView.lemma_upd_with_sel", - 1, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_155f92e3c25ca20d3a5794c7425bd632" - ], - 0, - "b2f7e16050809a908538792677fc5473" - ], - [ - "LowStar.BufferView.lemma_upd_with_sel", - 2, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pervasives.Mkdtuple4", - "data_elim_LowStar.BufferView.View", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", - "equation_LowStar.BufferView.inverses", - "equation_LowStar.BufferView.sel", "equation_LowStar.BufferView.upd", - "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.view", "int_inversion", - "lemma_FStar.Seq.Base.lemma_eq_refl", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_LowStar.BufferView.View_get", - "proj_equation_LowStar.BufferView.View_n", - "proj_equation_LowStar.BufferView.View_put", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__3", - "refinement_interpretation_Tm_refine_155f92e3c25ca20d3a5794c7425bd632", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_ab9eafcc4927425d2111559b51b7181f", - "refinement_interpretation_Tm_refine_d1e860561cb285683e38dc83e83caadd", - "refinement_interpretation_Tm_refine_d88f55d519e24baa891585a690bcfcfc", - "token_correspondence_LowStar.BufferView.__proj__View__item__get", - "token_correspondence_LowStar.BufferView.__proj__View__item__put", - "typing_LowStar.BufferView.get_view" - ], - 0, - "02c6f5aca77f2b65fb45de6766d9cc5f" - ], - [ - "LowStar.BufferView.upd_modifies", - 1, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", - "LowStar.BufferView_interpretation_Tm_ghost_arrow_23c7f1237b286d1dbfd156da0bd156c4", - "data_elim_FStar.Pervasives.Mkdtuple4", - "data_elim_LowStar.BufferView.View", - "equation_FStar.Seq.Properties.lseq", - "equation_LowStar.BufferView.as_buffer", - "equation_LowStar.BufferView.as_buffer_t", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", - "equation_LowStar.BufferView.upd", "equation_Prims.nat", - "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.view", "int_inversion", - "interpretation_Tm_abs_2f52ee49857d060e683109f5fabb70fa", - "interpretation_Tm_abs_a8f4e254db91d3480d350b4b0b5a6525", - "lemma_FStar.Seq.Base.lemma_len_append", - "lemma_LowStar.Monotonic.Buffer.length_as_seq", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__2", - "proj_equation_FStar.Pervasives.Mkdtuple4__3", - "proj_equation_LowStar.BufferView.View_n", - "proj_equation_LowStar.BufferView.View_put", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__3", - "refinement_interpretation_Tm_refine_452dd5171388b32bcb531f5ed5ed479d", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_a0cd7d06c5da6444b6b51b319febde8e", - "refinement_interpretation_Tm_refine_ab9eafcc4927425d2111559b51b7181f", - "refinement_interpretation_Tm_refine_d1e860561cb285683e38dc83e83caadd", - "refinement_interpretation_Tm_refine_d88f55d519e24baa891585a690bcfcfc", - "token_correspondence_LowStar.BufferView.__proj__View__item__put", - "typing_FStar.Seq.Base.append", - "typing_LowStar.BufferView.as_buffer", - "typing_LowStar.BufferView.get_view" - ], - 0, - "f0b296cdae987578dcac5c2f0a7058c1" - ], - [ - "LowStar.BufferView.upd_equal_domains", - 1, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", - "LowStar.BufferView_interpretation_Tm_ghost_arrow_23c7f1237b286d1dbfd156da0bd156c4", - "data_elim_FStar.Pervasives.Mkdtuple4", - "data_elim_LowStar.BufferView.View", - "equation_FStar.Seq.Properties.lseq", - "equation_LowStar.BufferView.as_buffer", - "equation_LowStar.BufferView.as_buffer_t", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", - "equation_LowStar.BufferView.upd", "equation_Prims.nat", - "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.view", "int_inversion", - "interpretation_Tm_abs_2f52ee49857d060e683109f5fabb70fa", - "interpretation_Tm_abs_a8f4e254db91d3480d350b4b0b5a6525", - "lemma_FStar.Seq.Base.lemma_len_append", - "lemma_LowStar.Monotonic.Buffer.length_as_seq", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__2", - "proj_equation_FStar.Pervasives.Mkdtuple4__3", - "proj_equation_LowStar.BufferView.View_n", - "proj_equation_LowStar.BufferView.View_put", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__3", - "refinement_interpretation_Tm_refine_452dd5171388b32bcb531f5ed5ed479d", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_a0cd7d06c5da6444b6b51b319febde8e", - "refinement_interpretation_Tm_refine_ab9eafcc4927425d2111559b51b7181f", - "refinement_interpretation_Tm_refine_d1e860561cb285683e38dc83e83caadd", - "refinement_interpretation_Tm_refine_d88f55d519e24baa891585a690bcfcfc", - "token_correspondence_LowStar.BufferView.__proj__View__item__put", - "typing_FStar.Seq.Base.append", - "typing_LowStar.BufferView.as_buffer", - "typing_LowStar.BufferView.get_view" - ], - 0, - "5af4d66af60114ba4127d786c80da32f" - ], - [ - "LowStar.BufferView.as_seq'", - 1, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", - "Prims_pretyping_ae567c2fb75be05905677af440075565", - "binder_x_709c51d42fb83ce2494a760a7ab175a8_4", - "binder_x_db88276a6748528fdd6e866d40cef350_3", - "binder_x_ed25b04ac1a3660bf4cdc8ae577888d8_2", - "binder_x_fe28d8bcde588226b4e538b35321de05_1", "bool_inversion", - "data_elim_LowStar.BufferView.View", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Seq.Base.cons", "equation_FStar.Seq.Properties.lseq", - "equation_LowStar.BufferView.get_view", - "equation_LowStar.BufferView.length", "equation_Prims.eqtype", - "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_LowStar.BufferView.view", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_Prims.__cache_version_number__", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", "int_inversion", - "int_typing", "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Seq.Base.lemma_create_len", - "lemma_FStar.Seq.Base.lemma_len_append", - "primitive_Prims.op_Addition", "primitive_Prims.op_Equality", - "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_LowStar.BufferView.View_n", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_a0cd7d06c5da6444b6b51b319febde8e", - "refinement_interpretation_Tm_refine_de40b7b01597bee6e1241dfaf5adf126", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Seq.Base.create", "typing_FStar.Seq.Base.length", - "typing_LowStar.BufferView.get_view", - "typing_LowStar.BufferView.length", "well-founded-ordering-on-nat" - ], - 0, - "edf9083f48346b233bbfdb1c1b6e9bbc" - ], - [ - "LowStar.BufferView.as_seq'", - 2, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", "primitive_Prims.op_Subtraction", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_a671aac4724a1864e0199413b4ef9ef4" - ], - 0, - "5e79d1951b51fe9b7c0257923ab3c131" - ], - [ - "LowStar.BufferView.as_seq", - 1, - 0, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pervasives.Mkdtuple4", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.length", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.buffer_view", - "int_inversion", "int_typing", - "interpretation_Tm_abs_5883f925e987abdd8f9f3acf72c807b8", - "primitive_Prims.op_Division", "primitive_Prims.op_Subtraction", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "typing_LowStar.BufferView.length" - ], - 0, - "8c089506c67d5e1089e2a12b7143a461" - ], - [ - "LowStar.BufferView.as_seq_sel", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Seq.Properties.lseq", - "equation_LowStar.BufferView.length", - "refinement_interpretation_Tm_refine_a0cd7d06c5da6444b6b51b319febde8e", - "typing_LowStar.BufferView.as_seq" - ], - 0, - "e992c397c96ac39d96ad827d603bf556" - ], - [ - "LowStar.BufferView.as_seq_sel", - 2, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_LowStar.BufferView.as_seq_.fuel_instrumented", - "@fuel_irrelevance_LowStar.BufferView.as_seq_.fuel_instrumented", - "@query", - "LowStar.BufferView_interpretation_Tm_ghost_arrow_3f464e6d64e344ed4c0f6759a36927ff", - "Prims_interpretation_Tm_ghost_arrow_0283b8a2a36bbec52abac4e3d837674a", - "Prims_pretyping_ae567c2fb75be05905677af440075565", "bool_inversion", - "data_elim_FStar.Pervasives.Mkdtuple4", - "data_elim_FStar.Pervasives.Native.Mktuple3", - "data_elim_LowStar.BufferView.View", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Seq.Base.cons", "equation_FStar.Seq.Properties.lseq", - "equation_FStar.Seq.Properties.split", - "equation_LowStar.BufferView.as_buffer", - "equation_LowStar.BufferView.as_buffer_t", - "equation_LowStar.BufferView.as_seq", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", - "equation_LowStar.BufferView.length", - "equation_LowStar.BufferView.sel", - "equation_LowStar.BufferView.split_at_i", "equation_Prims.nat", - "equation_Prims.pos", - "equation_with_fuel_LowStar.BufferView.as_seq_.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.view", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_Prims.__cache_version_number__", - "int_inversion", "int_typing", - "interpretation_Tm_abs_2f52ee49857d060e683109f5fabb70fa", - "interpretation_Tm_abs_a8f4e254db91d3480d350b4b0b5a6525", - "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Seq.Base.lemma_create_len", - "lemma_FStar.Seq.Base.lemma_index_app1", - "lemma_FStar.Seq.Base.lemma_index_app2", - "lemma_FStar.Seq.Base.lemma_index_create", - "lemma_FStar.Seq.Base.lemma_len_append", - "lemma_LowStar.Monotonic.Buffer.length_as_seq", - "lemma_LowStar.Monotonic.Buffer.length_null_1", - "primitive_Prims.op_Addition", "primitive_Prims.op_Equality", - "primitive_Prims.op_Multiply", "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__2", - "proj_equation_FStar.Pervasives.Mkdtuple4__3", - "proj_equation_LowStar.BufferView.View_get", - "proj_equation_LowStar.BufferView.View_n", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__3", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_06aa1b6fbc206f92bd33509e825cf7ce", - "refinement_interpretation_Tm_refine_53de2fffa6adf37dbdb63ca6e1d1a732", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_a0cd7d06c5da6444b6b51b319febde8e", - "refinement_interpretation_Tm_refine_ac201cf927190d39c033967b63cb957b", - "refinement_interpretation_Tm_refine_b482e4205debc4fb578095aa76c5597e", - "refinement_interpretation_Tm_refine_c1424615841f28cac7fc34e92b7ff33c", - "refinement_interpretation_Tm_refine_d1e860561cb285683e38dc83e83caadd", - "refinement_interpretation_Tm_refine_d83f8da8ef6c1cb9f71d1465c1bb1c55", - "refinement_interpretation_Tm_refine_d88f55d519e24baa891585a690bcfcfc", - "token_correspondence_LowStar.BufferView.__proj__View__item__get", - "token_correspondence_LowStar.BufferView.as_seq_.fuel_instrumented", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Seq.Base.append", "typing_FStar.Seq.Base.create", - "typing_FStar.Seq.Base.length", - "typing_LowStar.BufferView.as_buffer", - "typing_LowStar.BufferView.as_seq", - "typing_LowStar.BufferView.as_seq_", - "typing_LowStar.BufferView.get_view", - "typing_LowStar.BufferView.split_at_i", - "well-founded-ordering-on-nat" - ], - 0, - "a102bd099fd276eb06ac9a4a77498802" - ], - [ - "LowStar.BufferView.get_sel", - 1, - 0, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_Prims.pow2.fuel_instrumented", "@query", - "FStar.Seq.Base_pretyping_7efa52b424e80c83ad68a652aa3561e4", - "bool_inversion", "data_elim_FStar.Pervasives.Mkdtuple4", - "data_elim_FStar.Pervasives.Native.Mktuple3", - "data_elim_LowStar.BufferView.View", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Seq.Properties.lseq", - "equation_FStar.Seq.Properties.split", "equation_FStar.UInt.max_int", - "equation_LowStar.BufferView.as_buffer", - "equation_LowStar.BufferView.as_buffer_t", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", - "equation_LowStar.BufferView.split_at_i", "equation_Prims.logical", - "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.view", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_Prims.logical", "int_inversion", "int_typing", - "interpretation_Tm_abs_2f52ee49857d060e683109f5fabb70fa", - "interpretation_Tm_abs_5883f925e987abdd8f9f3acf72c807b8", - "interpretation_Tm_abs_a8f4e254db91d3480d350b4b0b5a6525", - "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Seq.Base.lemma_len_append", - "lemma_FStar.Seq.Base.lemma_len_slice", - "lemma_FStar.UInt.pow2_values", - "lemma_LowStar.Monotonic.Buffer.length_as_seq", - "primitive_Prims.op_Addition", "primitive_Prims.op_Division", - "primitive_Prims.op_LessThanOrEqual", "primitive_Prims.op_Modulus", - "primitive_Prims.op_Multiply", "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__2", - "proj_equation_FStar.Pervasives.Mkdtuple4__3", - "proj_equation_FStar.Pervasives.Mkdtuple4__4", - "proj_equation_LowStar.BufferView.View_get", - "proj_equation_LowStar.BufferView.View_n", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__3", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_81407705a0828c2c1b1976675443f647", - "refinement_interpretation_Tm_refine_a0cd7d06c5da6444b6b51b319febde8e", - "refinement_interpretation_Tm_refine_d1e860561cb285683e38dc83e83caadd", - "refinement_interpretation_Tm_refine_d88f55d519e24baa891585a690bcfcfc", - "refinement_interpretation_Tm_refine_e2e87089d09d3a765e6639034e237550", - "refinement_interpretation_Tm_refine_e868304d22002dcb6abc5eb4206665d3", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___1", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___2", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___3", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___4", - "typing_FStar.Seq.Base.append", "typing_FStar.Seq.Base.length", - "typing_LowStar.BufferView.__proj__BufferView__item__v", - "typing_LowStar.BufferView.as_buffer", - "typing_LowStar.BufferView.get_view", - "typing_LowStar.BufferView.split_at_i" - ], - 0, - "0fa18e267ea67939a3539ca08abdf9a0" - ], - [ - "LowStar.BufferView.get_sel", - 2, - 0, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_Prims.pow2.fuel_instrumented", "@query", - "LowStar.BufferView_interpretation_Tm_ghost_arrow_23c7f1237b286d1dbfd156da0bd156c4", - "bool_inversion", "data_elim_FStar.Pervasives.Mkdtuple4", - "data_elim_FStar.Pervasives.Native.Mktuple3", - "data_elim_LowStar.BufferView.View", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Seq.Properties.lseq", - "equation_FStar.Seq.Properties.split", "equation_FStar.UInt.max_int", - "equation_LowStar.BufferView.as_buffer", - "equation_LowStar.BufferView.as_buffer_t", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", - "equation_LowStar.BufferView.length", - "equation_LowStar.BufferView.sel", - "equation_LowStar.BufferView.split_at_i", - "equation_LowStar.Monotonic.Buffer.length", "equation_Prims.nat", - "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.view", - "function_token_typing_FStar.Monotonic.Heap.heap", "int_typing", - "interpretation_Tm_abs_2f52ee49857d060e683109f5fabb70fa", - "interpretation_Tm_abs_5883f925e987abdd8f9f3acf72c807b8", - "interpretation_Tm_abs_a8f4e254db91d3480d350b4b0b5a6525", - "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Seq.Base.lemma_len_append", - "lemma_FStar.Seq.Base.lemma_len_slice", - "lemma_FStar.Seq.Properties.slice_slice", - "lemma_FStar.UInt.pow2_values", - "lemma_LowStar.Monotonic.Buffer.length_as_seq", - "primitive_Prims.op_Addition", "primitive_Prims.op_Division", - "primitive_Prims.op_LessThanOrEqual", "primitive_Prims.op_Modulus", - "primitive_Prims.op_Multiply", "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__2", - "proj_equation_FStar.Pervasives.Mkdtuple4__3", - "proj_equation_FStar.Pervasives.Mkdtuple4__4", - "proj_equation_LowStar.BufferView.View_get", - "proj_equation_LowStar.BufferView.View_n", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__3", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_1ba8fd8bb363097813064c67740b2de5", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_81407705a0828c2c1b1976675443f647", - "refinement_interpretation_Tm_refine_a0cd7d06c5da6444b6b51b319febde8e", - "refinement_interpretation_Tm_refine_ab9eafcc4927425d2111559b51b7181f", - "refinement_interpretation_Tm_refine_d1e860561cb285683e38dc83e83caadd", - "refinement_interpretation_Tm_refine_d3d07693cd71377864ef84dc97d10ec1", - "refinement_interpretation_Tm_refine_d88f55d519e24baa891585a690bcfcfc", - "refinement_interpretation_Tm_refine_e868304d22002dcb6abc5eb4206665d3", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Seq.Base.append", "typing_FStar.Seq.Base.length", - "typing_LowStar.BufferView.__proj__BufferView__item__v", - "typing_LowStar.BufferView.as_buffer", - "typing_LowStar.BufferView.get_view", - "typing_LowStar.BufferView.length", "typing_LowStar.BufferView.sel", - "typing_LowStar.BufferView.split_at_i", - "typing_LowStar.Monotonic.Buffer.as_seq", - "typing_LowStar.Monotonic.Buffer.length" - ], - 0, - "c1929c9cc0a8cc29a0cadf3cf2178e0e" - ], - [ - "LowStar.BufferView.put_sel", - 1, - 0, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_Prims.pow2.fuel_instrumented", "@query", - "FStar.Seq.Base_pretyping_7efa52b424e80c83ad68a652aa3561e4", - "data_elim_FStar.Pervasives.Mkdtuple4", - "data_elim_FStar.Pervasives.Native.Mktuple3", - "data_elim_LowStar.BufferView.View", - "equation_FStar.Seq.Properties.lseq", - "equation_FStar.Seq.Properties.split", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", - "equation_LowStar.BufferView.as_buffer", - "equation_LowStar.BufferView.as_buffer_t", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", - "equation_LowStar.BufferView.split_at_i", "equation_Prims.logical", - "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.view", - "function_token_typing_Prims.logical", "int_typing", - "interpretation_Tm_abs_2f52ee49857d060e683109f5fabb70fa", - "interpretation_Tm_abs_5883f925e987abdd8f9f3acf72c807b8", - "interpretation_Tm_abs_a8f4e254db91d3480d350b4b0b5a6525", - "lemma_FStar.Seq.Base.lemma_len_append", - "lemma_FStar.UInt.pow2_values", - "lemma_LowStar.Monotonic.Buffer.length_as_seq", - "primitive_Prims.op_Addition", "primitive_Prims.op_Division", - "primitive_Prims.op_LessThanOrEqual", "primitive_Prims.op_Modulus", - "primitive_Prims.op_Multiply", "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__2", - "proj_equation_FStar.Pervasives.Mkdtuple4__3", - "proj_equation_FStar.Pervasives.Mkdtuple4__4", - "proj_equation_LowStar.BufferView.View_get", - "proj_equation_LowStar.BufferView.View_n", - "proj_equation_LowStar.BufferView.View_put", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__3", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_a0cd7d06c5da6444b6b51b319febde8e", - "refinement_interpretation_Tm_refine_d1e860561cb285683e38dc83e83caadd", - "refinement_interpretation_Tm_refine_d88f55d519e24baa891585a690bcfcfc", - "refinement_interpretation_Tm_refine_e868304d22002dcb6abc5eb4206665d3", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___1", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___2", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___3", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___4", - "typing_FStar.Seq.Base.append", - "typing_LowStar.BufferView.__proj__BufferView__item__v", - "typing_LowStar.BufferView.as_buffer", - "typing_LowStar.BufferView.get_view", - "typing_LowStar.BufferView.split_at_i" - ], - 0, - "fb8a82263c18fe49670ec5cf542684b5" - ], - [ - "LowStar.BufferView.put_sel", - 2, - 0, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_Prims.pow2.fuel_instrumented", "@query", - "LowStar.BufferView_interpretation_Tm_ghost_arrow_23c7f1237b286d1dbfd156da0bd156c4", - "bool_inversion", "data_elim_FStar.Pervasives.Mkdtuple4", - "data_elim_FStar.Pervasives.Native.Mktuple3", - "data_elim_LowStar.BufferView.View", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Seq.Properties.lseq", - "equation_FStar.Seq.Properties.split", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", - "equation_LowStar.BufferView.as_buffer", - "equation_LowStar.BufferView.as_buffer_t", - "equation_LowStar.BufferView.buffer", - "equation_LowStar.BufferView.get_view", - "equation_LowStar.BufferView.inverses", - "equation_LowStar.BufferView.length", - "equation_LowStar.BufferView.sel", - "equation_LowStar.BufferView.split_at_i", - "equation_LowStar.Monotonic.Buffer.length", "equation_Prims.logical", - "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Pervasives.dtuple4", - "fuel_guarded_inversion_LowStar.BufferView.view", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_Prims.logical", "int_inversion", "int_typing", - "interpretation_Tm_abs_2f52ee49857d060e683109f5fabb70fa", - "interpretation_Tm_abs_5883f925e987abdd8f9f3acf72c807b8", - "interpretation_Tm_abs_a8f4e254db91d3480d350b4b0b5a6525", - "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Seq.Base.lemma_len_append", - "lemma_FStar.Seq.Base.lemma_len_slice", - "lemma_FStar.Seq.Properties.slice_slice", - "lemma_FStar.UInt.pow2_values", - "lemma_LowStar.Monotonic.Buffer.length_as_seq", - "primitive_Prims.op_Addition", "primitive_Prims.op_Division", - "primitive_Prims.op_LessThanOrEqual", "primitive_Prims.op_Modulus", - "primitive_Prims.op_Multiply", "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pervasives.Mkdtuple4__1", - "proj_equation_FStar.Pervasives.Mkdtuple4__2", - "proj_equation_FStar.Pervasives.Mkdtuple4__3", - "proj_equation_FStar.Pervasives.Mkdtuple4__4", - "proj_equation_LowStar.BufferView.View_get", - "proj_equation_LowStar.BufferView.View_n", - "proj_equation_LowStar.BufferView.View_put", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__2", - "projection_inverse_FStar.Pervasives.Native.Mktuple3__3", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_1ba8fd8bb363097813064c67740b2de5", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_81407705a0828c2c1b1976675443f647", - "refinement_interpretation_Tm_refine_a0cd7d06c5da6444b6b51b319febde8e", - "refinement_interpretation_Tm_refine_ab9eafcc4927425d2111559b51b7181f", - "refinement_interpretation_Tm_refine_d1e860561cb285683e38dc83e83caadd", - "refinement_interpretation_Tm_refine_d3d07693cd71377864ef84dc97d10ec1", - "refinement_interpretation_Tm_refine_d88f55d519e24baa891585a690bcfcfc", - "refinement_interpretation_Tm_refine_e868304d22002dcb6abc5eb4206665d3", - "token_correspondence_LowStar.BufferView.__proj__View__item__get", - "token_correspondence_LowStar.BufferView.__proj__View__item__put", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pervasives.__proj__Mkdtuple4__item___4", - "typing_FStar.Seq.Base.append", "typing_FStar.Seq.Base.length", - "typing_LowStar.BufferView.__proj__BufferView__item__v", - "typing_LowStar.BufferView.as_buffer", - "typing_LowStar.BufferView.get_view", - "typing_LowStar.BufferView.sel", - "typing_LowStar.BufferView.split_at_i", - "typing_LowStar.Monotonic.Buffer.as_seq", - "typing_LowStar.Monotonic.Buffer.length" - ], - 0, - "5989246c5428e47278441bebea89a0e7" - ] - ] -] \ No newline at end of file diff --git a/ulib/fstar.include b/ulib/fstar.include index 05c2d2a67a2..1972d88b22e 100644 --- a/ulib/fstar.include +++ b/ulib/fstar.include @@ -1,3 +1,2 @@ legacy experimental -.cache diff --git a/ulib/legacy/FStar.Buffer.fst b/ulib/legacy/FStar.Buffer.fst index 1911536b625..1bf7092d5db 100644 --- a/ulib/legacy/FStar.Buffer.fst +++ b/ulib/legacy/FStar.Buffer.fst @@ -1152,7 +1152,6 @@ let lemma_modifies_one_trans_1 (#a:Type) (b:buffer a) (h0:mem) (h1:mem) (h2:mem) [SMTPat (modifies_one (frameOf b) h0 h1); SMTPat (modifies_one (frameOf b) h1 h2)] = () -#reset-options "--z3rlimit 100 --max_fuel 0 --max_ifuel 0 --initial_fuel 0 --initial_ifuel 0" (** Corresponds to memcpy *) val blit: #t:Type @@ -1170,6 +1169,8 @@ val blit: #t:Type Seq.slice (as_seq h0 b) 0 (v idx_b) /\ Seq.slice (as_seq h1 b) (v idx_b+v len) (length b) == Seq.slice (as_seq h0 b) (v idx_b+v len) (length b) )) + +#push-options "--z3rlimit 150 --max_fuel 0 --max_ifuel 0 --initial_fuel 0 --initial_ifuel 0" #restart-solver let rec blit #t a idx_a b idx_b len = let h0 = HST.get () in @@ -1185,6 +1186,7 @@ let rec blit #t a idx_a b idx_b len = Seq.cons_head_tail (Seq.slice (as_seq h0 b) (v idx_b + v len') (length b)); Seq.cons_head_tail (Seq.slice (as_seq h1 b) (v idx_b + v len') (length b)) end +#pop-options (** Corresponds to memset *) val fill: #t:Type diff --git a/ulib/legacy/FStar.Pointer.Base.fst.hints b/ulib/legacy/FStar.Pointer.Base.fst.hints deleted file mode 100644 index 03afddc2a9f..00000000000 --- a/ulib/legacy/FStar.Pointer.Base.fst.hints +++ /dev/null @@ -1,14359 +0,0 @@ -[ - "W|\u0011pA\u0011T", - [ - [ - "FStar.Pointer.Base.typ", - 1, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "assumption_FStar.Pervasives.Native.tuple2__uu___haseq", - "assumption_FStar.Pointer.Base.base_typ__uu___haseq", - "assumption_Prims.list__uu___haseq", - "equation_FStar.Pointer.Base.array_length_t", - "equation_Prims.eqtype", "function_token_typing_Prims.string", - "haseqTm_refine_9e7f68c38e43484e77069094f4fd88d3", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "typing_FStar.UInt32.t" - ], - 0, - "a89bb0405a833a126d7cb9aac4ceb36a" - ], - [ - "FStar.Pointer.Base.__proj__TBase__item__b", - 1, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_ceeec9534506f8e48ea73c2ad07d25c7" - ], - 0, - "a4a0699f4f7adb29d9dbf679b1620681" - ], - [ - "FStar.Pointer.Base.__proj__TStruct__item__l", - 1, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_8d4b0fe883393e3eac6a07f0ac5a4b2f" - ], - 0, - "6c39c686bad84ee3d05cfc276bc7d333" - ], - [ - "FStar.Pointer.Base.__proj__TUnion__item__l", - 1, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_5f786badef06e062d5b9e79ef882a955" - ], - 0, - "26373693c1923d11dfca59af43ed7246" - ], - [ - "FStar.Pointer.Base.__proj__TArray__item__length", - 1, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_e1353961fd147e6e478e4b8ac623b245" - ], - 0, - "76e19f1bd0c6b4d1f2dab0ba4b5343b0" - ], - [ - "FStar.Pointer.Base.__proj__TArray__item__t", - 1, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_e1353961fd147e6e478e4b8ac623b245" - ], - 0, - "89852ff2dc522d37b44936f3aa5c22bb" - ], - [ - "FStar.Pointer.Base.__proj__TPointer__item__t", - 1, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_a6915920d178273a967ebb7c9bb3e606" - ], - 0, - "cd075efde3e4ac028755e9131ff0b12e" - ], - [ - "FStar.Pointer.Base.__proj__TNPointer__item__t", - 1, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_31a28047608506db806a8e40b64a987a" - ], - 0, - "c562ee736a247dffddca0936073e0951" - ], - [ - "FStar.Pointer.Base.__proj__TBuffer__item__t", - 1, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_351ed0bf36be35253b8142000c255418" - ], - 0, - "a443aa9a467e6811e287b450a0608179" - ], - [ - "FStar.Pointer.Base.typ_of_struct_field'", - 1, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_Prims.Cons", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_typ_", - "proj_equation_FStar.Pervasives.Native.Some_v", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.Some_a", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462" - ], - 0, - "00af266fd93897e9b9a52c57374c3dee" - ], - [ - "FStar.Pointer.Base.typ_of_struct_field", - 1, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.typ_of_struct_field_", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "subterm_ordering_FStar.Pointer.Base.Mkstruct_typ" - ], - 0, - "7f84b7b35a201c46ede2390b3506ba50" - ], - [ - "FStar.Pointer.Base.typ_of_union_field", - 1, - 2, - 1, - [ "@query", "equation_FStar.Pointer.Base.union_typ" ], - 0, - "fc87bb364ac468de6d31ccaabc3dd6c2" - ], - [ - "FStar.Pointer.Base.typ_depth", - 1, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "data_elim_FStar.Pointer.Base.Mkstruct_typ", - "disc_equation_Prims.Cons", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "primitive_Prims.op_Addition", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "subterm_ordering_FStar.Pointer.Base.Mkstruct_typ", - "subterm_ordering_FStar.Pointer.Base.TArray", - "subterm_ordering_FStar.Pointer.Base.TStruct", - "subterm_ordering_FStar.Pointer.Base.TUnion" - ], - 0, - "ec7a2fe9d1c08f583f2c1a0dfa306494" - ], - [ - "FStar.Pointer.Base.typ_depth", - 2, - 2, - 1, - [ - "@MaxIFuel_assumption", "@query", - "binder_x_9faba42ab88b43fa963967c6d9fe0a56_0", - "disc_equation_Prims.Cons", "disc_equation_Prims.Nil", - "fuel_guarded_inversion_Prims.list", - "projection_inverse_BoxBool_proj_0", - "subterm_ordering_FStar.Pervasives.Native.Mktuple2", - "subterm_ordering_Prims.Cons" - ], - 0, - "e1e43c213d7f5e2bdc6fce78796b6dee" - ], - [ - "FStar.Pointer.Base.typ_depth_typ_of_struct_field", - 1, - 2, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.assoc.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.struct_typ_depth.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.typ_depth.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.assoc.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.struct_typ_depth.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.typ_depth.fuel_instrumented", - "@query", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "binder_x_126cca1e1e8f0a1c80ceb096c6921769_1", - "binder_x_900c0c0bdaba6baf3cf61f417292d80e_0", "bool_inversion", - "constructor_distinct_FStar.Pervasives.Native.None", - "constructor_distinct_FStar.Pervasives.Native.Some", - "constructor_distinct_Prims.Cons", "constructor_distinct_Tm_unit", - "disc_equation_Prims.Cons", "equation_FStar.Pervasives.Native.fst", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.typ_of_struct_field_", - "equation_Prims.eqtype", - "equation_with_fuel_FStar.List.Tot.Base.assoc.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.map.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.mem.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.struct_typ_depth.fuel_instrumented", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_Prims.string", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_Equality", "primitive_Prims.op_GreaterThan", - "proj_equation_FStar.Pervasives.Native.Mktuple2__1", - "proj_equation_FStar.Pervasives.Native.Some_v", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__2", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "projection_inverse_Prims.Cons_a", - "projection_inverse_Prims.Cons_hd", - "projection_inverse_Prims.Cons_tl", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", "subterm_ordering_Prims.Cons", - "token_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "token_correspondence_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "token_correspondence_FStar.Pervasives.Native.fst", - "typing_FStar.List.Tot.Base.map", "typing_FStar.List.Tot.Base.mem", - "typing_FStar.List.Tot.Base.noRepeats" - ], - 0, - "51907aedcfaa132324796ba450cfbd37" - ], - [ - "FStar.Pointer.Base.step", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "equation_FStar.UInt.fits", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "int_inversion", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.UInt32.v" - ], - 0, - "860c272b48df39a63549b5a165046236" - ], - [ - "FStar.Pointer.Base.step", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "assumption_FStar.Pointer.Base.struct_typ__uu___haseq", - "assumption_FStar.Pointer.Base.typ__uu___haseq", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.union_typ", "equation_Prims.eqtype", - "function_token_typing_Prims.string", - "haseqTm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "typing_FStar.Pointer.Base.struct_field", "typing_FStar.UInt32.t" - ], - 0, - "c4f87c95ba7003c7a657a39246cb13aa" - ], - [ - "FStar.Pointer.Base.__proj__StepField__item__l", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_2dbe21b033c495ad2b2877ddd3e5ea79" - ], - 0, - "650a6ec9482d5edb248a670bf92f610d" - ], - [ - "FStar.Pointer.Base.__proj__StepField__item__fd", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pointer.Base.StepField", - "refinement_interpretation_Tm_refine_2dbe21b033c495ad2b2877ddd3e5ea79" - ], - 0, - "30413adf50bce7e8fa9fbabd1aefb684" - ], - [ - "FStar.Pointer.Base.__proj__StepUField__item__l", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_18db3388ad3501cc905ccdbbe4d4bb12" - ], - 0, - "7bea98c114e03fb106e5d248f37de89f" - ], - [ - "FStar.Pointer.Base.__proj__StepUField__item__fd", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pointer.Base.StepUField", - "refinement_interpretation_Tm_refine_18db3388ad3501cc905ccdbbe4d4bb12" - ], - 0, - "4fa19a0f29f48fd8249d31b35c2d372e" - ], - [ - "FStar.Pointer.Base.__proj__StepCell__item__length", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_1ab1884987751eda87f4e41402dd7908" - ], - 0, - "3b24c387953a4b39680ecdf1d5028906" - ], - [ - "FStar.Pointer.Base.__proj__StepCell__item__value", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_1ab1884987751eda87f4e41402dd7908" - ], - 0, - "da430231e3ae300f846ea8d5d8d9bf01" - ], - [ - "FStar.Pointer.Base.__proj__StepCell__item__index", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pointer.Base.StepCell", - "refinement_interpretation_Tm_refine_1ab1884987751eda87f4e41402dd7908" - ], - 0, - "0e022219feae7e6e58ecca768bec0ed2" - ], - [ - "FStar.Pointer.Base.path", - 1, - 1, - 1, - [ - "@query", "assumption_FStar.Pointer.Base.step__uu___haseq", - "assumption_FStar.Pointer.Base.typ__uu___haseq" - ], - 0, - "c8fa00f0fdc2695b0192d63d09052d5b" - ], - [ - "FStar.Pointer.Base.__proj__PathStep__item__through", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_fbf57e97003df92db58cab134885be65" - ], - 0, - "312f302174cd50b7c1cd5aa71f15aea9" - ], - [ - "FStar.Pointer.Base.__proj__PathStep__item__to", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_fbf57e97003df92db58cab134885be65" - ], - 0, - "cd390085cdfeaf31fa4b492b8cc49b24" - ], - [ - "FStar.Pointer.Base.__proj__PathStep__item__p", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pointer.Base.PathStep", - "refinement_interpretation_Tm_refine_fbf57e97003df92db58cab134885be65" - ], - 0, - "90d10fc2ae998d449c02ed99bdbb6fcd" - ], - [ - "FStar.Pointer.Base.__proj__PathStep__item__s", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pointer.Base.PathStep", - "refinement_interpretation_Tm_refine_fbf57e97003df92db58cab134885be65" - ], - 0, - "ba3fc728958e6e5c2ec3f2392d4d7b49" - ], - [ - "FStar.Pointer.Base.step_typ_depth", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.struct_typ_depth.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.typ_depth.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.struct_typ_depth.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.typ_depth.fuel_instrumented", - "@query", "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "data_elim_FStar.Pointer.Base.StepField", - "data_elim_FStar.Pointer.Base.StepUField", - "disc_equation_FStar.Pointer.Base.StepField", - "disc_equation_FStar.Pointer.Base.StepUField", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_with_fuel_FStar.Pointer.Base.typ_depth.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "primitive_Prims.op_Addition", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "projection_inverse_FStar.Pointer.Base.TStruct_l", - "projection_inverse_FStar.Pointer.Base.TUnion_l" - ], - 0, - "e85f24478330420d06258769f93c52a5" - ], - [ - "FStar.Pointer.Base.path_typ_depth", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "binder_x_0d5ecd039c65d95077f45e317c0079e9_2", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", "equation_Prims.eqtype", - "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "subterm_ordering_FStar.Pointer.Base.PathStep" - ], - 0, - "5534fe6080f84220c028321622ae8e95" - ], - [ - "FStar.Pointer.Base.path_typ_depth", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "42c410be49a80086a3251fa62a6c74b9" - ], - [ - "FStar.Pointer.Base.__proj__Pointer__item__from", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6" - ], - 0, - "c692b1ee249c474c8043dfd50ced19f9" - ], - [ - "FStar.Pointer.Base.__proj__Pointer__item__contents", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6" - ], - 0, - "e1eae7eae1ec6a3a7649bbbdb37fa13b" - ], - [ - "FStar.Pointer.Base.__proj__Pointer__item__p", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6" - ], - 0, - "b150040e19338e3903b6492f4eb05498" - ], - [ - "FStar.Pointer.Base.g_is_null_intro", - 1, - 1, - 1, - [ - "@query", "constructor_distinct_FStar.Pointer.Base.NullPtr", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.nullptr", - "projection_inverse_FStar.Pointer.Base.NullPtr_to" - ], - 0, - "6e411af6852e71b7afb97861db45e3bd" - ], - [ - "FStar.Pointer.Base.not_an_array_cell", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0" - ], - 0, - "22211b9fd07f8bbd2c91ef3121f343f5" - ], - [ - "FStar.Pointer.Base.__proj__BufferRootSingleton__item__p", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_422fa8e43652f7856994c0adb062c739" - ], - 0, - "bf35d2759429c7fccdf3667755744a54" - ], - [ - "FStar.Pointer.Base.__proj__BufferRootArray__item__max_length", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_56cada75f3b65fbca41629ee43be6d8c" - ], - 0, - "8e193007561cc6dfc0ce020477ea1dcd" - ], - [ - "FStar.Pointer.Base.__proj__BufferRootArray__item__p", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_56cada75f3b65fbca41629ee43be6d8c" - ], - 0, - "409990c3a7db5e3b1e49ba326b882e85" - ], - [ - "FStar.Pointer.Base.buffer_root_length", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.BufferRootArray", - "disc_equation_FStar.Pointer.Base.BufferRootSingleton", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "projection_inverse_BoxBool_proj_0" - ], - 0, - "1f62946bec04e98e0f24c5cb17fb18e5" - ], - [ - "FStar.Pointer.Base.type_of_base_typ", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.TBool", - "disc_equation_FStar.Pointer.Base.TChar", - "disc_equation_FStar.Pointer.Base.TInt", - "disc_equation_FStar.Pointer.Base.TInt16", - "disc_equation_FStar.Pointer.Base.TInt32", - "disc_equation_FStar.Pointer.Base.TInt64", - "disc_equation_FStar.Pointer.Base.TInt8", - "disc_equation_FStar.Pointer.Base.TUInt", - "disc_equation_FStar.Pointer.Base.TUInt16", - "disc_equation_FStar.Pointer.Base.TUInt32", - "disc_equation_FStar.Pointer.Base.TUInt64", - "disc_equation_FStar.Pointer.Base.TUInt8", - "disc_equation_FStar.Pointer.Base.TUnit", - "fuel_guarded_inversion_FStar.Pointer.Base.base_typ", - "projection_inverse_BoxBool_proj_0" - ], - 0, - "c57a4e123a7d18992f062ceb96fc1129" - ], - [ - "FStar.Pointer.Base.array", - 1, - 1, - 1, - [ "@query" ], - 0, - "1e0fc8a84b1e4a92e37278c392af2745" - ], - [ - "FStar.Pointer.Base.type_of_struct_field''", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.assoc.fuel_instrumented", - "@query", "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field_", - "proj_equation_FStar.Pervasives.Native.Some_v", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29" - ], - 0, - "72ffc617fdd38d664ffd8c401aa1c7d5" - ], - [ - "FStar.Pointer.Base.type_of_struct_field'", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.struct_typ_", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "subterm_ordering_FStar.Pointer.Base.Mkstruct_typ" - ], - 0, - "5d2e82622c4d71b0053185b93147b55d" - ], - [ - "FStar.Pointer.Base.gtdata_get_value", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._gtdata_get_key", - "equation_FStar.Pointer.Base.gtdata_get_key", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_Prims.Mkdtuple2__1", - "projection_inverse_Prims.Mkdtuple2__2" - ], - 0, - "b480176738725e51c03c0040ccd39504" - ], - [ - "FStar.Pointer.Base.gtdata_create", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._gtdata_get_key", - "equation_FStar.Pointer.Base.gtdata_get_key", - "equation_FStar.Pointer.Base.gtdata_get_value", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_Prims.Mkdtuple2__1", - "projection_inverse_Prims.Mkdtuple2__2" - ], - 0, - "2e9c52ff2415abfe9eba049c1e6599ae" - ], - [ - "FStar.Pointer.Base.gtdata_extensionality", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._gtdata_get_key", - "equation_FStar.Pointer.Base.gtdata", - "equation_FStar.Pointer.Base.gtdata_get_key", - "equation_FStar.Pointer.Base.gtdata_get_value", - "fuel_guarded_inversion_Prims.dtuple2", - "proj_equation_Prims.Mkdtuple2__1" - ], - 0, - "2ed6d5f6dd4f95c1745f92328503658e" - ], - [ - "FStar.Pointer.Base.type_of_typ'", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "disc_equation_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pointer.Base.TBase", - "disc_equation_FStar.Pointer.Base.TBuffer", - "disc_equation_FStar.Pointer.Base.TNPointer", - "disc_equation_FStar.Pointer.Base.TPointer", - "disc_equation_FStar.Pointer.Base.TStruct", - "disc_equation_FStar.Pointer.Base.TUnion", - "equation_FStar.Pointer.Base.union_typ", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "projection_inverse_BoxBool_proj_0", - "subterm_ordering_FStar.Pointer.Base.TArray", - "subterm_ordering_FStar.Pointer.Base.TStruct", - "subterm_ordering_FStar.Pointer.Base.TUnion" - ], - 0, - "2626f3f742cca105cb9da0d9e333ec82" - ], - [ - "FStar.Pointer.Base.type_of_typ'", - 2, - 1, - 1, - [ "@query", "equation_FStar.Pointer.Base.union_typ" ], - 0, - "d032b31ded66e628486cc810b1a00bb5" - ], - [ - "FStar.Pointer.Base.type_of_typ", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "disc_equation_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pointer.Base.TBase", - "disc_equation_FStar.Pointer.Base.TBuffer", - "disc_equation_FStar.Pointer.Base.TNPointer", - "disc_equation_FStar.Pointer.Base.TPointer", - "disc_equation_FStar.Pointer.Base.TStruct", - "disc_equation_FStar.Pointer.Base.TUnion", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "projection_inverse_BoxBool_proj_0", - "subterm_ordering_FStar.Pointer.Base.TArray" - ], - 0, - "3f09ced943c95c81d3074a6c3fe2f096" - ], - [ - "FStar.Pointer.Base.type_of_typ_array", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "constructor_distinct_FStar.Pointer.Base.TArray", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t" - ], - 0, - "0b65236c77dcbf2f351f059b381a9bad" - ], - [ - "FStar.Pointer.Base.type_of_typ_type_of_struct_field", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "interpretation_Tm_abs_2bc9591a7ae56c98cef4b0180a729165", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "string_inversion", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_" - ], - 0, - "348e4d97e55e90522dd5986c1d783d25" - ], - [ - "FStar.Pointer.Base.type_of_typ'_eq", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.struct.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.union.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.struct.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.union.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Pointer.Base.TBuffer", - "constructor_distinct_FStar.Pointer.Base.TNPointer", - "constructor_distinct_FStar.Pointer.Base.TPointer", - "disc_equation_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pointer.Base.TBuffer", - "disc_equation_FStar.Pointer.Base.TNPointer", - "disc_equation_FStar.Pointer.Base.TPointer", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "lemma_FStar.Pointer.Base.type_of_typ_array", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "subterm_ordering_FStar.Pointer.Base.TArray", - "subterm_ordering_FStar.Pointer.Base.TBuffer", - "subterm_ordering_FStar.Pointer.Base.TNPointer", - "subterm_ordering_FStar.Pointer.Base.TPointer" - ], - 0, - "6ce874f4a10af19407d0aa962fe24c40" - ], - [ - "FStar.Pointer.Base.struct_sel", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@query", "lemma_FStar.Pointer.Base.type_of_typ__eq", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8" - ], - 0, - "a33d8e57eb8611510061a7801d1291bd" - ], - [ - "FStar.Pointer.Base.struct_literal_wf", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "assumption_Prims.list__uu___haseq", "equation_Prims.eqtype", - "function_token_typing_Prims.string", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "8fbaa4a55d574600e76c18b9129b8070" - ], - [ - "FStar.Pointer.Base.fun_of_list", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.count.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.sortWith.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.sortWith.fuel_instrumented", - "@query", - "FStar.List.Tot.Base_interpretation_Tm_arrow_9877f854fbaabbcfda94f6c19b32ae3f", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "FStar.Pointer.Base_interpretation_Tm_arrow_28e00c6cddbebec863cfb4ba46afe386", - "FStar.Pointer.Base_interpretation_Tm_arrow_a562036d4086240af9c67a5348138fd7", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.String_interpretation_Tm_arrow_77650534d172f9f4bbf4f147268736a3", - "Prims_interpretation_Tm_arrow_2eaa01e78f73e9bab5d0955fc1a662da", - "Prims_pretyping_3862c4e8ff39bfc3871b6a47e7ff5b2e", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "bool_inversion", - "constructor_distinct_Prims.Cons", "constructor_distinct_Prims.Nil", - "constructor_distinct_Prims.list", "constructor_distinct_Prims.unit", - "data_elim_Prims.Cons", "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_Prims.Cons", - "equation_FStar.Pointer.Base.dfst_struct_field", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_literal", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.typ_of_struct_field_", - "equation_Prims.eqtype", "equation_Prims.nat", - "equation_with_fuel_FStar.List.Tot.Base.count.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.map.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.sortWith.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.option", - "fuel_guarded_inversion_FStar.Pervasives.Native.tuple2", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_FStar.Pointer.Base.dfst_struct_field", - "function_token_typing_FStar.String.compare", - "function_token_typing_Prims.string", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", "kinding_Prims.dtuple2@tok", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_Equality", "primitive_Prims.op_Negation", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_Prims.Cons_a", - "projection_inverse_Prims.Cons_hd", - "projection_inverse_Prims.Cons_tl", - "projection_inverse_Prims.Mkdtuple2__1", - "projection_inverse_Prims.Nil_a", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_b0baa351b8dc6384c3594e50be109582", - "refinement_interpretation_Tm_refine_be42cbabfbefe51746a09e590ec29119", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", "string_typing", - "token_correspondence_FStar.List.Tot.Base.count.fuel_instrumented", - "token_correspondence_FStar.Pointer.Base.dfst_struct_field", - "typing_FStar.List.Tot.Base.map", "typing_FStar.List.Tot.Base.mem", - "typing_FStar.List.Tot.Base.noRepeats", - "typing_FStar.List.Tot.Base.sortWith", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.struct_field", - "typing_FStar.Pointer.Base.typ_of_struct_field_", - "typing_Tm_abs_2373aec163852d9339944ec67b8394ad", "unit_typing" - ], - 0, - "d2c0511b19344853705edd916bf8fe65" - ], - [ - "FStar.Pointer.Base.struct_upd", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@query", "lemma_FStar.Pointer.Base.type_of_typ__eq", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8" - ], - 0, - "5b11d137003361d9c757a1006a44e33e" - ], - [ - "FStar.Pointer.Base.struct_create_fun", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@query", "lemma_FStar.Pointer.Base.type_of_typ__eq", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8" - ], - 0, - "f2f0f18446e2b53fe3b2e4d876996ba4" - ], - [ - "FStar.Pointer.Base.struct_create", - 1, - 1, - 1, - [ "@query" ], - 0, - "50c937404d5e0d1dd862ef1a27b6534e" - ], - [ - "FStar.Pointer.Base.struct_sel_struct_create_fun", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@query", - "FStar.DependentMap_interpretation_Tm_arrow_a7d5cc170be69663c495e8582d2bc62a", - "FStar.Pointer.Base_interpretation_Tm_arrow_3039342fd2e0851a3664dcc7db386d3a", - "FStar.Pointer.Base_interpretation_Tm_arrow_488e6478d9a2851a48ceef25ab226f9c", - "Prims_interpretation_Tm_arrow_2eaa01e78f73e9bab5d0955fc1a662da", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", - "data_elim_Prims.Cons", "disc_equation_Prims.Cons", - "equation_FStar.Pointer.Base.struct_create_fun", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_sel", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.typ_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "fuel_guarded_inversion_FStar.Pervasives.Native.tuple2", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "function_token_typing_FStar.Pointer.Base.type_of_struct_field_", - "interpretation_Tm_abs_2bc9591a7ae56c98cef4b0180a729165", - "interpretation_Tm_abs_51105ee2e615de5655b7293d0759168b", - "lemma_FStar.DependentMap.sel_create", - "lemma_FStar.Pointer.Base.type_of_typ__eq", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_be42cbabfbefe51746a09e590ec29119", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.struct_field", - "typing_FStar.Pointer.Base.typ_of_struct_field_", - "typing_Tm_abs_51105ee2e615de5655b7293d0759168b" - ], - 0, - "ed2870f993d7231be3de49b1cb7dbb80" - ], - [ - "FStar.Pointer.Base.union_get_value", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@query", "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.typ_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "equation_FStar.Pointer.Base.union_get_key", - "equation_FStar.Pointer.Base.union_typ", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "interpretation_Tm_abs_2bc9591a7ae56c98cef4b0180a729165", - "interpretation_Tm_abs_51105ee2e615de5655b7293d0759168b", - "lemma_FStar.Pointer.Base.type_of_typ__eq", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_be42cbabfbefe51746a09e590ec29119", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.typ_of_struct_field_" - ], - 0, - "567e5db40a448534f7d5fa8b9e36ed02" - ], - [ - "FStar.Pointer.Base.union_create", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@query", "lemma_FStar.Pointer.Base.type_of_typ__eq", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8" - ], - 0, - "dbcb4364ffda9433db5734d38799889b" - ], - [ - "FStar.Pointer.Base.dummy_val", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.struct.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.union.fuel_instrumented", - "@fuel_correspondence_Prims.pow2.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.struct.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.union.fuel_instrumented", - "@fuel_irrelevance_Prims.pow2.fuel_instrumented", "@query", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", "b2t_def", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", "bool_inversion", - "bool_typing", - "constructor_distinct_FStar.Pointer.Base.BufferRootSingleton", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_FStar.Pointer.Base.TBase", - "constructor_distinct_FStar.Pointer.Base.TBool", - "constructor_distinct_FStar.Pointer.Base.TBuffer", - "constructor_distinct_FStar.Pointer.Base.TChar", - "constructor_distinct_FStar.Pointer.Base.TInt", - "constructor_distinct_FStar.Pointer.Base.TInt16", - "constructor_distinct_FStar.Pointer.Base.TInt32", - "constructor_distinct_FStar.Pointer.Base.TInt64", - "constructor_distinct_FStar.Pointer.Base.TInt8", - "constructor_distinct_FStar.Pointer.Base.TNPointer", - "constructor_distinct_FStar.Pointer.Base.TPointer", - "constructor_distinct_FStar.Pointer.Base.TUInt", - "constructor_distinct_FStar.Pointer.Base.TUInt16", - "constructor_distinct_FStar.Pointer.Base.TUInt32", - "constructor_distinct_FStar.Pointer.Base.TUInt64", - "constructor_distinct_FStar.Pointer.Base.TUInt8", - "constructor_distinct_FStar.Pointer.Base.TUnit", - "constructor_distinct_Prims.Cons", - "data_elim_FStar.Pointer.Base.Mkstruct_typ", - "data_elim_FStar.Pointer.Base.TPointer", - "data_typing_intro_FStar.Pointer.Base.NullPtr@tok", - "data_typing_intro_FStar.Pointer.Base.PathBase@tok", - "data_typing_intro_FStar.Pointer.Base.Pointer@tok", - "disc_equation_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pointer.Base.TBase", - "disc_equation_FStar.Pointer.Base.TBool", - "disc_equation_FStar.Pointer.Base.TBuffer", - "disc_equation_FStar.Pointer.Base.TChar", - "disc_equation_FStar.Pointer.Base.TInt", - "disc_equation_FStar.Pointer.Base.TInt16", - "disc_equation_FStar.Pointer.Base.TInt32", - "disc_equation_FStar.Pointer.Base.TInt64", - "disc_equation_FStar.Pointer.Base.TInt8", - "disc_equation_FStar.Pointer.Base.TNPointer", - "disc_equation_FStar.Pointer.Base.TPointer", - "disc_equation_FStar.Pointer.Base.TStruct", - "disc_equation_FStar.Pointer.Base.TUInt", - "disc_equation_FStar.Pointer.Base.TUInt16", - "disc_equation_FStar.Pointer.Base.TUInt32", - "disc_equation_FStar.Pointer.Base.TUInt64", - "disc_equation_FStar.Pointer.Base.TUInt8", - "disc_equation_FStar.Pointer.Base.TUnion", - "disc_equation_FStar.Pointer.Base.TUnit", "disc_equation_Prims.Cons", - "equality_tok_FStar.Pointer.Base.TBool@tok", - "equality_tok_FStar.Pointer.Base.TChar@tok", - "equality_tok_FStar.Pointer.Base.TInt16@tok", - "equality_tok_FStar.Pointer.Base.TInt32@tok", - "equality_tok_FStar.Pointer.Base.TInt64@tok", - "equality_tok_FStar.Pointer.Base.TInt8@tok", - "equality_tok_FStar.Pointer.Base.TInt@tok", - "equality_tok_FStar.Pointer.Base.TUInt16@tok", - "equality_tok_FStar.Pointer.Base.TUInt32@tok", - "equality_tok_FStar.Pointer.Base.TUInt64@tok", - "equality_tok_FStar.Pointer.Base.TUInt8@tok", - "equality_tok_FStar.Pointer.Base.TUInt@tok", - "equality_tok_FStar.Pointer.Base.TUnit@tok", - "equation_FStar.Int.fits", "equation_FStar.Int.max_int", - "equation_FStar.Int.min_int", "equation_FStar.Int.size", - "equation_FStar.List.Tot.Base.hd", - "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.not_an_array_cell", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.type_of_base_typ", - "equation_FStar.Pointer.Base.type_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "equation_FStar.Pointer.Base.union_typ", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.eqtype", "equation_Prims.nat", "equation_Prims.pos", - "equation_with_fuel_FStar.List.Tot.Base.map.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.mem.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "equation_with_fuel_Prims.pow2.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.base_typ", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "fuel_guarded_inversion_Prims.list", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_Prims.string", "int_typing", - "interpretation_Tm_abs_2bc9591a7ae56c98cef4b0180a729165", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", - "lemma_FStar.Pointer.Base.type_of_typ_array", - "lemma_FStar.Pointer.Base.type_of_typ_struct", - "lemma_FStar.Pointer.Base.type_of_typ_type_of_struct_field", - "lemma_FStar.Pointer.Base.type_of_typ_union", - "lemma_FStar.Seq.Base.lemma_create_len", "lemma_FStar.UInt32.vu_inv", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "primitive_Prims.op_LessThanOrEqual", "primitive_Prims.op_Minus", - "primitive_Prims.op_Multiply", "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_p", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_t", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "projection_inverse_FStar.Pointer.Base.TBase_b", - "projection_inverse_FStar.Pointer.Base.TBuffer_t", - "projection_inverse_FStar.Pointer.Base.TNPointer_t", - "projection_inverse_FStar.Pointer.Base.TPointer_t", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_a64ce837bc75d5399d5767b07092e72e", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "string_inversion", "subterm_ordering_FStar.Pointer.Base.TArray", - "subterm_ordering_FStar.Pointer.Base.TStruct", - "subterm_ordering_FStar.Pointer.Base.TUnion", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_", - "typing_FStar.Char.__char_of_int", "typing_FStar.List.Tot.Base.map", - "typing_FStar.List.Tot.Base.mem", - "typing_FStar.Monotonic.HyperStack.dummy_aref", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.type_of_typ", - "typing_FStar.Pointer.Base.uu___is_TBase", - "typing_FStar.Pointer.Base.uu___is_TUInt", "typing_FStar.UInt.fits", - "typing_Prims.pow2", "unit_typing" - ], - 0, - "7e5bc155dc89721f3fabcd32579860d8" - ], - [ - "FStar.Pointer.Base.otype_of_typ", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "disc_equation_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pointer.Base.TBase", - "disc_equation_FStar.Pointer.Base.TBuffer", - "disc_equation_FStar.Pointer.Base.TNPointer", - "disc_equation_FStar.Pointer.Base.TPointer", - "disc_equation_FStar.Pointer.Base.TStruct", - "disc_equation_FStar.Pointer.Base.TUnion", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "projection_inverse_BoxBool_proj_0", - "subterm_ordering_FStar.Pointer.Base.TArray", - "subterm_ordering_FStar.Pointer.Base.TStruct", - "subterm_ordering_FStar.Pointer.Base.TUnion" - ], - 0, - "5727ed9525750248a0260394c1160599" - ], - [ - "FStar.Pointer.Base.otype_of_typ_otype_of_struct_field", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.otype_of_struct_field", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "function_token_typing_FStar.Pointer.Base.otype_of_typ", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "string_inversion", - "token_correspondence_FStar.Pointer.Base.otype_of_struct_field", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_" - ], - 0, - "574df13cb2c41282ccacbaa2b9970bce" - ], - [ - "FStar.Pointer.Base.otype_of_typ_base", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@query", "constructor_distinct_FStar.Pointer.Base.TBase", - "data_typing_intro_FStar.Pointer.Base.TBase@tok", - "equation_with_fuel_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "projection_inverse_FStar.Pointer.Base.TBase_b" - ], - 0, - "72d2f89cbf5ddc6d15f07440b148f9ca" - ], - [ - "FStar.Pointer.Base.otype_of_typ_array", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@query", "constructor_distinct_FStar.Pointer.Base.TArray", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "equation_with_fuel_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t" - ], - 0, - "7045e49463ab893555608d72393ca2c7" - ], - [ - "FStar.Pointer.Base.ostruct_upd", - 1, - 1, - 1, - [ - "@query", "constructor_distinct_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pervasives.Native.Some", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.Some_a", - "projection_inverse_FStar.Pervasives.Native.Some_v" - ], - 0, - "fc8659be85c4cf95f0d9f3bc0df895fb" - ], - [ - "FStar.Pointer.Base.ostruct_create", - 1, - 1, - 1, - [ - "@query", "constructor_distinct_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pervasives.Native.Some", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.Some_a", - "projection_inverse_FStar.Pervasives.Native.Some_v" - ], - 0, - "1984b85aeb45a9bb78df97d05ba06e09" - ], - [ - "FStar.Pointer.Base.ounion_get_value", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.gtdata_get_key", - "equation_FStar.Pointer.Base.ounion_get_key", - "equation_FStar.Pointer.Base.struct_field", - "proj_equation_FStar.Pervasives.Native.Some_v" - ], - 0, - "db75e90b45002223bbbe15a3325c3353" - ], - [ - "FStar.Pointer.Base.struct_field_is_readable", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.otype_of_struct_field", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "function_token_typing_FStar.Pointer.Base.otype_of_struct_field", - "function_token_typing_FStar.Pointer.Base.otype_of_typ", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "string_inversion", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_" - ], - 0, - "14cf88092421d97871a8f712d42eebad" - ], - [ - "FStar.Pointer.Base.ovalue_is_readable", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "binder_x_50ee5a23f17759eafa03d1be4a28778c_1", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "constructor_distinct_FStar.Pointer.Base.TBuffer", - "constructor_distinct_FStar.Pointer.Base.TNPointer", - "constructor_distinct_FStar.Pointer.Base.TPointer", - "disc_equation_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pointer.Base.TBase", - "disc_equation_FStar.Pointer.Base.TBuffer", - "disc_equation_FStar.Pointer.Base.TNPointer", - "disc_equation_FStar.Pointer.Base.TPointer", - "disc_equation_FStar.Pointer.Base.TStruct", - "disc_equation_FStar.Pointer.Base.TUnion", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.union_typ", - "equation_with_fuel_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "lemma_FStar.Pointer.Base.otype_of_typ_array", - "lemma_FStar.Pointer.Base.otype_of_typ_base", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "lemma_FStar.Pointer.Base.otype_of_typ_union", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.TBuffer_t", - "projection_inverse_FStar.Pointer.Base.TNPointer_t", - "projection_inverse_FStar.Pointer.Base.TPointer_t", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "subterm_ordering_FStar.Pointer.Base.TArray", - "subterm_ordering_FStar.Pointer.Base.TStruct", - "subterm_ordering_FStar.Pointer.Base.TUnion", - "typing_FStar.Pointer.Base.typ_of_struct_field" - ], - 0, - "6416613933e819454416fbd8ac376962" - ], - [ - "FStar.Pointer.Base.ovalue_is_readable_struct_intro'", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.for_all.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.for_all.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@query", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "FStar.Pointer.Base_interpretation_Tm_arrow_2b6196d53ffacd624cfbed706c26d1dc", - "FStar.Pointer.Base_interpretation_Tm_arrow_bdc5e8c71a6f259f30c4b3e4051b956c", - "FStar.Set_interpretation_Tm_arrow_84543425b818e2d10a976186b8e8c250", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", "bool_inversion", - "constructor_distinct_Prims.Cons", - "data_elim_FStar.Pointer.Base.Mkstruct_typ", "data_elim_Prims.Cons", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_Prims.Cons", - "equation_FStar.Pointer.Base.struct_field_is_readable", - "equation_Prims.eqtype", - "equation_with_fuel_FStar.List.Tot.Base.for_all.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.map.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.tuple2", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_FStar.Pointer.Base.struct_field_is_readable", - "function_token_typing_Prims.string", - "interpretation_Tm_abs_a95ce6085183f3c79c9cc46b789489e0", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "primitive_Prims.op_AmpAmp", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_Prims.Cons_a", - "projection_inverse_Prims.Cons_hd", - "projection_inverse_Prims.Cons_tl", - "refinement_interpretation_Tm_refine_1588d9cc9cb691265bea0ce2c66abf3c", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_b86319999df90ce930c473bc87611673", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "token_correspondence_FStar.Pervasives.Native.fst", - "tot_fun_Tm_abs_a95ce6085183f3c79c9cc46b789489e0", - "typing_FStar.List.Tot.Base.for_all", - "typing_FStar.List.Tot.Base.map", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.ovalue_is_readable" - ], - 0, - "3cc7d791a4e35e956e5dc3665031dc99" - ], - [ - "FStar.Pointer.Base.ovalue_is_readable_struct_intro", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.memP.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.memP.fuel_instrumented", - "@query", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", "bool_inversion", - "data_elim_FStar.Pointer.Base.Mkstruct_typ", "data_elim_Prims.Cons", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_Prims.Cons", - "equation_FStar.Pointer.Base.otype_of_struct_field", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_field_is_readable", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "equation_Prims.eqtype", - "fuel_guarded_inversion_FStar.Pervasives.Native.tuple2", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_FStar.Pointer.Base.otype_of_struct_field", - "function_token_typing_FStar.Pointer.Base.otype_of_typ", - "function_token_typing_Prims.string", - "interpretation_Tm_abs_a95ce6085183f3c79c9cc46b789489e0", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", - "lemma_FStar.List.Tot.Properties.mem_memP", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_", - "typing_FStar.List.Tot.Base.map", "typing_FStar.List.Tot.Base.mem", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields" - ], - 0, - "cdbfc6c453abf94d2bb2fddf32d10910" - ], - [ - "FStar.Pointer.Base.ovalue_is_readable_struct_elim", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.for_all.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.memP.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.for_all.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.memP.fuel_instrumented", - "@query", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "FStar.Pointer.Base_interpretation_Tm_arrow_7e92cc1c04700553193b6f7c7895fa62", - "FStar.Set_interpretation_Tm_arrow_84543425b818e2d10a976186b8e8c250", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", "bool_inversion", - "bool_typing", "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_Prims.Cons", "data_elim_Prims.Cons", - "data_typing_intro_FStar.Pointer.Base.TStruct@tok", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_Prims.Cons", - "equation_FStar.Pointer.Base.otype_of_struct_field", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_field_is_readable", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.typ_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "equation_Prims.eqtype", - "equation_with_fuel_FStar.List.Tot.Base.for_all.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.map.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.mem.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.tuple2", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_FStar.Pointer.Base.otype_of_struct_field", - "function_token_typing_FStar.Pointer.Base.otype_of_typ", - "function_token_typing_Prims.string", - "interpretation_Tm_abs_a95ce6085183f3c79c9cc46b789489e0", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", - "lemma_FStar.List.Tot.Properties.mem_memP", - "primitive_Prims.op_AmpAmp", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.TStruct_l", - "projection_inverse_Prims.Cons_a", - "projection_inverse_Prims.Cons_hd", - "projection_inverse_Prims.Cons_tl", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_b86319999df90ce930c473bc87611673", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", - "token_correspondence_FStar.Pervasives.Native.fst", - "token_correspondence_FStar.Pointer.Base.otype_of_typ", - "token_correspondence_FStar.Pointer.Base.struct_field_is_readable", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_", - "typing_FStar.List.Tot.Base.map", "typing_FStar.List.Tot.Base.mem", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.ovalue_is_readable", - "typing_FStar.Pointer.Base.struct_field_is_readable" - ], - 0, - "582dc0be0021368a228ca2c0ca29dd03" - ], - [ - "FStar.Pointer.Base.ovalue_is_readable_array_elim", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@query", - "FStar.Seq.Base_pretyping_7efa52b424e80c83ad68a652aa3561e4", - "FStar.Set_interpretation_Tm_arrow_84543425b818e2d10a976186b8e8c250", - "b2t_def", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.TArray", - "data_elim_FStar.Pervasives.Native.Some", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "equation_FStar.Pointer.Base.array", - "equation_FStar.Seq.Properties.for_all", "equation_FStar.UInt.fits", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "fuel_token_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented_token", - "int_inversion", "lemma_FStar.Pointer.Base.otype_of_typ_array", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pervasives.Native.Some_v", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_307fd373d8b3749096cf164b41cf1984", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_d83f8da8ef6c1cb9f71d1465c1bb1c55", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "typing_FStar.Pointer.Base.otype_of_typ", - "typing_FStar.Pointer.Base.ovalue_is_readable", - "typing_FStar.Seq.Properties.for_all", "typing_FStar.UInt32.v" - ], - 0, - "9b5005d8a38fda5902e69f4b9a533969" - ], - [ - "FStar.Pointer.Base.ovalue_is_readable_array_intro", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@query", "b2t_def", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.TArray", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Seq.Properties.for_all", - "equation_FStar.Seq.Properties.seq_find", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "fuel_token_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented_token", - "int_inversion", - "interpretation_Tm_abs_e818836335067047224d0c19c4cabb2d", - "lemma_FStar.Pervasives.invertOption", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", "primitive_Prims.op_Negation", - "proj_equation_FStar.Pervasives.Native.Some_v", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "refinement_interpretation_Tm_refine_1dff1edeebf3475c72da4da95fdfe2d7", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5e8ab89510578a938a38bd5dfb813b93", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_d83f8da8ef6c1cb9f71d1465c1bb1c55", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "token_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "typing_FStar.Pervasives.Native.uu___is_Some", - "typing_FStar.Pointer.Base.array", - "typing_FStar.Pointer.Base.otype_of_typ", - "typing_FStar.Seq.Base.index", - "typing_FStar.Seq.Properties.seq_find", "typing_FStar.UInt32.v", - "typing_Tm_abs_e818836335067047224d0c19c4cabb2d" - ], - 0, - "0c63285d60c1ecb247fe391d333dbcb4" - ], - [ - "FStar.Pointer.Base.ostruct_field_of_struct_field", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "lemma_FStar.Pointer.Base.otype_of_typ_otype_of_struct_field", - "lemma_FStar.Pointer.Base.type_of_typ_type_of_struct_field", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "string_inversion", "typing_FStar.Pointer.Base.struct_sel", - "typing_FStar.Pointer.Base.typ_of_struct_field" - ], - 0, - "7dcc5a564791ea6c941e105d5b6e589e" - ], - [ - "FStar.Pointer.Base.seq_init_index", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.nat", - "int_inversion", "lemma_FStar.Seq.Base.init_index_", - "lemma_FStar.Seq.Base.lemma_init_len", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2" - ], - 0, - "e26eafb765eb2e06b8e642b69d6c3463" - ], - [ - "FStar.Pointer.Base.ovalue_of_value", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.struct.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.union.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.struct.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.union.fuel_instrumented", - "@query", - "FStar.Pointer.Base_interpretation_Tm_arrow_a54ccb37fa50163db51a6d983062cade", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Seq.Base_interpretation_Tm_arrow_44bb45ed5c2534b346e0f58ea5033251", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_daa2134f2832cd40b961ec5232d07ee3_1", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "data_typing_intro_FStar.Pervasives.Native.Some@tok", - "disc_equation_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pointer.Base.TStruct", - "disc_equation_FStar.Pointer.Base.TUnion", - "equation_FStar.Pointer.Base._union_get_key", - "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.gtdata_get_key", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.typ_of_struct_field_", - "equation_FStar.Pointer.Base.union_get_key", - "equation_FStar.Pointer.Base.union_typ", - "equation_with_fuel_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "lemma_FStar.Pointer.Base.otype_of_typ_array", - "lemma_FStar.Pointer.Base.otype_of_typ_otype_of_struct_field", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "lemma_FStar.Pointer.Base.otype_of_typ_union", - "lemma_FStar.Pointer.Base.type_of_typ_array", - "lemma_FStar.Pointer.Base.type_of_typ_struct", - "lemma_FStar.Pointer.Base.type_of_typ_union", - "lemma_FStar.Seq.Base.lemma_init_len", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.TUnion_l", - "refinement_interpretation_Tm_refine_1dff1edeebf3475c72da4da95fdfe2d7", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_b86319999df90ce930c473bc87611673", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_be42cbabfbefe51746a09e590ec29119", - "refinement_interpretation_Tm_refine_c1424615841f28cac7fc34e92b7ff33c", - "subterm_ordering_FStar.Pointer.Base.Mkstruct_typ", - "subterm_ordering_FStar.Pointer.Base.TArray", - "subterm_ordering_FStar.Pointer.Base.TStruct", - "subterm_ordering_FStar.Pointer.Base.TUnion", - "token_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "token_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base._union_get_key", - "typing_FStar.Pointer.Base.array", - "typing_FStar.Pointer.Base.typ_of_struct_field_", - "typing_FStar.Pointer.Base.type_of_typ", - "typing_FStar.Seq.Base.length" - ], - 0, - "b8a98fe543f77cd95086dd6202f4275f" - ], - [ - "FStar.Pointer.Base.ovalue_is_readable_ostruct_field_of_struct_field", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", - "equation_FStar.Pointer.Base.ostruct_field_of_struct_field", - "equation_FStar.Pointer.Base.otype_of_struct_field", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "function_token_typing_FStar.Pointer.Base.otype_of_typ", - "function_token_typing_FStar.Pointer.Base.ovalue_of_value", - "lemma_FStar.Pointer.Base.type_of_typ_type_of_struct_field", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "string_inversion", - "token_correspondence_FStar.Pointer.Base.otype_of_struct_field", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_", - "typing_FStar.Pointer.Base.struct_sel", - "typing_FStar.Pointer.Base.typ_of_struct_field" - ], - 0, - "c617bd5706dbd9cf7f06fba7b5633828" - ], - [ - "FStar.Pointer.Base.ovalue_is_readable_ovalue_of_value", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.struct.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.union.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.struct.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.union.fuel_instrumented", - "@query", - "FStar.DependentMap_interpretation_Tm_arrow_a7d5cc170be69663c495e8582d2bc62a", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "FStar.Pointer.Base_interpretation_Tm_arrow_3148b61c0b10b09d9fa8c898186315b9", - "FStar.Pointer.Base_interpretation_Tm_arrow_488e6478d9a2851a48ceef25ab226f9c", - "FStar.Pointer.Base_interpretation_Tm_arrow_4922c0d39a14074f92b751ffa781c0f9", - "FStar.Pointer.Base_interpretation_Tm_arrow_790b9eca5f7946b6b483f2755867c0f6", - "FStar.Pointer.Base_interpretation_Tm_arrow_9973fa91578be0bb3fb4c2414d28840c", - "FStar.Pointer.Base_interpretation_Tm_arrow_a562036d4086240af9c67a5348138fd7", - "FStar.Pointer.Base_interpretation_Tm_arrow_b84a9cfad13319928b2ca9ddcae894f8", - "FStar.Pointer.Base_interpretation_Tm_arrow_d80a4731200c46869c60012a694e5ece", - "FStar.Pointer.Base_interpretation_Tm_arrow_ff2c816033059072d4e58d0e2307b681", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Seq.Base_interpretation_Tm_arrow_44bb45ed5c2534b346e0f58ea5033251", - "Prims_interpretation_Tm_arrow_2eaa01e78f73e9bab5d0955fc1a662da", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", "b2t_def", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_daa2134f2832cd40b961ec5232d07ee3_1", "bool_inversion", - "constructor_distinct_FStar.Pervasives.Native.Some", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "constructor_distinct_Prims.Cons", - "data_elim_FStar.Pointer.Base.Mkstruct_typ", "data_elim_Prims.Cons", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pointer.Base.TStruct", - "disc_equation_FStar.Pointer.Base.TUnion", - "disc_equation_Prims.Cons", - "equation_FStar.Pointer.Base._union_get_key", - "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.gtdata_get_key", - "equation_FStar.Pointer.Base.ostruct_create", - "equation_FStar.Pointer.Base.ostruct_field_of_struct_field", - "equation_FStar.Pointer.Base.ostruct_sel", - "equation_FStar.Pointer.Base.otype_of_struct_field", - "equation_FStar.Pointer.Base.ounion_create", - "equation_FStar.Pointer.Base.ounion_get_key", - "equation_FStar.Pointer.Base.ounion_get_value", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_sel", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.typ_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "equation_FStar.Pointer.Base.union_get_key", - "equation_FStar.Pointer.Base.union_typ", - "equation_FStar.Seq.Properties.for_all", - "equation_FStar.Seq.Properties.seq_find", "equation_FStar.UInt.fits", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.eqtype", "equation_Prims.nat", - "equation_with_fuel_FStar.List.Tot.Base.map.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "fuel_guarded_inversion_Prims.list", - "fuel_token_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented_token", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_FStar.Pointer.Base.ostruct_field_of_struct_field", - "function_token_typing_FStar.Pointer.Base.otype_of_struct_field", - "function_token_typing_FStar.Pointer.Base.otype_of_typ", - "function_token_typing_FStar.Pointer.Base.ovalue_of_value", - "function_token_typing_FStar.Pointer.Base.type_of_struct_field_", - "function_token_typing_Prims.string", "int_inversion", - "interpretation_Tm_abs_06fa7cc7c07a065ab8c0a8e3140994af", - "interpretation_Tm_abs_2bc9591a7ae56c98cef4b0180a729165", - "interpretation_Tm_abs_e818836335067047224d0c19c4cabb2d", - "interpretation_Tm_abs_f9f87dbe487b01446642c952246e3fc1", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", - "lemma_FStar.DependentMap.sel_create", - "lemma_FStar.Pervasives.invertOption", - "lemma_FStar.Pointer.Base.otype_of_typ_otype_of_struct_field", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "lemma_FStar.Pointer.Base.type_of_typ_array", - "lemma_FStar.Pointer.Base.type_of_typ_struct", - "lemma_FStar.Pointer.Base.type_of_typ_union", - "lemma_FStar.Seq.Base.init_index_", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_Negation", - "proj_equation_FStar.Pervasives.Native.Some_v", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.Some_a", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "projection_inverse_FStar.Pointer.Base.TStruct_l", - "projection_inverse_FStar.Pointer.Base.TUnion_l", - "projection_inverse_Prims.Cons_a", - "projection_inverse_Prims.Cons_hd", - "projection_inverse_Prims.Cons_tl", - "refinement_interpretation_Tm_refine_2527f4cbcbf4f88512fcbb65718a9b76", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5e8ab89510578a938a38bd5dfb813b93", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_a5b419dde0606bbe5389a1b0a41725a2", - "refinement_interpretation_Tm_refine_b86319999df90ce930c473bc87611673", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_bd52d171a74d80903c7842fefd75f8a1", - "refinement_interpretation_Tm_refine_be42cbabfbefe51746a09e590ec29119", - "refinement_interpretation_Tm_refine_c1424615841f28cac7fc34e92b7ff33c", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_d83f8da8ef6c1cb9f71d1465c1bb1c55", - "refinement_interpretation_Tm_refine_dce369254040b9bd3ac1454cc66ab5ae", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "string_inversion", "string_typing", - "subterm_ordering_FStar.Pointer.Base.Mkstruct_typ", - "subterm_ordering_FStar.Pointer.Base.TArray", - "subterm_ordering_FStar.Pointer.Base.TStruct", - "subterm_ordering_FStar.Pointer.Base.TUnion", - "token_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "token_correspondence_FStar.Pervasives.Native.fst", - "token_correspondence_FStar.Pointer.Base.ostruct_field_of_struct_field", - "token_correspondence_FStar.Pointer.Base.otype_of_struct_field", - "token_correspondence_FStar.Pointer.Base.otype_of_typ", - "token_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "token_correspondence_FStar.Pointer.Base.ovalue_of_value", - "token_correspondence_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_", - "token_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "tot_fun_Tm_abs_f9f87dbe487b01446642c952246e3fc1", - "typing_FStar.List.Tot.Base.map", "typing_FStar.List.Tot.Base.mem", - "typing_FStar.List.Tot.Base.noRepeats", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base._union_get_key", - "typing_FStar.Pointer.Base.gtdata_create", - "typing_FStar.Pointer.Base.ostruct_field_of_struct_field", - "typing_FStar.Pointer.Base.ostruct_sel", - "typing_FStar.Pointer.Base.otype_of_struct_field", - "typing_FStar.Pointer.Base.otype_of_typ", - "typing_FStar.Pointer.Base.ovalue_is_readable", - "typing_FStar.Pointer.Base.ovalue_of_value", - "typing_FStar.Pointer.Base.struct_field", - "typing_FStar.Pointer.Base.typ_of_struct_field", - "typing_FStar.Pointer.Base.typ_of_struct_field_", - "typing_FStar.Pointer.Base.uu___is_TStruct", - "typing_FStar.Seq.Base.index", "typing_FStar.Seq.Base.length", - "typing_FStar.Seq.Properties.seq_find", "typing_FStar.UInt32.v", - "typing_Tm_abs_06fa7cc7c07a065ab8c0a8e3140994af", - "typing_Tm_abs_e818836335067047224d0c19c4cabb2d" - ], - 0, - "3536f5b6ab557073370234e405f3ff33" - ], - [ - "FStar.Pointer.Base.value_of_ovalue", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.struct.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.union.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.struct.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.union.fuel_instrumented", - "@query", - "FStar.Pointer.Base_interpretation_Tm_arrow_714c243ce1cda15b90190082618ab455", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Seq.Base_interpretation_Tm_arrow_44bb45ed5c2534b346e0f58ea5033251", - "binder_x_50ee5a23f17759eafa03d1be4a28778c_1", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Pointer.Base.TBase", - "constructor_distinct_FStar.Pointer.Base.TNPointer", - "constructor_distinct_FStar.Pointer.Base.TPointer", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "data_elim_FStar.Pointer.Base.TUnion", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pointer.Base.TBase", - "disc_equation_FStar.Pointer.Base.TBuffer", - "disc_equation_FStar.Pointer.Base.TNPointer", - "disc_equation_FStar.Pointer.Base.TPointer", - "disc_equation_FStar.Pointer.Base.TStruct", - "disc_equation_FStar.Pointer.Base.TUnion", - "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.gtdata", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.typ_of_struct_field_", - "equation_FStar.Pointer.Base.union_typ", "equation_Prims.eqtype", - "equation_with_fuel_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "kinding_Prims.dtuple2@tok", "lemma_FStar.Pervasives.invertOption", - "lemma_FStar.Pointer.Base.otype_of_typ_array", - "lemma_FStar.Pointer.Base.otype_of_typ_base", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "lemma_FStar.Pointer.Base.otype_of_typ_union", - "lemma_FStar.Pointer.Base.type_of_typ_array", - "lemma_FStar.Pointer.Base.type_of_typ_struct", - "lemma_FStar.Pointer.Base.type_of_typ_type_of_struct_field", - "lemma_FStar.Pointer.Base.type_of_typ_union", - "lemma_FStar.Seq.Base.lemma_init_len", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "projection_inverse_FStar.Pointer.Base.TBase_b", - "projection_inverse_FStar.Pointer.Base.TBuffer_t", - "projection_inverse_FStar.Pointer.Base.TNPointer_t", - "projection_inverse_FStar.Pointer.Base.TPointer_t", - "projection_inverse_FStar.Pointer.Base.TStruct_l", - "projection_inverse_FStar.Pointer.Base.TUnion_l", - "refinement_interpretation_Tm_refine_1dff1edeebf3475c72da4da95fdfe2d7", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_be42cbabfbefe51746a09e590ec29119", - "refinement_interpretation_Tm_refine_c1424615841f28cac7fc34e92b7ff33c", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "string_inversion", - "subterm_ordering_FStar.Pointer.Base.Mkstruct_typ", - "subterm_ordering_FStar.Pointer.Base.TArray", - "subterm_ordering_FStar.Pointer.Base.TStruct", - "subterm_ordering_FStar.Pointer.Base.TUnion", - "token_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.array", - "typing_FStar.Pointer.Base.buffer", - "typing_FStar.Pointer.Base.npointer", - "typing_FStar.Pointer.Base.pointer", - "typing_FStar.Pointer.Base.struct_create_fun", - "typing_FStar.Pointer.Base.struct_field", - "typing_FStar.Pointer.Base.typ_of_struct_field_", - "typing_FStar.Pointer.Base.type_of_base_typ", - "typing_FStar.Pointer.Base.type_of_typ", - "typing_FStar.Pointer.Base.uu___is_TStruct", - "typing_FStar.Seq.Base.length", - "typing_Tm_abs_6ba36691ee58dee85cd144324b083848" - ], - 0, - "72369a083d7f4fb6c35b30b7b643200a" - ], - [ - "FStar.Pointer.Base.ovalue_of_value_array_index", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", - "FStar.Pointer.Base_interpretation_Tm_arrow_790b9eca5f7946b6b483f2755867c0f6", - "FStar.Seq.Base_interpretation_Tm_arrow_44bb45ed5c2534b346e0f58ea5033251", - "constructor_distinct_FStar.Pointer.Base.TArray", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "equation_FStar.Pointer.Base.array", "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "int_inversion", - "interpretation_Tm_abs_06fa7cc7c07a065ab8c0a8e3140994af", - "lemma_FStar.Pointer.Base.type_of_typ_array", - "lemma_FStar.Seq.Base.init_index_", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_c1424615841f28cac7fc34e92b7ff33c", - "refinement_interpretation_Tm_refine_dce369254040b9bd3ac1454cc66ab5ae", - "typing_FStar.Pointer.Base.otype_of_typ", - "typing_FStar.Seq.Base.length", - "typing_Tm_abs_06fa7cc7c07a065ab8c0a8e3140994af" - ], - 0, - "5ea8878dbf69df0143385153140b81a5" - ], - [ - "FStar.Pointer.Base.value_of_ovalue_array_index", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@query", - "FStar.Pointer.Base_interpretation_Tm_arrow_cbaf63ee0f7cd0118b8161198aafa7ec", - "FStar.Seq.Base_interpretation_Tm_arrow_44bb45ed5c2534b346e0f58ea5033251", - "constructor_distinct_FStar.Pervasives.Native.Some", - "constructor_distinct_FStar.Pointer.Base.TArray", - "data_typing_intro_FStar.Pervasives.Native.Some@tok", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "equation_FStar.Pointer.Base.array", "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "int_inversion", - "interpretation_Tm_abs_cfbb5b4925290e52ac8f62a4db2a2e50", - "lemma_FStar.Pointer.Base.otype_of_typ_array", - "lemma_FStar.Pointer.Base.type_of_typ_array", - "lemma_FStar.Seq.Base.init_index_", - "projection_inverse_FStar.Pervasives.Native.Some_a", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_c1424615841f28cac7fc34e92b7ff33c", - "refinement_interpretation_Tm_refine_dce369254040b9bd3ac1454cc66ab5ae", - "typing_FStar.Pointer.Base.array", - "typing_FStar.Pointer.Base.otype_of_typ", - "typing_FStar.Pointer.Base.type_of_typ", - "typing_FStar.Pointer.Base.value_of_ovalue", - "typing_FStar.Seq.Base.length", - "typing_Tm_abs_cfbb5b4925290e52ac8f62a4db2a2e50" - ], - 0, - "60784da20bb036c78c78daff8af650a6" - ], - [ - "FStar.Pointer.Base.value_of_ovalue_of_value", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.struct.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.union.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.struct.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.union.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@query", - "FStar.DependentMap_interpretation_Tm_arrow_a7d5cc170be69663c495e8582d2bc62a", - "FStar.Pointer.Base_interpretation_Tm_arrow_3039342fd2e0851a3664dcc7db386d3a", - "FStar.Pointer.Base_interpretation_Tm_arrow_3148b61c0b10b09d9fa8c898186315b9", - "FStar.Pointer.Base_interpretation_Tm_arrow_488e6478d9a2851a48ceef25ab226f9c", - "FStar.Pointer.Base_interpretation_Tm_arrow_4922c0d39a14074f92b751ffa781c0f9", - "FStar.Pointer.Base_interpretation_Tm_arrow_9973fa91578be0bb3fb4c2414d28840c", - "FStar.Pointer.Base_interpretation_Tm_arrow_a562036d4086240af9c67a5348138fd7", - "FStar.Pointer.Base_interpretation_Tm_arrow_bdc5e911949bed2f1a418f3bbfc31253", - "FStar.Pointer.Base_interpretation_Tm_arrow_ff2c816033059072d4e58d0e2307b681", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "Prims_interpretation_Tm_arrow_2eaa01e78f73e9bab5d0955fc1a662da", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_daa2134f2832cd40b961ec5232d07ee3_1", - "constructor_distinct_FStar.Pervasives.Native.Some", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pointer.Base.TStruct", - "disc_equation_FStar.Pointer.Base.TUnion", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._gtdata_get_key", - "equation_FStar.Pointer.Base._union_get_key", - "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.gtdata", - "equation_FStar.Pointer.Base.gtdata_create", - "equation_FStar.Pointer.Base.gtdata_get_key", - "equation_FStar.Pointer.Base.gtdata_get_value", - "equation_FStar.Pointer.Base.ostruct", - "equation_FStar.Pointer.Base.ostruct_create", - "equation_FStar.Pointer.Base.ostruct_field_of_struct_field", - "equation_FStar.Pointer.Base.ostruct_sel", - "equation_FStar.Pointer.Base.otype_of_struct_field", - "equation_FStar.Pointer.Base.ounion_create", - "equation_FStar.Pointer.Base.ounion_get_key", - "equation_FStar.Pointer.Base.ounion_get_value", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_sel", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.typ_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "equation_FStar.Pointer.Base.union_create", - "equation_FStar.Pointer.Base.union_get_key", - "equation_FStar.Pointer.Base.union_get_value", - "equation_FStar.Pointer.Base.union_typ", "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.union.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.option", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "fuel_guarded_inversion_Prims.dtuple2", - "function_token_typing_FStar.Pointer.Base.ostruct_field_of_struct_field", - "function_token_typing_FStar.Pointer.Base.otype_of_struct_field", - "function_token_typing_FStar.Pointer.Base.otype_of_typ", - "function_token_typing_FStar.Pointer.Base.type_of_struct_field_", - "int_inversion", - "interpretation_Tm_abs_104b9206a0f6c45cba53cc44776fb1f1", - "interpretation_Tm_abs_2bc9591a7ae56c98cef4b0180a729165", - "interpretation_Tm_abs_f9f87dbe487b01446642c952246e3fc1", - "lemma_FStar.DependentMap.sel_create", - "lemma_FStar.Pointer.Base.otype_of_typ_otype_of_struct_field", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "lemma_FStar.Pointer.Base.struct_sel_struct_create_fun", - "lemma_FStar.Pointer.Base.type_of_typ_array", - "lemma_FStar.Pointer.Base.type_of_typ_struct", - "lemma_FStar.Pointer.Base.type_of_typ_union", - "proj_equation_FStar.Pervasives.Native.Some_v", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.Some_a", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "projection_inverse_FStar.Pointer.Base.TStruct_l", - "projection_inverse_FStar.Pointer.Base.TUnion_l", - "projection_inverse_Prims.Mkdtuple2_b", - "refinement_interpretation_Tm_refine_1dff1edeebf3475c72da4da95fdfe2d7", - "refinement_interpretation_Tm_refine_2527f4cbcbf4f88512fcbb65718a9b76", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_bd52d171a74d80903c7842fefd75f8a1", - "refinement_interpretation_Tm_refine_bdb60dfe130b3ec462391ffc4dcbf838", - "refinement_interpretation_Tm_refine_be42cbabfbefe51746a09e590ec29119", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "string_inversion", - "subterm_ordering_FStar.Pointer.Base.Mkstruct_typ", - "subterm_ordering_FStar.Pointer.Base.TArray", - "subterm_ordering_FStar.Pointer.Base.TStruct", - "subterm_ordering_FStar.Pointer.Base.TUnion", - "token_correspondence_FStar.Pointer.Base.ostruct_field_of_struct_field", - "token_correspondence_FStar.Pointer.Base.otype_of_struct_field", - "token_correspondence_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_", - "tot_fun_Tm_abs_f9f87dbe487b01446642c952246e3fc1", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base._union_get_key", - "typing_FStar.Pointer.Base.gtdata_create", - "typing_FStar.Pointer.Base.otype_of_struct_field", - "typing_FStar.Pointer.Base.ovalue_of_value", - "typing_FStar.Pointer.Base.struct_field", - "typing_FStar.Pointer.Base.typ_of_struct_field", - "typing_FStar.Pointer.Base.typ_of_struct_field_", - "typing_FStar.Pointer.Base.value_of_ovalue", - "typing_Tm_abs_104b9206a0f6c45cba53cc44776fb1f1" - ], - 0, - "b0cd6786458a19aa15bfeed4c21e4faa" - ], - [ - "FStar.Pointer.Base.none_ovalue", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@query", - "FStar.Pointer.Base_interpretation_Tm_arrow_a562036d4086240af9c67a5348138fd7", - "Prims_interpretation_Tm_arrow_2eaa01e78f73e9bab5d0955fc1a662da", - "constructor_distinct_FStar.Pointer.Base.TBase", - "constructor_distinct_FStar.Pointer.Base.TBuffer", - "constructor_distinct_FStar.Pointer.Base.TNPointer", - "constructor_distinct_FStar.Pointer.Base.TPointer", - "data_typing_intro_FStar.Pervasives.Native.None@tok", - "disc_equation_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pointer.Base.TBase", - "disc_equation_FStar.Pointer.Base.TBuffer", - "disc_equation_FStar.Pointer.Base.TNPointer", - "disc_equation_FStar.Pointer.Base.TPointer", - "disc_equation_FStar.Pointer.Base.TStruct", - "disc_equation_FStar.Pointer.Base.TUnion", - "equation_FStar.Pointer.Base.gtdata", - "equation_FStar.Pointer.Base.ostruct", - "equation_FStar.Pointer.Base.ounion", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.union_typ", "equation_Prims.eqtype", - "equation_with_fuel_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "kinding_Prims.dtuple2@tok", - "lemma_FStar.Pointer.Base.otype_of_typ_array", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "lemma_FStar.Pointer.Base.otype_of_typ_union", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "projection_inverse_FStar.Pointer.Base.TBase_b", - "projection_inverse_FStar.Pointer.Base.TBuffer_t", - "projection_inverse_FStar.Pointer.Base.TNPointer_t", - "projection_inverse_FStar.Pointer.Base.TPointer_t", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "typing_FStar.DependentMap.t", "typing_FStar.Pointer.Base.array", - "typing_FStar.Pointer.Base.buffer", - "typing_FStar.Pointer.Base.npointer", - "typing_FStar.Pointer.Base.otype_of_struct_field", - "typing_FStar.Pointer.Base.otype_of_typ", - "typing_FStar.Pointer.Base.pointer", - "typing_FStar.Pointer.Base.struct_field", - "typing_FStar.Pointer.Base.type_of_base_typ", - "typing_Tm_abs_6ba36691ee58dee85cd144324b083848" - ], - 0, - "ca444e0342dfc952d9a1c84350d51472" - ], - [ - "FStar.Pointer.Base.not_ovalue_is_readable_none_ovalue", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@query", "bool_inversion", - "constructor_distinct_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "equation_FStar.Pointer.Base.gtdata", - "equation_FStar.Pointer.Base.none_ovalue", - "equation_FStar.Pointer.Base.type_of_base_typ", - "equation_with_fuel_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "primitive_Prims.op_AmpAmp", "projection_inverse_BoxBool_proj_0", - "typing_FStar.Pointer.Base.none_ovalue", - "typing_FStar.Pointer.Base.ovalue_is_readable" - ], - 0, - "15a937fa7cf8f4a3a2ec7c02035f19b7" - ], - [ - "FStar.Pointer.Base.step_sel", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@query", "b2t_def", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.StepUField", - "data_elim_FStar.Pointer.Base.StepCell", - "data_elim_FStar.Pointer.Base.StepField", - "data_elim_FStar.Pointer.Base.StepUField", - "data_elim_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.StepCell", - "disc_equation_FStar.Pointer.Base.StepField", - "disc_equation_FStar.Pointer.Base.StepUField", - "disc_equation_Prims.Cons", "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.gtdata", - "equation_FStar.Pointer.Base.otype_of_struct_field", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "equation_FStar.Pointer.Base.union_typ", "equation_FStar.UInt.fits", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.eqtype", - "fuel_guarded_inversion_FStar.Pervasives.Native.option", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "function_token_typing_FStar.Pointer.Base.otype_of_struct_field", - "function_token_typing_FStar.Pointer.Base.otype_of_typ", - "int_inversion", "kinding_Prims.dtuple2@tok", - "lemma_FStar.Pervasives.invertOption", - "lemma_FStar.Pointer.Base.otype_of_typ_array", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "lemma_FStar.Pointer.Base.otype_of_typ_union", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.StepUField_fd", - "projection_inverse_FStar.Pointer.Base.StepUField_l", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.array", - "typing_FStar.Pointer.Base.struct_field", - "typing_FStar.Pointer.Base.uu___is_StepField", - "typing_FStar.UInt32.v", - "typing_Tm_abs_6ba36691ee58dee85cd144324b083848" - ], - 0, - "5ce619f3446c982b710744e3c417cbc1" - ], - [ - "FStar.Pointer.Base.ovalue_is_readable_step_sel_cell", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@query", - "FStar.Seq.Base_pretyping_7efa52b424e80c83ad68a652aa3561e4", - "FStar.Set_interpretation_Tm_arrow_84543425b818e2d10a976186b8e8c250", - "b2t_def", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.StepCell", - "constructor_distinct_FStar.Pointer.Base.TArray", - "data_elim_FStar.Pervasives.Native.Some", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.step_sel", - "equation_FStar.Seq.Properties.for_all", "equation_FStar.UInt.fits", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "fuel_token_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented_token", - "int_inversion", "lemma_FStar.Pointer.Base.otype_of_typ_array", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pervasives.Native.Some_v", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.StepCell_index", - "projection_inverse_FStar.Pointer.Base.StepCell_length", - "projection_inverse_FStar.Pointer.Base.StepCell_value", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_307fd373d8b3749096cf164b41cf1984", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_b361ba8089a6e963921008d537e799a1", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_d83f8da8ef6c1cb9f71d1465c1bb1c55", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "typing_FStar.Pointer.Base.otype_of_typ", - "typing_FStar.Pointer.Base.ovalue_is_readable", - "typing_FStar.Seq.Base.length", - "typing_FStar.Seq.Properties.for_all", "typing_FStar.UInt32.v" - ], - 0, - "88f238a2da4b9f72b8665520600bc978" - ], - [ - "FStar.Pointer.Base.ovalue_is_readable_step_sel_field", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@query", "constructor_distinct_FStar.Pointer.Base.StepField", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "data_typing_intro_FStar.Pointer.Base.TStruct@tok", - "disc_equation_FStar.Pervasives.Native.Some", - "equation_FStar.Pointer.Base.step_sel", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_with_fuel_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "lemma_FStar.Pointer.Base.ovalue_is_readable_struct_elim", - "primitive_Prims.op_AmpAmp", "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.StepField_fd", - "projection_inverse_FStar.Pointer.Base.StepField_l", - "projection_inverse_FStar.Pointer.Base.TStruct_l" - ], - 0, - "267637ac95080321124dfe5700af775c" - ], - [ - "FStar.Pointer.Base.ovalue_is_readable_step_sel_union_same", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@query", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.StepUField", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "constructor_distinct_Prims.Cons", "data_elim_Prims.Cons", - "data_typing_intro_FStar.Pointer.Base.StepUField@tok", - "data_typing_intro_FStar.Pointer.Base.TUnion@tok", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_Prims.Cons", "equation_FStar.Pointer.Base.gtdata", - "equation_FStar.Pointer.Base.step_sel", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.union_typ", "equation_Prims.eqtype", - "equation_with_fuel_FStar.List.Tot.Base.map.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.tuple2", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_Prims.string", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", - "lemma_FStar.Pointer.Base.otype_of_typ_union", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "primitive_Prims.op_Negation", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.StepUField_fd", - "projection_inverse_FStar.Pointer.Base.StepUField_l", - "projection_inverse_FStar.Pointer.Base.TUnion_l", - "projection_inverse_Prims.Cons_a", - "projection_inverse_Prims.Cons_hd", - "projection_inverse_Prims.Cons_tl", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", "string_typing", - "token_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "typing_FStar.List.Tot.Base.map", "typing_FStar.List.Tot.Base.mem", - "typing_FStar.List.Tot.Base.noRepeats", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.ovalue_is_readable", - "typing_FStar.Pointer.Base.step_sel", - "typing_FStar.Pointer.Base.typ_of_struct_field" - ], - 0, - "77d057f1c71a4ec49e4b02eac827dd6c" - ], - [ - "FStar.Pointer.Base.step_sel_none_ovalue", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@query", "constructor_distinct_FStar.Pervasives.Native.None", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "equation_FStar.Pointer.Base.none_ovalue", - "equation_FStar.Pointer.Base.step_sel", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "projection_inverse_FStar.Pervasives.Native.None_a", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "projection_inverse_FStar.Pointer.Base.TStruct_l", - "projection_inverse_FStar.Pointer.Base.TUnion_l" - ], - 0, - "1508a9798ecd78669ecd6672b5f98f74" - ], - [ - "FStar.Pointer.Base.path_sel", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "binder_x_0d5ecd039c65d95077f45e317c0079e9_3", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "projection_inverse_BoxBool_proj_0", - "subterm_ordering_FStar.Pointer.Base.PathStep" - ], - 0, - "df3faca82b67d4ecd71bee4575d57338" - ], - [ - "FStar.Pointer.Base.path_sel_none_ovalue", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@query", "binder_x_0d5ecd039c65d95077f45e317c0079e9_2", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "constructor_distinct_FStar.Pervasives.Native.None", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "equation_FStar.Pointer.Base.none_ovalue", - "equation_FStar.Pointer.Base.step_sel", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.None_a", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "projection_inverse_FStar.Pointer.Base.TStruct_l", - "projection_inverse_FStar.Pointer.Base.TUnion_l", - "subterm_ordering_FStar.Pointer.Base.PathStep", - "typing_FStar.Pointer.Base.none_ovalue" - ], - 0, - "63014005c4d2dbcfe79719b514f545c4" - ], - [ - "FStar.Pointer.Base.step_upd", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@query", - "FStar.Pointer.Base_interpretation_Tm_arrow_a54ccb37fa50163db51a6d983062cade", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Seq.Base_interpretation_Tm_arrow_44bb45ed5c2534b346e0f58ea5033251", - "b2t_def", "constructor_distinct_FStar.Pervasives.Native.Some", - "data_elim_FStar.Pointer.Base.StepCell", - "data_elim_FStar.Pointer.Base.StepField", - "data_elim_FStar.Pointer.Base.StepUField", - "data_elim_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.StepCell", - "disc_equation_FStar.Pointer.Base.StepField", - "disc_equation_FStar.Pointer.Base.StepUField", - "disc_equation_Prims.Cons", "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.ostruct", - "equation_FStar.Pointer.Base.ostruct_create", - "equation_FStar.Pointer.Base.otype_of_struct_field", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.typ_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "equation_FStar.UInt.fits", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pervasives.Native.option", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "function_token_typing_FStar.Pointer.Base.otype_of_struct_field", - "function_token_typing_FStar.Pointer.Base.otype_of_typ", - "int_inversion", "lemma_FStar.Pervasives.invertOption", - "lemma_FStar.Pointer.Base.otype_of_typ_array", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "lemma_FStar.Pointer.Base.otype_of_typ_union", - "lemma_FStar.Seq.Base.lemma_init_len", - "lemma_FStar.Seq.Base.lemma_len_upd", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_Equality", "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.Some_a", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "refinement_interpretation_Tm_refine_1dff1edeebf3475c72da4da95fdfe2d7", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_2ca062977a42c36634b89c1c4f193f79", - "refinement_interpretation_Tm_refine_65dcf6dc23a3b63be750aeabd27678f4", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_b86319999df90ce930c473bc87611673", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_be42cbabfbefe51746a09e590ec29119", - "refinement_interpretation_Tm_refine_c1424615841f28cac7fc34e92b7ff33c", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "string_inversion", - "token_correspondence_FStar.Pointer.Base.otype_of_typ", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.array", - "typing_FStar.Pointer.Base.none_ovalue", - "typing_FStar.Pointer.Base.ostruct_create", - "typing_FStar.Pointer.Base.otype_of_typ", - "typing_FStar.Pointer.Base.typ_of_struct_field_", - "typing_FStar.UInt32.v" - ], - 0, - "b7de0afac6ba879efba6aa558caa775e" - ], - [ - "FStar.Pointer.Base.step_sel_upd_same", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@query", - "FStar.DependentMap_interpretation_Tm_arrow_a7d5cc170be69663c495e8582d2bc62a", - "FStar.Pointer.Base_interpretation_Tm_arrow_461c1a04a2297465934fe9913aab7926", - "FStar.Pointer.Base_interpretation_Tm_arrow_5d491a1b4d1a78b391a7c9212b49de34", - "FStar.Pointer.Base_interpretation_Tm_arrow_a562036d4086240af9c67a5348138fd7", - "FStar.Seq.Base_interpretation_Tm_arrow_44bb45ed5c2534b346e0f58ea5033251", - "FStar.Seq.Base_pretyping_7efa52b424e80c83ad68a652aa3561e4", - "FStar.UInt32_pretyping_2ab3c8ba2d08b0172817fc70b5994868", - "Prims_interpretation_Tm_arrow_2eaa01e78f73e9bab5d0955fc1a662da", - "b2t_def", "constructor_distinct_FStar.Pervasives.Native.Some", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "data_elim_FStar.Pervasives.Native.Some", - "data_elim_FStar.Pointer.Base.StepCell", - "data_elim_FStar.Pointer.Base.StepField", - "data_elim_FStar.Pointer.Base.StepUField", - "data_elim_FStar.Pointer.Base.TArray", - "data_elim_FStar.Pointer.Base.TStruct", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._gtdata_get_key", - "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.gtdata", - "equation_FStar.Pointer.Base.gtdata_create", - "equation_FStar.Pointer.Base.gtdata_get_value", - "equation_FStar.Pointer.Base.ostruct", - "equation_FStar.Pointer.Base.ostruct_create", - "equation_FStar.Pointer.Base.ostruct_sel", - "equation_FStar.Pointer.Base.ostruct_upd", - "equation_FStar.Pointer.Base.otype_of_struct_field", - "equation_FStar.Pointer.Base.ounion_create", - "equation_FStar.Pointer.Base.ounion_get_key", - "equation_FStar.Pointer.Base.ounion_get_value", - "equation_FStar.Pointer.Base.step_sel", - "equation_FStar.Pointer.Base.step_upd", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "equation_FStar.UInt.fits", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.option", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "function_token_typing_FStar.Pointer.Base.otype_of_struct_field", - "function_token_typing_FStar.Pointer.Base.otype_of_typ", - "int_inversion", - "interpretation_Tm_abs_2db8b65edc142b3bb4b255ced46f0f32", - "interpretation_Tm_abs_75a59a4eb51a5d27cae7d8aba21bacf8", - "lemma_FStar.DependentMap.sel_create", - "lemma_FStar.DependentMap.sel_upd_same", - "lemma_FStar.Pointer.Base.otype_of_typ_array", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "lemma_FStar.Seq.Base.init_index_", - "lemma_FStar.Seq.Base.lemma_index_upd1", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_Equality", "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pervasives.Native.Some_v", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.Some_a", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "projection_inverse_FStar.Pointer.Base.TStruct_l", - "projection_inverse_Prims.Mkdtuple2__1", - "projection_inverse_Prims.Mkdtuple2__2", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_4482b8ea10b7c3e0f684ae7e9c82dd06", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_c1424615841f28cac7fc34e92b7ff33c", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_d83f8da8ef6c1cb9f71d1465c1bb1c55", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "string_inversion", "string_typing", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_", - "typing_FStar.Pointer.Base.otype_of_struct_field", - "typing_FStar.Pointer.Base.otype_of_typ", - "typing_FStar.Pointer.Base.struct_field", - "typing_FStar.Seq.Base.init", "typing_FStar.UInt32.v", - "typing_Tm_abs_2db8b65edc142b3bb4b255ced46f0f32", - "typing_Tm_abs_75a59a4eb51a5d27cae7d8aba21bacf8" - ], - 0, - "52cdc4794382de68237b75827aea3096" - ], - [ - "FStar.Pointer.Base.path_upd", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "binder_x_0d5ecd039c65d95077f45e317c0079e9_3", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "projection_inverse_BoxBool_proj_0", - "subterm_ordering_FStar.Pointer.Base.PathStep" - ], - 0, - "22d8d10e6a22353708f6bc14d1323e46" - ], - [ - "FStar.Pointer.Base.path_sel_upd_same", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_upd.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_upd.fuel_instrumented", - "@query", "binder_x_0d5ecd039c65d95077f45e317c0079e9_3", - "binder_x_50ee5a23f17759eafa03d1be4a28778c_2", - "binder_x_7e43bafa8b7a13ff9357a27d8b711814_4", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_upd.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "subterm_ordering_FStar.Pointer.Base.PathStep", - "typing_FStar.Pointer.Base.path_upd" - ], - 0, - "18c90614f9744bb5316797d7b7d98156" - ], - [ - "FStar.Pointer.Base.path_concat", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "binder_x_83e1c84bf1d2bdd28607d065760d2282_4", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "projection_inverse_BoxBool_proj_0", - "subterm_ordering_FStar.Pointer.Base.PathStep" - ], - 0, - "3c0f5c8e6b484d372eb9fd52b5416ba4" - ], - [ - "FStar.Pointer.Base.path_concat_base_r", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@query", "constructor_distinct_FStar.Pointer.Base.PathBase", - "data_typing_intro_FStar.Pointer.Base.PathBase@tok", - "equation_with_fuel_FStar.Pointer.Base.path_concat.fuel_instrumented", - "projection_inverse_FStar.Pointer.Base.PathBase_from" - ], - 0, - "bae4784488515baa9b07a269264a004c" - ], - [ - "FStar.Pointer.Base.path_concat_base_l", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@query", "binder_x_0d5ecd039c65d95077f45e317c0079e9_2", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "data_typing_intro_FStar.Pointer.Base.PathBase@tok", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "equation_with_fuel_FStar.Pointer.Base.path_concat.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "subterm_ordering_FStar.Pointer.Base.PathStep" - ], - 0, - "e41e6344ac4355c8dd70cc95a4ec5674" - ], - [ - "FStar.Pointer.Base.path_concat_assoc", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@query", "binder_x_0d5ecd039c65d95077f45e317c0079e9_4", - "binder_x_204eb02adadc5901687bfc1939c2389a_6", - "binder_x_83e1c84bf1d2bdd28607d065760d2282_5", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_c187978e0b47d492be4f7ef67953e027_1", - "binder_x_c187978e0b47d492be4f7ef67953e027_2", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "equation_with_fuel_FStar.Pointer.Base.path_concat.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_422c1bb9c4648e056a5d7170c942c5e1", - "subterm_ordering_FStar.Pointer.Base.PathStep", - "token_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "typing_FStar.Pointer.Base.path_concat" - ], - 0, - "0c60f0fbde742ca6a6e0f85dbb11d221" - ], - [ - "FStar.Pointer.Base.path_sel_concat", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@query", "binder_x_0d5ecd039c65d95077f45e317c0079e9_4", - "binder_x_50ee5a23f17759eafa03d1be4a28778c_3", - "binder_x_83e1c84bf1d2bdd28607d065760d2282_5", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_c187978e0b47d492be4f7ef67953e027_1", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "equation_with_fuel_FStar.Pointer.Base.path_concat.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_422c1bb9c4648e056a5d7170c942c5e1", - "subterm_ordering_FStar.Pointer.Base.PathStep", - "token_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "typing_FStar.Pointer.Base.path_sel" - ], - 0, - "d065409ca7d5baa0caeaae2f1bed1a03" - ], - [ - "FStar.Pointer.Base.path_upd_concat", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_upd.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_upd.fuel_instrumented", - "@query", "binder_x_0d5ecd039c65d95077f45e317c0079e9_4", - "binder_x_50ee5a23f17759eafa03d1be4a28778c_3", - "binder_x_5beba193057cd8d8483bf03292ee87be_6", - "binder_x_83e1c84bf1d2bdd28607d065760d2282_5", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_c187978e0b47d492be4f7ef67953e027_1", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "equation_with_fuel_FStar.Pointer.Base.path_concat.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_upd.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "lemma_FStar.Pointer.Base.path_sel_concat", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_422c1bb9c4648e056a5d7170c942c5e1", - "subterm_ordering_FStar.Pointer.Base.PathStep", - "token_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "token_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "typing_FStar.Pointer.Base.path_sel" - ], - 0, - "8bd5955920fe354c572e8cdfd1901c0c" - ], - [ - "FStar.Pointer.Base.path_includes", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "assumption_FStar.Pointer.Base.path__uu___haseq", - "assumption_FStar.Pointer.Base.typ__uu___haseq", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_c187978e0b47d492be4f7ef67953e027_2", - "binder_x_d8c87ff041e51a1dd1e128b2f010108c_4", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "projection_inverse_BoxBool_proj_0", - "subterm_ordering_FStar.Pointer.Base.PathStep" - ], - 0, - "5b19bff5c0a94d6de8bd6708ad688c62" - ], - [ - "FStar.Pointer.Base.path_includes_base", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "binder_x_0d5ecd039c65d95077f45e317c0079e9_2", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "data_typing_intro_FStar.Pointer.Base.PathBase@tok", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "equation_with_fuel_FStar.Pointer.Base.path_includes.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_BarBar", - "primitive_Prims.op_Equality", "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "subterm_ordering_FStar.Pointer.Base.PathStep" - ], - 0, - "6aabcacdbbea5741d1206f90a02ccd88" - ], - [ - "FStar.Pointer.Base.path_includes_refl", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "equation_with_fuel_FStar.Pointer.Base.path_includes.fuel_instrumented", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_BarBar", - "primitive_Prims.op_Equality", "projection_inverse_BoxBool_proj_0" - ], - 0, - "1ec3bd08ad19d389c3e006587ed4c718" - ], - [ - "FStar.Pointer.Base.path_includes_step_r", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_typing_intro_FStar.Pointer.Base.PathStep@tok", - "equation_with_fuel_FStar.Pointer.Base.path_includes.fuel_instrumented", - "lemma_FStar.Pointer.Base.path_includes_refl", - "primitive_Prims.op_BarBar", "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to" - ], - 0, - "341dd3f9742ae84e1f85787ed131a473" - ], - [ - "FStar.Pointer.Base.path_includes_trans", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "b2t_def", "binder_x_0d5ecd039c65d95077f45e317c0079e9_4", - "binder_x_515b6cc5dab22ef3b4646bf3c5fbc75e_6", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_c187978e0b47d492be4f7ef67953e027_1", - "binder_x_c187978e0b47d492be4f7ef67953e027_2", - "binder_x_c187978e0b47d492be4f7ef67953e027_3", - "binder_x_d8c87ff041e51a1dd1e128b2f010108c_5", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "equation_Prims.l_False", "equation_Prims.squash", - "equation_with_fuel_FStar.Pointer.Base.path_includes.fuel_instrumented", - "false_interp", "fuel_guarded_inversion_FStar.Pointer.Base.path", - "lemma_FStar.Pointer.Base.path_includes_base", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_BarBar", - "primitive_Prims.op_Equality", "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_1588d9cc9cb691265bea0ce2c66abf3c", - "refinement_interpretation_Tm_refine_2de20c066034c13bf76e9c0b94f4806c", - "refinement_interpretation_Tm_refine_6dc6390730040f67a21fe10c9d9d78e6", - "refinement_interpretation_Tm_refine_8ef196a44e8c1ef9d6ff692d3a3d9458", - "subterm_ordering_FStar.Pointer.Base.PathStep", - "token_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "typing_FStar.Pointer.Base.path_includes" - ], - 0, - "b16e04e8cadafc8b8023cf6fe619b7b5" - ], - [ - "FStar.Pointer.Base.path_includes_ind", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "b2t_def", "binder_x_2f08af3dcd557cc55ee8503a3b614889_8", - "binder_x_b85f5386c8ec8b88fc0b7ceff79f784a_7", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_c187978e0b47d492be4f7ef67953e027_5", - "binder_x_c187978e0b47d492be4f7ef67953e027_6", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "equation_Prims.l_False", "equation_Prims.squash", - "equation_with_fuel_FStar.Pointer.Base.path_includes.fuel_instrumented", - "false_interp", "fuel_guarded_inversion_FStar.Pointer.Base.path", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_BarBar", - "primitive_Prims.op_Equality", "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_2de20c066034c13bf76e9c0b94f4806c", - "refinement_interpretation_Tm_refine_4e1d800d2c4913431885413c72cd5aac", - "refinement_interpretation_Tm_refine_7ff07ff8c64e9fab3c0c26ef4caea393", - "subterm_ordering_FStar.Pointer.Base.PathStep" - ], - 0, - "ae2b45a19cf2984a4b727c2049445d19" - ], - [ - "FStar.Pointer.Base.path_includes_ind", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_ee1123b778ea4e559d0b51cc1e52228a" - ], - 0, - "12b4daa674ecd5e8322214e92448a992" - ], - [ - "FStar.Pointer.Base.path_length", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "binder_x_0d5ecd039c65d95077f45e317c0079e9_2", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "primitive_Prims.op_Addition", "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "subterm_ordering_FStar.Pointer.Base.PathStep" - ], - 0, - "cce301bf1579d757dea2f8eea26201d9" - ], - [ - "FStar.Pointer.Base.path_includes_length", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_length.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_length.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_1e5c7ab94e0ccfb7b0d2bbb59a9e3f5d", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_typing_intro_FStar.Pointer.Base.PathStep@tok", - "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.path_includes.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_length.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_BarBar", "primitive_Prims.op_Equality", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_2778e38235d85783ab53f0fc09c57540", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_e937e34ee8783c8b9b7d7a707f01634d", - "refinement_interpretation_Tm_refine_ee1123b778ea4e559d0b51cc1e52228a", - "typing_FStar.Pointer.Base.path_length" - ], - 0, - "e95678e3583fa75a7c8f1bb9ae56262f" - ], - [ - "FStar.Pointer.Base.path_includes_step_l", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_length.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_length.fuel_instrumented", - "@query", "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_typing_intro_FStar.Pointer.Base.PathStep@tok", - "equation_with_fuel_FStar.Pointer.Base.path_length.fuel_instrumented", - "primitive_Prims.op_Addition", "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_f858bfffcdc33b4edac687bbc0ab10f8" - ], - 0, - "b64e14ed1dc250557ad7b8e73f279268" - ], - [ - "FStar.Pointer.Base.path_includes_concat", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", "binder_x_0d5ecd039c65d95077f45e317c0079e9_3", - "binder_x_83e1c84bf1d2bdd28607d065760d2282_4", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_c187978e0b47d492be4f7ef67953e027_1", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "equation_with_fuel_FStar.Pointer.Base.path_concat.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_includes.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "lemma_FStar.Pointer.Base.path_includes_refl", - "primitive_Prims.op_BarBar", "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_422c1bb9c4648e056a5d7170c942c5e1", - "subterm_ordering_FStar.Pointer.Base.PathStep", - "token_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented" - ], - 0, - "52c1331a7b5488763a0fa4c5ca590917" - ], - [ - "FStar.Pointer.Base.path_includes_exists_concat", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@query", "constructor_distinct_FStar.Pointer.Base.PathBase", - "data_typing_intro_FStar.Pointer.Base.PathBase@tok", - "data_typing_intro_FStar.Pointer.Base.PathStep@tok", - "equation_with_fuel_FStar.Pointer.Base.path_concat.fuel_instrumented", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "refinement_interpretation_Tm_refine_2294e26464ce127dc19115c7d2830fb1", - "refinement_interpretation_Tm_refine_60ddcdffd5ba8adfd24b319ec76cd182", - "refinement_interpretation_Tm_refine_b9f77fe2b0319c7073314bc8ff8b7aa2", - "refinement_interpretation_Tm_refine_ee1123b778ea4e559d0b51cc1e52228a" - ], - 0, - "129daf8440172a2996024649b8a80c95" - ], - [ - "FStar.Pointer.Base.path_concat_includes", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", "lemma_FStar.Pointer.Base.path_includes_concat" - ], - 0, - "53d2b51634b85dbc6b3397e1e097f700" - ], - [ - "FStar.Pointer.Base.path_concat_includes", - 2, - 1, - 1, - [ - "@MaxFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", "lemma_FStar.Pointer.Base.path_includes_concat" - ], - 0, - "d49073259fe1649cbfe4afeb44dca937" - ], - [ - "FStar.Pointer.Base.step_disjoint", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "data_elim_FStar.Pointer.Base.StepCell", - "data_elim_FStar.Pointer.Base.StepField", - "disc_equation_FStar.Pointer.Base.StepCell", - "disc_equation_FStar.Pointer.Base.StepField", - "disc_equation_FStar.Pointer.Base.StepUField", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.UInt.uint_t", "equation_Prims.eqtype", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "function_token_typing_Prims.int", - "haseqTm_refine_f13070840248fced9d9d60d77bdae3ec", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.StepField_fd", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "projection_inverse_FStar.Pointer.Base.TStruct_l", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29" - ], - 0, - "21c68b817dbb198914088730d5b8af81" - ], - [ - "FStar.Pointer.Base.step_eq", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.StepCell", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "data_elim_FStar.Pointer.Base.StepCell", - "data_elim_FStar.Pointer.Base.StepField", - "data_elim_FStar.Pointer.Base.StepUField", - "disc_equation_FStar.Pointer.Base.StepCell", - "disc_equation_FStar.Pointer.Base.StepField", - "disc_equation_FStar.Pointer.Base.StepUField", - "disc_equation_Prims.Cons", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.union_typ", "equation_Prims.eqtype", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "primitive_Prims.op_Equality", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.StepCell_index", - "projection_inverse_FStar.Pointer.Base.StepCell_length", - "projection_inverse_FStar.Pointer.Base.StepCell_value", - "projection_inverse_FStar.Pointer.Base.StepField_fd", - "projection_inverse_FStar.Pointer.Base.StepUField_fd", - "projection_inverse_FStar.Pointer.Base.StepUField_l", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "projection_inverse_FStar.Pointer.Base.TStruct_l", - "projection_inverse_FStar.Pointer.Base.TUnion_l", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.UInt32.t" - ], - 0, - "495213fab4491f3567e7411552167761" - ], - [ - "FStar.Pointer.Base.step_disjoint_not_eq", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "@query", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "constructor_distinct_Prims.Cons", - "data_elim_FStar.Pointer.Base.StepField", - "data_elim_FStar.Pointer.Base.StepUField", - "data_elim_FStar.Pointer.Base.TUnion", "data_elim_Prims.Cons", - "disc_equation_Prims.Cons", - "equation_FStar.Pointer.Base.step_disjoint", - "equation_FStar.Pointer.Base.step_eq", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_typ_", "equation_Prims.eqtype", - "equation_with_fuel_FStar.List.Tot.Base.map.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.tuple2", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_Prims.string", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_Equality", "primitive_Prims.op_Negation", - "primitive_Prims.op_disEquality", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_Prims.Cons_a", - "projection_inverse_Prims.Cons_hd", - "projection_inverse_Prims.Cons_tl", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_7871a89cee4ff50bc14c65877a68b5bd", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", "string_typing", - "token_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "typing_FStar.List.Tot.Base.map", "typing_FStar.List.Tot.Base.mem", - "typing_FStar.List.Tot.Base.noRepeats", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.step_eq" - ], - 0, - "72e8f4a0225105e2436e52b0e7d2e91c" - ], - [ - "FStar.Pointer.Base.step_disjoint_sym", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "@query", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "constructor_distinct_Prims.Cons", "constructor_distinct_Tm_unit", - "data_elim_FStar.Pointer.Base.StepField", - "data_elim_FStar.Pointer.Base.StepUField", - "data_elim_FStar.Pointer.Base.TUnion", "data_elim_Prims.Cons", - "disc_equation_Prims.Cons", - "equation_FStar.Pointer.Base.step_disjoint", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.UInt.uint_t", "equation_Prims.eqtype", - "equation_with_fuel_FStar.List.Tot.Base.map.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.tuple2", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_Prims.string", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_Negation", "primitive_Prims.op_disEquality", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_Prims.Cons_a", - "projection_inverse_Prims.Cons_hd", - "projection_inverse_Prims.Cons_tl", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", "string_typing", - "token_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "typing_FStar.List.Tot.Base.map", "typing_FStar.List.Tot.Base.mem", - "typing_FStar.List.Tot.Base.noRepeats", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.step_disjoint" - ], - 0, - "8ca4f378d208c37730149b863fbb680f" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointStep__item__through", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_e2bd2f820ea5fbaed59a6a315ed9cc96" - ], - 0, - "b2101c133d85935fc6245a938d439b0f" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointStep__item__to1", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_e2bd2f820ea5fbaed59a6a315ed9cc96" - ], - 0, - "8d32d0f5b1088ebdea56171018d6d9c4" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointStep__item__to2", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_e2bd2f820ea5fbaed59a6a315ed9cc96" - ], - 0, - "199d40547c568acc7f57c9ec373bc2d5" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointStep__item__p", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pointer.Base.PathDisjointStep", - "refinement_interpretation_Tm_refine_e2bd2f820ea5fbaed59a6a315ed9cc96" - ], - 0, - "26bb11e9188d89549a88f39f44030b7f" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointStep__item__s1", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pointer.Base.PathDisjointStep", - "refinement_interpretation_Tm_refine_e2bd2f820ea5fbaed59a6a315ed9cc96" - ], - 0, - "dbf130bca9f4563663b33221487c8744" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointStep__item__s2", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pointer.Base.PathDisjointStep", - "refinement_interpretation_Tm_refine_e2bd2f820ea5fbaed59a6a315ed9cc96" - ], - 0, - "c8086a1cd4f3f4fd71e52e52c6384bc6" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointIncludes__item__to1", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_5147b1db68e5e61b5c28506bfe607655" - ], - 0, - "016591f7503d9b355e9d39d55b3fda1e" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointIncludes__item__to2", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_5147b1db68e5e61b5c28506bfe607655" - ], - 0, - "6eebd6f885bae082887fa84a22f9009f" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointIncludes__item__p1", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pointer.Base.PathDisjointIncludes", - "refinement_interpretation_Tm_refine_5147b1db68e5e61b5c28506bfe607655" - ], - 0, - "43f45feb0a582d26de0ac10d8e6ee8b0" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointIncludes__item__p2", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pointer.Base.PathDisjointIncludes", - "refinement_interpretation_Tm_refine_5147b1db68e5e61b5c28506bfe607655" - ], - 0, - "48c48cd324233a51039e0339c10a609e" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointIncludes__item__to1'", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_5147b1db68e5e61b5c28506bfe607655" - ], - 0, - "7a5ac37c36acdcdc65f0c5e954a7697e" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointIncludes__item__to2'", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_5147b1db68e5e61b5c28506bfe607655" - ], - 0, - "9c081dd6eb534ded711a39f7b7fef199" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointIncludes__item__p1'", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pointer.Base.PathDisjointIncludes", - "refinement_interpretation_Tm_refine_5147b1db68e5e61b5c28506bfe607655" - ], - 0, - "2fd038ed13f567b4568c0ba02e07fe2e" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointIncludes__item__p2'", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pointer.Base.PathDisjointIncludes", - "refinement_interpretation_Tm_refine_5147b1db68e5e61b5c28506bfe607655" - ], - 0, - "0e112117959a47d1dd05f4819104ab36" - ], - [ - "FStar.Pointer.Base.__proj__PathDisjointIncludes__item___8", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_elim_FStar.Pointer.Base.PathDisjointIncludes", - "refinement_interpretation_Tm_refine_5147b1db68e5e61b5c28506bfe607655" - ], - 0, - "514b16a6ab3e829d19645eff60f87169" - ], - [ - "FStar.Pointer.Base.path_disjoint_t_rect", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "binder_x_31bb00d38761e7ccde7ceb9be4978dcc_9", - "data_elim_FStar.Pointer.Base.PathDisjointIncludes", - "data_elim_FStar.Pointer.Base.PathDisjointStep", - "disc_equation_FStar.Pointer.Base.PathDisjointIncludes", - "disc_equation_FStar.Pointer.Base.PathDisjointStep", - "fuel_guarded_inversion_FStar.Pointer.Base.path_disjoint_t", - "projection_inverse_BoxBool_proj_0", - "subterm_ordering_FStar.Pointer.Base.PathDisjointIncludes" - ], - 0, - "5c819cb96366da332e7a91fbf41c2bb1" - ], - [ - "FStar.Pointer.Base.path_disjoint_ind", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.path_disjoint", "equation_Prims.squash", - "refinement_interpretation_Tm_refine_2de20c066034c13bf76e9c0b94f4806c", - "refinement_interpretation_Tm_refine_a02d9d7bf230ab25f66365b7b22b819e", - "refinement_interpretation_Tm_refine_e937e34ee8783c8b9b7d7a707f01634d", - "unit_inversion" - ], - 0, - "03990745bfaf6f80ea4e93781715220f" - ], - [ - "FStar.Pointer.Base.path_disjoint_ind", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_035771f6b75c6418e7b567530874ccfb" - ], - 0, - "dd953f1835b9d9dd7b8de0227862e51e" - ], - [ - "FStar.Pointer.Base.path_disjoint_step", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.path_disjoint", - "equation_Prims.squash" - ], - 0, - "a06ea541ce4f3256bd1d329cb24a7982" - ], - [ - "FStar.Pointer.Base.path_disjoint_includes", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.path_disjoint", "equation_Prims.squash", - "refinement_interpretation_Tm_refine_2de20c066034c13bf76e9c0b94f4806c", - "unit_inversion" - ], - 0, - "3bbfa99b97edd740eb4405d622e5272d" - ], - [ - "FStar.Pointer.Base.path_disjoint_includes_l", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", "lemma_FStar.Pointer.Base.path_includes_refl" - ], - 0, - "fa9c054540b248e7c08a50f55cd7d66b" - ], - [ - "FStar.Pointer.Base.path_disjoint_sym", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "constructor_distinct_Tm_unit", - "data_elim_FStar.Pointer.Base.StepField", - "equation_FStar.Pointer.Base.step_disjoint", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "primitive_Prims.op_disEquality", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_035771f6b75c6418e7b567530874ccfb", - "refinement_interpretation_Tm_refine_a9e26cbb885a87a3108ab70a0a0e0be5", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_e937e34ee8783c8b9b7d7a707f01634d", - "string_inversion", "typing_FStar.Pointer.Base.step_disjoint" - ], - 0, - "c4e145b25b38591e1191d7758c2d9bc0" - ], - [ - "FStar.Pointer.Base.path_equal", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "binder_x_0d5ecd039c65d95077f45e317c0079e9_3", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_c187978e0b47d492be4f7ef67953e027_2", - "binder_x_d8c87ff041e51a1dd1e128b2f010108c_4", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "equation_FStar.Pointer.Base.step_eq", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "primitive_Prims.op_AmpAmp", "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_64931eeed1bbcf763c4eea2d0c933f26", - "refinement_interpretation_Tm_refine_7871a89cee4ff50bc14c65877a68b5bd", - "subterm_ordering_FStar.Pointer.Base.PathStep", - "typing_FStar.Pointer.Base.path_equal", - "typing_FStar.Pointer.Base.step_eq" - ], - 0, - "a0b048f6ced4a36a73b5a73bd2bb0614" - ], - [ - "FStar.Pointer.Base.path_length_concat", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_length.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_length.fuel_instrumented", - "@query", "binder_x_0d5ecd039c65d95077f45e317c0079e9_3", - "binder_x_83e1c84bf1d2bdd28607d065760d2282_4", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_c187978e0b47d492be4f7ef67953e027_1", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.path_concat.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_length.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", "int_inversion", - "primitive_Prims.op_Addition", "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_422c1bb9c4648e056a5d7170c942c5e1", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "subterm_ordering_FStar.Pointer.Base.PathStep", - "token_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "typing_FStar.Pointer.Base.path_length" - ], - 0, - "66a584facfd9cfdbc832a1be4b28fefb" - ], - [ - "FStar.Pointer.Base.path_concat_inj_l", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_equal.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_length.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_1e5c7ab94e0ccfb7b0d2bbb59a9e3f5d", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "binder_x_0d5ecd039c65d95077f45e317c0079e9_2", - "binder_x_198ccbfb8c62cfbcee6e3d63acd41eeb_8", - "binder_x_3f0f019f33fdd3df121c66cf611c3526_4", - "binder_x_b85f5386c8ec8b88fc0b7ceff79f784a_7", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_c187978e0b47d492be4f7ef67953e027_1", - "binder_x_c187978e0b47d492be4f7ef67953e027_3", - "binder_x_c187978e0b47d492be4f7ef67953e027_5", - "binder_x_c187978e0b47d492be4f7ef67953e027_6", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_Tm_unit", - "data_elim_FStar.Pointer.Base.PathBase", - "data_elim_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.path_concat.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_equal.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_length.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", "int_inversion", - "lemma_FStar.Pointer.Base.path_concat_base_l", - "primitive_Prims.op_Addition", "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_422c1bb9c4648e056a5d7170c942c5e1", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_7871a89cee4ff50bc14c65877a68b5bd", - "subterm_ordering_FStar.Pointer.Base.PathStep", - "token_correspondence_FStar.Pointer.Base.path_length.fuel_instrumented", - "typing_FStar.Pointer.Base.path_concat", - "typing_FStar.Pointer.Base.path_equal", - "typing_FStar.Pointer.Base.path_length" - ], - 0, - "84b00824a7fd9440c5fce520166eb707" - ], - [ - "FStar.Pointer.Base.path_concat_inj_l", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_7871a89cee4ff50bc14c65877a68b5bd", - "typing_FStar.Pointer.Base.path_equal" - ], - 0, - "a149ab62d5490cbabd1ffcd21a5feaaf" - ], - [ - "FStar.Pointer.Base.path_disjoint_decomp_t", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "assumption_FStar.Pointer.Base.path__uu___haseq", - "assumption_FStar.Pointer.Base.step__uu___haseq", - "assumption_FStar.Pointer.Base.typ__uu___haseq", - "equation_Prims.eqtype", "function_token_typing_Prims.unit", - "haseqTm_refine_6a13e3414a2014eb8720dd6788230581", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_6a13e3414a2014eb8720dd6788230581" - ], - 0, - "7ee43224c8d61fb0a4537d1977e2a76b" - ], - [ - "FStar.Pointer.Base.path_disjoint_decomp_includes", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "fuel_guarded_inversion_FStar.Pointer.Base.path_disjoint_decomp_t", - "refinement_interpretation_Tm_refine_6a13e3414a2014eb8720dd6788230581" - ], - 0, - "ead448d1b16d107d06b7b3c6f94c6a9c" - ], - [ - "FStar.Pointer.Base.path_disjoint_decomp", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@query", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "data_typing_intro_FStar.Pointer.Base.PathBase@tok", - "data_typing_intro_FStar.Pointer.Base.PathStep@tok", - "equation_FStar.Pointer.Base.step_disjoint", - "equation_with_fuel_FStar.Pointer.Base.path_concat.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path_disjoint_decomp_t", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "refinement_interpretation_Tm_refine_035771f6b75c6418e7b567530874ccfb", - "refinement_interpretation_Tm_refine_68524d6f37c308befc08173d6bb78275", - "refinement_interpretation_Tm_refine_e937e34ee8783c8b9b7d7a707f01634d", - "typing_FStar.Pointer.Base.step_disjoint" - ], - 0, - "9651cf5e04d4c0b74e4068ebcabecd64" - ], - [ - "FStar.Pointer.Base.path_disjoint_not_path_equal", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_equal.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_length.fuel_instrumented", - "@query", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.PathDisjointDecomp", - "data_typing_intro_FStar.Pointer.Base.PathStep@tok", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", - "equation_FStar.Pointer.Base.step_disjoint", - "equation_with_fuel_FStar.Pointer.Base.path_concat.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_equal.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_length.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "fuel_guarded_inversion_FStar.Pointer.Base.path_disjoint_decomp_t", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_disEquality", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathDisjointDecomp_d_p", - "projection_inverse_FStar.Pointer.Base.PathDisjointDecomp_d_p1_", - "projection_inverse_FStar.Pointer.Base.PathDisjointDecomp_d_p2_", - "projection_inverse_FStar.Pointer.Base.PathDisjointDecomp_d_s1", - "projection_inverse_FStar.Pointer.Base.PathDisjointDecomp_d_s2", - "projection_inverse_FStar.Pointer.Base.PathDisjointDecomp_d_through", - "projection_inverse_FStar.Pointer.Base.PathDisjointDecomp_d_v1", - "projection_inverse_FStar.Pointer.Base.PathDisjointDecomp_d_v2", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_6a13e3414a2014eb8720dd6788230581", - "refinement_interpretation_Tm_refine_7871a89cee4ff50bc14c65877a68b5bd", - "typing_FStar.Pointer.Base.path_equal" - ], - 0, - "08d706731cc43f189147b3899e1c36b5" - ], - [ - "FStar.Pointer.Base.path_destruct_l", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_length.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_length.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_1e5c7ab94e0ccfb7b0d2bbb59a9e3f5d", - "binder_x_0d5ecd039c65d95077f45e317c0079e9_2", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_c187978e0b47d492be4f7ef67953e027_1", - "constructor_distinct_FStar.Pervasives.Native.None", - "constructor_distinct_FStar.Pervasives.Native.Some", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_typing_intro_FStar.Pointer.Base.PathBase@tok", - "data_typing_intro_FStar.Pointer.Base.PathStep@tok", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pointer.Base.PathStep", "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.path_concat.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_length.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.option", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "primitive_Prims.op_Addition", "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pervasives.Native.None_a", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_02fc160983cb6b95bc8024dda2fb489b", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5b666a2eab9854f4862b747ca3df0e57", - "refinement_interpretation_Tm_refine_cc25ff37b88364dacaea8b60814ec8fc", - "refinement_interpretation_Tm_refine_dbd32fc1f8071b12a138403222ec975e", - "subterm_ordering_FStar.Pointer.Base.PathStep", - "token_correspondence_FStar.Pointer.Base.path_length.fuel_instrumented", - "typing_FStar.Pointer.Base.path_destruct_l" - ], - 0, - "5d5bf8f544c417600a31ce26a402b57f" - ], - [ - "FStar.Pointer.Base.path_equal'", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_destruct_l.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_length.fuel_instrumented", - "@query", - "FStar.Pointer.Base_interpretation_Tm_arrow_9973fa91578be0bb3fb4c2414d28840c", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "Prims_interpretation_Tm_arrow_2eaa01e78f73e9bab5d0955fc1a662da", - "binder_x_0d5ecd039c65d95077f45e317c0079e9_3", - "binder_x_c187978e0b47d492be4f7ef67953e027_0", - "binder_x_c187978e0b47d492be4f7ef67953e027_1", - "binder_x_c187978e0b47d492be4f7ef67953e027_2", - "binder_x_d8c87ff041e51a1dd1e128b2f010108c_4", "bool_inversion", - "constructor_distinct_FStar.Pervasives.Native.None", - "constructor_distinct_FStar.Pervasives.Native.Some", - "data_elim_FStar.Pervasives.Native.Some", - "data_elim_FStar.Pointer.Base.PathBase", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.PathBase", - "equation_FStar.Pointer.Base.step_eq", "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.path_destruct_l.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_length.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.option", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "fuel_guarded_inversion_Prims.dtuple2", - "kinding_FStar.Pointer.Base.typ@tok", "kinding_Prims.dtuple2@tok", - "lemma_FStar.Pervasives.invertOption", "primitive_Prims.op_AmpAmp", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pervasives.Native.None_a", - "projection_inverse_FStar.Pervasives.Native.Some_a", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "projection_inverse_Prims.Mkdtuple2__1", - "projection_inverse_Prims.Mkdtuple2__2", - "projection_inverse_Prims.Mkdtuple2_b", - "refinement_interpretation_Tm_refine_02fc160983cb6b95bc8024dda2fb489b", - "refinement_interpretation_Tm_refine_2b3edafa058d04c5c7e8a35d7a04bacd", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_7871a89cee4ff50bc14c65877a68b5bd", - "refinement_interpretation_Tm_refine_942eb390dc27abd74312617cdca63061", - "refinement_interpretation_Tm_refine_c065a07c7e82cfd5d3c150edd43fe009", - "token_correspondence_FStar.Pointer.Base.path_length.fuel_instrumented", - "typing_FStar.Pointer.Base.path_destruct_l", - "typing_FStar.Pointer.Base.path_equal_", - "typing_FStar.Pointer.Base.path_length", - "typing_FStar.Pointer.Base.step_eq", - "typing_FStar.Pointer.Base.uu___is_PathBase", - "typing_Tm_abs_43e86c10475a85d4cc75f6a9b60d3d85", - "well-founded-ordering-on-nat" - ], - 0, - "d481dff1576d5af2fec95031dab50df4" - ], - [ - "FStar.Pointer.Base.path_includes_concat_l", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_typing_intro_FStar.Pointer.Base.PathStep@tok", - "equation_with_fuel_FStar.Pointer.Base.path_concat.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "lemma_FStar.Pointer.Base.path_concat_base_l", - "lemma_FStar.Pointer.Base.path_includes_concat", - "lemma_FStar.Pointer.Base.path_includes_step_r", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_422c1bb9c4648e056a5d7170c942c5e1", - "refinement_interpretation_Tm_refine_745c45a6c660b2794a1601e82614b9cd", - "refinement_interpretation_Tm_refine_ee1123b778ea4e559d0b51cc1e52228a", - "typing_FStar.Pointer.Base.path_concat" - ], - 0, - "76abe3e93fd42d084800ed77c9a92cd0" - ], - [ - "FStar.Pointer.Base.path_disjoint_concat", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@query", "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_typing_intro_FStar.Pointer.Base.PathStep@tok", - "equation_with_fuel_FStar.Pointer.Base.path_concat.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_035771f6b75c6418e7b567530874ccfb", - "refinement_interpretation_Tm_refine_c9202a9834ab181aaf32a45a956916fb", - "refinement_interpretation_Tm_refine_e937e34ee8783c8b9b7d7a707f01634d" - ], - 0, - "cb917c0b7f55908c5482bbe6456e416d" - ], - [ - "FStar.Pointer.Base.step_sel_upd_other", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@query", - "FStar.DependentMap_interpretation_Tm_arrow_a7d5cc170be69663c495e8582d2bc62a", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "FStar.Pointer.Base_interpretation_Tm_arrow_461c1a04a2297465934fe9913aab7926", - "FStar.Pointer.Base_interpretation_Tm_arrow_5d491a1b4d1a78b391a7c9212b49de34", - "FStar.Pointer.Base_interpretation_Tm_arrow_a562036d4086240af9c67a5348138fd7", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Seq.Base_interpretation_Tm_arrow_44bb45ed5c2534b346e0f58ea5033251", - "Prims_interpretation_Tm_arrow_2eaa01e78f73e9bab5d0955fc1a662da", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", "b2t_def", - "bool_inversion", - "constructor_distinct_FStar.Pervasives.Native.None", - "constructor_distinct_FStar.Pervasives.Native.Some", - "constructor_distinct_FStar.Pointer.Base.StepCell", - "constructor_distinct_FStar.Pointer.Base.StepField", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "constructor_distinct_Prims.Cons", "constructor_distinct_Tm_unit", - "data_elim_FStar.Pointer.Base.StepCell", - "data_elim_FStar.Pointer.Base.StepField", - "data_elim_FStar.Pointer.Base.TArray", "data_elim_Prims.Cons", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.StepCell", - "disc_equation_FStar.Pointer.Base.StepField", - "disc_equation_Prims.Cons", "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.ostruct", - "equation_FStar.Pointer.Base.ostruct_create", - "equation_FStar.Pointer.Base.ostruct_sel", - "equation_FStar.Pointer.Base.ostruct_upd", - "equation_FStar.Pointer.Base.otype_of_struct_field", - "equation_FStar.Pointer.Base.step_disjoint", - "equation_FStar.Pointer.Base.step_sel", - "equation_FStar.Pointer.Base.step_upd", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "equation_FStar.UInt.fits", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.eqtype", "equation_Prims.nat", - "equation_with_fuel_FStar.List.Tot.Base.map.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.noRepeats.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.option", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "fuel_guarded_inversion_Prims.list", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_FStar.Pointer.Base.otype_of_struct_field", - "function_token_typing_FStar.Pointer.Base.otype_of_typ", - "function_token_typing_Prims.string", "int_inversion", - "interpretation_Tm_abs_2db8b65edc142b3bb4b255ced46f0f32", - "interpretation_Tm_abs_75a59a4eb51a5d27cae7d8aba21bacf8", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", - "lemma_FStar.DependentMap.sel_create", - "lemma_FStar.Pervasives.invertOption", - "lemma_FStar.Pointer.Base.otype_of_typ_array", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "lemma_FStar.Seq.Base.init_index_", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_Equality", "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Negation", "primitive_Prims.op_disEquality", - "proj_equation_FStar.Pervasives.Native.Some_v", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.None_a", - "projection_inverse_FStar.Pervasives.Native.Some_a", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "projection_inverse_FStar.Pointer.Base.StepCell_index", - "projection_inverse_FStar.Pointer.Base.StepCell_length", - "projection_inverse_FStar.Pointer.Base.StepCell_value", - "projection_inverse_FStar.Pointer.Base.StepField_fd", - "projection_inverse_FStar.Pointer.Base.StepField_l", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "projection_inverse_FStar.Pointer.Base.TStruct_l", - "projection_inverse_Prims.Cons_a", - "projection_inverse_Prims.Cons_hd", - "projection_inverse_Prims.Cons_tl", - "refinement_interpretation_Tm_refine_041a24a8c3715e0f4960d28f20ee920b", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_4482b8ea10b7c3e0f684ae7e9c82dd06", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_c1424615841f28cac7fc34e92b7ff33c", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "string_inversion", "string_typing", - "token_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_", - "typing_FStar.List.Tot.Base.map", "typing_FStar.List.Tot.Base.mem", - "typing_FStar.List.Tot.Base.noRepeats", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.array", - "typing_FStar.Pointer.Base.otype_of_struct_field", - "typing_FStar.Pointer.Base.otype_of_typ", - "typing_FStar.Pointer.Base.step_disjoint", - "typing_FStar.Pointer.Base.struct_field", - "typing_FStar.Pointer.Base.uu___is_StepCell", - "typing_FStar.UInt32.v", - "typing_Tm_abs_2db8b65edc142b3bb4b255ced46f0f32", - "typing_Tm_abs_75a59a4eb51a5d27cae7d8aba21bacf8", "unit_inversion", - "unit_typing" - ], - 0, - "cf938173057d064705f4a0214a72b109" - ], - [ - "FStar.Pointer.Base.path_sel_upd_other", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_upd.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_upd.fuel_instrumented", - "@query", "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_typing_intro_FStar.Pointer.Base.PathStep@tok", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_upd.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "lemma_FStar.Pointer.Base.path_sel_concat", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "refinement_interpretation_Tm_refine_035771f6b75c6418e7b567530874ccfb", - "refinement_interpretation_Tm_refine_60ddcdffd5ba8adfd24b319ec76cd182", - "refinement_interpretation_Tm_refine_94ef2680d91d6577ede5074a8fbe39bf", - "refinement_interpretation_Tm_refine_e937e34ee8783c8b9b7d7a707f01634d", - "typing_FStar.Pointer.Base.path_sel", - "typing_FStar.Pointer.Base.path_upd" - ], - 0, - "b58bac3bff0da64b329cedf95225345e" - ], - [ - "FStar.Pointer.Base.path_sel_upd_other'", - 1, - 1, - 1, - [ "@query" ], - 0, - "58a308a8a3d8fa6a65ac375a07153344" - ], - [ - "FStar.Pointer.Base.equal", - 1, - 1, - 1, - [ "@query" ], - 0, - "7f556a05e2ccb69e8ca0936b2924bfcd" - ], - [ - "FStar.Pointer.Base.equal", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_equal.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_1e5c7ab94e0ccfb7b0d2bbb59a9e3f5d", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "assumption_FStar.Pointer.Base.typ__uu___haseq", "bool_inversion", - "data_elim_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_7871a89cee4ff50bc14c65877a68b5bd", - "refinement_interpretation_Tm_refine_82707a6e3d48caa257bb4bddb01d7d73", - "typing_FStar.Monotonic.HyperStack.aref_equal", - "typing_FStar.Pointer.Base.path_equal" - ], - 0, - "89a28b6e51fc4b3a3addeab5e2b52cad" - ], - [ - "FStar.Pointer.Base.as_addr", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0" - ], - 0, - "1f3234cedd89d1042088f4eefa51d239" - ], - [ - "FStar.Pointer.Base._field", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.Pointer", - "disc_equation_Prims.Cons", "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields" - ], - 0, - "15309cf2a7f1f2b4329abc8208856db0" - ], - [ - "FStar.Pointer.Base._cell", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "int_inversion", "primitive_Prims.op_AmpAmp", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.UInt32.v" - ], - 0, - "bc8963c0f9165c13dd6fa5478735c7ab" - ], - [ - "FStar.Pointer.Base._ufield", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.Pointer", - "disc_equation_Prims.Cons", "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.union_typ", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields" - ], - 0, - "ab0fecd632927cd8e4cc86fd36a107a6" - ], - [ - "FStar.Pointer.Base.unused_in", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_FStar.Monotonic.Heap.heap", - "lemma_FStar.Map.lemma_ContainsDom", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip" - ], - 0, - "5966edffe07321538e9cce15e5257a7d" - ], - [ - "FStar.Pointer.Base.live", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_FStar.Monotonic.Heap.heap", - "lemma_FStar.Map.lemma_ContainsDom", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip" - ], - 0, - "f0ae2f25bffe5d5d073ff163ddea721a" - ], - [ - "FStar.Pointer.Base.nlive", - 1, - 1, - 1, - [ "@query", "equation_FStar.Pointer.Base.g_is_null" ], - 0, - "dcb0828fba2eb40f8bb7f5d819df3015" - ], - [ - "FStar.Pointer.Base.live_nlive", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.nlive", - "equation_FStar.Pointer.Base.pointer", - "function_token_typing_FStar.Monotonic.Heap.heap", - "lemma_FStar.Map.lemma_ContainsDom", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip" - ], - 0, - "002260cb53bbdb16a16af2dc8ff15a5b" - ], - [ - "FStar.Pointer.Base.g_is_null_nlive", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.nlive", "true_interp", - "typing_FStar.Pointer.Base.g_is_null" - ], - 0, - "f8fe1ee6847cf064adf636c85b9870d3" - ], - [ - "FStar.Pointer.Base.greference_of", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "l_and-interp", "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Monotonic.HyperStack.aref_of_greference_of", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_4e2ab07ce8055f254b1a667fb0e845a8", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Heap.trivial_preorder", "typing_FStar.Map.contains", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip" - ], - 0, - "4ede69264f8bffdbaae264e6ae063c3c" - ], - [ - "FStar.Pointer.Base.unused_in_greference_of", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.unused_in", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Monotonic.HyperStack.unused_in_aref_of", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_7ae259fb7a49b4d47af4153553bb7fa3", - "typing_FStar.Heap.trivial_preorder", "typing_FStar.Map.contains", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pointer.Base.greference_of" - ], - 0, - "f9d97e14db32780fb1d1a8d73c7aba17" - ], - [ - "FStar.Pointer.Base.live_not_unused_in", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.contains", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Monotonic.HyperStack.unused_in", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.unused_in", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "l_and-interp", "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Monotonic.Heap.lemma_contains_implies_used", - "lemma_FStar.Monotonic.HyperStack.aref_live_at_aref_of", - "lemma_FStar.Monotonic.HyperStack.unused_in_aref_of", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Heap.trivial_preorder", "typing_FStar.Map.contains", - "typing_FStar.Map.sel", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.as_ref", - "typing_FStar.Monotonic.HyperStack.frameOf", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip" - ], - 0, - "f924ab37d53ea63ed5b0bb50ac03d134" - ], - [ - "FStar.Pointer.Base.gread", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "disc_equation_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "l_and-interp", "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_Prims.Mkdtuple2__1", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_74d04f3d69c944bb6c1ca407ab636711" - ], - 0, - "ba727db465c48f16aeef7720c9849237" - ], - [ - "FStar.Pointer.Base.frameOf", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0" - ], - 0, - "c267739e71c98e39a8e214ff422ed344" - ], - [ - "FStar.Pointer.Base.live_region_frameOf", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.contains", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "l_and-interp", "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Monotonic.HyperStack.aref_live_at_aref_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_aref_of", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Heap.trivial_preorder", "typing_FStar.Map.contains", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip" - ], - 0, - "f6c033dd7b52b535fc609ed29fba9e78" - ], - [ - "FStar.Pointer.Base.disjoint_roots_intro_pointer_vs_pointer", - 1, - 1, - 1, - [ "@query" ], - 0, - "26c6cf532f7dd673e006452b4be873cd" - ], - [ - "FStar.Pointer.Base.disjoint_roots_intro_pointer_vs_pointer", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "data_elim_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.unused_in", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "l_and-interp", - "lemma_FStar.Monotonic.HyperStack.as_addr_greference_of", - "lemma_FStar.Monotonic.HyperStack.contains_aref_unused_in", - "lemma_FStar.Monotonic.HyperStack.contains_greference_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_greference_of", - "lemma_FStar.Pointer.Base.live_region_frameOf", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_1d7f81705c35971e00b909e37163cd25", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.greference_of", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.__proj__Pointer__item__contents", - "typing_FStar.Pointer.Base.frameOf" - ], - 0, - "d33a2ff01777ad0312e1425ce576f4b0" - ], - [ - "FStar.Pointer.Base.disjoint_roots_intro_pointer_vs_reference", - 1, - 1, - 1, - [ "@query" ], - 0, - "58c7aa2553ed38cff91efb48d0609c8a" - ], - [ - "FStar.Pointer.Base.disjoint_roots_intro_pointer_vs_reference", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.as_addr", - "equation_FStar.Monotonic.HyperStack.contains", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Monotonic.HyperStack.mreference", - "equation_FStar.Monotonic.HyperStack.unused_in", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "fuel_guarded_inversion_FStar.Monotonic.HyperStack.mreference_", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "l_and-interp", - "lemma_FStar.Monotonic.Heap.lemma_contains_implies_used", - "lemma_FStar.Monotonic.Heap.lemma_distinct_addrs_unused", - "lemma_FStar.Monotonic.HyperStack.aref_as_addr_aref_of", - "lemma_FStar.Monotonic.HyperStack.aref_live_at_aref_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_aref_of", - "lemma_FStar.Pointer.Base.live_region_frameOf", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Heap.trivial_preorder", "typing_FStar.Map.sel", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.as_ref", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.frameOf" - ], - 0, - "2c7e1260021856551cb92efe828c9917" - ], - [ - "FStar.Pointer.Base.disjoint_roots_intro_reference_vs_pointer", - 1, - 1, - 1, - [ "@query" ], - 0, - "5622441cf36a6138a09a5ce898285982" - ], - [ - "FStar.Pointer.Base.disjoint_roots_intro_reference_vs_pointer", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "data_elim_FStar.Pointer.Base.Pointer", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperStack.contains", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.mreference", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.unused_in", - "fuel_guarded_inversion_FStar.Monotonic.HyperStack.mreference_", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "lemma_FStar.Monotonic.HyperStack.contains_aref_unused_in", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.frameOf" - ], - 0, - "4d1d2cbeae920eabe5c330380e11cf5b" - ], - [ - "FStar.Pointer.Base.is_mm", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0" - ], - 0, - "2b5d0e673d19de2ceeacd0b8f508ccb6" - ], - [ - "FStar.Pointer.Base.as_addr_gfield", - 1, - 1, - 1, - [ "@query" ], - 0, - "32a92776909369a67b578457643c8cd6" - ], - [ - "FStar.Pointer.Base.as_addr_gfield", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._field", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gfield", "unit_typing" - ], - 0, - "aaccfc92ab9b6f7ea128a4e36706ce3e" - ], - [ - "FStar.Pointer.Base.unused_in_gfield", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._field", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.unused_in", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gfield", "unit_typing" - ], - 0, - "e7d7be4d1973e5dbdf25b582c0ef0baa" - ], - [ - "FStar.Pointer.Base.live_gfield", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._field", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gfield", "unit_typing" - ], - 0, - "f1055fbf033054934e3fc010b323be5d" - ], - [ - "FStar.Pointer.Base.gread_gfield", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "equation_FStar.Pointer.Base.typ_of_struct_field", - "lemma_FStar.Pointer.Base.type_of_typ_type_of_struct_field" - ], - 0, - "c116d013061a9b2c51d858242e2cdeca" - ], - [ - "FStar.Pointer.Base.gread_gfield", - 2, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.dummy_val.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.dummy_val.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@query", - "FStar.Pointer.Base_interpretation_Tm_arrow_3039342fd2e0851a3664dcc7db386d3a", - "FStar.Pointer.Base_interpretation_Tm_arrow_a0191ab2e68770063b58fb6f1cd4a993", - "FStar.Pointer.Base_interpretation_Tm_arrow_a562036d4086240af9c67a5348138fd7", - "FStar.Pointer.Base_interpretation_Tm_arrow_bdc5e911949bed2f1a418f3bbfc31253", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "Prims_interpretation_Tm_arrow_2eaa01e78f73e9bab5d0955fc1a662da", - "bool_inversion", "bool_typing", - "constructor_distinct_FStar.Pervasives.Native.None", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_FStar.Pointer.Base.StepField", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_Tm_unit", "data_elim_Prims.Mkdtuple2", - "data_typing_intro_FStar.Pointer.Base.TStruct@tok", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.Pointer", - "disc_equation_Prims.Cons", "eq2-interp", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._field", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.gread", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.none_ovalue", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.ostruct", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.step_sel", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_Prims.eqtype", - "equation_with_fuel_FStar.Pointer.Base.dummy_val.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "fuel_guarded_inversion_Prims.dtuple2", - "function_token_typing_FStar.Pointer.Base.otype_of_struct_field", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "function_token_typing_Prims.string", - "interpretation_Tm_abs_104b9206a0f6c45cba53cc44776fb1f1", - "interpretation_Tm_abs_9960ac31dfa7b5178b9dece32921ded2", - "interpretation_Tm_abs_9add4301e24a482cad3210ba222ff660", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", "l_and-interp", - "lemma_FStar.Pervasives.invertOption", - "lemma_FStar.Pointer.Base.live_gfield", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "lemma_FStar.Pointer.Base.struct_sel_struct_create_fun", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.None_a", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.StepField_fd", - "projection_inverse_FStar.Pointer.Base.StepField_l", - "projection_inverse_FStar.Pointer.Base.TStruct_l", - "refinement_interpretation_Tm_refine_2c7ecebd8a41d0890aab4251b61d6458", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_7ae259fb7a49b4d47af4153553bb7fa3", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", - "token_correspondence_FStar.Pointer.Base.otype_of_struct_field", - "typing_FStar.DependentMap.t", "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.sel", - "typing_FStar.Pervasives.dfst", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.__proj__Pointer__item__p", - "typing_FStar.Pointer.Base.gfield", - "typing_FStar.Pointer.Base.greference_of", - "typing_FStar.Pointer.Base.live", - "typing_FStar.Pointer.Base.path_sel", - "typing_FStar.Pointer.Base.struct_field", - "typing_FStar.Pointer.Base.typ_of_struct_field", - "typing_FStar.StrongExcludedMiddle.strong_excluded_middle", - "typing_Prims.uu___is_Cons", - "typing_Tm_abs_104b9206a0f6c45cba53cc44776fb1f1", - "typing_Tm_abs_9960ac31dfa7b5178b9dece32921ded2" - ], - 0, - "c475c89a08e7fd7b7d71d2fe7e83bfda" - ], - [ - "FStar.Pointer.Base.frameOf_gfield", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._field", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gfield", "unit_typing" - ], - 0, - "038751a6294da1e507b9d5aa6276bf3e" - ], - [ - "FStar.Pointer.Base.is_mm_gfield", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._field", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.is_mm", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gfield", "unit_typing" - ], - 0, - "32634f64b628db7d8512c8c7bbd0e7cb" - ], - [ - "FStar.Pointer.Base.as_addr_gufield", - 1, - 1, - 1, - [ "@query" ], - 0, - "79d305fcafc10194a4c6039b15fcb666" - ], - [ - "FStar.Pointer.Base.as_addr_gufield", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._ufield", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.gufield", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gufield", "unit_typing" - ], - 0, - "75d50ae9da7a246e69249c9a41752591" - ], - [ - "FStar.Pointer.Base.unused_in_gufield", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._ufield", - "equation_FStar.Pointer.Base.gufield", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.unused_in", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gufield", "unit_typing" - ], - 0, - "150a12d8928e48cb04553204f15bd089" - ], - [ - "FStar.Pointer.Base.live_gufield", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._ufield", - "equation_FStar.Pointer.Base.gufield", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gufield", "unit_typing" - ], - 0, - "22413e19bb26954511b8d17adfb14ba6" - ], - [ - "FStar.Pointer.Base.gread_gufield", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.union_typ", - "lemma_FStar.Pointer.Base.type_of_typ_type_of_struct_field" - ], - 0, - "9f1f38665c8fb4367a2dfdc77ee86d71" - ], - [ - "FStar.Pointer.Base.gread_gufield", - 2, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.dummy_val.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.dummy_val.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@query", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "bool_inversion", - "constructor_distinct_FStar.Pervasives.Native.None", - "constructor_distinct_FStar.Pervasives.Native.Some", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_FStar.Pointer.Base.StepUField", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.Cons", "constructor_distinct_Prims.unit", - "data_elim_Prims.Cons", "data_elim_Prims.Mkdtuple2", - "data_typing_intro_FStar.Pointer.Base.TUnion@tok", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.Pointer", - "disc_equation_Prims.Cons", "eq2-interp", - "equation_FStar.HyperStack.reference", - "equation_FStar.List.Tot.Base.hd", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Pervasives.Native.fst", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._gtdata_get_key", - "equation_FStar.Pointer.Base._ufield", - "equation_FStar.Pointer.Base.gread", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.gtdata_create", - "equation_FStar.Pointer.Base.gtdata_get_key", - "equation_FStar.Pointer.Base.gtdata_get_value", - "equation_FStar.Pointer.Base.gufield", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.none_ovalue", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.step_sel", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.union_create", - "equation_FStar.Pointer.Base.union_get_key", - "equation_FStar.Pointer.Base.union_get_value", - "equation_FStar.Pointer.Base.union_typ", "equation_Prims.eqtype", - "equation_with_fuel_FStar.List.Tot.Base.map.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.dummy_val.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ_.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.tuple2", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "fuel_guarded_inversion_Prims.dtuple2", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "function_token_typing_Prims.string", - "interpretation_Tm_abs_9add4301e24a482cad3210ba222ff660", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", "l_and-interp", - "lemma_FStar.Pointer.Base.live_gufield", - "lemma_FStar.Pointer.Base.type_of_typ__eq", - "primitive_Prims.op_Equality", - "proj_equation_FStar.Pervasives.Native.Mktuple2__1", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.None_a", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "projection_inverse_FStar.Pointer.Base.StepUField_fd", - "projection_inverse_FStar.Pointer.Base.StepUField_l", - "projection_inverse_FStar.Pointer.Base.TUnion_l", - "projection_inverse_Prims.Cons_a", - "projection_inverse_Prims.Cons_hd", - "projection_inverse_Prims.Cons_tl", - "projection_inverse_Prims.Mkdtuple2__1", - "projection_inverse_Prims.Mkdtuple2__2", - "refinement_interpretation_Tm_refine_2c7ecebd8a41d0890aab4251b61d6458", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_7ae259fb7a49b4d47af4153553bb7fa3", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "token_correspondence_FStar.Pervasives.Native.fst", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.sel", - "typing_FStar.Pervasives.dfst", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.__proj__Pointer__item__p", - "typing_FStar.Pointer.Base.greference_of", - "typing_FStar.Pointer.Base.gufield", - "typing_FStar.Pointer.Base.live", - "typing_FStar.Pointer.Base.path_sel", - "typing_FStar.Pointer.Base.typ_of_struct_field", - "typing_FStar.StrongExcludedMiddle.strong_excluded_middle", - "typing_Prims.uu___is_Cons", "unit_typing" - ], - 0, - "d819ed28a045d852a7e30ab8e08b65e1" - ], - [ - "FStar.Pointer.Base.frameOf_gufield", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._ufield", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.gufield", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gufield", "unit_typing" - ], - 0, - "fe87274c7dfc06117cb78e11a8c112b2" - ], - [ - "FStar.Pointer.Base.is_mm_gufield", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._ufield", - "equation_FStar.Pointer.Base.gufield", - "equation_FStar.Pointer.Base.is_mm", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gufield", "unit_typing" - ], - 0, - "9d9795001ea8e9466307a5b88362c010" - ], - [ - "FStar.Pointer.Base.gcell", - 1, - 1, - 1, - [ "@query" ], - 0, - "e12630b4f6a4eac2852fa39a4bc8ded4" - ], - [ - "FStar.Pointer.Base.as_addr_gcell", - 1, - 1, - 1, - [ "@query" ], - 0, - "11d38455dd607b6bff8cb6f8e4b61557" - ], - [ - "FStar.Pointer.Base.as_addr_gcell", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gcell", "unit_typing" - ], - 0, - "7649fb012f86f41f4a7c70678778952b" - ], - [ - "FStar.Pointer.Base.unused_in_gcell", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.unused_in", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gcell", "unit_typing" - ], - 0, - "549881efb11c576624a41f26bead3ca3" - ], - [ - "FStar.Pointer.Base.live_gcell", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gcell", "unit_typing" - ], - 0, - "6cafb0c58cc6e9056c1038a00e8cf06a" - ], - [ - "FStar.Pointer.Base.gread_gcell", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "equation_FStar.Pointer.Base.array", "equation_FStar.UInt.fits", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "lemma_FStar.Pointer.Base.type_of_typ_array", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "typing_FStar.Pointer.Base.gread" - ], - 0, - "b513b383ae4604e178980b96a34f18f6" - ], - [ - "FStar.Pointer.Base.gread_gcell", - 2, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.dummy_val.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.dummy_val.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@query", - "FStar.Pervasives.Native_pretyping_b53dbd183c526bc5d0f20d7b966ae125", - "FStar.Pointer.Base_interpretation_Tm_arrow_cbaf63ee0f7cd0118b8161198aafa7ec", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Seq.Base_interpretation_Tm_arrow_44bb45ed5c2534b346e0f58ea5033251", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "b2t_def", - "bool_inversion", - "constructor_distinct_FStar.Pervasives.Native.None", - "constructor_distinct_FStar.Pervasives.Native.option", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_FStar.Pointer.Base.StepCell", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_Prims.unit", "constructor_distinct_Tm_unit", - "data_elim_Prims.Mkdtuple2", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gread", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.none_ovalue", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.step_sel", "equation_FStar.UInt.fits", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.dummy_val.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "fuel_guarded_inversion_Prims.dtuple2", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "int_inversion", - "interpretation_Tm_abs_9add4301e24a482cad3210ba222ff660", - "interpretation_Tm_abs_cfbb5b4925290e52ac8f62a4db2a2e50", - "kinding_FStar.Pointer.Base.typ@tok", "l_and-interp", - "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Pervasives.invertOption", - "lemma_FStar.Pointer.Base.live_gcell", - "lemma_FStar.Pointer.Base.type_of_typ_array", - "lemma_FStar.Seq.Base.init_index_", - "lemma_FStar.Seq.Base.lemma_index_create", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.None_a", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.StepCell_index", - "projection_inverse_FStar.Pointer.Base.StepCell_length", - "projection_inverse_FStar.Pointer.Base.StepCell_value", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_2c7ecebd8a41d0890aab4251b61d6458", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_7ae259fb7a49b4d47af4153553bb7fa3", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_c1424615841f28cac7fc34e92b7ff33c", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "refinement_interpretation_Tm_refine_dce369254040b9bd3ac1454cc66ab5ae", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_FStar.Pointer.Base.dummy_val.fuel_instrumented", - "typing_FStar.Heap.trivial_preorder", "typing_FStar.Map.contains", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Monotonic.HyperStack.sel", - "typing_FStar.Pervasives.dfst", - "typing_FStar.Pointer.Base.__proj__Pointer__item__p", - "typing_FStar.Pointer.Base.gcell", "typing_FStar.Pointer.Base.gread", - "typing_FStar.Pointer.Base.greference_of", - "typing_FStar.Pointer.Base.live", - "typing_FStar.Pointer.Base.path_sel", - "typing_FStar.Pointer.Base.type_of_typ", - "typing_FStar.Seq.Base.length", - "typing_FStar.StrongExcludedMiddle.strong_excluded_middle", - "typing_FStar.UInt32.v", - "typing_Tm_abs_cfbb5b4925290e52ac8f62a4db2a2e50", "unit_typing" - ], - 0, - "4d32fe22bd64f7b28ef313e859f76770" - ], - [ - "FStar.Pointer.Base.frameOf_gcell", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gcell", "unit_typing" - ], - 0, - "23107db70ca846bd8b3935ab6cc682c3" - ], - [ - "FStar.Pointer.Base.is_mm_gcell", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.is_mm", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gcell", "unit_typing" - ], - 0, - "18907491a90ca128759a897d31092928" - ], - [ - "FStar.Pointer.Base.includes", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "assumption_FStar.Pointer.Base.typ__uu___haseq", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "primitive_Prims.op_Equality", "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0" - ], - 0, - "a15bd8adc4fd74ffa80e8727bb8e245e" - ], - [ - "FStar.Pointer.Base.includes_refl", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "bool_inversion", "bool_typing", - "data_elim_FStar.Pointer.Base.Pointer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.includes", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "lemma_FStar.Pointer.Base.path_includes_refl", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_82707a6e3d48caa257bb4bddb01d7d73", - "typing_FStar.Monotonic.HyperStack.aref_equal", - "typing_FStar.Pointer.Base.includes" - ], - 0, - "21b514edbfb93ccae8519b98af2e6a0c" - ], - [ - "FStar.Pointer.Base.includes_trans", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", "bool_inversion", "bool_typing", - "data_elim_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.includes", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_82707a6e3d48caa257bb4bddb01d7d73", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "typing_FStar.Monotonic.HyperStack.aref_equal", - "typing_FStar.Pointer.Base.__proj__Pointer__item__contents", - "typing_FStar.Pointer.Base.includes" - ], - 0, - "26a2af1501083ab9464a913fdeba76f7" - ], - [ - "FStar.Pointer.Base.includes_gfield", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_1e5c7ab94e0ccfb7b0d2bbb59a9e3f5d", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "data_elim_FStar.Pointer.Base.Pointer", - "data_typing_intro_FStar.Pointer.Base.StepField@tok", - "data_typing_intro_FStar.Pointer.Base.TStruct@tok", - "disc_equation_Prims.Cons", "equation_FStar.Pointer.Base._field", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.includes", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_Prims.eqtype", "function_token_typing_Prims.string", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", - "lemma_FStar.Pointer.Base.path_includes_step_r", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_82707a6e3d48caa257bb4bddb01d7d73", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", "typing_FStar.Monotonic.HyperStack.aref_equal", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.gfield", - "typing_FStar.Pointer.Base.typ_of_struct_field", - "typing_Prims.uu___is_Cons", "unit_typing" - ], - 0, - "622754f7caafbc6c86e2637095cf2ee2" - ], - [ - "FStar.Pointer.Base.includes_gufield", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_1e5c7ab94e0ccfb7b0d2bbb59a9e3f5d", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "data_elim_FStar.Pointer.Base.Pointer", - "data_typing_intro_FStar.Pointer.Base.StepUField@tok", - "data_typing_intro_FStar.Pointer.Base.TUnion@tok", - "disc_equation_Prims.Cons", "equation_FStar.Pointer.Base._ufield", - "equation_FStar.Pointer.Base.gufield", - "equation_FStar.Pointer.Base.includes", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.union_typ", "equation_Prims.eqtype", - "function_token_typing_Prims.string", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", - "lemma_FStar.Pointer.Base.path_includes_step_r", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_82707a6e3d48caa257bb4bddb01d7d73", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", "typing_FStar.Monotonic.HyperStack.aref_equal", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.gufield", - "typing_FStar.Pointer.Base.typ_of_struct_field", - "typing_Prims.uu___is_Cons", "unit_typing" - ], - 0, - "7610b638ce414737fe4e4fb49fdfa01b" - ], - [ - "FStar.Pointer.Base.includes_gcell", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_b27168cc0cd9c7f90ae88b602ccfc55c", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "b2t_def", - "bool_inversion", "bool_typing", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "data_elim_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.Pointer", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.includes", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "int_inversion", "lemma_FStar.Pointer.Base.path_includes_step_r", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_82707a6e3d48caa257bb4bddb01d7d73", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Monotonic.HyperStack.aref_equal", - "typing_FStar.Pointer.Base.gcell", "typing_FStar.UInt32.v", - "unit_typing" - ], - 0, - "d97d8424dae8d87bd124301fa09e05b6" - ], - [ - "FStar.Pointer.Base.includes_ind", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_1e5c7ab94e0ccfb7b0d2bbb59a9e3f5d", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "bool_inversion", "bool_typing", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "data_elim_FStar.Pointer.Base.Pointer", - "data_elim_FStar.Pointer.Base.StepCell", - "data_elim_FStar.Pointer.Base.StepField", - "data_elim_FStar.Pointer.Base.StepUField", - "data_elim_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.StepCell", - "disc_equation_FStar.Pointer.Base.StepField", - "disc_equation_FStar.Pointer.Base.StepUField", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base._field", - "equation_FStar.Pointer.Base._ufield", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.gufield", - "equation_FStar.Pointer.Base.includes", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_with_fuel_FStar.Pointer.Base.path_includes.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_BarBar", - "primitive_Prims.op_Equality", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "refinement_interpretation_Tm_refine_1588d9cc9cb691265bea0ce2c66abf3c", - "refinement_interpretation_Tm_refine_2294e26464ce127dc19115c7d2830fb1", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_7be63e85c12413195a9167f090ba15fa", - "refinement_interpretation_Tm_refine_82707a6e3d48caa257bb4bddb01d7d73", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_e937e34ee8783c8b9b7d7a707f01634d", - "refinement_interpretation_Tm_refine_ee1123b778ea4e559d0b51cc1e52228a", - "refinement_interpretation_Tm_refine_f1bdf056430cac446ba97c183d132799", - "token_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "typing_FStar.Monotonic.HyperStack.aref_equal", - "typing_FStar.Pointer.Base.includes", - "typing_FStar.Pointer.Base.path_includes" - ], - 0, - "c68f0476eb9b1f950cefd5e0bd3cf5b4" - ], - [ - "FStar.Pointer.Base.includes_ind", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_063c2b36cf1249fbf6d053f452d0ad87", - "refinement_interpretation_Tm_refine_1d4ba028ed16c70ccba461732aababc0", - "refinement_interpretation_Tm_refine_bc644e363f3618c98f527a696e25fc11" - ], - 0, - "bbf91f7ede992327488316ba69f755c3" - ], - [ - "FStar.Pointer.Base.readable", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "bool_inversion", "disc_equation_FStar.Pointer.Base.Pointer", - "eq2-interp", "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "l_and-interp", "lemma_FStar.Pointer.Base.live_region_frameOf", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_Prims.Mkdtuple2__1", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.frameOf" - ], - 0, - "679d9f194028362ffb4c8de60ca32dbc" - ], - [ - "FStar.Pointer.Base.readable_live", - 1, - 1, - 1, - [ "@query", "equation_FStar.Pointer.Base.readable", "l_and-interp" ], - 0, - "fe31546410cfb2493473ed0e14235d87" - ], - [ - "FStar.Pointer.Base.readable_gfield", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_1e5c7ab94e0ccfb7b0d2bbb59a9e3f5d", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "b2t_def", - "bool_inversion", "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "data_elim_FStar.Pointer.Base.Pointer", "data_elim_Prims.Mkdtuple2", - "data_typing_intro_FStar.Pointer.Base.TStruct@tok", - "disc_equation_FStar.Pointer.Base.Pointer", - "disc_equation_Prims.Cons", "eq2-interp", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._field", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.readable", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_Prims.eqtype", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "fuel_guarded_inversion_Prims.dtuple2", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "function_token_typing_Prims.string", - "interpretation_Tm_abs_9add4301e24a482cad3210ba222ff660", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", "l_and-interp", - "lemma_FStar.Pointer.Base.live_gfield", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "lemma_FStar.Pointer.Base.ovalue_is_readable_step_sel_field", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_7ae259fb7a49b4d47af4153553bb7fa3", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.sel", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.__proj__Pointer__item__p", - "typing_FStar.Pointer.Base.gfield", - "typing_FStar.Pointer.Base.greference_of", - "typing_FStar.Pointer.Base.ovalue_is_readable", - "typing_FStar.Pointer.Base.path_sel", - "typing_FStar.Pointer.Base.typ_of_struct_field", - "typing_Prims.uu___is_Cons", "unit_typing" - ], - 0, - "edc06ab926f52a829e221b9c4c4e469a" - ], - [ - "FStar.Pointer.Base.readable_struct", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_correspondence_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@query", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "FStar.Pointer.Base_interpretation_Tm_arrow_a562036d4086240af9c67a5348138fd7", - "FStar.Pointer.Base_pretyping_1e5c7ab94e0ccfb7b0d2bbb59a9e3f5d", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_interpretation_Tm_arrow_2eaa01e78f73e9bab5d0955fc1a662da", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "b2t_def", - "bool_inversion", - "constructor_distinct_FStar.Pervasives.Native.None", - "constructor_distinct_FStar.Pervasives.Native.Some", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_FStar.Pointer.Base.StepField", - "constructor_distinct_FStar.Pointer.Base.TStruct", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.Cons", "constructor_distinct_Prims.unit", - "data_elim_FStar.Pointer.Base.Pointer", - "data_typing_intro_FStar.Pointer.Base.TStruct@tok", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.Pointer", - "disc_equation_Prims.Cons", "eq2-interp", - "equation_FStar.List.Tot.Base.hd", - "equation_FStar.Pervasives.Native.fst", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._field", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.gtdata", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.none_ovalue", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.ostruct", - "equation_FStar.Pointer.Base.otype_of_struct_field", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.readable", - "equation_FStar.Pointer.Base.step_sel", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.type_of_base_typ", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "equation_Prims.eqtype", - "equation_with_fuel_FStar.List.Tot.Base.map.fuel_instrumented", - "equation_with_fuel_FStar.List.Tot.Base.mem.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.tuple2", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_FStar.Pointer.Base.otype_of_struct_field", - "function_token_typing_Prims.string", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", "l_and-interp", - "lemma_FStar.Pervasives.invertOption", - "lemma_FStar.Pointer.Base.live_gfield", - "lemma_FStar.Pointer.Base.otype_of_typ_struct", - "lemma_FStar.Pointer.Base.readable_live", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "proj_equation_FStar.Pervasives.Native.Mktuple2__1", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "projection_inverse_FStar.Pointer.Base.StepField_fd", - "projection_inverse_FStar.Pointer.Base.StepField_l", - "projection_inverse_Prims.Cons_a", - "projection_inverse_Prims.Cons_hd", - "projection_inverse_Prims.Cons_tl", - "projection_inverse_Prims.Mkdtuple2__1", - "projection_inverse_Prims.Mkdtuple2__2", - "refinement_interpretation_Tm_refine_298741368b36f9fce1347c8b3fa4f829", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_b86319999df90ce930c473bc87611673", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "string_inversion", - "token_correspondence_FStar.Pointer.Base.otype_of_typ", - "token_correspondence_FStar.Pointer.Base.type_of_struct_field_", - "typing_FStar.DependentMap.t", "typing_FStar.List.Tot.Base.map", - "typing_FStar.List.Tot.Base.mem", "typing_FStar.Pointer.Base.gfield", - "typing_FStar.Pointer.Base.ostruct_sel", - "typing_FStar.Pointer.Base.otype_of_struct_field", - "typing_FStar.Pointer.Base.ovalue_is_readable", - "typing_FStar.Pointer.Base.path_sel", - "typing_FStar.Pointer.Base.struct_field", - "typing_FStar.Pointer.Base.typ_of_struct_field", "unit_typing" - ], - 0, - "92cd9b1e1b61eb6e1e3481d2831d09b0" - ], - [ - "FStar.Pointer.Base.readable_struct_fields'", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", - "binder_x_2ab32aa018f38d35baff7c272edc1456_0", - "binder_x_dce109b66917edb1ca83b9408101b740_3", - "binder_x_ed25b04ac1a3660bf4cdc8ae577888d8_1", "bool_inversion", - "data_elim_FStar.Pointer.Base.Mkstruct_typ", - "disc_equation_Prims.Cons", "disc_equation_Prims.Nil", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "fuel_guarded_inversion_Prims.list", - "function_token_typing_FStar.Monotonic.Heap.heap", - "lemma_FStar.Map.lemma_ContainsDom", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "subterm_ordering_Prims.Cons", "typing_FStar.Map.contains", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip" - ], - 0, - "d2b1c634b4a8f6eaa18f16732a757fdf" - ], - [ - "FStar.Pointer.Base.readable_struct_fields_nil", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.readable_struct_fields_.fuel_instrumented", - "@query", "constructor_distinct_Prims.Nil", - "data_typing_intro_Prims.Nil@tok", - "equation_FStar.Pointer.Base.readable_struct_fields", - "equation_Prims.eqtype", - "equation_with_fuel_FStar.Pointer.Base.readable_struct_fields_.fuel_instrumented", - "function_token_typing_Prims.string", - "projection_inverse_Prims.Nil_a", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "true_interp" - ], - 0, - "d2638fb4e367f959e0d981f1a56c3a4c" - ], - [ - "FStar.Pointer.Base.readable_struct_fields_cons", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.readable_struct_fields_.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.readable_struct_fields_.fuel_instrumented", - "@query", "bool_inversion", "constructor_distinct_Prims.Cons", - "data_typing_intro_Prims.Cons@tok", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.readable_struct_fields", - "equation_Prims.eqtype", - "equation_with_fuel_FStar.Pointer.Base.readable_struct_fields_.fuel_instrumented", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_Prims.string", "l_and-interp", - "lemma_FStar.Map.lemma_ContainsDom", - "projection_inverse_Prims.Cons_a", - "projection_inverse_Prims.Cons_hd", - "projection_inverse_Prims.Cons_tl", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "true_interp", "typing_FStar.Map.contains", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip" - ], - 0, - "a7c507f0ae04fc8a9c0b56e3d029a782" - ], - [ - "FStar.Pointer.Base.readable_struct_fields_elim", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.readable_struct_fields_.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.mem.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.readable_struct_fields_.fuel_instrumented", - "@query", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", - "binder_x_2ab32aa018f38d35baff7c272edc1456_0", - "binder_x_8d6a14835e73bbd0cb8017424edf0a1b_2", - "binder_x_dce109b66917edb1ca83b9408101b740_3", - "binder_x_ed25b04ac1a3660bf4cdc8ae577888d8_1", "bool_inversion", - "constructor_distinct_Prims.Cons", "constructor_distinct_Prims.Nil", - "data_elim_FStar.Pointer.Base.Mkstruct_typ", - "data_typing_intro_FStar.Pointer.Base.TStruct@tok", - "disc_equation_Prims.Cons", "disc_equation_Prims.Nil", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.readable_struct_fields", - "equation_Prims.eqtype", - "equation_with_fuel_FStar.List.Tot.Base.mem.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.readable_struct_fields_.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "fuel_guarded_inversion_Prims.list", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_Prims.string", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", "l_and-interp", - "lemma_FStar.Map.lemma_ContainsDom", "primitive_Prims.op_Equality", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_Prims.Cons_a", - "projection_inverse_Prims.Cons_hd", - "projection_inverse_Prims.Cons_tl", "projection_inverse_Prims.Nil_a", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_e35e9177b80909dbf230b3c6b66a11f5", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", "subterm_ordering_Prims.Cons", - "typing_FStar.List.Tot.Base.map", "typing_FStar.List.Tot.Base.mem", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields" - ], - 0, - "18c01df48b9962f5425f28e25dfb3739" - ], - [ - "FStar.Pointer.Base.readable_struct_fields_readable_struct", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.List.Tot.Base.map.fuel_instrumented", - "@fuel_irrelevance_FStar.List.Tot.Base.map.fuel_instrumented", - "@query", - "FStar.Map_interpretation_Tm_arrow_6980332764c4493a7b0df5c02f7aefbe", - "FStar.Pervasives.Native_interpretation_Tm_arrow_b8cce376a4a678a51298a0f3945f25ce", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", - "data_elim_FStar.Pointer.Base.Mkstruct_typ", - "data_typing_intro_FStar.Pointer.Base.TStruct@tok", - "disc_equation_Prims.Cons", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", "equation_Prims.eqtype", - "equation_with_fuel_FStar.List.Tot.Base.map.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.struct_typ", - "function_token_typing_FStar.Pervasives.Native.fst", - "function_token_typing_Prims.string", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_40af92d7ec70b9e69654b789401fdfd3", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields" - ], - 0, - "7d93abd1b3d95930b8b6fc9493faf6db" - ], - [ - "FStar.Pointer.Base.readable_gcell", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "b2t_def", - "bool_inversion", "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "data_elim_FStar.Pointer.Base.Pointer", "data_elim_Prims.Mkdtuple2", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "disc_equation_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.readable", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "fuel_guarded_inversion_Prims.dtuple2", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "interpretation_Tm_abs_9add4301e24a482cad3210ba222ff660", - "l_and-interp", "lemma_FStar.Pointer.Base.live_gcell", - "lemma_FStar.Pointer.Base.live_region_frameOf", - "lemma_FStar.Pointer.Base.ovalue_is_readable_step_sel_cell", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_7ae259fb7a49b4d47af4153553bb7fa3", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Monotonic.HyperStack.sel", - "typing_FStar.Pointer.Base.__proj__Pointer__item__p", - "typing_FStar.Pointer.Base.frameOf", - "typing_FStar.Pointer.Base.gcell", - "typing_FStar.Pointer.Base.greference_of", - "typing_FStar.Pointer.Base.ovalue_is_readable", - "typing_FStar.Pointer.Base.path_sel", "unit_typing" - ], - 0, - "8c728bbde89a8099d1b1cd1c8cd7fdee" - ], - [ - "FStar.Pointer.Base.readable_array", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "b2t_def", - "bool_inversion", - "constructor_distinct_FStar.Pervasives.Native.None", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_FStar.Pointer.Base.StepCell", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "data_elim_FStar.Pointer.Base.Pointer", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.gtdata", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.none_ovalue", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.readable", - "equation_FStar.Pointer.Base.step_sel", - "equation_FStar.Pointer.Base.type_of_base_typ", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "equation_with_fuel_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "function_token_typing_FStar.Monotonic.Heap.heap", "int_inversion", - "int_typing", "l_and-interp", "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Pervasives.invertOption", - "lemma_FStar.Pointer.Base.live_gcell", - "lemma_FStar.Pointer.Base.otype_of_typ_array", - "lemma_FStar.Pointer.Base.readable_live", - "lemma_FStar.UInt32.vu_inv", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pervasives.Native.Some_v", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "projection_inverse_FStar.Pointer.Base.StepCell_index", - "projection_inverse_FStar.Pointer.Base.StepCell_length", - "projection_inverse_FStar.Pointer.Base.StepCell_value", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "projection_inverse_Prims.Mkdtuple2__1", - "projection_inverse_Prims.Mkdtuple2__2", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pointer.Base._cell", "typing_FStar.Pointer.Base.array", - "typing_FStar.Pointer.Base.ovalue_is_readable", - "typing_FStar.Pointer.Base.path_sel", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v", - "unit_typing" - ], - 0, - "bf1cb80d7758e383f5581e019cc76a2a" - ], - [ - "FStar.Pointer.Base.readable_gufield", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "b2t_def", "bool_inversion", "bool_typing", - "constructor_distinct_FStar.Pervasives.Native.None", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_FStar.Pointer.Base.StepUField", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "constructor_distinct_Tm_unit", - "data_elim_FStar.Pointer.Base.Pointer", "data_elim_Prims.Mkdtuple2", - "data_typing_intro_FStar.Pointer.Base.TUnion@tok", - "disc_equation_FStar.Pervasives.Native.None", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.Pointer", - "disc_equation_Prims.Cons", "eq2-interp", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._gtdata_get_key", - "equation_FStar.Pointer.Base._ufield", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gread", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.gtdata", - "equation_FStar.Pointer.Base.gtdata_create", - "equation_FStar.Pointer.Base.gtdata_get_key", - "equation_FStar.Pointer.Base.gufield", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.none_ovalue", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.ounion", - "equation_FStar.Pointer.Base.ounion_get_key", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.readable", - "equation_FStar.Pointer.Base.step_sel", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.type_of_base_typ", - "equation_FStar.Pointer.Base.union_create", - "equation_FStar.Pointer.Base.union_get_key", - "equation_FStar.Pointer.Base.union_typ", "equation_Prims.eqtype", - "equation_with_fuel_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "fuel_guarded_inversion_Prims.dtuple2", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "function_token_typing_Prims.string", - "interpretation_Tm_abs_9add4301e24a482cad3210ba222ff660", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", "kinding_Prims.dtuple2@tok", - "l_and-interp", "lemma_FStar.Pervasives.invertOption", - "lemma_FStar.Pointer.Base.live_gufield", - "lemma_FStar.Pointer.Base.otype_of_typ_union", - "lemma_FStar.Pointer.Base.readable_live", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.StepUField_fd", - "projection_inverse_FStar.Pointer.Base.StepUField_l", - "projection_inverse_FStar.Pointer.Base.TUnion_l", - "projection_inverse_Prims.Mkdtuple2__1", - "refinement_interpretation_Tm_refine_2c7ecebd8a41d0890aab4251b61d6458", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_7ae259fb7a49b4d47af4153553bb7fa3", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.sel", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.__proj__Pointer__item__from", - "typing_FStar.Pointer.Base.__proj__Pointer__item__p", - "typing_FStar.Pointer.Base.greference_of", - "typing_FStar.Pointer.Base.gufield", - "typing_FStar.Pointer.Base.live", - "typing_FStar.Pointer.Base.ovalue_is_readable", - "typing_FStar.Pointer.Base.path_sel", - "typing_FStar.Pointer.Base.struct_field", - "typing_FStar.Pointer.Base.typ_of_struct_field", - "typing_FStar.StrongExcludedMiddle.strong_excluded_middle", - "typing_Prims.uu___is_Cons", - "typing_Tm_abs_6ba36691ee58dee85cd144324b083848" - ], - 0, - "4980ebf2e7843bd498e4d8d9f3963042" - ], - [ - "FStar.Pointer.Base.is_active_union_field", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "Prims_pretyping_ce036b6b736ef4e0bc3a9ff132a12aed", "bool_inversion", - "data_elim_Prims.Cons", "disc_equation_FStar.Pointer.Base.Pointer", - "disc_equation_Prims.Cons", "eq2-interp", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.struct_typ_", - "equation_FStar.Pointer.Base.union_typ", - "fuel_guarded_inversion_FStar.Pervasives.Native.tuple2", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_FStar.Monotonic.Heap.heap", "l_and-interp", - "lemma_FStar.Map.lemma_ContainsDom", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_Prims.Mkdtuple2__1", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields" - ], - 0, - "bfbc3bdbf6c86602588b6ccfe66f8b17" - ], - [ - "FStar.Pointer.Base.is_active_union_live", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.is_active_union_field", - "l_and-interp" - ], - 0, - "d1ade5d6de3a49675b1b8eb2fb61c473" - ], - [ - "FStar.Pointer.Base.is_active_union_field_live", - 1, - 1, - 1, - [ - "@query", "lemma_FStar.Pointer.Base.is_active_union_live", - "lemma_FStar.Pointer.Base.live_gufield" - ], - 0, - "b8a8f3f1a3c25363d327acd1cd9a3ba7" - ], - [ - "FStar.Pointer.Base.is_active_union_field_eq", - 1, - 1, - 1, - [ - "@query", "eq2-interp", - "equation_FStar.Pointer.Base.is_active_union_field", - "equation_FStar.Pointer.Base.struct_field", "l_and-interp" - ], - 0, - "2bbb84b4fc7c451a68fd2fd27ea03f17" - ], - [ - "FStar.Pointer.Base.is_active_union_field_get_key", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "b2t_def", "constructor_distinct_FStar.Pointer.Base.TUnion", - "constructor_distinct_Tm_unit", - "data_elim_FStar.Pointer.Base.Pointer", "data_elim_Prims.Mkdtuple2", - "data_typing_intro_FStar.Pointer.Base.TUnion@tok", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._gtdata_get_key", - "equation_FStar.Pointer.Base._ufield", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gread", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.gtdata_create", - "equation_FStar.Pointer.Base.gtdata_get_key", - "equation_FStar.Pointer.Base.gufield", - "equation_FStar.Pointer.Base.is_active_union_field", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.otype_of_struct_field", - "equation_FStar.Pointer.Base.ounion_get_key", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.union_create", - "equation_FStar.Pointer.Base.union_get_key", - "equation_FStar.Pointer.Base.union_typ", - "equation_with_fuel_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_Prims.dtuple2", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "interpretation_Tm_abs_9add4301e24a482cad3210ba222ff660", - "l_and-interp", - "lemma_FStar.Pointer.Base.is_active_union_field_live", - "lemma_FStar.Pointer.Base.is_active_union_live", - "proj_equation_FStar.Pervasives.Native.Some_v", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.TUnion_l", - "projection_inverse_Prims.Mkdtuple2__1", - "refinement_interpretation_Tm_refine_2c7ecebd8a41d0890aab4251b61d6458", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_7ae259fb7a49b4d47af4153553bb7fa3", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.sel", - "typing_FStar.Pointer.Base.__proj__Pointer__item__p", - "typing_FStar.Pointer.Base.greference_of", - "typing_FStar.Pointer.Base.gufield", - "typing_FStar.Pointer.Base.live", - "typing_FStar.Pointer.Base.path_sel", - "typing_FStar.Pointer.Base.typ_of_struct_field", - "typing_FStar.StrongExcludedMiddle.strong_excluded_middle" - ], - 0, - "9e765231f83d5256a7ff310bd86655c3" - ], - [ - "FStar.Pointer.Base.is_active_union_field_readable", - 1, - 1, - 1, - [ "@query", "lemma_FStar.Pointer.Base.readable_gufield" ], - 0, - "fe58d13ce032dccfbf18e54868cb6dcc" - ], - [ - "FStar.Pointer.Base.is_active_union_field_includes_readable", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_concat.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_09d9235a8068627898e77b75945bdc5c", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "b2t_def", - "bool_inversion", - "constructor_distinct_FStar.Pervasives.Native.None", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_FStar.Pointer.Base.StepUField", - "constructor_distinct_FStar.Pointer.Base.TUnion", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "data_elim_FStar.Pervasives.Native.Some", - "data_elim_FStar.Pointer.Base.Pointer", - "data_elim_FStar.Pointer.Base.TUnion", - "data_typing_intro_FStar.Pointer.Base.TUnion@tok", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._ufield", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.gtdata", - "equation_FStar.Pointer.Base.gtdata_get_key", - "equation_FStar.Pointer.Base.gufield", - "equation_FStar.Pointer.Base.includes", - "equation_FStar.Pointer.Base.is_active_union_field", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.none_ovalue", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.otype_of_struct_field", - "equation_FStar.Pointer.Base.ounion", - "equation_FStar.Pointer.Base.ounion_get_key", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.readable", - "equation_FStar.Pointer.Base.step_sel", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.union_typ", "equation_Prims.logical", - "equation_with_fuel_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_includes.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.option", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", "l_and-interp", - "lemma_FStar.Pointer.Base.otype_of_typ_union", - "lemma_FStar.Pointer.Base.path_includes_concat", - "lemma_FStar.Pointer.Base.readable_live", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_BarBar", - "primitive_Prims.op_Equality", - "proj_equation_FStar.Pervasives.Native.Some_v", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.StepUField_fd", - "projection_inverse_FStar.Pointer.Base.StepUField_l", - "projection_inverse_Prims.Mkdtuple2__1", - "projection_inverse_Prims.Mkdtuple2__2", - "refinement_interpretation_Tm_refine_1588d9cc9cb691265bea0ce2c66abf3c", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_82707a6e3d48caa257bb4bddb01d7d73", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "string_inversion", - "token_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "typing_FStar.Monotonic.HyperStack.aref_equal", - "typing_FStar.Pointer.Base.gufield", - "typing_FStar.Pointer.Base.ovalue_is_readable", - "typing_FStar.Pointer.Base.path_includes", - "typing_FStar.Pointer.Base.path_sel", - "typing_FStar.Pointer.Base.typ_of_struct_field", "unit_typing" - ], - 0, - "5ebba8b6febcc5ecd1583cadf7aed48c" - ], - [ - "FStar.Pointer.Base._singleton_buffer_of_pointer", - 1, - 1, - 2, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_Prims.pow2.fuel_instrumented", "@query", - "b2t_def", "bool_inversion", "bool_typing", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "constructor_distinct_FStar.Pointer.Base.BufferRootSingleton", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "data_elim_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.Pointer", - "data_elim_FStar.Pointer.Base.StepCell", - "data_elim_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.StepCell", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.not_an_array_cell", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.nat", "equation_Prims.pos", - "equation_with_fuel_Prims.pow2.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "int_inversion", "int_typing", "lemma_FStar.UInt32.vu_inv", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", "primitive_Prims.op_Multiply", - "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pointer.Base.PathStep_s", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_t", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_p", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_t", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_Prims.pow2.fuel_instrumented", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.UInt32.v" - ], - 0, - "27633def9bfd23ba228a449206ef489a" - ], - [ - "FStar.Pointer.Base.singleton_buffer_of_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.HyperStack.ST.equal_domains", - "equation_FStar.Monotonic.Heap.equal_dom", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "function_token_typing_FStar.Monotonic.Heap.heap", - "lemma_FStar.HyperStack.ST.lemma_same_refs_in_all_regions_intro", - "lemma_FStar.Set.lemma_equal_refl", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_35927a15e9516018e6643fa65a3f830c", - "typing_FStar.Map.domain", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap" - ], - 0, - "7357188741744f406bba9dd511ebd716" - ], - [ - "FStar.Pointer.Base.gbuffer_of_array_pointer", - 1, - 1, - 1, - [ - "@query", "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "equation_FStar.Pointer.Base.buffer_root_length", - "primitive_Prims.op_Addition", "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_t" - ], - 0, - "cedb50bc8889dda58e61a07547b0902e" - ], - [ - "FStar.Pointer.Base.buffer_of_array_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "equation_FStar.HyperStack.ST.equal_domains", - "equation_FStar.Monotonic.Heap.equal_dom", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.gbuffer_of_array_pointer", - "equation_FStar.UInt.uint_t", - "function_token_typing_FStar.Monotonic.Heap.heap", "int_inversion", - "lemma_FStar.HyperStack.ST.lemma_same_refs_in_all_regions_intro", - "lemma_FStar.Set.lemma_equal_refl", "primitive_Prims.op_Addition", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_t", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_cd8f0bca9917727345df4a84d95cb089", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Map.domain", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", "typing_FStar.UInt32.v" - ], - 0, - "95d32b5c9f543ac3f1421c638b5e6eec" - ], - [ - "FStar.Pointer.Base.buffer_length_gsingleton_buffer_of_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "constructor_distinct_Tm_unit", - "equation_FStar.Pointer.Base._singleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_FStar.Pointer.Base.Buffer_blength", - "typing_FStar.Pointer.Base.gsingleton_buffer_of_pointer" - ], - 0, - "86e06633471c86c0c20e11560f3f0f30" - ], - [ - "FStar.Pointer.Base.buffer_length_gbuffer_of_array_pointer", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.gbuffer_of_array_pointer", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_FStar.Pointer.Base.Buffer_blength" - ], - 0, - "2c584103ac3a835ac67c80ad506bd4f6" - ], - [ - "FStar.Pointer.Base.buffer_live", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.BufferRootArray", - "disc_equation_FStar.Pointer.Base.BufferRootSingleton", - "equation_FStar.Pointer.Base.buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot" - ], - 0, - "3a88ba324107eb7eca8f127be2542533" - ], - [ - "FStar.Pointer.Base.buffer_live_gsingleton_buffer_of_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "constructor_distinct_FStar.Pointer.Base.BufferRootSingleton", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_Tm_unit", "eq2-interp", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._singleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_live", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.live", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "l_and-interp", "proj_equation_FStar.Pointer.Base.Buffer_broot", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_p", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.gsingleton_buffer_of_pointer" - ], - 0, - "79a5ef83c9414423718a2bfd594308c5" - ], - [ - "FStar.Pointer.Base.buffer_live_gbuffer_of_array_pointer", - 1, - 1, - 1, - [ - "@query", "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "equation_FStar.Pointer.Base.buffer_live", - "equation_FStar.Pointer.Base.gbuffer_of_array_pointer", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_t", - "projection_inverse_FStar.Pointer.Base.Buffer_broot" - ], - 0, - "86a0547e3c4572bdc00e3fd25570bb80" - ], - [ - "FStar.Pointer.Base.buffer_unused_in", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.BufferRootArray", - "disc_equation_FStar.Pointer.Base.BufferRootSingleton", - "equation_FStar.Pointer.Base.buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot" - ], - 0, - "ea0eee7673abee09cd2aa9bdfeb5180b" - ], - [ - "FStar.Pointer.Base.buffer_live_not_unused_in", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "data_elim_FStar.Pointer.Base.BufferRootArray", - "data_elim_FStar.Pointer.Base.BufferRootSingleton", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_live", - "equation_FStar.Pointer.Base.buffer_unused_in", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "lemma_FStar.Pointer.Base.live_not_unused_in", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "refinement_interpretation_Tm_refine_b31ca53c440388681f8686931d7c051a", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot" - ], - 0, - "d00e06d9bb8e4966e275c868318ef676" - ], - [ - "FStar.Pointer.Base.buffer_unused_in_gsingleton_buffer_of_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "constructor_distinct_FStar.Pointer.Base.BufferRootSingleton", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_Tm_unit", - "equation_FStar.Pointer.Base._singleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_unused_in", - "equation_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.unused_in", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_p", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.gsingleton_buffer_of_pointer" - ], - 0, - "7bf6868e3b086e79182d2c35df1668c2" - ], - [ - "FStar.Pointer.Base.buffer_unused_in_gbuffer_of_array_pointer", - 1, - 1, - 1, - [ - "@query", "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "equation_FStar.Pointer.Base.buffer_unused_in", - "equation_FStar.Pointer.Base.gbuffer_of_array_pointer", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_t", - "projection_inverse_FStar.Pointer.Base.Buffer_broot" - ], - 0, - "2e7bd45e2f3d5e1a08590b0f38848a4b" - ], - [ - "FStar.Pointer.Base.frameOf_buffer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.BufferRootArray", - "disc_equation_FStar.Pointer.Base.BufferRootSingleton", - "equation_FStar.Pointer.Base.buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot" - ], - 0, - "8a91e10e9a4b664ef603aecb67389caf" - ], - [ - "FStar.Pointer.Base.frameOf_buffer_gsingleton_buffer_of_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "constructor_distinct_FStar.Pointer.Base.BufferRootSingleton", - "constructor_distinct_Tm_unit", - "equation_FStar.Pointer.Base._singleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.frameOf_buffer", - "equation_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_p", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.gsingleton_buffer_of_pointer" - ], - 0, - "a4c542249a701ec9b94cea793a7c2ae3" - ], - [ - "FStar.Pointer.Base.frameOf_buffer_gbuffer_of_array_pointer", - 1, - 1, - 1, - [ - "@query", "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "equation_FStar.Pointer.Base.frameOf_buffer", - "equation_FStar.Pointer.Base.gbuffer_of_array_pointer", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_t", - "projection_inverse_FStar.Pointer.Base.Buffer_broot" - ], - 0, - "6c30dac3d77abb5b838d1ed4b39b3665" - ], - [ - "FStar.Pointer.Base.live_region_frameOf_buffer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "data_elim_FStar.Pointer.Base.BufferRootArray", - "data_elim_FStar.Pointer.Base.BufferRootSingleton", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_live", - "equation_FStar.Pointer.Base.frameOf_buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "lemma_FStar.Pointer.Base.live_region_frameOf", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "refinement_interpretation_Tm_refine_b31ca53c440388681f8686931d7c051a", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot" - ], - 0, - "4eae1cec98bdf064a473f7ce930ee6a2" - ], - [ - "FStar.Pointer.Base.buffer_as_addr", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.BufferRootArray", - "disc_equation_FStar.Pointer.Base.BufferRootSingleton", - "equation_FStar.Pointer.Base.buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot" - ], - 0, - "389a37cfb2e1b55ad17f1c6fd45d757d" - ], - [ - "FStar.Pointer.Base.buffer_as_addr_gsingleton_buffer_of_pointer", - 1, - 1, - 1, - [ "@query" ], - 0, - "a8992b94f5807d755e95be5a1a122cab" - ], - [ - "FStar.Pointer.Base.buffer_as_addr_gsingleton_buffer_of_pointer", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "constructor_distinct_FStar.Pointer.Base.BufferRootSingleton", - "constructor_distinct_Tm_unit", - "equation_FStar.Pointer.Base._singleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_addr", - "equation_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_p", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.gsingleton_buffer_of_pointer" - ], - 0, - "db8b220c66c101475247a14453711b84" - ], - [ - "FStar.Pointer.Base.buffer_as_addr_gbuffer_of_array_pointer", - 1, - 1, - 1, - [ "@query" ], - 0, - "231f17b82de569157e073157e895828d" - ], - [ - "FStar.Pointer.Base.buffer_as_addr_gbuffer_of_array_pointer", - 2, - 1, - 1, - [ - "@query", "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "equation_FStar.Pointer.Base.buffer_as_addr", - "equation_FStar.Pointer.Base.gbuffer_of_array_pointer", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_t", - "projection_inverse_FStar.Pointer.Base.Buffer_broot" - ], - 0, - "089e8e8aad92d3c148ff73bf16df1d50" - ], - [ - "FStar.Pointer.Base.gsub_buffer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "int_inversion", - "lemma_FStar.UInt32.uv_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v" - ], - 0, - "d8d3936321d7cfddf16e326098ef7bb7" - ], - [ - "FStar.Pointer.Base.frameOf_buffer_gsub_buffer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.frameOf_buffer", - "equation_FStar.Pointer.Base.gsub_buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot" - ], - 0, - "ebe55371b4ee840390f39f7b6d25a7af" - ], - [ - "FStar.Pointer.Base.buffer_as_addr_gsub_buffer", - 1, - 1, - 1, - [ "@query" ], - 0, - "b2fd7aa7f1dd663fe97a39e25b1bd188" - ], - [ - "FStar.Pointer.Base.buffer_as_addr_gsub_buffer", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_addr", - "equation_FStar.Pointer.Base.gsub_buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot" - ], - 0, - "8f99e4ed31433adc0b2ceeb3bc31af18" - ], - [ - "FStar.Pointer.Base.sub_buffer", - 1, - 1, - 1, - [ "@query" ], - 0, - "bfd08bb6748be93620986c05dc3a2996" - ], - [ - "FStar.Pointer.Base.sub_buffer", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "equation_FStar.HyperStack.ST.equal_domains", - "equation_FStar.Monotonic.Heap.equal_dom", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "function_token_typing_FStar.Monotonic.Heap.heap", "int_inversion", - "lemma_FStar.HyperStack.ST.lemma_same_refs_in_all_regions_intro", - "lemma_FStar.Set.lemma_equal_refl", "lemma_FStar.UInt32.uv_inv", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_745d664c6e159cd0e4e176fbcf302acd", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Map.domain", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v" - ], - 0, - "b7b7ba57c9610ab91a6fd487f0ff50ca" - ], - [ - "FStar.Pointer.Base.offset_buffer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "int_inversion", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Subtraction", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.UInt32.v" - ], - 0, - "ed401fe6f561c34701675fc013fbed96" - ], - [ - "FStar.Pointer.Base.offset_buffer", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "int_inversion", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Subtraction", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_3c867da679bb8eb29a8fb4db4ca3b21f", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.buffer_length", "typing_FStar.UInt32.v" - ], - 0, - "d35848d26e0c63fe7f4bf1d5cb3a8f91" - ], - [ - "FStar.Pointer.Base.buffer_length_gsub_buffer", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.gsub_buffer", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_FStar.Pointer.Base.Buffer_blength" - ], - 0, - "41a47ce72167e5660aff0edc030059d5" - ], - [ - "FStar.Pointer.Base.buffer_live_gsub_buffer_equiv", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_live", - "equation_FStar.Pointer.Base.gsub_buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot" - ], - 0, - "257e8abbe8f00dc04a19eecdb0049cd3" - ], - [ - "FStar.Pointer.Base.buffer_live_gsub_buffer_intro", - 1, - 1, - 1, - [ "@query", "lemma_FStar.Pointer.Base.buffer_live_gsub_buffer_equiv" ], - 0, - "fe3419c6083f3c3aed026e54d5fd949a" - ], - [ - "FStar.Pointer.Base.buffer_unused_in_gsub_buffer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_unused_in", - "equation_FStar.Pointer.Base.gsub_buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot" - ], - 0, - "90c23761f8a1af53342f8892aec9141f" - ], - [ - "FStar.Pointer.Base.gsub_buffer_gsub_buffer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "int_inversion", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.Buffer_blength", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.buffer_length", "typing_FStar.UInt32.v" - ], - 0, - "0683e631e66645f60967e30d36feabd3" - ], - [ - "FStar.Pointer.Base.gsub_buffer_gsub_buffer", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.UInt32_pretyping_2ab3c8ba2d08b0172817fc70b5994868", "b2t_def", - "data_elim_FStar.Pointer.Base.Buffer", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "int_inversion", - "lemma_FStar.UInt32.uv_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.Buffer_bidx", - "projection_inverse_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_5f4176692ba0d4ebb4eb0d80d43bf405", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_bc3e4ef50c18a50a72b09b9950468e20", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.Pointer.Base.gsub_buffer", "typing_FStar.UInt32.add", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v" - ], - 0, - "5eec32a424ce6a1f3935f84743b83b7c" - ], - [ - "FStar.Pointer.Base.gsub_buffer_zero_buffer_length", - 1, - 1, - 1, - [ - "@query", "primitive_Prims.op_Addition", - "projection_inverse_BoxInt_proj_0" - ], - 0, - "c1cf671de1ae9a187cb4bdfa0731f0e0" - ], - [ - "FStar.Pointer.Base.gsub_buffer_zero_buffer_length", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "int_inversion", - "int_typing", "lemma_FStar.UInt32.uv_inv", - "lemma_FStar.UInt32.vu_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.buffer_length", "typing_FStar.UInt32.add", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v" - ], - 0, - "cda73c32d8f7e2bf4b4b2c2e750a4c93" - ], - [ - "FStar.Pointer.Base.buffer_root_as_seq", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.BufferRootArray", - "disc_equation_FStar.Pointer.Base.BufferRootSingleton", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "projection_inverse_BoxBool_proj_0" - ], - 0, - "24781f609ee79fd0fef4d3bd8ab6fb28" - ], - [ - "FStar.Pointer.Base.length_buffer_root_as_seq", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_Prims.pow2.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "b2t_def", "bool_inversion", "bool_typing", - "data_elim_FStar.Pointer.Base.BufferRootArray", - "data_elim_FStar.Pointer.Base.BufferRootSingleton", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.buffer_root_as_seq", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.nat", "equation_Prims.pos", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "equation_with_fuel_Prims.pow2.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "function_token_typing_FStar.Monotonic.Heap.heap", "int_inversion", - "int_typing", "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Pointer.Base.type_of_typ_array", - "lemma_FStar.Seq.Base.lemma_create_len", "lemma_FStar.UInt32.uv_inv", - "lemma_FStar.UInt32.vu_inv", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", "primitive_Prims.op_Multiply", - "primitive_Prims.op_Subtraction", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_b31ca53c440388681f8686931d7c051a", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_Prims.pow2.fuel_instrumented", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pointer.Base.buffer_root_as_seq", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.Pointer.Base.gread", - "typing_FStar.Pointer.Base.type_of_typ", - "typing_FStar.Seq.Base.length", "typing_FStar.UInt32.uint_to_t", - "typing_FStar.UInt32.v" - ], - 0, - "d4aad7e2430e53a7a036f94e3be884c1" - ], - [ - "FStar.Pointer.Base.buffer_as_seq", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", "bool_inversion", - "data_elim_FStar.Pointer.Base.Buffer", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.buffer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "function_token_typing_FStar.Monotonic.Heap.heap", - "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Pointer.Base.length_buffer_root_as_seq", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_365abba901205a01d0ef28ebf2198c47", - "refinement_interpretation_Tm_refine_5f4176692ba0d4ebb4eb0d80d43bf405", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperHeap.rid_freeable", - "typing_FStar.Monotonic.HyperHeap.root", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.UInt32.v" - ], - 0, - "b0baf2b6f85323469057831923acfd41" - ], - [ - "FStar.Pointer.Base.buffer_length_buffer_as_seq", - 1, - 1, - 1, - [ "@query" ], - 0, - "2b76ef89a0637d9964e0ed28ecbd692e" - ], - [ - "FStar.Pointer.Base.buffer_length_buffer_as_seq", - 2, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_seq", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.buffer_root_as_seq", - "equation_FStar.UInt.fits", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "int_inversion", - "int_typing", "lemma_FStar.Pointer.Base.length_buffer_root_as_seq", - "lemma_FStar.Seq.Base.lemma_len_slice", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_81407705a0828c2c1b1976675443f647", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_as_seq", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_as_seq", - "typing_FStar.Pointer.Base.type_of_typ", - "typing_FStar.Seq.Base.length", "typing_FStar.UInt32.v" - ], - 0, - "bec7f6afe21916f35549df7dc240faaa" - ], - [ - "FStar.Pointer.Base.buffer_as_seq_gsingleton_buffer_of_pointer", - 1, - 1, - 2, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.dummy_val.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@fuel_correspondence_Prims.pow2.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.dummy_val.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@fuel_irrelevance_Prims.pow2.fuel_instrumented", "@query", - "FStar.Pointer.Base_interpretation_Tm_arrow_cbaf63ee0f7cd0118b8161198aafa7ec", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Seq.Base_interpretation_Tm_arrow_44bb45ed5c2534b346e0f58ea5033251", - "FStar.Seq.Base_pretyping_7efa52b424e80c83ad68a652aa3561e4", - "FStar.UInt32_pretyping_2ab3c8ba2d08b0172817fc70b5994868", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "b2t_def", - "bool_inversion", "bool_typing", - "constructor_distinct_FStar.Pervasives.Native.None", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "constructor_distinct_FStar.Pointer.Base.BufferRootSingleton", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_FStar.Pointer.Base.StepCell", - "constructor_distinct_FStar.Pointer.Base.TArray", - "constructor_distinct_FStar.Seq.Base.seq", - "constructor_distinct_FStar.UInt32.t", - "constructor_distinct_Prims.unit", - "data_elim_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.Pointer", - "data_elim_FStar.Pointer.Base.TArray", "data_elim_Prims.Mkdtuple2", - "disc_equation_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pointer.Base.PathStep", - "disc_equation_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.StepCell", "eq2-interp", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._singleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_seq", - "equation_FStar.Pointer.Base.buffer_root_as_seq", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gread", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.none_ovalue", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.step_sel", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.eq2", "equation_Prims.logical", "equation_Prims.nat", - "equation_Prims.pos", "equation_Prims.squash", - "equation_with_fuel_FStar.Pointer.Base.dummy_val.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.path", - "fuel_guarded_inversion_FStar.Pointer.Base.step", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "fuel_guarded_inversion_Prims.dtuple2", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "int_inversion", "int_typing", - "interpretation_Tm_abs_9add4301e24a482cad3210ba222ff660", - "interpretation_Tm_abs_cfbb5b4925290e52ac8f62a4db2a2e50", - "l_and-interp", "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Pointer.Base.length_buffer_root_as_seq", - "lemma_FStar.Seq.Base.init_index_", - "lemma_FStar.Seq.Base.lemma_create_len", - "lemma_FStar.Seq.Base.lemma_eq_elim", - "lemma_FStar.Seq.Base.lemma_eq_intro", - "lemma_FStar.Seq.Base.lemma_index_create", - "lemma_FStar.Seq.Base.lemma_index_slice", - "lemma_FStar.Seq.Base.lemma_len_slice", - "lemma_FStar.Seq.Properties.slice_slice", - "lemma_FStar.UInt32.vu_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "proj_equation_FStar.Pointer.Base.PathStep_s", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pervasives.Native.None_a", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_p", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_t", - "projection_inverse_FStar.Pointer.Base.Buffer_bidx", - "projection_inverse_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "projection_inverse_FStar.Pointer.Base.StepCell_index", - "projection_inverse_FStar.Pointer.Base.StepCell_length", - "projection_inverse_FStar.Pointer.Base.StepCell_value", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_1ba8fd8bb363097813064c67740b2de5", - "refinement_interpretation_Tm_refine_1d7f81705c35971e00b909e37163cd25", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_2c7ecebd8a41d0890aab4251b61d6458", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_35a0739c434508f48d0bb1d5cd5df9e8", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_6a1872e8dc484ff4b169a75dface8e09", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_81407705a0828c2c1b1976675443f647", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_c1424615841f28cac7fc34e92b7ff33c", - "refinement_interpretation_Tm_refine_d3d07693cd71377864ef84dc97d10ec1", - "refinement_interpretation_Tm_refine_d83f8da8ef6c1cb9f71d1465c1bb1c55", - "refinement_interpretation_Tm_refine_dce369254040b9bd3ac1454cc66ab5ae", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "refinement_kinding_Tm_refine_2de20c066034c13bf76e9c0b94f4806c", - "token_correspondence_FStar.Pointer.Base.dummy_val.fuel_instrumented", - "token_correspondence_Prims.pow2.fuel_instrumented", - "typing_FStar.Heap.trivial_preorder", "typing_FStar.Map.contains", - "typing_FStar.Map.sel", "typing_FStar.Monotonic.Heap.sel", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.aref_live_at", - "typing_FStar.Monotonic.HyperStack.as_ref", - "typing_FStar.Monotonic.HyperStack.frameOf", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Monotonic.HyperStack.greference_of", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.Pointer.Base.gread", - "typing_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "typing_FStar.Pointer.Base.path_sel", - "typing_FStar.Pointer.Base.type_of_typ", - "typing_FStar.Seq.Base.create", "typing_FStar.Seq.Base.length", - "typing_FStar.StrongExcludedMiddle.strong_excluded_middle", - "typing_FStar.UInt.fits", "typing_FStar.UInt32.v", - "typing_Prims.l_and", - "typing_Tm_abs_cfbb5b4925290e52ac8f62a4db2a2e50", "unit_typing" - ], - 0, - "303b9560bfe5fa4e0aff36a7725b47b3" - ], - [ - "FStar.Pointer.Base.buffer_as_seq_gbuffer_of_array_pointer", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", "bool_inversion", "bool_typing", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.array", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.buffer_as_seq", - "equation_FStar.Pointer.Base.buffer_root_as_seq", - "equation_FStar.Pointer.Base.gbuffer_of_array_pointer", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "function_token_typing_FStar.Monotonic.Heap.heap", "int_typing", - "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Seq.Base.lemma_len_slice", - "lemma_FStar.Seq.Properties.slice_slice", - "lemma_FStar.UInt32.vu_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_t", - "projection_inverse_FStar.Pointer.Base.Buffer_bidx", - "projection_inverse_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_1ba8fd8bb363097813064c67740b2de5", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_81407705a0828c2c1b1976675443f647", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_ba20691c598b7aba0d11d91ead0d6da1", - "refinement_interpretation_Tm_refine_d3d07693cd71377864ef84dc97d10ec1", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pointer.Base.type_of_typ", - "typing_FStar.Seq.Base.length", "typing_FStar.UInt32.v" - ], - 0, - "5136689c8c4e9de0429c25e509cb05c7" - ], - [ - "FStar.Pointer.Base.buffer_as_seq_gsub_buffer", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.UInt.fits", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "int_inversion", - "lemma_FStar.Pointer.Base.buffer_length_buffer_as_seq", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.UInt32.v" - ], - 0, - "e574f1a85da346600a8c784645104bfe" - ], - [ - "FStar.Pointer.Base.buffer_as_seq_gsub_buffer", - 2, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", "bool_inversion", "bool_typing", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_seq", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "function_token_typing_FStar.Monotonic.Heap.heap", "int_inversion", - "int_typing", "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Pointer.Base.length_buffer_root_as_seq", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.Buffer_bidx", - "projection_inverse_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.UInt32.add", "typing_FStar.UInt32.v" - ], - 0, - "e6a0f2904c65568b4463ff9f38630ce4" - ], - [ - "FStar.Pointer.Base.gpointer_of_buffer_cell", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.UInt32_pretyping_2ab3c8ba2d08b0172817fc70b5994868", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "b2t_def", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "constructor_distinct_FStar.UInt32.t", - "constructor_distinct_Prims.unit", - "disc_equation_FStar.Pointer.Base.BufferRootArray", - "disc_equation_FStar.Pointer.Base.BufferRootSingleton", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "int_inversion", - "lemma_FStar.UInt32.uv_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_t", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v", - "unit_typing" - ], - 0, - "e91d03b7952353a598b0588972e496b6" - ], - [ - "FStar.Pointer.Base.pointer_of_buffer_cell", - 1, - 1, - 1, - [ "@query" ], - 0, - "5f6093fef5a155ed87b3468640caf99f" - ], - [ - "FStar.Pointer.Base.pointer_of_buffer_cell", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.UInt32_pretyping_2ab3c8ba2d08b0172817fc70b5994868", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "b2t_def", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "constructor_distinct_FStar.Pointer.Base.BufferRootSingleton", - "constructor_distinct_FStar.UInt32.t", - "constructor_distinct_Prims.unit", - "disc_equation_FStar.Pointer.Base.BufferRootArray", - "disc_equation_FStar.Pointer.Base.BufferRootSingleton", - "equation_FStar.HyperStack.ST.equal_domains", - "equation_FStar.Monotonic.Heap.equal_dom", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "function_token_typing_FStar.Monotonic.Heap.heap", "int_inversion", - "lemma_FStar.HyperStack.ST.lemma_same_refs_in_all_regions_intro", - "lemma_FStar.Set.lemma_equal_refl", "lemma_FStar.UInt32.uv_inv", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_t", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_p", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_t", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_c8a149bfed84d9c7f91b56b75749a739", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Map.domain", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v", - "unit_typing" - ], - 0, - "0736863a04381fde6b2cc657dbbafc3e" - ], - [ - "FStar.Pointer.Base.gpointer_of_buffer_cell_gsub_buffer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "int_inversion", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.Buffer_blength", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.buffer_length", "typing_FStar.UInt32.v" - ], - 0, - "1171553367577b78b29814e7e88a0688" - ], - [ - "FStar.Pointer.Base.gpointer_of_buffer_cell_gsub_buffer", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", "bool_inversion", - "data_elim_FStar.Pointer.Base.Buffer", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "int_inversion", "int_typing", "lemma_FStar.UInt32.uv_inv", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.Buffer_bidx", - "projection_inverse_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_5f4176692ba0d4ebb4eb0d80d43bf405", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_bc3e4ef50c18a50a72b09b9950468e20", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.Pointer.Base.gsub_buffer", "typing_FStar.UInt.fits", - "typing_FStar.UInt32.add", "typing_FStar.UInt32.uint_to_t", - "typing_FStar.UInt32.v" - ], - 0, - "6f8660a1bc651406369d640a06b58ce3" - ], - [ - "FStar.Pointer.Base.gpointer_of_buffer_cell_gsub_buffer'", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "int_inversion", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.Buffer_blength", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.buffer_length", "typing_FStar.UInt32.v" - ], - 0, - "2eb24d4127072e80a24d38e0b29d9383" - ], - [ - "FStar.Pointer.Base.live_gpointer_of_buffer_cell", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "constructor_distinct_Tm_unit", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_live", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_FStar.Monotonic.Heap.heap", - "lemma_FStar.Map.lemma_ContainsDom", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell" - ], - 0, - "5aa67f8ca1ab8c31c072b173656baf2f" - ], - [ - "FStar.Pointer.Base.gpointer_of_buffer_cell_gsingleton_buffer_of_pointer", - 1, - 1, - 2, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_Prims.pow2.fuel_instrumented", "@query", - "b2t_def", "bool_inversion", "bool_typing", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", - "equation_Prims.pos", - "equation_with_fuel_Prims.pow2.fuel_instrumented", "int_inversion", - "int_typing", - "lemma_FStar.Pointer.Base.buffer_length_gsingleton_buffer_of_pointer", - "lemma_FStar.UInt32.vu_inv", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThan", "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Multiply", "primitive_Prims.op_Subtraction", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_Prims.pow2.fuel_instrumented", - "typing_FStar.UInt32.v" - ], - 0, - "8e8487ebd574b1f0b3992d69c22fa9fe" - ], - [ - "FStar.Pointer.Base.gpointer_of_buffer_cell_gsingleton_buffer_of_pointer", - 2, - 1, - 2, - [ - "@MaxIFuel_assumption", "@query", - "FStar.UInt32_pretyping_2ab3c8ba2d08b0172817fc70b5994868", "b2t_def", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "constructor_distinct_FStar.Pointer.Base.BufferRootSingleton", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_Tm_unit", - "data_elim_FStar.Pointer.Base.Buffer", - "data_elim_FStar.Pointer.Base.BufferRootArray", - "data_elim_FStar.Pointer.Base.Pointer", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base._singleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "fuel_guarded_inversion_FStar.Pointer.Base.path", "int_inversion", - "int_typing", "lemma_FStar.UInt32.uv_inv", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_p", - "projection_inverse_FStar.Pointer.Base.Buffer_bidx", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.Buffer_t", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "typing_FStar.UInt32.add", "typing_FStar.UInt32.v" - ], - 0, - "527c13d50deadb3e3076ba80dbe622be" - ], - [ - "FStar.Pointer.Base.gpointer_of_buffer_cell_gbuffer_of_array_pointer", - 1, - 1, - 1, - [ - "@query", - "lemma_FStar.Pointer.Base.buffer_length_gbuffer_of_array_pointer" - ], - 0, - "175f45030c9a082bb6266c7e3f964e66" - ], - [ - "FStar.Pointer.Base.gpointer_of_buffer_cell_gbuffer_of_array_pointer", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", "bool_inversion", - "bool_typing", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.gbuffer_of_array_pointer", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", "int_inversion", - "int_typing", "lemma_FStar.UInt32.uv_inv", - "lemma_FStar.UInt32.vu_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_t", - "projection_inverse_FStar.Pointer.Base.Buffer_bidx", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.gbuffer_of_array_pointer", - "typing_FStar.UInt.fits", "typing_FStar.UInt32.add", - "typing_FStar.UInt32.v" - ], - 0, - "73d74873ac02c07192738c3d70f6f88f" - ], - [ - "FStar.Pointer.Base.frameOf_gpointer_of_buffer_cell", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "constructor_distinct_Tm_unit", "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.frameOf_buffer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "int_inversion", "primitive_Prims.op_AmpAmp", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.UInt32.v" - ], - 0, - "d2fa18592ffa7a5b95567d117ef55efd" - ], - [ - "FStar.Pointer.Base.as_addr_gpointer_of_buffer_cell", - 1, - 1, - 1, - [ "@query" ], - 0, - "582a1e0323ad83fd41f948c5694e897a" - ], - [ - "FStar.Pointer.Base.as_addr_gpointer_of_buffer_cell", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "constructor_distinct_Tm_unit", "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_addr", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "int_inversion", "primitive_Prims.op_AmpAmp", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.UInt32.v" - ], - 0, - "03ddfde04698c0d3247e6cb31f196fb3" - ], - [ - "FStar.Pointer.Base.gread_gpointer_of_buffer_cell", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.UInt.fits", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "lemma_FStar.Pointer.Base.buffer_length_buffer_as_seq", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0" - ], - 0, - "56620811a10f28620d33fb786cc483c7" - ], - [ - "FStar.Pointer.Base.gread_gpointer_of_buffer_cell", - 2, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", "bool_inversion", "bool_typing", - "data_elim_FStar.Pointer.Base.Buffer", - "data_elim_FStar.Pointer.Base.BufferRootArray", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_seq", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.buffer_root_as_seq", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.gread", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "fuel_guarded_inversion_FStar.Pointer.Base.typ", - "function_token_typing_FStar.Monotonic.Heap.heap", "int_inversion", - "int_typing", "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Pointer.Base.gread_gcell", - "lemma_FStar.Pointer.Base.length_buffer_root_as_seq", - "lemma_FStar.Seq.Base.lemma_index_create", - "lemma_FStar.Seq.Base.lemma_index_slice", - "lemma_FStar.UInt32.uv_inv", "lemma_FStar.UInt32.vu_inv", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_35a0739c434508f48d0bb1d5cd5df9e8", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5f4176692ba0d4ebb4eb0d80d43bf405", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_c1424615841f28cac7fc34e92b7ff33c", - "refinement_interpretation_Tm_refine_d3d07693cd71377864ef84dc97d10ec1", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_as_seq", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.Pointer.Base.gread", - "typing_FStar.Pointer.Base.type_of_typ", "typing_FStar.UInt32.add", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v" - ], - 0, - "4594b52069ffac074aa15dffcf9c98d0" - ], - [ - "FStar.Pointer.Base.gread_gpointer_of_buffer_cell'", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.UInt.fits", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "lemma_FStar.Pointer.Base.buffer_length_buffer_as_seq", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0" - ], - 0, - "d3501abcfa0dbd0347a6578ad1958622" - ], - [ - "FStar.Pointer.Base.gread_gpointer_of_buffer_cell'", - 2, - 1, - 1, - [ - "@MaxFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "lemma_FStar.Pointer.Base.gread_gpointer_of_buffer_cell" - ], - 0, - "17a188525ec272b8a9e6968dff2548d6" - ], - [ - "FStar.Pointer.Base.index_buffer_as_seq", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "int_inversion", - "lemma_FStar.Pointer.Base.buffer_length_buffer_as_seq", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.buffer_length", "typing_FStar.UInt32.v" - ], - 0, - "dfbcfaca1e1cd40b15a63de8fbccc6a5" - ], - [ - "FStar.Pointer.Base.index_buffer_as_seq", - 2, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", "bool_inversion", "bool_typing", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", - "function_token_typing_FStar.Monotonic.Heap.heap", "int_inversion", - "int_typing", "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Pointer.Base.gread_gpointer_of_buffer_cell", - "lemma_FStar.UInt32.vu_inv", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_365abba901205a01d0ef28ebf2198c47", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperHeap.rid_freeable", - "typing_FStar.Monotonic.HyperHeap.root", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pointer.Base.buffer_length", "typing_FStar.UInt.fits", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v" - ], - 0, - "5d4589fe0b4f036813f103b024b3135f" - ], - [ - "FStar.Pointer.Base.gsingleton_buffer_of_pointer_gcell", - 1, - 1, - 1, - [ - "@query", - "lemma_FStar.Pointer.Base.buffer_length_gbuffer_of_array_pointer", - "primitive_Prims.op_Addition", "projection_inverse_BoxInt_proj_0" - ], - 0, - "5e5381e58d3d8158db6f7cf0f81983d6" - ], - [ - "FStar.Pointer.Base.gsingleton_buffer_of_pointer_gcell", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", "bool_inversion", - "bool_typing", "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_FStar.Pointer.Base.StepCell", - "constructor_distinct_Tm_unit", "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base._singleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.gbuffer_of_array_pointer", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "int_inversion", "int_typing", "lemma_FStar.UInt32.uv_inv", - "lemma_FStar.UInt32.vu_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.Buffer_bidx", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.StepCell_index", - "projection_inverse_FStar.Pointer.Base.StepCell_length", - "projection_inverse_FStar.Pointer.Base.StepCell_value", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.gbuffer_of_array_pointer", - "typing_FStar.Pointer.Base.gcell", - "typing_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "typing_FStar.UInt32.add", "typing_FStar.UInt32.v" - ], - 0, - "797a8ecf7e521210daadefeb99eb5fd3" - ], - [ - "FStar.Pointer.Base.gsingleton_buffer_of_pointer_gpointer_of_buffer_cell", - 1, - 1, - 1, - [ - "@query", "primitive_Prims.op_Addition", - "projection_inverse_BoxInt_proj_0" - ], - 0, - "39badc92106e69871952e4f6d3ccbbd6" - ], - [ - "FStar.Pointer.Base.gsingleton_buffer_of_pointer_gpointer_of_buffer_cell", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "FStar.UInt32_pretyping_2ab3c8ba2d08b0172817fc70b5994868", "b2t_def", - "bool_inversion", "bool_typing", - "constructor_distinct_FStar.Pointer.Base.BufferRootSingleton", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_Tm_unit", - "data_elim_FStar.Pointer.Base.Buffer", - "data_elim_FStar.Pointer.Base.BufferRootArray", - "data_elim_FStar.Pointer.Base.BufferRootSingleton", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base._singleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gbuffer_of_array_pointer", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.Pointer.Base.not_an_array_cell", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "int_inversion", "int_typing", - "lemma_FStar.Pointer.Base.gsingleton_buffer_of_pointer_gcell", - "lemma_FStar.UInt32.uv_inv", "lemma_FStar.UInt32.vu_inv", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.Buffer_bidx", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "projection_inverse_FStar.Pointer.Base.StepCell_index", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_5f4176692ba0d4ebb4eb0d80d43bf405", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_b31ca53c440388681f8686931d7c051a", - "refinement_interpretation_Tm_refine_bc3e4ef50c18a50a72b09b9950468e20", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "typing_FStar.Pointer.Base.gsub_buffer", - "typing_FStar.Pointer.Base.not_an_array_cell", - "typing_FStar.UInt.fits", "typing_FStar.UInt32.add", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v" - ], - 0, - "868e43f64034ce662da5cadaab38d616" - ], - [ - "FStar.Pointer.Base.buffer_readable_buffer_live", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.buffer_readable", - "equation_FStar.Pointer.Base.buffer_readable_" - ], - 0, - "61c106eeb28769fe83f93c6df35c10c7" - ], - [ - "FStar.Pointer.Base.buffer_readable_gsingleton_buffer_of_pointer", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_Prims.pow2.fuel_instrumented", - "@fuel_irrelevance_Prims.pow2.fuel_instrumented", "@query", - "b2t_def", "bool_inversion", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_readable", - "equation_FStar.Pointer.Base.buffer_readable_", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_FStar.Monotonic.Heap.heap", "int_inversion", - "int_typing", "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Pointer.Base.buffer_length_gsingleton_buffer_of_pointer", - "lemma_FStar.Pointer.Base.buffer_live_gsingleton_buffer_of_pointer", - "lemma_FStar.Pointer.Base.gpointer_of_buffer_cell_gsingleton_buffer_of_pointer", - "lemma_FStar.Pointer.Base.readable_live", - "lemma_FStar.UInt.pow2_values", "lemma_FStar.UInt32.vu_inv", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Subtraction", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "typing_FStar.UInt.fits", "typing_FStar.UInt32.uint_to_t", - "typing_FStar.UInt32.v" - ], - 0, - "434a323e9754cb612d353f536e4f07f4" - ], - [ - "FStar.Pointer.Base.buffer_readable_gbuffer_of_array_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "equation_FStar.Pointer.Base.buffer_readable", - "equation_FStar.Pointer.Base.buffer_readable_", - "equation_FStar.Pointer.Base.gbuffer_of_array_pointer", - "lemma_FStar.Pointer.Base.buffer_length_gbuffer_of_array_pointer", - "lemma_FStar.Pointer.Base.buffer_live_gbuffer_of_array_pointer", - "lemma_FStar.Pointer.Base.gpointer_of_buffer_cell_gbuffer_of_array_pointer", - "lemma_FStar.Pointer.Base.readable_gcell", - "lemma_FStar.Pointer.Base.readable_live", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c" - ], - 0, - "e22abb92dfa7fb9697fb7b3a49939bd9" - ], - [ - "FStar.Pointer.Base.buffer_readable_gsub_buffer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.buffer_readable", - "equation_FStar.Pointer.Base.buffer_readable_", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "int_inversion", - "lemma_FStar.Pointer.Base.buffer_length_gsub_buffer", - "lemma_FStar.Pointer.Base.buffer_live_gsub_buffer_equiv", - "lemma_FStar.Pointer.Base.buffer_readable_buffer_live", - "lemma_FStar.UInt32.uv_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.UInt32.add", "typing_FStar.UInt32.uint_to_t", - "typing_FStar.UInt32.v" - ], - 0, - "350ddfed32868cc96457493f7e666bf2" - ], - [ - "FStar.Pointer.Base.readable_gpointer_of_buffer_cell", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.buffer_readable", - "equation_FStar.Pointer.Base.buffer_readable_" - ], - 0, - "9ba64665d90bb28d2889fd40653d3cf2" - ], - [ - "FStar.Pointer.Base.buffer_readable_intro", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.buffer_readable", - "equation_FStar.Pointer.Base.buffer_readable_" - ], - 0, - "4779c263c10955d084e13727e39bbf9a" - ], - [ - "FStar.Pointer.Base.buffer_readable_elim", - 1, - 1, - 1, - [ - "@query", "lemma_FStar.Pointer.Base.buffer_readable_buffer_live", - "lemma_FStar.Pointer.Base.readable_gpointer_of_buffer_cell" - ], - 0, - "b83490aa69fbdbff49cd63473c1f902f" - ], - [ - "FStar.Pointer.Base.disjoint", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_Prims.eqtype", - "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0" - ], - 0, - "4f2a9494d6c68350d8ea9017e6faf010" - ], - [ - "FStar.Pointer.Base.disjoint_root", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.disjoint", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "true_interp" - ], - 0, - "18976bc90e08d891ef21260c3962df63" - ], - [ - "FStar.Pointer.Base.disjoint_gfield", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base.StepField", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "data_elim_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.Pointer", - "data_typing_intro_FStar.Pointer.Base.StepField@tok", - "data_typing_intro_FStar.Pointer.Base.TStruct@tok", "eq2-interp", - "equation_FStar.Pointer.Base._field", - "equation_FStar.Pointer.Base.disjoint", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.step_disjoint", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "l_and-interp", "lemma_FStar.Pointer.Base.as_addr_gfield", - "lemma_FStar.Pointer.Base.frameOf_gfield", - "lemma_FStar.Pointer.Base.path_disjoint_step", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "primitive_Prims.op_disEquality", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.StepField_fd", - "projection_inverse_FStar.Pointer.Base.StepField_l", - "refinement_interpretation_Tm_refine_041a24a8c3715e0f4960d28f20ee920b", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "string_inversion", "typing_FStar.Pointer.Base.gfield", - "typing_FStar.Pointer.Base.typ_of_struct_field", "unit_typing" - ], - 0, - "4f460f8765ffd6650d731af6c66819c6" - ], - [ - "FStar.Pointer.Base.disjoint_gcell", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_1e5c7ab94e0ccfb7b0d2bbb59a9e3f5d", - "FStar.Pointer.Base_pretyping_b27168cc0cd9c7f90ae88b602ccfc55c", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "b2t_def", "bool_inversion", "bool_typing", - "constructor_distinct_FStar.Pointer.Base.StepCell", - "data_elim_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.Pointer", - "data_typing_intro_FStar.Pointer.Base.StepCell@tok", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", "eq2-interp", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.disjoint", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.step_disjoint", - "equation_FStar.UInt.fits", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.eqtype", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_Prims.int", - "haseqTm_refine_f13070840248fced9d9d60d77bdae3ec", "int_inversion", - "l_and-interp", "lemma_FStar.Pointer.Base.as_addr_gcell", - "lemma_FStar.Pointer.Base.frameOf_gcell", - "lemma_FStar.Pointer.Base.path_disjoint_step", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_disEquality", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.StepCell_index", - "projection_inverse_FStar.Pointer.Base.StepCell_length", - "projection_inverse_FStar.Pointer.Base.StepCell_value", - "refinement_interpretation_Tm_refine_041a24a8c3715e0f4960d28f20ee920b", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.gcell", - "typing_FStar.Pointer.Base.step_disjoint", "typing_FStar.UInt32.v" - ], - 0, - "d458e63e4d179e8ee94c6a251aa7a525" - ], - [ - "FStar.Pointer.Base.disjoint_includes", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "bool_inversion", "data_elim_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.disjoint", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.includes", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_Prims.eqtype", - "equation_Prims.l_and", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", "l_and-interp", - "lemma_FStar.Pointer.Base.path_disjoint_includes_l", - "lemma_FStar.Pointer.Base.path_disjoint_sym", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_1588d9cc9cb691265bea0ce2c66abf3c", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_82707a6e3d48caa257bb4bddb01d7d73", - "true_interp", "typing_FStar.Monotonic.HyperStack.aref_equal", - "typing_FStar.Pointer.Base.includes", - "typing_FStar.Pointer.Base.path_includes" - ], - 0, - "90d88d5302d9071a88db2d18e525f62a" - ], - [ - "FStar.Pointer.Base.disjoint_ind", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "bool_inversion", "bool_typing", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_FStar.Pointer.Base.StepCell", - "constructor_distinct_FStar.Pointer.Base.StepField", - "constructor_distinct_Tm_unit", - "data_elim_FStar.Pointer.Base.StepCell", - "data_elim_FStar.Pointer.Base.StepField", - "data_elim_FStar.Pointer.Base.TArray", - "disc_equation_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.StepCell", - "disc_equation_FStar.Pointer.Base.StepField", "eq2-interp", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base._field", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.disjoint", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.includes", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.path_disjoint", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.step_disjoint", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.UInt.uint_t", "equation_Prims.eq2", - "equation_Prims.eqtype", "equation_Prims.l_and", - "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.path_includes.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", "l_and-interp", - "lemma_FStar.Pointer.Base.disjoint_gcell", - "lemma_FStar.Pointer.Base.disjoint_gfield", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "primitive_Prims.op_disEquality", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "projection_inverse_FStar.Pointer.Base.StepCell_index", - "projection_inverse_FStar.Pointer.Base.StepCell_length", - "projection_inverse_FStar.Pointer.Base.StepCell_value", - "projection_inverse_FStar.Pointer.Base.StepField_fd", - "projection_inverse_FStar.Pointer.Base.StepField_l", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "projection_inverse_FStar.Pointer.Base.TStruct_l", - "refinement_interpretation_Tm_refine_035771f6b75c6418e7b567530874ccfb", - "refinement_interpretation_Tm_refine_1588d9cc9cb691265bea0ce2c66abf3c", - "refinement_interpretation_Tm_refine_15c45a18895db0eab7a092e57be84ca9", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_77cb76ece3ec1de98424d83e532afccd", - "refinement_interpretation_Tm_refine_82707a6e3d48caa257bb4bddb01d7d73", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_a02d9d7bf230ab25f66365b7b22b819e", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_e937e34ee8783c8b9b7d7a707f01634d", - "string_inversion", - "token_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "typing_FStar.Monotonic.HyperStack.aref_equal", - "typing_FStar.Pointer.Base.path_includes", - "typing_FStar.Pointer.Base.step_disjoint" - ], - 0, - "699a862d74f54579af88a2e2dc5d4cb9" - ], - [ - "FStar.Pointer.Base.disjoint_ind", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.disjoint", "equation_FStar.UInt.uint_t", - "equation_Prims.eqtype", "equation_Prims.nat", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "haseqTm_refine_f13070840248fced9d9d60d77bdae3ec", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_1057f9d2b591d45e0a692fa868a0c66f", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_30a755a87d7c91eaa6f98b54378d0d56", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "true_interp" - ], - 0, - "969a4084bb7444e80d144baf9b525a35" - ], - [ - "FStar.Pointer.Base.disjoint_sym", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_FStar.UInt.uint_t", - "equation_Prims.eqtype", "equation_Prims.nat", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "haseqTm_refine_f13070840248fced9d9d60d77bdae3ec", - "refinement_interpretation_Tm_refine_0600ed6fdc397ec1705174e441a78dc0", - "refinement_interpretation_Tm_refine_1057f9d2b591d45e0a692fa868a0c66f", - "refinement_interpretation_Tm_refine_28e1729ae23638276a10e717f1d0f59c", - "refinement_interpretation_Tm_refine_30a755a87d7c91eaa6f98b54378d0d56", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_9d2830d159b1e910dc3e47685b21dba0", - "refinement_interpretation_Tm_refine_f1bdf056430cac446ba97c183d132799" - ], - 0, - "1ca2b40d59824da8c1c43caf667ff5c9" - ], - [ - "FStar.Pointer.Base.disjoint_sym'", - 1, - 1, - 1, - [ "@query" ], - 0, - "1e9f6c2afcb4deba775a1eb2810aa4e7" - ], - [ - "FStar.Pointer.Base.disjoint_includes_l", - 1, - 1, - 1, - [ "@query", "lemma_FStar.Pointer.Base.includes_refl" ], - 0, - "b6862c56de1dcf6ed428fd994d05886b" - ], - [ - "FStar.Pointer.Base.disjoint_includes_l_swap", - 1, - 1, - 1, - [ "@query" ], - 0, - "1b19ac1685da8ac77b4803d0078834f6" - ], - [ - "FStar.Pointer.Base.disjoint_includes_r", - 1, - 1, - 1, - [ "@query" ], - 0, - "33774282f93c77a75049554f276b8476" - ], - [ - "FStar.Pointer.Base.__proj__LocBuffer__item__t", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_eaa9b5871ffb5dfb1e4b50ec94a00ce3" - ], - 0, - "43e26da83d6799f24558919e5928c168" - ], - [ - "FStar.Pointer.Base.__proj__LocBuffer__item__b", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_eaa9b5871ffb5dfb1e4b50ec94a00ce3" - ], - 0, - "0e9a8807dacacf221beaddaef33553c6" - ], - [ - "FStar.Pointer.Base.__proj__LocPointer__item__t", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_13ab046771d51233a400cd28aa47c12e" - ], - 0, - "ec9670b59eace89d3c6a70d843b2120a" - ], - [ - "FStar.Pointer.Base.__proj__LocPointer__item__p", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "refinement_interpretation_Tm_refine_13ab046771d51233a400cd28aa47c12e" - ], - 0, - "2d129cffb0400983c0f3d3f37cfa2ab2" - ], - [ - "FStar.Pointer.Base.loc_aux_includes_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.LocBuffer", - "disc_equation_FStar.Pointer.Base.LocPointer", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "projection_inverse_BoxBool_proj_0" - ], - 0, - "87914e1df2c31c3267f3a2181d5f50f7" - ], - [ - "FStar.Pointer.Base.loc_aux_includes_pointer_trans", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "disc_equation_FStar.Pointer.Base.LocBuffer", - "disc_equation_FStar.Pointer.Base.LocPointer", - "equation_FStar.Pointer.Base.buffer_includes_pointer", - "equation_FStar.Pointer.Base.includes", - "equation_FStar.Pointer.Base.loc_aux_includes_pointer", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "typing_FStar.Pointer.Base.includes" - ], - 0, - "bcbdc3820ab9b188303393f29bc067eb" - ], - [ - "FStar.Pointer.Base.loc_aux_includes", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.LocBuffer", - "disc_equation_FStar.Pointer.Base.LocPointer", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "projection_inverse_BoxBool_proj_0" - ], - 0, - "b0a37a5086c1ffb4a92d717167c1dbf1" - ], - [ - "FStar.Pointer.Base.loc_aux_includes_refl'", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "b2t_def", "data_elim_FStar.Pointer.Base.LocBuffer", - "data_elim_FStar.Pointer.Base.LocPointer", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_includes_pointer", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.loc_aux_includes", - "equation_FStar.Pointer.Base.loc_aux_includes_buffer", - "equation_FStar.Pointer.Base.loc_aux_includes_pointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "lemma_FStar.Pointer.Base.includes_refl", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell" - ], - 0, - "69223844d894160c463635bb4b6981df" - ], - [ - "FStar.Pointer.Base.loc_aux_includes_loc_aux_includes_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "disc_equation_FStar.Pointer.Base.LocBuffer", - "disc_equation_FStar.Pointer.Base.LocPointer", - "equation_FStar.Pointer.Base.buffer_includes_pointer", - "equation_FStar.Pointer.Base.loc_aux_includes", - "equation_FStar.Pointer.Base.loc_aux_includes_buffer", - "equation_FStar.Pointer.Base.loc_aux_includes_pointer", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t" - ], - 0, - "a122148d8aa09e1e489055fa0638b025" - ], - [ - "FStar.Pointer.Base.loc_aux_includes_trans", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "disc_equation_FStar.Pointer.Base.LocBuffer", - "disc_equation_FStar.Pointer.Base.LocPointer", - "equation_FStar.Pointer.Base.loc_aux_includes", - "equation_FStar.Pointer.Base.loc_aux_includes_buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t" - ], - 0, - "903007afb7f87771d807e299377bf0ca" - ], - [ - "FStar.Pointer.Base.loc_aux_disjoint_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.LocBuffer", - "disc_equation_FStar.Pointer.Base.LocPointer", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "projection_inverse_BoxBool_proj_0" - ], - 0, - "4b626dc23d56db8a4862143b563884bf" - ], - [ - "FStar.Pointer.Base.loc_aux_disjoint_buffer_sym", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "equation_FStar.Pointer.Base.disjoint_buffer_vs_pointer", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.loc_aux_disjoint_buffer", - "equation_FStar.Pointer.Base.loc_aux_disjoint_pointer", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell" - ], - 0, - "037fce4ac4eb8fcd3a1280a8b2d93852" - ], - [ - "FStar.Pointer.Base.loc_aux_disjoint_pointer_buffer_sym", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "equation_FStar.Pointer.Base.disjoint_buffer_vs_pointer", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.loc_aux_disjoint_buffer", - "equation_FStar.Pointer.Base.loc_aux_disjoint_pointer", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell" - ], - 0, - "22eb420fe0801f5407570fb4ef7c915e" - ], - [ - "FStar.Pointer.Base.loc_aux_disjoint", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.LocBuffer", - "disc_equation_FStar.Pointer.Base.LocPointer", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "projection_inverse_BoxBool_proj_0" - ], - 0, - "f8f74391c076e7b9029c5e072f8ca8e4" - ], - [ - "FStar.Pointer.Base.loc_aux_disjoint_sym", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_3c6602ae075bb8847a1e8d4d2278d7fa", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "disc_equation_FStar.Pointer.Base.LocBuffer", - "disc_equation_FStar.Pointer.Base.LocPointer", - "equation_FStar.Pointer.Base.loc_aux_disjoint", - "equation_FStar.Pointer.Base.loc_aux_disjoint_pointer", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "proj_equation_FStar.Pervasives.Native.Mktuple2__1", - "proj_equation_FStar.Pervasives.Native.Mktuple2__2", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__1", - "projection_inverse_FStar.Pervasives.Native.Mktuple2__2", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t" - ], - 0, - "f81c7cb22df1cfa5fb35bdfedf5f33d4" - ], - [ - "FStar.Pointer.Base.loc_aux_disjoint_pointer_includes", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "bool_inversion", "data_elim_FStar.Pointer.Base.LocBuffer", - "data_elim_FStar.Pointer.Base.LocPointer", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.disjoint_buffer_vs_pointer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.includes", - "equation_FStar.Pointer.Base.loc_aux_disjoint_pointer", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "lemma_FStar.Pointer.Base.disjoint_includes_r", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.Pointer.Base.includes" - ], - 0, - "5cac0b6c425084ff60cfe95c27dfd910" - ], - [ - "FStar.Pointer.Base.loc_aux_disjoint_loc_aux_includes_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "disc_equation_FStar.Pointer.Base.LocBuffer", - "disc_equation_FStar.Pointer.Base.LocPointer", - "equation_FStar.Pointer.Base.buffer_includes_pointer", - "equation_FStar.Pointer.Base.loc_aux_disjoint", - "equation_FStar.Pointer.Base.loc_aux_disjoint_buffer", - "equation_FStar.Pointer.Base.loc_aux_includes_pointer", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t" - ], - 0, - "490de332bef73498f72dbe5679f1b541" - ], - [ - "FStar.Pointer.Base.loc_aux_disjoint_loc_aux_includes", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "disc_equation_FStar.Pointer.Base.LocBuffer", - "disc_equation_FStar.Pointer.Base.LocPointer", - "equation_FStar.Pointer.Base.loc_aux_disjoint", - "equation_FStar.Pointer.Base.loc_aux_disjoint_buffer", - "equation_FStar.Pointer.Base.loc_aux_includes", - "equation_FStar.Pointer.Base.loc_aux_includes_buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t" - ], - 0, - "4681b9d260239eb09a61b57d615d0e92" - ], - [ - "FStar.Pointer.Base.loc_aux_preserved", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.LocBuffer", - "disc_equation_FStar.Pointer.Base.LocPointer", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "projection_inverse_BoxBool_proj_0" - ], - 0, - "3ef1e7fbe84f6fd8455368fa55bb8380" - ], - [ - "FStar.Pointer.Base.pointer_preserved_intro", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "bool_inversion", "eq2-interp", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.equal_values", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gread", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_preserved", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.readable", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "l_and-interp", "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Monotonic.HyperStack.aref_as_addr_aref_of", - "lemma_FStar.Monotonic.HyperStack.aref_live_at_aref_of", - "lemma_FStar.Monotonic.HyperStack.contains_greference_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_aref_of", - "lemma_FStar.Pointer.Base.readable_live", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_2c7ecebd8a41d0890aab4251b61d6458", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Heap.trivial_preorder", "typing_FStar.Map.contains", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.aref_of", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pointer.Base.live", - "typing_FStar.StrongExcludedMiddle.strong_excluded_middle" - ], - 0, - "295f161650fb520888634994e4364878" - ], - [ - "FStar.Pointer.Base.pointer_preserved_intro", - 2, - 1, - 1, - [ "@query" ], - 0, - "3fb5db78a2751932c5cef6376ade77fc" - ], - [ - "FStar.Pointer.Base.buffer_preserved_intro", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_preserved", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "lemma_FStar.Pointer.Base.as_addr_gpointer_of_buffer_cell", - "lemma_FStar.Pointer.Base.frameOf_gpointer_of_buffer_cell", - "refinement_interpretation_Tm_refine_4a180d7aa1afd0a2fad463fd9c642bcc" - ], - 0, - "91f3044e26aa7d34a2ee610a13a487dc" - ], - [ - "FStar.Pointer.Base.buffer_preserved_intro", - 2, - 1, - 1, - [ "@query" ], - 0, - "87451f38fbcdac12d33003026b6e4047" - ], - [ - "FStar.Pointer.Base.disjoint_not_self", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Pointer.Base.disjoint", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "l_and-interp", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_Equality", "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_7871a89cee4ff50bc14c65877a68b5bd", - "typing_FStar.Pointer.Base.path_equal" - ], - 0, - "5159133cfa4468f4e779c493f231d85d" - ], - [ - "FStar.Pointer.Base.loc_aux_in_addr", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "disc_equation_FStar.Pointer.Base.LocBuffer", - "disc_equation_FStar.Pointer.Base.LocPointer", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "projection_inverse_BoxBool_proj_0" - ], - 0, - "f31d0616c71d744a343122d7203ceab6" - ], - [ - "FStar.Pointer.Base.cls", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "disc_equation_FStar.Pointer.Base.LocBuffer", - "disc_equation_FStar.Pointer.Base.LocPointer", "eq2-interp", - "equation_FStar.Pointer.Base.aloc", - "equation_FStar.Pointer.Base.buffer_preserved", - "equation_FStar.Pointer.Base.equal_values", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_aux_preserved", - "equation_FStar.Pointer.Base.pointer_preserved", - "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", "int_inversion", - "l_and-interp", "lemma_FStar.Pointer.Base.loc_aux_includes_refl__", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_ee0c42debbc39763e22b0866c76a996c" - ], - 0, - "e4e3fae866d579e7e82e98718361c602" - ], - [ - "FStar.Pointer.Base.loc_union_idem", - 1, - 1, - 1, - [ "@query", "equation_FStar.Pointer.Base.loc_union" ], - 0, - "6863d3f03d7fe9be988cdc7a318cb641" - ], - [ - "FStar.Pointer.Base.loc_pointer", - 1, - 1, - 1, - [ - "@query", "constructor_distinct_FStar.Pointer.Base.LocPointer", - "eq2-interp", "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_Prims.nat", "l_and-interp", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t" - ], - 0, - "5fcc1d57cf4ea0a56e7ddb29e52c3a4e" - ], - [ - "FStar.Pointer.Base.loc_buffer", - 1, - 1, - 1, - [ - "@query", "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "eq2-interp", "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_Prims.nat", "l_and-interp", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t" - ], - 0, - "14b4297e50602140051d502622a0b22a" - ], - [ - "FStar.Pointer.Base.loc_addresses", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "05800f040290cc4d687562d628a6fa01" - ], - [ - "FStar.Pointer.Base.loc_includes_refl", - 1, - 1, - 1, - [ "@query", "equation_FStar.Pointer.Base.loc_includes" ], - 0, - "ee5a0c600db8e72f783182b7ab2c69e1" - ], - [ - "FStar.Pointer.Base.loc_includes_trans", - 1, - 1, - 1, - [ "@query", "equation_FStar.Pointer.Base.loc_includes" ], - 0, - "f51a869039dba2d4334b8be3cf9b1986" - ], - [ - "FStar.Pointer.Base.loc_includes_union_r", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_union" - ], - 0, - "f3220436291b47d03047931b2435ad08" - ], - [ - "FStar.Pointer.Base.loc_includes_union_l", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_union" - ], - 0, - "45490061fb22f672248b27491e00d7fb" - ], - [ - "FStar.Pointer.Base.loc_includes_none", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_none" - ], - 0, - "2129dad406295e7c2ad04c3a4556a2e7" - ], - [ - "FStar.Pointer.Base.loc_includes_pointer_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "data_elim_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.includes", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_aux_includes", - "equation_FStar.Pointer.Base.loc_aux_includes_pointer", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "interpretation_Tm_abs_6f6e5b3c6d0b6c95f3240339e76063e0", - "l_and-interp", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_Equality", - "proj_equation_FStar.ModifiesGen.Cls_aloc_includes", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_includes", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_82707a6e3d48caa257bb4bddb01d7d73", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_includes", - "token_correspondence_FStar.Pointer.Base.loc_aux_includes", - "typing_FStar.Monotonic.HyperStack.aref_equal", - "typing_FStar.Pointer.Base.__proj__Pointer__item__contents", - "typing_FStar.Pointer.Base.includes" - ], - 0, - "99233d264b9f95b4f7b863ec7949199d" - ], - [ - "FStar.Pointer.Base.loc_includes_gsingleton_buffer_of_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "b2t_def", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "constructor_distinct_FStar.Pointer.Base.BufferRootSingleton", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "data_elim_FStar.Pointer.Base.BufferRootArray", - "data_elim_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base._singleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "equation_FStar.Pointer.Base.includes", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_aux_includes", - "equation_FStar.Pointer.Base.loc_aux_includes_buffer", - "equation_FStar.Pointer.Base.loc_aux_includes_pointer", - "equation_FStar.Pointer.Base.loc_buffer", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "int_typing", - "interpretation_Tm_abs_6f6e5b3c6d0b6c95f3240339e76063e0", - "l_and-interp", - "lemma_FStar.Pointer.Base.buffer_as_addr_gsingleton_buffer_of_pointer", - "lemma_FStar.Pointer.Base.buffer_length_gsingleton_buffer_of_pointer", - "lemma_FStar.Pointer.Base.frameOf_buffer_gsingleton_buffer_of_pointer", - "lemma_FStar.Pointer.Base.gpointer_of_buffer_cell_gsingleton_buffer_of_pointer", - "lemma_FStar.Pointer.Base.includes_refl", - "lemma_FStar.UInt32.vu_inv", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_Equality", "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.ModifiesGen.Cls_aloc_includes", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_includes", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_p", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_t", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_82707a6e3d48caa257bb4bddb01d7d73", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_includes", - "token_correspondence_FStar.Pointer.Base.loc_aux_includes", - "typing_FStar.Monotonic.HyperStack.aref_equal", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.Pointer.Base.gsingleton_buffer_of_pointer", - "typing_FStar.Pointer.Base.includes", "typing_FStar.UInt.fits", - "typing_FStar.UInt32.v" - ], - 0, - "97b5d16c9cec6d4700d4f47382aec1d3" - ], - [ - "FStar.Pointer.Base.loc_includes_gbuffer_of_array_pointer", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.path_includes.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_1e5c7ab94e0ccfb7b0d2bbb59a9e3f5d", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "b2t_def", "bool_inversion", "bool_typing", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "data_elim_FStar.Pointer.Base.Pointer", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", "eq2-interp", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.includes", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_aux_includes", - "equation_FStar.Pointer.Base.loc_aux_includes_buffer", - "equation_FStar.Pointer.Base.loc_aux_includes_pointer", - "equation_FStar.Pointer.Base.loc_buffer", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.path_includes.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "int_inversion", - "interpretation_Tm_abs_6f6e5b3c6d0b6c95f3240339e76063e0", - "l_and-interp", - "lemma_FStar.Pointer.Base.buffer_as_addr_gbuffer_of_array_pointer", - "lemma_FStar.Pointer.Base.buffer_length_gbuffer_of_array_pointer", - "lemma_FStar.Pointer.Base.frameOf_buffer_gbuffer_of_array_pointer", - "lemma_FStar.Pointer.Base.gpointer_of_buffer_cell_gbuffer_of_array_pointer", - "lemma_FStar.Pointer.Base.path_includes_refl", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_BarBar", - "primitive_Prims.op_Equality", "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.ModifiesGen.Cls_aloc_includes", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_includes", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_82707a6e3d48caa257bb4bddb01d7d73", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_includes", - "token_correspondence_FStar.Pointer.Base.loc_aux_includes", - "typing_FStar.Monotonic.HyperStack.aref_equal", - "typing_FStar.Pointer.Base.gbuffer_of_array_pointer", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.Pointer.Base.includes", "typing_FStar.UInt32.v" - ], - 0, - "f6d0c24d23f0de96614b8aef29158f95" - ], - [ - "FStar.Pointer.Base.loc_includes_gpointer_of_array_cell", - 1, - 1, - 1, - [ - "@query", "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_FStar.Pointer.Base.LocPointer", "eq2-interp", - "equation_FStar.Pointer.Base.buffer_includes_pointer", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_aux_includes", - "equation_FStar.Pointer.Base.loc_aux_includes_pointer", - "equation_FStar.Pointer.Base.loc_buffer", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_pointer", "equation_Prims.nat", - "interpretation_Tm_abs_6f6e5b3c6d0b6c95f3240339e76063e0", - "l_and-interp", - "lemma_FStar.Pointer.Base.as_addr_gpointer_of_buffer_cell", - "lemma_FStar.Pointer.Base.frameOf_gpointer_of_buffer_cell", - "lemma_FStar.Pointer.Base.includes_refl", - "proj_equation_FStar.ModifiesGen.Cls_aloc_includes", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_includes", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_includes", - "token_correspondence_FStar.Pointer.Base.loc_aux_includes" - ], - 0, - "bb31650fa3f4c9c3292deb6e060b251e" - ], - [ - "FStar.Pointer.Base.loc_includes_gsub_buffer_r", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", "bool_inversion", - "bool_typing", "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "eq2-interp", "equation_FStar.Pointer.Base.buffer_includes_pointer", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_aux_includes", - "equation_FStar.Pointer.Base.loc_aux_includes_buffer", - "equation_FStar.Pointer.Base.loc_aux_includes_pointer", - "equation_FStar.Pointer.Base.loc_buffer", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", "int_inversion", - "interpretation_Tm_abs_6f6e5b3c6d0b6c95f3240339e76063e0", - "l_and-interp", - "lemma_FStar.Pointer.Base.buffer_as_addr_gsub_buffer", - "lemma_FStar.Pointer.Base.buffer_length_gsub_buffer", - "lemma_FStar.Pointer.Base.frameOf_buffer_gsub_buffer", - "lemma_FStar.Pointer.Base.gpointer_of_buffer_cell_gsub_buffer_", - "lemma_FStar.Pointer.Base.includes_refl", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.ModifiesGen.Cls_aloc_includes", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_includes", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_includes", - "token_correspondence_FStar.Pointer.Base.loc_aux_includes", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.UInt32.add", "typing_FStar.UInt32.v" - ], - 0, - "18fbaee04fa52f09aed53c99cd931d9c" - ], - [ - "FStar.Pointer.Base.loc_includes_gsub_buffer_l", - 1, - 1, - 1, - [ "@query" ], - 0, - "fba07db2a5eaaa35098e503605c94cd2" - ], - [ - "FStar.Pointer.Base.loc_includes_gsub_buffer_l", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", "bool_inversion", - "bool_typing", "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "eq2-interp", "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_addr", - "equation_FStar.Pointer.Base.buffer_includes_pointer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.frameOf_buffer", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_aux_includes", - "equation_FStar.Pointer.Base.loc_aux_includes_buffer", - "equation_FStar.Pointer.Base.loc_aux_includes_pointer", - "equation_FStar.Pointer.Base.loc_buffer", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "int_inversion", - "interpretation_Tm_abs_6f6e5b3c6d0b6c95f3240339e76063e0", - "l_and-interp", - "lemma_FStar.Pointer.Base.buffer_as_addr_gsub_buffer", - "lemma_FStar.Pointer.Base.buffer_length_gsub_buffer", - "lemma_FStar.Pointer.Base.frameOf_buffer_gsub_buffer", - "lemma_FStar.Pointer.Base.gpointer_of_buffer_cell_gsub_buffer_", - "lemma_FStar.Pointer.Base.gsub_buffer_gsub_buffer", - "lemma_FStar.Pointer.Base.includes_refl", - "lemma_FStar.UInt32.uv_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Subtraction", - "proj_equation_FStar.ModifiesGen.Cls_aloc_includes", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_includes", - "projection_inverse_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_includes", - "token_correspondence_FStar.Pointer.Base.loc_aux_includes", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.UInt32.add", "typing_FStar.UInt32.uint_to_t", - "typing_FStar.UInt32.v" - ], - 0, - "f04d2d44b920898dd29b376d67a3a124" - ], - [ - "FStar.Pointer.Base.loc_includes_addresses_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "ed81c2c8a85255e84b4bc16d3cf3df68" - ], - [ - "FStar.Pointer.Base.loc_includes_addresses_pointer", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.LocPointer", "eq2-interp", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.loc_addresses", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_Prims.eqtype", - "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", "l_and-interp", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_kinding_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "typing_FStar.Set.mem" - ], - 0, - "99c54d061bf73fb86d74f1b9e1537ac8" - ], - [ - "FStar.Pointer.Base.loc_includes_addresses_pointer", - 3, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "71d6f45629e0dbb8817d669d9f3f4e25" - ], - [ - "FStar.Pointer.Base.loc_includes_addresses_buffer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "99e01f13298dc122bf90df07cfc25292" - ], - [ - "FStar.Pointer.Base.loc_includes_addresses_buffer", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", "eq2-interp", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.loc_addresses", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_buffer", - "equation_FStar.Pointer.Base.loc_includes", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "l_and-interp", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t" - ], - 0, - "0b3cc6d66ff185d70feb7561f61ba591" - ], - [ - "FStar.Pointer.Base.loc_includes_addresses_buffer", - 3, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "7cf4ee8a46cee8bcc9227623f4c8111b" - ], - [ - "FStar.Pointer.Base.loc_includes_region_pointer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.LocPointer", "eq2-interp", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.loc_regions", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "l_and-interp", "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Monotonic.HyperHeap.rid", "typing_FStar.Set.mem" - ], - 0, - "4793322f3676664fae8f1e39b384df72" - ], - [ - "FStar.Pointer.Base.loc_includes_region_buffer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", "eq2-interp", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_buffer", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_regions", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "l_and-interp", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t" - ], - 0, - "19fe090d4ff8343fa12c0222e5d1de3c" - ], - [ - "FStar.Pointer.Base.loc_includes_region_addresses", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "66737800ce9de594e6180aaa2d48baf0" - ], - [ - "FStar.Pointer.Base.loc_includes_region_addresses", - 2, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_addresses", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_regions" - ], - 0, - "75ee00692ac9fe175ef8e47971ca0720" - ], - [ - "FStar.Pointer.Base.loc_includes_region_region", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_regions" - ], - 0, - "fb27a337c27bda566b45b0e67ccc6347" - ], - [ - "FStar.Pointer.Base.loc_includes_region_union_l", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_regions", - "equation_FStar.Pointer.Base.loc_union" - ], - 0, - "5d6277b5d7d0f02d80bf7ca66620a4e3" - ], - [ - "FStar.Pointer.Base.loc_disjoint_sym", - 1, - 1, - 1, - [ "@query", "equation_FStar.Pointer.Base.loc_disjoint" ], - 0, - "1c06f3beae72ce5038e8230076ee7528" - ], - [ - "FStar.Pointer.Base.loc_disjoint_none_r", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_none" - ], - 0, - "711efb7ab99a3c58daf3cbe34bb0a296" - ], - [ - "FStar.Pointer.Base.loc_disjoint_union_r", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_union" - ], - 0, - "21805396d2da260fbe9ca7d3f9616030" - ], - [ - "FStar.Pointer.Base.loc_disjoint_root", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "8e5bc5ab0b19ceb32b9160d64c258dc5" - ], - [ - "FStar.Pointer.Base.loc_disjoint_root", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.loc_addresses", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Set.subset", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "lemma_FStar.Set.mem_intersect", "lemma_FStar.Set.mem_singleton", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_afd51579b90d50ea23e03b743a1fa001", - "refinement_kinding_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "typing_FStar.Pointer.Base.as_addr" - ], - 0, - "e1c11e68d80b37e35340b23fa4c08010" - ], - [ - "FStar.Pointer.Base.loc_disjoint_gfield", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "data_elim_FStar.Pointer.Base.LocPointer", "eq2-interp", - "equation_FStar.Pointer.Base._field", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.loc_aux_disjoint", - "equation_FStar.Pointer.Base.loc_aux_disjoint_pointer", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_Prims.nat", - "interpretation_Tm_abs_5588b165ff73228a6a45b3d5bd80e04a", - "l_and-interp", "lemma_FStar.Pointer.Base.as_addr_gfield", - "lemma_FStar.Pointer.Base.disjoint_gfield", - "lemma_FStar.Pointer.Base.frameOf_gfield", - "proj_equation_FStar.ModifiesGen.Cls_aloc_disjoint", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_disjoint", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "string_inversion", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_disjoint", - "token_correspondence_FStar.Pointer.Base.loc_aux_disjoint", - "unit_typing" - ], - 0, - "dd2178963735cd3a2c6cf729e6d0e2ac" - ], - [ - "FStar.Pointer.Base.loc_disjoint_gcell", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_FStar.UInt.uint_t", - "equation_Prims.eqtype", "function_token_typing_Prims.int", - "haseqTm_refine_f13070840248fced9d9d60d77bdae3ec", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "324724c135fc9a989b9f96f3fa7ce903" - ], - [ - "FStar.Pointer.Base.loc_disjoint_gcell", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", "eq2-interp", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.loc_aux_disjoint", - "equation_FStar.Pointer.Base.loc_aux_disjoint_pointer", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_Prims.nat", - "interpretation_Tm_abs_5588b165ff73228a6a45b3d5bd80e04a", - "l_and-interp", "lemma_FStar.Pointer.Base.as_addr_gcell", - "lemma_FStar.Pointer.Base.disjoint_gcell", - "lemma_FStar.Pointer.Base.frameOf_gcell", - "proj_equation_FStar.ModifiesGen.Cls_aloc_disjoint", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_disjoint", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_disjoint", - "token_correspondence_FStar.Pointer.Base.loc_aux_disjoint", - "unit_typing" - ], - 0, - "6ed7a8af84c7df27a49b839419b9b13b" - ], - [ - "FStar.Pointer.Base.loc_disjoint_includes", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_includes" - ], - 0, - "fdbc32ea2ce13b9a40567d8e35bd9437" - ], - [ - "FStar.Pointer.Base.live_unused_in_disjoint_strong", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "959501bdca7b39cbcd411c91894fad3b" - ], - [ - "FStar.Pointer.Base.live_unused_in_disjoint_strong", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "data_elim_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.unused_in", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "l_and-interp", - "lemma_FStar.Monotonic.HyperStack.as_addr_greference_of", - "lemma_FStar.Monotonic.HyperStack.contains_aref_unused_in", - "lemma_FStar.Monotonic.HyperStack.contains_greference_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_greference_of", - "lemma_FStar.Pointer.Base.live_region_frameOf", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_1d7f81705c35971e00b909e37163cd25", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.greference_of", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.__proj__Pointer__item__contents", - "typing_FStar.Pointer.Base.frameOf" - ], - 0, - "d68bb73d5314ac1de4f4ef3a937e1947" - ], - [ - "FStar.Pointer.Base.live_unused_in_disjoint", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "data_elim_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.unused_in", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "l_and-interp", - "lemma_FStar.Monotonic.HyperStack.as_addr_greference_of", - "lemma_FStar.Monotonic.HyperStack.contains_aref_unused_in", - "lemma_FStar.Monotonic.HyperStack.contains_greference_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_greference_of", - "lemma_FStar.Pointer.Base.live_region_frameOf", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_1d7f81705c35971e00b909e37163cd25", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.greference_of", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.__proj__Pointer__item__contents", - "typing_FStar.Pointer.Base.frameOf" - ], - 0, - "cb711069a754955205325c993632383f" - ], - [ - "FStar.Pointer.Base.pointer_live_reference_unused_in_disjoint", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "39f010fc6a29f2ebec5e8db2012f96c8" - ], - [ - "FStar.Pointer.Base.pointer_live_reference_unused_in_disjoint", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.ModifiesGen.loc_freed_mreference", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.mreference", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.loc_addresses", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Set.subset", "equation_Prims.eqtype", - "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Monotonic.HyperStack.mreference_", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "lemma_FStar.Pointer.Base.live_region_frameOf", - "lemma_FStar.Set.mem_intersect", "lemma_FStar.Set.mem_singleton", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_afd51579b90d50ea23e03b743a1fa001", - "refinement_kinding_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.as_addr" - ], - 0, - "43689c401e2c8383154dc8c6e5eb3851" - ], - [ - "FStar.Pointer.Base.reference_live_pointer_unused_in_disjoint", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "5940e53790abfa9fbeb38d6aa0451e93" - ], - [ - "FStar.Pointer.Base.reference_live_pointer_unused_in_disjoint", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.ModifiesGen.loc_freed_mreference", - "equation_FStar.Monotonic.HyperStack.as_addr", - "equation_FStar.Monotonic.HyperStack.contains", - "equation_FStar.Monotonic.HyperStack.frameOf", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.mreference", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.loc_addresses", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Set.subset", "equation_Prims.eqtype", - "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Monotonic.HyperStack.mreference_", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "lemma_FStar.Set.mem_intersect", "lemma_FStar.Set.mem_singleton", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_afd51579b90d50ea23e03b743a1fa001", - "refinement_kinding_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.frameOf", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.as_addr" - ], - 0, - "f91b3cc4364f9ed3fc1d6f567c158c8b" - ], - [ - "FStar.Pointer.Base.loc_disjoint_gsub_buffer", - 1, - 1, - 1, - [ "@query" ], - 0, - "7daf2438eba4d8f7d7294ddbb78e9013" - ], - [ - "FStar.Pointer.Base.loc_disjoint_gsub_buffer", - 2, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_Prims.pow2.fuel_instrumented", - "@fuel_irrelevance_Prims.pow2.fuel_instrumented", "@query", - "FStar.UInt32_pretyping_2ab3c8ba2d08b0172817fc70b5994868", "b2t_def", - "bool_inversion", "bool_typing", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_Tm_unit", - "data_elim_FStar.Pointer.Base.Buffer", - "data_elim_FStar.Pointer.Base.BufferRootArray", - "data_elim_FStar.Pointer.Base.BufferRootSingleton", "eq2-interp", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.disjoint_buffer_vs_pointer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.Pointer.Base.loc_aux_disjoint", - "equation_FStar.Pointer.Base.loc_aux_disjoint_buffer", - "equation_FStar.Pointer.Base.loc_aux_disjoint_pointer", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_buffer", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.not_an_array_cell", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "int_inversion", "int_typing", - "interpretation_Tm_abs_5588b165ff73228a6a45b3d5bd80e04a", - "l_and-interp", - "lemma_FStar.Pointer.Base.buffer_as_addr_gsub_buffer", - "lemma_FStar.Pointer.Base.buffer_length_gsub_buffer", - "lemma_FStar.Pointer.Base.disjoint_gcell", - "lemma_FStar.Pointer.Base.frameOf_buffer_gsub_buffer", - "lemma_FStar.Pointer.Base.gpointer_of_buffer_cell_gsub_buffer_", - "lemma_FStar.UInt.pow2_values", "lemma_FStar.UInt32.uv_inv", - "lemma_FStar.UInt32.vu_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Subtraction", - "proj_equation_FStar.ModifiesGen.Cls_aloc_disjoint", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_disjoint", - "projection_inverse_FStar.Pointer.Base.Buffer_bidx", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.StepCell_index", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_b31ca53c440388681f8686931d7c051a", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_disjoint", - "token_correspondence_FStar.Pointer.Base.loc_aux_disjoint", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.Pointer.Base.not_an_array_cell", - "typing_FStar.UInt.fits", "typing_FStar.UInt32.add", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v" - ], - 0, - "eca47794219dcabdb4a2fc310de5190f" - ], - [ - "FStar.Pointer.Base.loc_disjoint_gpointer_of_buffer_cell", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_FStar.UInt.uint_t", - "equation_Prims.eqtype", "function_token_typing_Prims.int", - "haseqTm_refine_f13070840248fced9d9d60d77bdae3ec", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "9d000f35738e5e5e9bedddbda5a0ecd9" - ], - [ - "FStar.Pointer.Base.loc_disjoint_gpointer_of_buffer_cell", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", "bool_inversion", - "bool_typing", "constructor_distinct_FStar.Pointer.Base.LocPointer", - "data_elim_FStar.Pointer.Base.BufferRootArray", "eq2-interp", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.loc_aux_disjoint", - "equation_FStar.Pointer.Base.loc_aux_disjoint_pointer", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "int_inversion", "int_typing", - "interpretation_Tm_abs_5588b165ff73228a6a45b3d5bd80e04a", - "l_and-interp", - "lemma_FStar.Pointer.Base.as_addr_gpointer_of_buffer_cell", - "lemma_FStar.Pointer.Base.disjoint_gcell", - "lemma_FStar.Pointer.Base.frameOf_gpointer_of_buffer_cell", - "lemma_FStar.UInt32.uv_inv", "lemma_FStar.UInt32.vu_inv", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.ModifiesGen.Cls_aloc_disjoint", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_disjoint", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_disjoint", - "token_correspondence_FStar.Pointer.Base.loc_aux_disjoint", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.UInt.fits", "typing_FStar.UInt32.add", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v" - ], - 0, - "f8cf308b53f490766d2f095b5266cacb" - ], - [ - "FStar.Pointer.Base.loc_disjoint_gpointer_of_buffer_cell_r", - 1, - 1, - 1, - [ - "@query", - "lemma_FStar.Pointer.Base.loc_includes_gpointer_of_array_cell", - "lemma_FStar.Pointer.Base.loc_includes_refl" - ], - 0, - "8a474a44727c2bdfc484ab1f7716213f" - ], - [ - "FStar.Pointer.Base.loc_disjoint_gpointer_of_buffer_cell_l", - 1, - 1, - 1, - [ - "@query", - "lemma_FStar.Pointer.Base.loc_includes_gpointer_of_array_cell", - "lemma_FStar.Pointer.Base.loc_includes_refl" - ], - 0, - "c4dd2069d7eeaa0ee243c0b33bad825a" - ], - [ - "FStar.Pointer.Base.loc_disjoint_addresses", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "fb7974c2d94f51c283eb141dad5b4447" - ], - [ - "FStar.Pointer.Base.loc_disjoint_addresses", - 2, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_addresses", - "equation_FStar.Pointer.Base.loc_disjoint" - ], - 0, - "7f4af2458f6f8f7954a6afa01b343826" - ], - [ - "FStar.Pointer.Base.loc_disjoint_pointer_addresses", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "c2519cea9fd66d05dadd3f6e3d5b7da6" - ], - [ - "FStar.Pointer.Base.loc_disjoint_pointer_addresses", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "equation_FStar.Pointer.Base.as_addr", "equation_FStar.Set.subset", - "equation_Prims.eqtype", "equation_Prims.nat", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "lemma_FStar.Pointer.Base.loc_disjoint_addresses", - "lemma_FStar.Pointer.Base.loc_includes_addresses_pointer", - "lemma_FStar.Pointer.Base.loc_includes_refl", - "lemma_FStar.Set.mem_intersect", "lemma_FStar.Set.mem_singleton", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_afd51579b90d50ea23e03b743a1fa001", - "refinement_kinding_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "typing_FStar.Pointer.Base.as_addr", "typing_FStar.Set.mem" - ], - 0, - "43af341885d804fad4a33e9449546809" - ], - [ - "FStar.Pointer.Base.loc_disjoint_pointer_addresses", - 3, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "eae4b909fdd08a8d1f69be04b0e590b1" - ], - [ - "FStar.Pointer.Base.loc_disjoint_buffer_addresses", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "0ff75f2858ced2524ceebfd0105a9445" - ], - [ - "FStar.Pointer.Base.loc_disjoint_buffer_addresses", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_addr", - "equation_FStar.Set.subset", "equation_Prims.eqtype", - "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "lemma_FStar.Pointer.Base.loc_disjoint_addresses", - "lemma_FStar.Pointer.Base.loc_includes_addresses_buffer", - "lemma_FStar.Pointer.Base.loc_includes_refl", - "lemma_FStar.Set.mem_intersect", "lemma_FStar.Set.mem_singleton", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_afd51579b90d50ea23e03b743a1fa001", - "refinement_kinding_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "typing_FStar.Pointer.Base.buffer_as_addr", "typing_FStar.Set.mem" - ], - 0, - "c338686cfdf46a2bde1b07385ed91a57" - ], - [ - "FStar.Pointer.Base.loc_disjoint_buffer_addresses", - 3, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "4f55ac9812ca8d3435522e8615abf27e" - ], - [ - "FStar.Pointer.Base.loc_disjoint_regions", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_regions" - ], - 0, - "a3dcfa09ae3b4439a9c35e5c56521619" - ], - [ - "FStar.Pointer.Base.modifies_loc_regions_intro", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_regions", - "equation_FStar.Pointer.Base.modifies", "lemma_FStar.Set.mem_subset", - "typing_FStar.Monotonic.HyperHeap.rid" - ], - 0, - "3e4063cadff53b30351ddc3a175e59ae" - ], - [ - "FStar.Pointer.Base.modifies_pointer_elim", - 1, - 1, - 1, - [ - "@query", "constructor_distinct_FStar.Pointer.Base.LocPointer", - "eq2-interp", "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_aux_preserved", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.modifies", - "equation_FStar.Pointer.Base.pointer_preserved", - "equation_Prims.nat", - "interpretation_Tm_abs_b66b60341cf47258f5ae9ea4a40f3bb1", - "l_and-interp", "proj_equation_FStar.ModifiesGen.Cls_aloc_preserved", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_preserved", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_preserved", - "token_correspondence_FStar.Pointer.Base.loc_aux_preserved" - ], - 0, - "33e1e783497536a9f6b4c7e206395304" - ], - [ - "FStar.Pointer.Base.modifies_buffer_elim'", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", "bool_inversion", "bool_typing", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.buffer_as_seq", - "equation_FStar.Pointer.Base.buffer_readable", - "equation_FStar.Pointer.Base.buffer_readable_", - "equation_FStar.Pointer.Base.equal_values", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", "int_inversion", - "int_typing", "lemma_FStar.Pointer.Base.buffer_length_buffer_as_seq", - "lemma_FStar.Pointer.Base.buffer_readable_buffer_live", - "lemma_FStar.Pointer.Base.index_buffer_as_seq", - "lemma_FStar.Pointer.Base.live_gpointer_of_buffer_cell", - "lemma_FStar.Pointer.Base.live_region_frameOf_buffer", - "lemma_FStar.Pointer.Base.loc_disjoint_gpointer_of_buffer_cell_l", - "lemma_FStar.UInt32.vu_inv", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_365abba901205a01d0ef28ebf2198c47", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_d1e76d56e2ec7389d639ef3df0bd6a06", - "refinement_interpretation_Tm_refine_d83f8da8ef6c1cb9f71d1465c1bb1c55", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Monotonic.HyperHeap.rid_freeable", - "typing_FStar.Monotonic.HyperHeap.root", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.frameOf_buffer", "typing_FStar.UInt.fits", - "typing_FStar.UInt32.uint_to_t" - ], - 0, - "0f4eb9b64640f7ab9ebd8e1f72a80b17" - ], - [ - "FStar.Pointer.Base.modifies_buffer_elim", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", "bool_inversion", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_seq", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.buffer_readable", - "equation_FStar.Pointer.Base.buffer_readable_", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "int_inversion", "int_typing", - "lemma_FStar.Pointer.Base.length_buffer_root_as_seq", - "lemma_FStar.Pointer.Base.live_region_frameOf_buffer", - "lemma_FStar.Seq.Properties.slice_is_empty", - "lemma_FStar.UInt32.uv_inv", "lemma_FStar.UInt32.vu_inv", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_Equality", "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_b361ba8089a6e963921008d537e799a1", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_as_seq", - "typing_FStar.Pointer.Base.frameOf_buffer", - "typing_FStar.Pointer.Base.type_of_typ", "typing_FStar.UInt32.v" - ], - 0, - "7881919606da7a00a6860ddb5c323141" - ], - [ - "FStar.Pointer.Base.modifies_reference_elim", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "c8a5e87de11102c06b95c5535bf0b9de" - ], - [ - "FStar.Pointer.Base.modifies_reference_elim", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.ModifiesGen.loc_freed_mreference", - "equation_FStar.ModifiesGen.loc_mreference", - "equation_FStar.Monotonic.HyperStack.as_addr", - "equation_FStar.Monotonic.HyperStack.mreference", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.loc_addresses", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.modifies", "equation_FStar.Set.subset", - "equation_Prims.eqtype", "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Monotonic.HyperStack.mreference_", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.as_addr" - ], - 0, - "43ce369de718445c490fb1e8bf81b7e4" - ], - [ - "FStar.Pointer.Base.modifies_refl", - 1, - 1, - 1, - [ "@query", "equation_FStar.Pointer.Base.modifies" ], - 0, - "42a51bab8d5639b35d1ccb549ac1ebb1" - ], - [ - "FStar.Pointer.Base.modifies_loc_includes", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.modifies" - ], - 0, - "532218bd14c9541d19a3e6f4e37a6a52" - ], - [ - "FStar.Pointer.Base.modifies_trans", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_union", - "equation_FStar.Pointer.Base.modifies" - ], - 0, - "9114aeb2d124766f9fc64b8f6ccd0e20" - ], - [ - "FStar.Pointer.Base.screate", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@query", - "FStar.Pointer.Base_interpretation_Tm_arrow_9973fa91578be0bb3fb4c2414d28840c", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "Prims_interpretation_Tm_arrow_2eaa01e78f73e9bab5d0955fc1a662da", - "b2t_def", "bool_inversion", - "constructor_distinct_FStar.Pervasives.Native.Some", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "data_typing_intro_FStar.Pointer.Base.PathBase@tok", - "data_typing_intro_FStar.Pointer.Base.Pointer@tok", - "data_typing_intro_Prims.Mkdtuple2@tok", - "disc_equation_FStar.Pervasives.Native.None", "eq2-interp", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.ST.inline_stack_inv", - "equation_FStar.HyperStack.ST.mstackref", - "equation_FStar.HyperStack.ST.salloc_post", - "equation_FStar.Monotonic.Heap.fresh", - "equation_FStar.Monotonic.Heap.modifies", - "equation_FStar.Monotonic.Heap.modifies_t", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.contains", - "equation_FStar.Monotonic.HyperStack.frameOf", - "equation_FStar.Monotonic.HyperStack.fresh_ref", - "equation_FStar.Monotonic.HyperStack.is_stack_region", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Monotonic.HyperStack.mreference", - "equation_FStar.Monotonic.HyperStack.mstackref", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Monotonic.HyperStack.unused_in", - "equation_FStar.Monotonic.HyperStack.upd", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gread", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.loc_none", - "equation_FStar.Pointer.Base.modifies", - "equation_FStar.Pointer.Base.modifies_0", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.readable", - "equation_FStar.Pointer.Base.unused_in", "equation_Prims.logical", - "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "fuel_guarded_inversion_FStar.Monotonic.HyperStack.mreference_", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "int_inversion", - "interpretation_Tm_abs_9add4301e24a482cad3210ba222ff660", - "kinding_FStar.Pointer.Base.typ@tok", "l_and-interp", - "lemma_FStar.Map.lemma_ContainsDom", "lemma_FStar.Map.lemma_SelUpd1", - "lemma_FStar.Map.lemma_SelUpd2", - "lemma_FStar.Monotonic.Heap.lemma_contains_implies_used", - "lemma_FStar.Monotonic.Heap.lemma_distinct_addrs_unused", - "lemma_FStar.Monotonic.Heap.lemma_sel_upd1", - "lemma_FStar.Monotonic.Heap.lemma_sel_upd2", - "lemma_FStar.Monotonic.Heap.lemma_unused_upd_modifies", - "lemma_FStar.Monotonic.HyperStack.aref_as_addr_aref_of", - "lemma_FStar.Monotonic.HyperStack.aref_live_at_aref_of", - "lemma_FStar.Monotonic.HyperStack.as_addr_greference_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_aref_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_greference_of", - "lemma_FStar.Monotonic.HyperStack.is_mm_aref_of", - "lemma_FStar.Monotonic.HyperStack.is_mm_greference_of", - "lemma_FStar.Monotonic.HyperStack.lemma_mk_mem__projectors", - "lemma_FStar.Monotonic.HyperStack.unused_in_aref_of", - "lemma_FStar.Pervasives.invertOption", - "lemma_FStar.Pointer.Base.ovalue_is_readable_ovalue_of_value", - "lemma_FStar.Pointer.Base.value_of_ovalue_of_value", - "primitive_Prims.op_GreaterThan", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.Some_a", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "projection_inverse_Prims.Mkdtuple2__1", - "projection_inverse_Prims.Mkdtuple2__2", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_2c7ecebd8a41d0890aab4251b61d6458", - "refinement_interpretation_Tm_refine_3107a49075513d87f81f8b8614ef7aa0", - "refinement_interpretation_Tm_refine_3415ed6c7abc7d0c55726285f3ae5f3f", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_f63e058f9631c11993f3ef0430296051", - "typing_FStar.Heap.trivial_preorder", "typing_FStar.Map.contains", - "typing_FStar.Map.sel", "typing_FStar.Map.upd", - "typing_FStar.Monotonic.Heap.upd", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.aref_live_at", - "typing_FStar.Monotonic.HyperStack.aref_of", - "typing_FStar.Monotonic.HyperStack.as_ref", - "typing_FStar.Monotonic.HyperStack.frameOf", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_rid_ctr", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Monotonic.HyperStack.sel", - "typing_FStar.Pervasives.dfst", - "typing_FStar.Pointer.Base.type_of_typ", - "typing_FStar.StrongExcludedMiddle.strong_excluded_middle", - "typing_Prims.eq2", "typing_Prims.l_and", - "typing_Tm_abs_9add4301e24a482cad3210ba222ff660" - ], - 0, - "5fb9d8e4218ed3891dbf500936b544bf" - ], - [ - "FStar.Pointer.Base.domain_upd", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.frameOf", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Monotonic.HyperStack.upd", - "function_token_typing_FStar.Monotonic.Heap.heap", "int_inversion", - "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Map.lemma_InDomUpd1", "lemma_FStar.Map.lemma_InDomUpd2", - "lemma_FStar.Monotonic.HyperStack.lemma_mk_mem__projectors", - "lemma_FStar.Set.lemma_equal_elim", "primitive_Prims.op_BarBar", - "primitive_Prims.op_Equality", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_ad7e1b418ca64c1aeef94590edc4eb01", - "typing_FStar.Heap.trivial_preorder", "typing_FStar.Map.domain", - "typing_FStar.Map.sel", "typing_FStar.Monotonic.Heap.upd", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.as_ref", - "typing_FStar.Monotonic.HyperStack.frameOf", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_rid_ctr", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Monotonic.HyperStack.live_region" - ], - 0, - "f6ab3984eeafc6a20e0c5daa609d1107" - ], - [ - "FStar.Pointer.Base.ecreate", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@query", - "FStar.Pointer.Base_interpretation_Tm_arrow_9973fa91578be0bb3fb4c2414d28840c", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "Prims_interpretation_Tm_arrow_2eaa01e78f73e9bab5d0955fc1a662da", - "b2t_def", "bool_inversion", - "constructor_distinct_FStar.Pervasives.Native.Some", - "constructor_distinct_FStar.Pointer.Base.PathBase", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "data_typing_intro_FStar.Pointer.Base.PathBase@tok", - "data_typing_intro_FStar.Pointer.Base.Pointer@tok", - "data_typing_intro_Prims.Mkdtuple2@tok", - "disc_equation_FStar.Pervasives.Native.None", "eq2-interp", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.ST.equal_stack_domains", - "equation_FStar.HyperStack.ST.is_eternal_region", - "equation_FStar.HyperStack.ST.mref", - "equation_FStar.HyperStack.ST.ralloc_post", - "equation_FStar.Monotonic.Heap.modifies", - "equation_FStar.Monotonic.Heap.modifies_t", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.as_addr", - "equation_FStar.Monotonic.HyperStack.contains", - "equation_FStar.Monotonic.HyperStack.frameOf", - "equation_FStar.Monotonic.HyperStack.is_heap_color", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Monotonic.HyperStack.mref", - "equation_FStar.Monotonic.HyperStack.mreference", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Monotonic.HyperStack.unused_in", - "equation_FStar.Monotonic.HyperStack.upd", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gread", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.is_mm", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.loc_none", - "equation_FStar.Pointer.Base.modifies", - "equation_FStar.Pointer.Base.modifies_0", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.readable", - "equation_FStar.Pointer.Base.unused_in", "equation_Prims.nat", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "equation_with_fuel_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "fuel_guarded_inversion_FStar.Monotonic.HyperStack.mreference_", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "int_inversion", - "interpretation_Tm_abs_9add4301e24a482cad3210ba222ff660", - "kinding_FStar.Pointer.Base.typ@tok", "l_and-interp", - "lemma_FStar.Map.lemma_ContainsDom", "lemma_FStar.Map.lemma_SelUpd1", - "lemma_FStar.Map.lemma_SelUpd2", - "lemma_FStar.Monotonic.Heap.lemma_contains_implies_used", - "lemma_FStar.Monotonic.Heap.lemma_distinct_addrs_unused", - "lemma_FStar.Monotonic.Heap.lemma_sel_upd1", - "lemma_FStar.Monotonic.Heap.lemma_sel_upd2", - "lemma_FStar.Monotonic.Heap.lemma_unused_upd_contains", - "lemma_FStar.Monotonic.Heap.lemma_unused_upd_modifies", - "lemma_FStar.Monotonic.Heap.lemma_upd_contains", - "lemma_FStar.Monotonic.HyperStack.aref_as_addr_aref_of", - "lemma_FStar.Monotonic.HyperStack.aref_live_at_aref_of", - "lemma_FStar.Monotonic.HyperStack.as_addr_greference_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_aref_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_greference_of", - "lemma_FStar.Monotonic.HyperStack.is_mm_aref_of", - "lemma_FStar.Monotonic.HyperStack.is_mm_greference_of", - "lemma_FStar.Monotonic.HyperStack.lemma_mk_mem__projectors", - "lemma_FStar.Monotonic.HyperStack.unused_in_aref_of", - "lemma_FStar.Pervasives.invertOption", - "lemma_FStar.Pointer.Base.ovalue_is_readable_ovalue_of_value", - "lemma_FStar.Pointer.Base.value_of_ovalue_of_value", - "primitive_Prims.op_Negation", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pervasives.Native.Some_a", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "projection_inverse_FStar.Pointer.Base.PathBase_from", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "projection_inverse_Prims.Mkdtuple2__1", - "projection_inverse_Prims.Mkdtuple2__2", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_11909c5fb1b8562f96ab0360020d5e9a", - "refinement_interpretation_Tm_refine_161e04719814801d293219f408210f95", - "refinement_interpretation_Tm_refine_1a1278f1aecd37594ba20d888b7fd230", - "refinement_interpretation_Tm_refine_2c7ecebd8a41d0890aab4251b61d6458", - "refinement_interpretation_Tm_refine_4deaad73cd92bc948bb28924e869c4ab", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Heap.trivial_preorder", "typing_FStar.Map.contains", - "typing_FStar.Map.sel", "typing_FStar.Map.upd", - "typing_FStar.Monotonic.Heap.upd", - "typing_FStar.Monotonic.HyperHeap.color", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperHeap.rid_freeable", - "typing_FStar.Monotonic.HyperStack.aref_of", - "typing_FStar.Monotonic.HyperStack.as_ref", - "typing_FStar.Monotonic.HyperStack.frameOf", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_rid_ctr", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Monotonic.HyperStack.is_heap_color", - "typing_FStar.Monotonic.HyperStack.is_mm", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Monotonic.HyperStack.sel", - "typing_FStar.Pointer.Base.live", - "typing_FStar.Pointer.Base.type_of_typ", - "typing_FStar.StrongExcludedMiddle.strong_excluded_middle", - "typing_Tm_abs_9add4301e24a482cad3210ba222ff660" - ], - 0, - "8744a72f481c3434f60030982026ae7b" - ], - [ - "FStar.Pointer.Base.field", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.HyperStack.ST.equal_domains", - "equation_FStar.Monotonic.Heap.equal_dom", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "function_token_typing_FStar.Monotonic.Heap.heap", - "lemma_FStar.HyperStack.ST.lemma_same_refs_in_all_regions_intro", - "lemma_FStar.Set.lemma_equal_refl", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_630f4ea120e51fbdccd80790504db19e", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "string_inversion", "typing_FStar.Map.domain", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap" - ], - 0, - "5322c69156bd3e3b4aa4076d56902a3d" - ], - [ - "FStar.Pointer.Base.ufield", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.HyperStack.ST.equal_domains", - "equation_FStar.Monotonic.Heap.equal_dom", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.gufield", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "function_token_typing_FStar.Monotonic.Heap.heap", - "lemma_FStar.HyperStack.ST.lemma_same_refs_in_all_regions_intro", - "lemma_FStar.Set.lemma_equal_refl", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_2f0ee03cb351104c406e5fab04712a9c", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "string_inversion", "typing_FStar.Map.domain", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap" - ], - 0, - "ed56f0f879c63d93c01f83ded668330f" - ], - [ - "FStar.Pointer.Base.cell", - 1, - 1, - 1, - [ "@query" ], - 0, - "a30fe65343951c615e2b47a6e38c0807" - ], - [ - "FStar.Pointer.Base.cell", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.HyperStack.ST.equal_domains", - "equation_FStar.Monotonic.Heap.equal_dom", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.gcell", - "function_token_typing_FStar.Monotonic.Heap.heap", - "lemma_FStar.HyperStack.ST.lemma_same_refs_in_all_regions_intro", - "lemma_FStar.Set.lemma_equal_refl", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_95ca5788740b2281baacad759e71270f", - "typing_FStar.Map.domain", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap" - ], - 0, - "7707570829c2d6574b960abfce4feb59" - ], - [ - "FStar.Pointer.Base.reference_of", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "bool_inversion", "disc_equation_FStar.Pointer.Base.Pointer", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.contains", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Monotonic.HyperStack.mreference", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_Prims.eqtype", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Monotonic.HyperStack.mreference_", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_Prims.dtuple2", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "function_token_typing_Prims.int", - "haseqTm_refine_774ba3f728d91ead8ef40be66c9802e5", "l_and-interp", - "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Monotonic.HyperStack.aref_as_addr_aref_of", - "lemma_FStar.Monotonic.HyperStack.as_addr_greference_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_aref_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_greference_of", - "lemma_FStar.Monotonic.HyperStack.is_mm_aref_of", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_34fbe053a9bd90fe28a8c099cc43600c", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "typing_FStar.Heap.trivial_preorder", "typing_FStar.Map.contains", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip" - ], - 0, - "1101ecef73e7483ea6d251af584d14e6" - ], - [ - "FStar.Pointer.Base.read", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "bool_inversion", "disc_equation_FStar.Pointer.Base.Pointer", - "eq2-interp", "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.ST.is_live_for_rw_in", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gread", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_Prims.dtuple2", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "l_and-interp", - "lemma_FStar.Monotonic.HyperStack.contains_greference_of", - "lemma_FStar.Pointer.Base.live_region_frameOf", - "lemma_FStar.Pointer.Base.readable_live", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_Prims.Mkdtuple2__1", - "refinement_interpretation_Tm_refine_2c7ecebd8a41d0890aab4251b61d6458", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_6f9aa66937ca4c204e459d01042299f6", - "refinement_interpretation_Tm_refine_81a0d54c78434fc678ec7202c4b9fc09", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Monotonic.HyperStack.reference_of", - "typing_FStar.Monotonic.HyperStack.sel", - "typing_FStar.Pointer.Base.__proj__Pointer__item__contents", - "typing_FStar.Pointer.Base.frameOf", - "typing_FStar.Pointer.Base.live", - "typing_FStar.StrongExcludedMiddle.strong_excluded_middle" - ], - 0, - "43bba79f5f551ef3cd66d6eca4fe0bf9" - ], - [ - "FStar.Pointer.Base.is_null", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "constructor_distinct_FStar.Pointer.Base.NullPtr", - "equation_FStar.HyperStack.ST.equal_domains", - "equation_FStar.Monotonic.Heap.equal_dom", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.nlive", - "function_token_typing_FStar.Monotonic.Heap.heap", - "lemma_FStar.HyperStack.ST.lemma_same_refs_in_all_regions_intro", - "lemma_FStar.Set.lemma_equal_refl", - "projection_inverse_FStar.Pointer.Base.NullPtr_to", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_bb3ae679001acd4c6774b129b21fb7f7", - "true_interp", "typing_FStar.Map.domain", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Pointer.Base.g_is_null" - ], - 0, - "fdf65bb6405dce1d06ad8036505acd0d" - ], - [ - "FStar.Pointer.Base.owrite", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_upd.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_upd.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "data_elim_FStar.Pointer.Base.LocBuffer", - "data_elim_FStar.Pointer.Base.LocPointer", - "data_elim_FStar.Pointer.Base.Pointer", "data_elim_Prims.Mkdtuple2", - "data_typing_intro_Prims.Mkdtuple2@tok", - "disc_equation_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.Heap.trivial_rel", - "equation_FStar.HyperStack.ST.equal_domains", - "equation_FStar.HyperStack.ST.is_live_for_rw_in", - "equation_FStar.HyperStack.reference", - "equation_FStar.ModifiesGen.loc_mreference", - "equation_FStar.Monotonic.Heap.equal_dom", - "equation_FStar.Monotonic.Heap.modifies", - "equation_FStar.Monotonic.Heap.modifies_t", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.as_addr", - "equation_FStar.Monotonic.HyperStack.contains", - "equation_FStar.Monotonic.HyperStack.frameOf", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Monotonic.HyperStack.mreference", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Monotonic.HyperStack.upd", - "equation_FStar.Pervasives.dfst", "equation_FStar.Pointer.Base.aloc", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_preserved", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.disjoint", - "equation_FStar.Pointer.Base.disjoint_buffer_vs_pointer", - "equation_FStar.Pointer.Base.equal_values", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.frameOf_buffer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.gread", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.loc_aux_disjoint", - "equation_FStar.Pointer.Base.loc_aux_disjoint_pointer", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_aux_preserved", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.modifies", - "equation_FStar.Pointer.Base.modifies_1", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_preserved", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.readable", "equation_Prims.eqtype", - "equation_Prims.logical", "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Monotonic.HyperStack.mreference_", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "fuel_guarded_inversion_FStar.Pointer.Base.loc_aux", - "fuel_guarded_inversion_Prims.dtuple2", - "function_token_typing_FStar.Heap.trivial_preorder", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", "int_inversion", - "interpretation_Tm_abs_5588b165ff73228a6a45b3d5bd80e04a", - "interpretation_Tm_abs_568747eb5009c1dec504311dee989dc2", - "interpretation_Tm_abs_9add4301e24a482cad3210ba222ff660", - "interpretation_Tm_abs_b66b60341cf47258f5ae9ea4a40f3bb1", - "kinding_FStar.Pointer.Base.typ@tok", "l_and-interp", - "lemma_FStar.HyperStack.ST.lemma_same_refs_in_all_regions_elim", - "lemma_FStar.Map.lemma_ContainsDom", "lemma_FStar.Map.lemma_SelUpd1", - "lemma_FStar.Map.lemma_SelUpd2", - "lemma_FStar.Monotonic.Heap.lemma_contains_upd_modifies", - "lemma_FStar.Monotonic.Heap.lemma_sel_upd1", - "lemma_FStar.Monotonic.Heap.lemma_sel_upd2", - "lemma_FStar.Monotonic.HyperStack.aref_as_addr_aref_of", - "lemma_FStar.Monotonic.HyperStack.aref_live_at_aref_of", - "lemma_FStar.Monotonic.HyperStack.contains_greference_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_aref_of", - "lemma_FStar.Monotonic.HyperStack.is_mm_aref_of", - "lemma_FStar.Monotonic.HyperStack.lemma_mk_mem__projectors", - "lemma_FStar.Pointer.Base.as_addr_gpointer_of_buffer_cell", - "lemma_FStar.Pointer.Base.disjoint_sym_", - "lemma_FStar.Pointer.Base.frameOf_gpointer_of_buffer_cell", - "lemma_FStar.Pointer.Base.live_region_frameOf", - "lemma_FStar.Pointer.Base.path_sel_upd_same", - "lemma_FStar.Pointer.Base.readable_live", - "lemma_FStar.Set.lemma_equal_elim", "lemma_FStar.Set.mem_singleton", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "proj_equation_FStar.ModifiesGen.Cls_aloc_disjoint", - "proj_equation_FStar.ModifiesGen.Cls_aloc_preserved", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_disjoint", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_preserved", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "projection_inverse_Prims.Mkdtuple2__1", - "projection_inverse_Prims.Mkdtuple2__2", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_2c7ecebd8a41d0890aab4251b61d6458", - "refinement_interpretation_Tm_refine_35927a15e9516018e6643fa65a3f830c", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_60907afdf5bb4feb7e8ea7ef7a2ad5dc", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_81a0d54c78434fc678ec7202c4b9fc09", - "refinement_interpretation_Tm_refine_ee0c42debbc39763e22b0866c76a996c", - "refinement_kinding_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_disjoint", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_preserved", - "token_correspondence_FStar.Pointer.Base.loc_aux_disjoint", - "token_correspondence_FStar.Pointer.Base.loc_aux_preserved", - "true_interp", "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Map.domain", "typing_FStar.Map.sel", - "typing_FStar.Map.upd", "typing_FStar.Monotonic.Heap.upd", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.aref_live_at", - "typing_FStar.Monotonic.HyperStack.aref_of", - "typing_FStar.Monotonic.HyperStack.as_addr", - "typing_FStar.Monotonic.HyperStack.as_ref", - "typing_FStar.Monotonic.HyperStack.frameOf", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_rid_ctr", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Monotonic.HyperStack.reference_of", - "typing_FStar.Monotonic.HyperStack.sel", - "typing_FStar.Pervasives.dfst", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.frameOf", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.Pointer.Base.live", - "typing_FStar.Pointer.Base.reference_of", "typing_FStar.Set.mem", - "typing_FStar.StrongExcludedMiddle.strong_excluded_middle", - "typing_Prims.eq2", "typing_Prims.l_and" - ], - 0, - "9a23b3a48f36ada64341c00087efca14" - ], - [ - "FStar.Pointer.Base.write", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.ovalue_of_value.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.value_of_ovalue.fuel_instrumented", - "@query", "b2t_def", "equation_FStar.Pointer.Base.gread", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.readable", "l_and-interp", - "lemma_FStar.Pointer.Base.ovalue_is_readable_ovalue_of_value", - "lemma_FStar.Pointer.Base.value_of_ovalue_of_value", - "refinement_interpretation_Tm_refine_2c7ecebd8a41d0890aab4251b61d6458", - "refinement_interpretation_Tm_refine_35927a15e9516018e6643fa65a3f830c", - "typing_FStar.Pointer.Base.live", - "typing_FStar.StrongExcludedMiddle.strong_excluded_middle" - ], - 0, - "36f5e45cd68ee5252ae4d0fea9c4a71b" - ], - [ - "FStar.Pointer.Base.write_union_field", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", "bool_inversion", - "constructor_distinct_FStar.Pervasives.Native.Some", - "disc_equation_FStar.Pervasives.Native.Some", "eq2-interp", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Pointer.Base.is_active_union_field", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_FStar.Pointer.Base.type_of_struct_field_", - "equation_FStar.Pointer.Base.type_of_struct_field__", - "equation_FStar.Pointer.Base.union_typ", - "function_token_typing_FStar.Monotonic.Heap.heap", "l_and-interp", - "lemma_FStar.Map.lemma_ContainsDom", - "proj_equation_FStar.Pervasives.Native.Some_v", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_FStar.Pervasives.Native.Some_a", - "projection_inverse_FStar.Pervasives.Native.Some_v", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "string_inversion", - "token_correspondence_FStar.Pointer.Base.otype_of_typ", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Pointer.Base.none_ovalue", - "typing_FStar.Pointer.Base.typ_of_struct_field" - ], - 0, - "73b3de430fe49317193f158d676c210e" - ], - [ - "FStar.Pointer.Base.modifies_fresh_frame_popped", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "equation_FStar.ModifiesGen.loc_all_regions_from", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.fresh_frame", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Monotonic.HyperStack.popped", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.loc_regions", - "equation_FStar.Pointer.Base.loc_union", - "equation_FStar.Pointer.Base.modifies", - "function_token_typing_FStar.Monotonic.Heap.heap", - "lemma_FStar.Map.lemma_ContainsDom", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "typing_FStar.Map.contains", "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip" - ], - 0, - "c099712fdb8313ba8d95e32b87e98e76" - ], - [ - "FStar.Pointer.Base.modifies_only_live_regions", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.loc_regions", - "equation_FStar.Pointer.Base.loc_union", - "equation_FStar.Pointer.Base.modifies" - ], - 0, - "4ce7667538e53b66553b19a629f8941b" - ], - [ - "FStar.Pointer.Base.modifies_loc_addresses_intro", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "82ff96d5e2cb22f325032917e859ca3a" - ], - [ - "FStar.Pointer.Base.modifies_loc_addresses_intro", - 2, - 1, - 1, - [ - "@query", "equation_FStar.ModifiesGen.loc_region_only", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.loc_addresses", - "equation_FStar.Pointer.Base.loc_regions", - "equation_FStar.Pointer.Base.loc_union", - "equation_FStar.Pointer.Base.modifies", "equation_FStar.Set.subset", - "equation_Prims.nat" - ], - 0, - "ac10edbfa5153af01e844c7fe8dd792c" - ], - [ - "FStar.Pointer.Base.modifies_loc_addresses_intro", - 3, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "7a0de365c01bc19883722d8554267b25" - ], - [ - "FStar.Pointer.Base.modifies_1_readable_struct", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "disc_equation_Prims.Cons", - "equation_FStar.Pointer.Base.equal_values", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gfield", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.modifies_1", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.struct_field", - "equation_FStar.Pointer.Base.struct_field_", - "equation_FStar.Pointer.Base.typ_of_struct_field", - "equation_Prims.eqtype", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "function_token_typing_Prims.string", - "kinding_FStar.Pervasives.Native.tuple2@tok", - "kinding_FStar.Pointer.Base.typ@tok", - "lemma_FStar.Pointer.Base.loc_disjoint_gfield", - "lemma_FStar.Pointer.Base.modifies_pointer_elim", - "lemma_FStar.Pointer.Base.readable_gfield", - "lemma_FStar.Pointer.Base.readable_live", - "proj_equation_FStar.Pointer.Base.Mkstruct_typ_fields", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_9560ef23f44dd048af58f1767cc19bc8", - "refinement_interpretation_Tm_refine_c6dda526ae22ec011a2853adf9fa6a29", - "refinement_interpretation_Tm_refine_eda496a665ec0c486d3c3de30bfc4462", - "string_inversion", - "typing_FStar.Pointer.Base.__proj__Mkstruct_typ__item__fields", - "typing_FStar.Pointer.Base.gfield", - "typing_FStar.Pointer.Base.loc_pointer", - "typing_FStar.Pointer.Base.typ_of_struct_field", - "typing_Prims.uu___is_Cons" - ], - 0, - "2ded2ebdc699a06b4a6841569c7686a5" - ], - [ - "FStar.Pointer.Base.modifies_1_readable_array", - 1, - 1, - 1, - [ "@query" ], - 0, - "63a9a88f12f999c5456f9533ff39962f" - ], - [ - "FStar.Pointer.Base.modifies_1_readable_array", - 2, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.otype_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "b2t_def", - "bool_inversion", "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_FStar.Pointer.Base.StepCell", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", - "data_elim_FStar.Pointer.Base.Pointer", "data_elim_Prims.Mkdtuple2", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "disc_equation_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.is_tip", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.equal_values", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.modifies_1", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.readable", - "equation_FStar.Pointer.Base.step_sel", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pervasives.Native.option", - "fuel_guarded_inversion_Prims.dtuple2", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "interpretation_Tm_abs_9add4301e24a482cad3210ba222ff660", - "l_and-interp", "lemma_FStar.Map.lemma_ContainsDom", - "lemma_FStar.Pointer.Base.live_gcell", - "lemma_FStar.Pointer.Base.loc_disjoint_gcell", - "lemma_FStar.Pointer.Base.modifies_pointer_elim", - "lemma_FStar.Pointer.Base.otype_of_typ_array", - "lemma_FStar.Pointer.Base.readable_gcell", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "projection_inverse_FStar.Pointer.Base.StepCell_index", - "projection_inverse_FStar.Pointer.Base.StepCell_length", - "projection_inverse_FStar.Pointer.Base.StepCell_value", - "projection_inverse_FStar.Pointer.Base.TArray_length", - "projection_inverse_FStar.Pointer.Base.TArray_t", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_7ae259fb7a49b4d47af4153553bb7fa3", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "typing_FStar.Heap.trivial_preorder", "typing_FStar.Map.contains", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.get_tip", - "typing_FStar.Monotonic.HyperStack.sel", - "typing_FStar.Pointer.Base.__proj__Pointer__item__p", - "typing_FStar.Pointer.Base.gcell", - "typing_FStar.Pointer.Base.greference_of", - "typing_FStar.Pointer.Base.loc_pointer", - "typing_FStar.Pointer.Base.ovalue_is_readable", - "typing_FStar.Pointer.Base.path_sel", "unit_typing" - ], - 0, - "1fb8b29069b289f38896481de368d623" - ], - [ - "FStar.Pointer.Base.read_buffer", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.UInt.fits", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "lemma_FStar.Pointer.Base.buffer_length_buffer_as_seq", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0" - ], - 0, - "eed1fd42a4f4427516548be493847d9f" - ], - [ - "FStar.Pointer.Base.read_buffer", - 2, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.readable", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "l_and-interp", - "lemma_FStar.Pointer.Base.gread_gpointer_of_buffer_cell", - "lemma_FStar.Pointer.Base.live_gpointer_of_buffer_cell", - "refinement_interpretation_Tm_refine_c8a149bfed84d9c7f91b56b75749a739", - "refinement_interpretation_Tm_refine_e9f80e94617693055b40900dbd5751b9" - ], - 0, - "ecd64d0f3f7523120019de66be9d2f80" - ], - [ - "FStar.Pointer.Base.write_buffer", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", "b2t_def", "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.UInt.fits", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "lemma_FStar.Pointer.Base.buffer_length_buffer_as_seq", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_c8a149bfed84d9c7f91b56b75749a739" - ], - 0, - "a7ad0d26e8c591d3ba7e89b4a3c306c0" - ], - [ - "FStar.Pointer.Base.write_buffer", - 2, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_FStar.Pointer.Base.ovalue_is_readable.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.path_sel.fuel_instrumented", - "@fuel_correspondence_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@fuel_irrelevance_FStar.Pointer.Base.type_of_typ.fuel_instrumented", - "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "b2t_def", "bool_inversion", "bool_typing", - "constructor_distinct_FStar.Pointer.Base.PathStep", - "constructor_distinct_FStar.Pointer.Base.Pointer", - "constructor_distinct_FStar.Pointer.Base.StepCell", - "constructor_distinct_Tm_unit", - "data_elim_FStar.Pointer.Base.BufferRootArray", - "data_elim_Prims.Mkdtuple2", - "disc_equation_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.sel", - "equation_FStar.Pervasives.dfst", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.array_length_t", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.buffer_readable", - "equation_FStar.Pointer.Base.buffer_readable_", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.equal_values", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.modifies_1", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.readable", - "equation_FStar.Pointer.Base.step_sel", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_with_fuel_FStar.Pointer.Base.path_sel.fuel_instrumented", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "fuel_guarded_inversion_Prims.dtuple2", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "int_inversion", - "interpretation_Tm_abs_9add4301e24a482cad3210ba222ff660", - "l_and-interp", - "lemma_FStar.Pointer.Base.gread_gpointer_of_buffer_cell", - "lemma_FStar.Pointer.Base.live_gpointer_of_buffer_cell", - "lemma_FStar.Pointer.Base.live_region_frameOf_buffer", - "lemma_FStar.Pointer.Base.loc_disjoint_gcell", - "lemma_FStar.Pointer.Base.modifies_pointer_elim", - "lemma_FStar.UInt32.uv_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "proj_equation_FStar.Pointer.Base.Pointer_from", - "proj_equation_FStar.Pointer.Base.Pointer_p", - "proj_equation_Prims.Mkdtuple2__1", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.PathStep_from", - "projection_inverse_FStar.Pointer.Base.PathStep_p", - "projection_inverse_FStar.Pointer.Base.PathStep_s", - "projection_inverse_FStar.Pointer.Base.PathStep_through", - "projection_inverse_FStar.Pointer.Base.PathStep_to", - "projection_inverse_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.Pointer.Base.Pointer_from", - "projection_inverse_FStar.Pointer.Base.Pointer_p", - "projection_inverse_FStar.Pointer.Base.Pointer_to", - "projection_inverse_FStar.Pointer.Base.StepCell_index", - "projection_inverse_FStar.Pointer.Base.StepCell_length", - "projection_inverse_FStar.Pointer.Base.StepCell_value", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_35927a15e9516018e6643fa65a3f830c", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_7ae259fb7a49b4d47af4153553bb7fa3", - "refinement_interpretation_Tm_refine_9e7f68c38e43484e77069094f4fd88d3", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "refinement_interpretation_Tm_refine_c8a149bfed84d9c7f91b56b75749a739", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Monotonic.HyperStack.sel", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.__proj__Pointer__item__from", - "typing_FStar.Pointer.Base.__proj__Pointer__item__p", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.Pointer.Base.frameOf_buffer", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.Pointer.Base.greference_of", - "typing_FStar.Pointer.Base.loc_pointer", - "typing_FStar.Pointer.Base.ovalue_is_readable", - "typing_FStar.Pointer.Base.path_sel", "typing_FStar.UInt32.add", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v" - ], - 0, - "d1401871e9f1ed7d013ebde2a084ffd7" - ], - [ - "FStar.Pointer.Base.buffer_live_unused_in_disjoint", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "FStar.Pointer.Base_pretyping_f3a233063c2acf85b4715bac19503db2", - "Prims_pretyping_f8666440faa91836cc5a13998af863fc", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_FStar.Pointer.Base._npointer", - "constructor_distinct_Prims.unit", "constructor_distinct_Tm_unit", - "data_elim_FStar.Pointer.Base.BufferRootArray", - "data_elim_FStar.Pointer.Base.BufferRootSingleton", - "data_elim_FStar.Pointer.Base.Pointer", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "disc_equation_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperHeap.hmap", - "equation_FStar.Monotonic.HyperStack.as_addr", - "equation_FStar.Monotonic.HyperStack.contains", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Monotonic.HyperStack.unused_in", - "equation_FStar.Pointer.Base._cell", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_addr", - "equation_FStar.Pointer.Base.buffer_live", - "equation_FStar.Pointer.Base.buffer_unused_in", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.disjoint_buffer_vs_pointer", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.frameOf_buffer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gcell", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.loc_aux_disjoint", - "equation_FStar.Pointer.Base.loc_aux_disjoint_buffer", - "equation_FStar.Pointer.Base.loc_aux_disjoint_pointer", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_buffer", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.not_an_array_cell", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.unused_in", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "function_token_typing_FStar.Monotonic.Heap.heap", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "int_inversion", - "interpretation_Tm_abs_5588b165ff73228a6a45b3d5bd80e04a", - "l_and-interp", - "lemma_FStar.Monotonic.Heap.lemma_contains_implies_used", - "lemma_FStar.Monotonic.Heap.lemma_distinct_addrs_unused", - "lemma_FStar.Monotonic.HyperStack.aref_as_addr_aref_of", - "lemma_FStar.Monotonic.HyperStack.as_addr_greference_of", - "lemma_FStar.Monotonic.HyperStack.contains_aref_unused_in", - "lemma_FStar.Monotonic.HyperStack.contains_greference_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_aref_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_greference_of", - "lemma_FStar.Monotonic.HyperStack.unused_in_aref_of", - "lemma_FStar.Pointer.Base.live_region_frameOf_buffer", - "lemma_FStar.Pointer.Base.unused_in_greference_of", - "proj_equation_FStar.ModifiesGen.Cls_aloc_disjoint", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_disjoint", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_1d7f81705c35971e00b909e37163cd25", - "refinement_interpretation_Tm_refine_365abba901205a01d0ef28ebf2198c47", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_7ae259fb7a49b4d47af4153553bb7fa3", - "refinement_interpretation_Tm_refine_b31ca53c440388681f8686931d7c051a", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_disjoint", - "token_correspondence_FStar.Pointer.Base.loc_aux_disjoint", - "typing_FStar.Heap.trivial_preorder", "typing_FStar.Map.sel", - "typing_FStar.Monotonic.HyperHeap.rid", - "typing_FStar.Monotonic.HyperHeap.rid_freeable", - "typing_FStar.Monotonic.HyperHeap.root", - "typing_FStar.Monotonic.HyperStack.aref_of", - "typing_FStar.Monotonic.HyperStack.as_ref", - "typing_FStar.Monotonic.HyperStack.frameOf", - "typing_FStar.Monotonic.HyperStack.get_hmap", - "typing_FStar.Monotonic.HyperStack.greference_of", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.__proj__Pointer__item__contents", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.Pointer.Base.greference_of", - "typing_FStar.Pointer.Base.not_an_array_cell", "unit_typing" - ], - 0, - "9bee1300faa4ad002f0ffe74566a4fa9" - ], - [ - "FStar.Pointer.Base.pointer_live_buffer_unused_in_disjoint", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "data_elim_FStar.Pointer.Base.BufferRootArray", - "data_elim_FStar.Pointer.Base.Pointer", - "disc_equation_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_addr", - "equation_FStar.Pointer.Base.buffer_unused_in", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.frameOf_buffer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.greference_of", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.loc_aux_disjoint", - "equation_FStar.Pointer.Base.loc_aux_disjoint_buffer", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_buffer", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.unused_in", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "interpretation_Tm_abs_5588b165ff73228a6a45b3d5bd80e04a", - "l_and-interp", - "lemma_FStar.Monotonic.HyperStack.as_addr_greference_of", - "lemma_FStar.Monotonic.HyperStack.contains_aref_unused_in", - "lemma_FStar.Monotonic.HyperStack.contains_greference_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_greference_of", - "lemma_FStar.Monotonic.HyperStack.unused_in_aref_of", - "lemma_FStar.Pointer.Base.as_addr_gpointer_of_buffer_cell", - "lemma_FStar.Pointer.Base.frameOf_gpointer_of_buffer_cell", - "lemma_FStar.Pointer.Base.live_region_frameOf", - "lemma_FStar.Pointer.Base.unused_in_greference_of", - "proj_equation_FStar.ModifiesGen.Cls_aloc_disjoint", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_disjoint", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "refinement_interpretation_Tm_refine_1d7f81705c35971e00b909e37163cd25", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_7ae259fb7a49b4d47af4153553bb7fa3", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_disjoint", - "token_correspondence_FStar.Pointer.Base.loc_aux_disjoint", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.aref_of", - "typing_FStar.Monotonic.HyperStack.greference_of", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.__proj__Pointer__item__contents", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell", - "typing_FStar.Pointer.Base.greference_of" - ], - 0, - "12f0f21f1fa67e04bbe0cc4c6189d80e" - ], - [ - "FStar.Pointer.Base.buffer_live_pointer_unused_in_disjoint", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.Pointer.Base_pretyping_c187978e0b47d492be4f7ef67953e027", - "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.LocBuffer", - "constructor_distinct_FStar.Pointer.Base.LocPointer", - "data_elim_FStar.Pointer.Base.BufferRootArray", - "data_elim_FStar.Pointer.Base.Pointer", - "data_typing_intro_FStar.Pointer.Base.TArray@tok", - "disc_equation_FStar.Pointer.Base.Pointer", "eq2-interp", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_addr", - "equation_FStar.Pointer.Base.buffer_live", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.disjoint_buffer_vs_pointer", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.frameOf_buffer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.gpointer_of_buffer_cell", - "equation_FStar.Pointer.Base.live", - "equation_FStar.Pointer.Base.loc_aux_disjoint", - "equation_FStar.Pointer.Base.loc_aux_disjoint_pointer", - "equation_FStar.Pointer.Base.loc_aux_in_addr", - "equation_FStar.Pointer.Base.loc_buffer", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_pointer", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.pointer_ref_contents", - "equation_FStar.Pointer.Base.unused_in", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "function_token_typing_FStar.Pointer.Base.pointer_ref_contents", - "interpretation_Tm_abs_5588b165ff73228a6a45b3d5bd80e04a", - "l_and-interp", - "lemma_FStar.Monotonic.HyperStack.as_addr_greference_of", - "lemma_FStar.Monotonic.HyperStack.contains_aref_unused_in", - "lemma_FStar.Monotonic.HyperStack.contains_greference_of", - "lemma_FStar.Monotonic.HyperStack.frameOf_greference_of", - "lemma_FStar.Pointer.Base.as_addr_gpointer_of_buffer_cell", - "lemma_FStar.Pointer.Base.frameOf_gpointer_of_buffer_cell", - "lemma_FStar.Pointer.Base.live_region_frameOf_buffer", - "proj_equation_FStar.ModifiesGen.Cls_aloc_disjoint", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.ModifiesGen.Cls_aloc_disjoint", - "projection_inverse_FStar.Pointer.Base.LocBuffer_b", - "projection_inverse_FStar.Pointer.Base.LocBuffer_t", - "projection_inverse_FStar.Pointer.Base.LocPointer_p", - "projection_inverse_FStar.Pointer.Base.LocPointer_t", - "refinement_interpretation_Tm_refine_1d7f81705c35971e00b909e37163cd25", - "refinement_interpretation_Tm_refine_4d0dd697db2857c49f85536bd0dded47", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_c8374bfee518478c6a0e30260a118ee6", - "token_correspondence_FStar.ModifiesGen.__proj__Cls__item__aloc_disjoint", - "token_correspondence_FStar.Pointer.Base.loc_aux_disjoint", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.greference_of", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.__proj__Pointer__item__contents", - "typing_FStar.Pointer.Base.gpointer_of_buffer_cell" - ], - 0, - "68a19b5abf350d2a225d68f18cd02a7e" - ], - [ - "FStar.Pointer.Base.reference_live_buffer_unused_in_disjoint", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "e952ddf69f94ad8840b487e824b56209" - ], - [ - "FStar.Pointer.Base.reference_live_buffer_unused_in_disjoint", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "data_elim_FStar.Pointer.Base.BufferRootArray", - "data_elim_FStar.Pointer.Base.BufferRootSingleton", - "data_elim_FStar.Pointer.Base.Pointer", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.ModifiesGen.loc_freed_mreference", - "equation_FStar.Monotonic.HyperStack.as_addr", - "equation_FStar.Monotonic.HyperStack.contains", - "equation_FStar.Monotonic.HyperStack.frameOf", - "equation_FStar.Monotonic.HyperStack.is_wf_with_ctr_and_tip", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.mem", - "equation_FStar.Monotonic.HyperStack.mreference", - "equation_FStar.Pointer.Base.as_addr", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_addr", - "equation_FStar.Pointer.Base.buffer_unused_in", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.frameOf", - "equation_FStar.Pointer.Base.frameOf_buffer", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.loc_addresses", - "equation_FStar.Pointer.Base.loc_buffer", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Pointer.Base.npointer", - "equation_FStar.Pointer.Base.pointer", - "equation_FStar.Pointer.Base.unused_in", "equation_FStar.Set.subset", - "equation_Prims.eqtype", "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Monotonic.HyperStack.mreference_", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._npointer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "lemma_FStar.Monotonic.HyperStack.contains_aref_unused_in", - "lemma_FStar.Set.mem_intersect", "lemma_FStar.Set.mem_singleton", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "proj_equation_FStar.Pointer.Base.Pointer_contents", - "projection_inverse_BoxBool_proj_0", - "refinement_interpretation_Tm_refine_05e15190c946858f68c69156f585f95a", - "refinement_interpretation_Tm_refine_365abba901205a01d0ef28ebf2198c47", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_afd51579b90d50ea23e03b743a1fa001", - "refinement_interpretation_Tm_refine_b31ca53c440388681f8686931d7c051a", - "refinement_kinding_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperHeap.rid_freeable", - "typing_FStar.Monotonic.HyperHeap.root", - "typing_FStar.Monotonic.HyperStack.frameOf", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_as_addr" - ], - 0, - "131bc5ec42ec3d20885b6586a3efda57" - ], - [ - "FStar.Pointer.Base.buffer_live_reference_unused_in_disjoint", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "equation_Prims.eqtype", - "equation_Prims.nat", "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f" - ], - 0, - "a9bfcbc543788eac250810bc925f305a" - ], - [ - "FStar.Pointer.Base.buffer_live_reference_unused_in_disjoint", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "constructor_distinct_FStar.Pointer.Base.BufferRootSingleton", - "disc_equation_FStar.Pointer.Base.BufferRootArray", - "disc_equation_FStar.Pointer.Base.BufferRootSingleton", - "equation_FStar.Heap.trivial_preorder", - "equation_FStar.HyperStack.reference", - "equation_FStar.ModifiesGen.loc_freed_mreference", - "equation_FStar.Monotonic.HyperStack.live_region", - "equation_FStar.Monotonic.HyperStack.mreference", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_as_addr", - "equation_FStar.Pointer.Base.buffer_live", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.frameOf_buffer", - "equation_FStar.Pointer.Base.loc_addresses", - "equation_FStar.Pointer.Base.loc_buffer", - "equation_FStar.Pointer.Base.loc_disjoint", - "equation_FStar.Pointer.Base.loc_includes", - "equation_FStar.Set.subset", "equation_Prims.eqtype", - "equation_Prims.nat", "equation_Prims.pos", - "fuel_guarded_inversion_FStar.Monotonic.HyperStack.mreference_", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "function_token_typing_Prims.int", - "haseqTm_refine_542f9d4f129664613f2483a6c88bc7c2", - "lemma_FStar.Pointer.Base.live_region_frameOf_buffer", - "lemma_FStar.Set.mem_intersect", "lemma_FStar.Set.mem_singleton", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_Equality", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_t", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_p", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_t", - "refinement_interpretation_Tm_refine_414d0a9f578ab0048252f8c8f552b99f", - "refinement_interpretation_Tm_refine_774ba3f728d91ead8ef40be66c9802e5", - "refinement_interpretation_Tm_refine_afd51579b90d50ea23e03b743a1fa001", - "refinement_kinding_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "typing_FStar.Heap.trivial_preorder", - "typing_FStar.Monotonic.HyperStack.as_addr", - "typing_FStar.Monotonic.HyperStack.live_region", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_as_addr" - ], - 0, - "fc20c2b6f95d7ae7992e96ba864f5e0c" - ], - [ - "FStar.Pointer.Base.root_buffer", - 1, - 1, - 1, - [ - "@MaxFuel_assumption", "@MaxIFuel_assumption", - "@fuel_correspondence_Prims.pow2.fuel_instrumented", - "@fuel_irrelevance_Prims.pow2.fuel_instrumented", "@query", - "FStar.UInt32_pretyping_2ab3c8ba2d08b0172817fc70b5994868", "b2t_def", - "bool_inversion", - "constructor_distinct_FStar.Pointer.Base.BufferRootArray", - "constructor_distinct_FStar.Pointer.Base.BufferRootSingleton", - "data_elim_FStar.Pointer.Base.Buffer", - "disc_equation_FStar.Pointer.Base.BufferRootArray", - "disc_equation_FStar.Pointer.Base.BufferRootSingleton", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.g_is_null", - "equation_FStar.Pointer.Base.not_an_array_cell", - "equation_FStar.Pointer.Base.pointer", "equation_FStar.UInt.fits", - "equation_FStar.UInt.max_int", "equation_FStar.UInt.min_int", - "equation_FStar.UInt.size", "equation_FStar.UInt.uint_t", - "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "int_inversion", "int_typing", "lemma_FStar.UInt.pow2_values", - "lemma_FStar.UInt32.vu_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_max_length", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_p", - "projection_inverse_FStar.Pointer.Base.BufferRootArray_t", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_p", - "projection_inverse_FStar.Pointer.Base.BufferRootSingleton_t", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5a212a071163d99770d05bcb6b988cd0", - "refinement_interpretation_Tm_refine_5f4176692ba0d4ebb4eb0d80d43bf405", - "refinement_interpretation_Tm_refine_b31ca53c440388681f8686931d7c051a", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.Pointer.Base.not_an_array_cell", - "typing_FStar.UInt.fits", "typing_FStar.UInt32.v" - ], - 0, - "f18c8b1ad1307c2c342cacda4e1f8391" - ], - [ - "FStar.Pointer.Base.buffer_idx", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "constructor_distinct_Tm_unit", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.root_buffer", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "lemma_FStar.UInt32.uv_inv", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_FStar.Pointer.Base.Buffer_blength", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.root_buffer", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v" - ], - 0, - "b7e487d4ccfef18e08989576855c383b" - ], - [ - "FStar.Pointer.Base.buffer_eq_gsub_root", - 1, - 1, - 1, - [ "@query" ], - 0, - "95304e90c44378c0aba433f0a46b0c7f" - ], - [ - "FStar.Pointer.Base.buffer_eq_gsub_root", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "constructor_distinct_Tm_unit", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_idx", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.Pointer.Base.root_buffer", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "int_inversion", - "lemma_FStar.UInt32.uv_inv", "primitive_Prims.op_Addition", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.Buffer_bidx", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.root_buffer", "typing_FStar.UInt32.v" - ], - 0, - "ba63038f3e5cdd30957219e18768786c" - ], - [ - "FStar.Pointer.Base.root_buffer_gsub_buffer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.Pointer.Base.root_buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot" - ], - 0, - "52ea29e30b74c9ba1971ac68738bfd2e" - ], - [ - "FStar.Pointer.Base.buffer_idx_gsub_buffer", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "int_inversion", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.root_buffer", "typing_FStar.UInt32.v" - ], - 0, - "c34668f75f1ba18fadb7665ee1c9041d" - ], - [ - "FStar.Pointer.Base.buffer_idx_gsub_buffer", - 2, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.buffer_idx", - "equation_FStar.Pointer.Base.gsub_buffer", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "projection_inverse_FStar.Pointer.Base.Buffer_bidx" - ], - 0, - "90d0fe6d53bef94794a65cf5524cab5a" - ], - [ - "FStar.Pointer.Base.buffer_includes_refl", - 1, - 1, - 1, - [ - "@query", "b2t_def", "eq2-interp", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_includes", "l_and-interp", - "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0" - ], - 0, - "a83451719aad37567413e66e059737fb" - ], - [ - "FStar.Pointer.Base.buffer_includes_trans", - 1, - 1, - 1, - [ - "@query", "b2t_def", "eq2-interp", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_includes", "l_and-interp", - "primitive_Prims.op_Addition", "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0" - ], - 0, - "8681787d1d88eb612e201caa60387ebb" - ], - [ - "FStar.Pointer.Base.buffer_includes_gsub_r", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", "eq2-interp", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_idx", - "equation_FStar.Pointer.Base.buffer_includes", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "int_inversion", - "l_and-interp", "lemma_FStar.Pointer.Base.buffer_idx_gsub_buffer", - "lemma_FStar.Pointer.Base.buffer_length_gsub_buffer", - "lemma_FStar.Pointer.Base.root_buffer_gsub_buffer", - "lemma_FStar.UInt32.uv_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.UInt32.add", "typing_FStar.UInt32.uint_to_t", - "typing_FStar.UInt32.v" - ], - 0, - "3b744b8a92e930d9016c11172b644b3d" - ], - [ - "FStar.Pointer.Base.buffer_includes_gsub", - 1, - 1, - 1, - [ "@query" ], - 0, - "ca56e903c0ddb1ea766bbd14e7f43531" - ], - [ - "FStar.Pointer.Base.buffer_includes_gsub", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", "bool_inversion", - "bool_typing", "constructor_distinct_Tm_unit", - "data_elim_FStar.Pointer.Base.Buffer", "eq2-interp", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_idx", - "equation_FStar.Pointer.Base.buffer_includes", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.Pointer.Base.root_buffer", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", "int_inversion", - "int_typing", "l_and-interp", - "lemma_FStar.Pointer.Base.root_buffer_gsub_buffer", - "lemma_FStar.UInt32.uv_inv", "lemma_FStar.UInt32.vu_inv", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.Buffer_bidx", - "projection_inverse_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5f4176692ba0d4ebb4eb0d80d43bf405", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_bc3e4ef50c18a50a72b09b9950468e20", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.Pointer.Base.gsub_buffer", - "typing_FStar.Pointer.Base.root_buffer", "typing_FStar.UInt.fits", - "typing_FStar.UInt32.add", "typing_FStar.UInt32.uint_to_t", - "typing_FStar.UInt32.v" - ], - 0, - "efb65a069f3bfc1459eff311a59dfbce" - ], - [ - "FStar.Pointer.Base.buffer_includes_elim", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.root_buffer", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "lemma_FStar.UInt32.uv_inv", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Subtraction", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_3256d3fe4550b585f500f20586b94bcb", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__blength", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.buffer_root_length", - "typing_FStar.Pointer.Base.root_buffer", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v" - ], - 0, - "148cdaeb95fd565b8caae6bcaf43c6af" - ], - [ - "FStar.Pointer.Base.buffer_includes_elim", - 2, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "FStar.UInt32_pretyping_2ab3c8ba2d08b0172817fc70b5994868", "b2t_def", - "bool_inversion", "bool_typing", "constructor_distinct_Tm_unit", - "data_elim_FStar.Pointer.Base.Buffer", "eq2-interp", - "equation_FStar.Pointer.Base.buffer", - "equation_FStar.Pointer.Base.buffer_idx", - "equation_FStar.Pointer.Base.buffer_includes", - "equation_FStar.Pointer.Base.buffer_length", - "equation_FStar.Pointer.Base.buffer_root_length", - "equation_FStar.Pointer.Base.gsub_buffer", - "equation_FStar.Pointer.Base.root_buffer", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "equation_Prims.nat", - "fuel_guarded_inversion_FStar.Pointer.Base._buffer", - "fuel_guarded_inversion_FStar.Pointer.Base.buffer_root", - "int_inversion", "int_typing", "l_and-interp", - "lemma_FStar.UInt32.uv_inv", "lemma_FStar.UInt32.vu_inv", - "primitive_Prims.op_Addition", "primitive_Prims.op_AmpAmp", - "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Subtraction", - "proj_equation_FStar.Pointer.Base.Buffer_bidx", - "proj_equation_FStar.Pointer.Base.Buffer_blength", - "proj_equation_FStar.Pointer.Base.Buffer_broot", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "projection_inverse_FStar.Pointer.Base.Buffer_bidx", - "projection_inverse_FStar.Pointer.Base.Buffer_blength", - "projection_inverse_FStar.Pointer.Base.Buffer_broot", - "refinement_interpretation_Tm_refine_0ea1fba779ad5718e28476faeef94d56", - "refinement_interpretation_Tm_refine_542f9d4f129664613f2483a6c88bc7c2", - "refinement_interpretation_Tm_refine_5f4176692ba0d4ebb4eb0d80d43bf405", - "refinement_interpretation_Tm_refine_709aff84c75b0fff77dcbf3b529649dd", - "refinement_interpretation_Tm_refine_aa4b3d268075d84252df525db1f85524", - "refinement_interpretation_Tm_refine_bc3e4ef50c18a50a72b09b9950468e20", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.__proj__Buffer__item__bidx", - "typing_FStar.Pointer.Base.__proj__Buffer__item__broot", - "typing_FStar.Pointer.Base.buffer_length", - "typing_FStar.Pointer.Base.gsub_buffer", - "typing_FStar.Pointer.Base.root_buffer", "typing_FStar.UInt.fits", - "typing_FStar.UInt32.add", "typing_FStar.UInt32.sub", - "typing_FStar.UInt32.uint_to_t", "typing_FStar.UInt32.v" - ], - 0, - "98ab333cf5f218788a68e64e4820bf57" - ], - [ - "FStar.Pointer.Base.buffer_includes_loc_includes", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", "b2t_def", - "equation_FStar.UInt.fits", "equation_FStar.UInt.max_int", - "equation_FStar.UInt.min_int", "equation_FStar.UInt.size", - "equation_FStar.UInt.uint_t", "primitive_Prims.op_Addition", - "primitive_Prims.op_AmpAmp", "primitive_Prims.op_LessThanOrEqual", - "primitive_Prims.op_Subtraction", - "projection_inverse_BoxBool_proj_0", - "projection_inverse_BoxInt_proj_0", - "refinement_interpretation_Tm_refine_f13070840248fced9d9d60d77bdae3ec", - "typing_FStar.Pointer.Base.buffer_length", "typing_FStar.UInt32.v" - ], - 0, - "c50e9a9939cd46783aa50143b266b727" - ], - [ - "FStar.Pointer.Base.loc_of_cloc_of_loc", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.cloc_of_loc", - "equation_FStar.Pointer.Base.loc_of_cloc" - ], - 0, - "f0d337fcff7664a23f7829b63e735186" - ], - [ - "FStar.Pointer.Base.cloc_of_loc_of_cloc", - 1, - 1, - 1, - [ - "@query", "equation_FStar.Pointer.Base.cloc_of_loc", - "equation_FStar.Pointer.Base.loc_of_cloc" - ], - 0, - "3de068f1df423493d8f8f03aee070984" - ], - [ - "FStar.Pointer.Base.loc_includes_to_cloc", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.cloc_cls", - "equation_FStar.Pointer.Base.cloc_of_loc", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.loc_includes", - "fuel_guarded_inversion_FStar.ModifiesGen.cls", - "projection_inverse_FStar.ModifiesGen.Cls_aloc", - "typing_FStar.Pointer.Base.cloc_cls" - ], - 0, - "3163ba918728070db1b6d3bfb15cf4cf" - ], - [ - "FStar.Pointer.Base.loc_disjoint_to_cloc", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.cloc_cls", - "equation_FStar.Pointer.Base.cloc_of_loc", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.loc_disjoint", - "fuel_guarded_inversion_FStar.ModifiesGen.cls", - "projection_inverse_FStar.ModifiesGen.Cls_aloc", - "typing_FStar.Pointer.Base.cloc_cls" - ], - 0, - "832cd203f36fbd2a09c3fa4e8de2ecfa" - ], - [ - "FStar.Pointer.Base.modifies_to_cloc", - 1, - 1, - 1, - [ - "@MaxIFuel_assumption", "@query", - "equation_FStar.Pointer.Base.cloc_cls", - "equation_FStar.Pointer.Base.cloc_of_loc", - "equation_FStar.Pointer.Base.cls", - "equation_FStar.Pointer.Base.modifies", - "fuel_guarded_inversion_FStar.ModifiesGen.cls", - "projection_inverse_FStar.ModifiesGen.Cls_aloc", - "typing_FStar.Pointer.Base.cloc_cls" - ], - 0, - "4766891a8699cf30faf2d702ece8d11a" - ] - ] -] \ No newline at end of file diff --git a/ulib/ml/app-extra/FStar_Buffer.ml b/ulib/ml/app-extra/FStar_Buffer.ml new file mode 100644 index 00000000000..0fe0c803e94 --- /dev/null +++ b/ulib/ml/app-extra/FStar_Buffer.ml @@ -0,0 +1,68 @@ + +let disjoint_only_lemma t b t' b' = () +let eq_lemma h0 h1 size a mods = () +let modifies_transitivity_lemma mods h0 h1 h2 = () +let modifies_subset_lemma mods submods h0 h1 = () +let modifies_empty_lemma h = () +let modifies_fresh_lemma h0 h1 mods size b = () + +type ('a, 'b, 'c, 'd) disjoint = unit +type ('a, 'b, 'c) live = unit + +type abuffer = | Buff of (unit * unit) + +type 'a buffer = { + content:'a array; + idx:int; + length:int; + } + +type u8 = FStar_UInt8.t +type u32 = FStar_UInt32.t +type u64 = FStar_UInt64.t +type u128 = FStar_UInt128.t + +type uint8s = u8 buffer +type uint32s = u32 buffer +type uint64s = u64 buffer +type uint128s = u128 buffer + +let create init len = {content = Array.make len init; idx = 0; length = len} +let createL l = {content = Array.of_list l; idx = 0; length = List.length l} +let rcreate r init len = create init len +let index b n = Array.get b.content (n+b.idx) +let upd (b:'a buffer) (n:u32) (v:'a) = Array.set b.content (FStar_UInt32.to_native_int n + b.idx) v +let sub b i len = {content = b.content; idx = b.idx+i; length = len} +let offset b i = {content = b.content; idx = b.idx+i; length = b.length-i} +let blit a idx_a b idx_b len = + let idx_a = FStar_UInt32.to_native_int idx_a in + let idx_b = FStar_UInt32.to_native_int idx_b in + let len = FStar_UInt32.to_native_int len in + Array.blit a.content (idx_a+a.idx) b.content (idx_b+b.idx) len + +let fill a z len = Array.fill a.content a.idx (FStar_UInt32.to_native_int len) z +let split a i = (sub a 0 i, sub a i (a.length - i)) +let of_seq s l = () +let copy b l = {content = Array.sub b.content b.idx l; idx = 0; length = l} + +let eqb b1 b2 (len:u32) = + Array.sub b1.content b1.idx (FStar_UInt32.to_native_int len) = Array.sub b2.content b2.idx (FStar_UInt32.to_native_int len) + +type ('a, 'b, 'c, 'd) modifies_buf = unit +let op_Plus_Plus a b = BatSet.empty +let only a = BatSet.empty + +let op_Array_Access b n = index b n +let op_Array_Assignment b n v = upd b n v + +let recall = fun b -> () + +let of_ocaml_array a = { + content = a; + idx = 0; + length = Array.length a +} + +(* AR: revisit. This is used in the idealization code of AEAD encrypt *) +let to_seq_full b = Obj.magic () + diff --git a/ulib/ml/app-extra/FStar_HyperStack_ST.ml b/ulib/ml/app-extra/FStar_HyperStack_ST.ml new file mode 100644 index 00000000000..73817896ff0 --- /dev/null +++ b/ulib/ml/app-extra/FStar_HyperStack_ST.ml @@ -0,0 +1,80 @@ +open FStar_CommonST + +open FStar_Monotonic_HyperHeap +open FStar_Monotonic_HyperStack + +open FStar_HyperStack + +let push_frame () = () +let pop_frame () = () + +let root = () + +let def_rid = root + +let salloc (contents:'a) :('a reference) = + let r = FStar_CommonST.alloc contents in + MkRef (root, r) + +let salloc_mm (contents:'a) :('a reference) = + let r = FStar_CommonST.alloc contents in + MkRef (root, r) + +let sfree r = () + +let new_region = (fun r0 -> def_rid) +let new_colored_region = (fun r0 c -> def_rid) + +let ralloc i (contents:'a) :('a reference) = + let r = FStar_CommonST.alloc contents in + MkRef (i, r) + +let ralloc_mm i (contents:'a) :('a reference) = + let r = FStar_CommonST.alloc contents in + MkRef (i, r) + +let rfree r = () + +let op_Colon_Equals r v = match r with + | MkRef (_, r) -> op_Colon_Equals r v + +let op_Bang r = match r with + | MkRef (_, r) -> op_Bang r + +let read = op_Bang + +let write = op_Colon_Equals + +let get () = HS (Prims.parse_int "0", FStar_Map.const FStar_Monotonic_Heap.emp, def_rid) + +let recall = (fun r -> ()) + +let recall_region = (fun r -> ()) +let witness_region _ = () +let witness_hsref _ = () +type erid = rid + +type 'a ref = 'a FStar_HyperStack.reference +type ('a, 'b) mreference = 'a ref +type 'a reference = 'a ref +let alloc = salloc +type ('a, 'b) mref = 'a ref +type ('a, 'b, 'c) m_rref = 'b ref +type ('a, 'b, 'c, 'd, 'e) stable_on_t = unit +let mr_witness _ _ _ _ _ = () +let testify _ = () +let testify_forall _ = () +let testify_forall_region_contains_pred _ _ = () + +type ex_rid = erid +type 'a witnessed = 'a FStar_CommonST.witnessed +type ('a, 'b, 'c, 'd) stable_on = unit +type ('a, 'b, 'c, 'd) token = unit +let witness_p _ _ = () +let recall_p _ _ = () + +type drgn = rid +let new_drgn _ = () +let free_drgn _ = () +let ralloc_drgn = ralloc +let ralloc_drgn_mm = ralloc_mm diff --git a/ulib/ml/app-extra/README.txt b/ulib/ml/app-extra/README.txt new file mode 100644 index 00000000000..0a613f953fd --- /dev/null +++ b/ulib/ml/app-extra/README.txt @@ -0,0 +1,3 @@ +This is really part of the application library, but depends on some extracted +lib modules so it cannot be used in building the compiler. Hence, we separate it here, +so we can just include 'app' when building fstarc-bare. diff --git a/ulib/ml/app/FStar_All.ml b/ulib/ml/app/FStar_All.ml new file mode 100644 index 00000000000..c9a376ee3ac --- /dev/null +++ b/ulib/ml/app/FStar_All.ml @@ -0,0 +1,3 @@ +exception Failure = Failure +let failwith x = raise (Failure x) +let exit i = exit (Z.to_int i) diff --git a/ulib/ml/app/FStar_Bytes.ml b/ulib/ml/app/FStar_Bytes.ml new file mode 100644 index 00000000000..ac80dca1b1b --- /dev/null +++ b/ulib/ml/app/FStar_Bytes.ml @@ -0,0 +1,249 @@ +module U8 = FStar_UInt8 +module U16 = FStar_UInt16 +module U32 = FStar_UInt32 +module U64 = FStar_UInt64 + +type u8 = U8.t +type u16 = U16.t +type u32 = U32.t + +type byte = u8 + +type bytes = string +type cbytes = string (* not in FStar.Bytes *) + +let len (b:bytes) = U32.of_native_int (String.length b) +let length (b:bytes) = Z.of_int (String.length b) + +let reveal (b:bytes) = () +let length_reveal (x:bytes) = () +let hide s = () +let hide_reveal (x:bytes) = () +let reveal_hide s = () + +type 'a lbytes = bytes +type 'a lbytes32 = bytes +type kbytes = bytes + +let empty_bytes = "" +let empty_unique (b:bytes) = () + +let get (b:bytes) (pos:u32) = int_of_char (String.get b (Z.to_int (U32.to_int pos))) +let op_String_Access = get + +let index (b:bytes) (i:Z.t) = get b (U32.uint_to_t i) + +type ('b1, 'b2) equal = unit + +let extensionality (b1:bytes) (b2:bytes) = () + +let create (len:u32) (v:byte) = String.make (U32.to_native_int len) (char_of_int v) +let create_ (len:Z.t) (v:byte) = String.make (Z.to_int len) (char_of_int v) + +let init (len:u32) (f:u32 -> byte) = + String.init (U32.to_native_int len) + (fun (i:int) -> + let b : byte = f (U32.of_native_int i) in + char_of_int b) + +let abyte (b:byte) = create (U32.of_native_int 1) b +let twobytes (bs:(byte * byte)) = + init (U32.of_native_int 2) (fun i -> if i = U32.of_native_int 0 then fst bs else snd bs) + +let append (b1:bytes) (b2:bytes) = b1 ^ b2 +let op_At_Bar = append + +let slice (b:bytes) (s:u32) (e:u32) = + String.sub b (U32.to_native_int s) (U32.to_native_int (U32.sub e s)) +let slice_ (b:bytes) (s:Z.t) (e:Z.t) = + slice b (U32.uint_to_t s) (U32.uint_to_t e) + +let sub (b:bytes) (s:u32) (l:u32) = + String.sub b (U32.to_native_int s) (U32.to_native_int l) + +let split (b:bytes) (k:u32) = + sub b (U32.of_native_int 0) k, + sub b k (U32.sub (U32.of_native_int (String.length b)) k) +let split_ (b:bytes) (k:Z.t) = + split b (U32.of_int k) + +let fits_in_k_bytes (n:Z.t) (k:Z.t) = (* expects k to fit in an int *) + Z.leq Z.zero n && + Z.leq n (Z.of_int (BatInt.pow 2 (8 * Z.to_int k))) +type 'a uint_k = Z.t + +let rec repr_bytes (n:Z.t) = + if Z.to_int n < 256 then Z.of_int 1 + else Z.add (Z.of_int 1) (repr_bytes (Z.div n (Z.of_int 256))) + +let lemma_repr_bytes_values (n:Z.t) = () +let repr_bytes_size (k:Z.t) (n:'a uint_k) = () +let int_of_bytes (b:bytes) = + let x = ref Z.zero in + let len = String.length b in + let n = Z.of_int 256 in + for y = 0 to len-1 do + x := Z.add (Z.mul n !x) (Z.of_int (get b (U32.of_native_int y))) + done; + !x + +let bytes_of_int (nb:Z.t) (i:Z.t) = + let nb = Z.to_int nb in + let i = Z.to_int64 i in + if Int64.compare i Int64.zero < 0 then failwith "Negative 64bit."; + let rec put_bytes bb lb n = + if lb = 0 then failwith "not enough bytes" + else + begin + let lown = Int64.logand n (Int64.of_int 255) in + Bytes.set bb (lb-1) (char_of_int (Int64.to_int lown)); + let ns = Int64.div n (Int64.of_int 256) in + if Int64.compare ns Int64.zero > 0 then + put_bytes bb (lb-1) ns + else bb + end + in + let b = Bytes.make nb (char_of_int 0) in + Bytes.to_string (put_bytes b nb i) + +let int_of_bytes_of_int (k:Z.t) (n:'a uint_k) = () +let bytes_of_int_of_bytes (b:bytes) = () + +let int32_of_bytes (b:bytes) = + Z.to_int (int_of_bytes b) + +let int16_of_bytes (b:bytes) = + Z.to_int (int_of_bytes b) + +let int8_of_bytes (b:bytes) = + Z.to_int (int_of_bytes b) + +let bytes_of_int32 (n:U32.t) = + bytes_of_int (Z.of_int 4) (U32.to_int n) + +let bytes_of_int16 (n:U32.t) = + bytes_of_int (Z.of_int 2) (U32.to_int n) + +let bytes_of_int8 (n:U32.t) = + bytes_of_int (Z.of_int 1) (U32.to_int n) + +type 'a minbytes = bytes + +let xor (len:U32.t) (s1:'a minbytes) (s2:'b minbytes) : bytes = + let f (i:u32) : byte = + let l = int_of_char s1.[U32.to_native_int i] in + let r = int_of_char s2.[U32.to_native_int i] in + l lxor r + in + init len f + +let xor_ (len:Z.t) = xor (U32.of_int len) + +let xor_commutative (n:U32.t) (b1: 'a minbytes) (b2: 'b minbytes) = () +let xor_append (b1:bytes) (b2:bytes) (x1:bytes) (b2:bytes) = () +let xor_idempotent (n:U32.t) (b1:bytes) (b2:bytes) = () + +(*********************************************************************************) +(* Under discussion *) +let utf8 (x:string) : bytes = x (* TODO: use Camomile *) +let utf8_encode = utf8 +let iutf8 (x:bytes) : string = x (* TODO: use Camomile *) +let iutf8_opt (x:bytes) : string option = Some (x) +(*********************************************************************************) + +(* Some helpers to deal with the conversation from hex literals to bytes and + * conversely. Mostly for tests. *) + +let digit_to_int c = match c with + | '0'..'9' -> Char.code c - Char.code '0' + | 'a'..'f' -> 10 + Char.code c - Char.code 'a' + | _ -> failwith "hex_to_char: invalid hex digit" + +let hex_to_char a b = + Char.chr ((digit_to_int a) lsl 4 + digit_to_int b) + +let char_to_hex c = + let n = Char.code c in + let digits = "0123456789abcdef" in + digits.[n lsr 4], digits.[n land 0x0f] + +let string_of_hex s = + let n = String.length s in + if n mod 2 <> 0 then + failwith "string_of_hex: invalid length" + else + let res = Bytes.create (n/2) in + let rec aux i = + if i >= n then () + else ( + Bytes.set res (i/2) (hex_to_char s.[i] s.[i+1]); + aux (i+2) + ) + in + aux 0; + res +let bytes_of_hex s = Bytes.to_string (string_of_hex s) + +let hex_of_string s = + let n = String.length s in + let buf = Buffer.create n in + for i = 0 to n - 1 do + let d1,d2 = char_to_hex s.[i] in + Buffer.add_char buf d1; + Buffer.add_char buf d2; + done; + Buffer.contents buf +let hex_of_bytes b = hex_of_string b + +let print_bytes (s:bytes) : string = + let b = Buffer.create 1024 in + for i = 0 to String.length s - 1 do + Buffer.add_string b (Printf.sprintf "%02X" (int_of_char s.[i])); + done; + Buffer.contents b + +let string_of_bytes b = b +let bytes_of_string s = s + +(*********************************************************************************) +(* OLD *) +(*********************************************************************************) + +let cbyte (b:bytes) = + try int_of_char (String.get b 0) + with _ -> failwith "cbyte: called on empty string" + +let cbyte2 (b:bytes) = + try (int_of_char (String.get b 0), int_of_char (String.get b 1)) + with _ -> failwith "cbyte2: need at least length 2" + +let index (b:bytes) i = + try int_of_char (String.get b (Z.to_int i)) + with _ -> failwith "index: called out of bound" + +let get_cbytes (b:bytes) = b +let abytes (ba:cbytes) = ba +let abyte (ba:byte) = String.make 1 (char_of_int ba) +let abyte2 (ba1,ba2) = + String.init 2 (fun i -> if i = 0 then char_of_int ba1 else char_of_int ba2) + +let split_eq = split + +let createBytes len (value:int) : bytes = + let len = Z.to_int len in + try abytes (String.make len (char_of_int value)) + with _ -> failwith "Default integer for createBytes was greater than max_value" + +let initBytes len f : bytes = + let len = Z.to_int len in + try abytes (String.init len (fun i -> char_of_int (f (Z.of_int i)))) + with _ -> failwith "Platform.Bytes.initBytes: invalid char returned" + +let equalBytes (b1:bytes) (b2:bytes) = b1 = b2 + +let split2 (b:bytes) i j : bytes * bytes * bytes = + let b1, b2 = split b i in + let b2a, b2b = split b2 j in + (b1, b2a, b2b) + +let byte_of_int i = Z.to_int i diff --git a/ulib/ml/app/FStar_Char.ml b/ulib/ml/app/FStar_Char.ml new file mode 100644 index 00000000000..2727e723626 --- /dev/null +++ b/ulib/ml/app/FStar_Char.ml @@ -0,0 +1,21 @@ +module UChar = BatUChar + +module U32 = FStar_UInt32 + +type char = int[@@deriving yojson,show] +type char_code = U32.t + +(* FIXME(adl) UChar.lowercase/uppercase removed from recent Batteries. Use Camomile? *) +let lowercase (x:char) : char = + try Char.code (Char.lowercase_ascii (Char.chr x)) + with _ -> x + +let uppercase (x:char) : char = + try Char.code (Char.uppercase_ascii (Char.chr x)) + with _ -> x + +let int_of_char (x:char) : Z.t= Z.of_int x +let char_of_int (i:Z.t) : char = Z.to_int i + +let u32_of_char (x:char) : char_code = U32.of_native_int x +let char_of_u32 (x:char_code) : char = U32.to_native_int x diff --git a/ulib/ml/app/FStar_CommonST.ml b/ulib/ml/app/FStar_CommonST.ml new file mode 100644 index 00000000000..2a798438918 --- /dev/null +++ b/ulib/ml/app/FStar_CommonST.ml @@ -0,0 +1,19 @@ +open FStar_Monotonic_Heap + +let read x = !x + +let op_Bang x = read x + +let write x y = x := y + +let op_Colon_Equals x y = write x y + +let alloc contents = ref contents + +let recall = (fun r -> ()) +let get () = () + +type 'a witnessed = | C + +let gst_witness = (fun r -> ()) +let gst_recall = (fun r -> ()) diff --git a/ulib/ml/app/FStar_Exn.ml b/ulib/ml/app/FStar_Exn.ml new file mode 100644 index 00000000000..8128ed78d96 --- /dev/null +++ b/ulib/ml/app/FStar_Exn.ml @@ -0,0 +1 @@ +let raise = raise diff --git a/ulib/ml/app/FStar_Float.ml b/ulib/ml/app/FStar_Float.ml new file mode 100644 index 00000000000..39546f9599e --- /dev/null +++ b/ulib/ml/app/FStar_Float.ml @@ -0,0 +1,2 @@ +type double = float[@@deriving yojson,show] +type float = double[@@deriving yojson,show] diff --git a/ulib/ml/app/FStar_Heap.ml b/ulib/ml/app/FStar_Heap.ml new file mode 100644 index 00000000000..f58f5935c55 --- /dev/null +++ b/ulib/ml/app/FStar_Heap.ml @@ -0,0 +1,5 @@ +open FStar_Monotonic_Heap + +type 'a ref = 'a FStar_Monotonic_Heap.ref +type ('a, 'b, 'c) trivial_rel = Prims.l_True +type ('a, 'b, 'c) trivial_preorder = ('a, 'b, 'c) trivial_rel diff --git a/ulib/ml/app/FStar_IO.ml b/ulib/ml/app/FStar_IO.ml new file mode 100644 index 00000000000..0888665e028 --- /dev/null +++ b/ulib/ml/app/FStar_IO.ml @@ -0,0 +1,82 @@ +exception EOF +type fd_read = in_channel +type fd_write = out_channel +let stdin = stdin +let stdout = stdout +let stderr = stderr + +let pr = Printf.printf +let spr = Printf.sprintf +let fpr = Printf.fprintf + +let print_newline = print_newline +let print_string s = pr "%s" s; flush stdout + + +(* let print_nat s = + * pr "%x" s; + * flush stdout + * + * let print_nat_dec s = + * pr "%u" s; + * flush stdout *) + +let print_via (f:'a -> string) (x:'a) : unit = + print_string (f x); + flush stdout + +let print_uint8 = print_via FStar_UInt8.to_string_hex +let print_uint16 = print_via FStar_UInt16.to_string_hex +let print_uint32 = print_via FStar_UInt32.to_string_hex +let print_uint64 = print_via FStar_UInt64.to_string_hex + +let print_uint8_dec = print_via FStar_UInt8.to_string +let print_uint16_dec = print_via FStar_UInt16.to_string +let print_uint32_dec = print_via FStar_UInt32.to_string +let print_uint64_dec = print_via FStar_UInt64.to_string + +let print_uint8_hex_pad = print_via FStar_UInt8.to_string_hex_pad +let print_uint16_hex_pad = print_via FStar_UInt16.to_string_hex_pad +let print_uint32_hex_pad = print_via FStar_UInt32.to_string_hex_pad +let print_uint64_hex_pad = print_via FStar_UInt64.to_string_hex_pad + + +let __zeropad n s = + String.make (n - String.length s) '0' ^ s + +(* The magic numbers in these dec_pad functions are the precomputed + * string lengths of the maximum number when printed in decimal. + * + * - length "255" = 3 + * - length "65535" = 5 + * - length "4294967296" = 10 + * - length "18446744073709551616" = 20 + *) +let print_uint8_dec_pad n = + let s = FStar_UInt8.to_string n in + print_string (__zeropad 3 s) + +let print_uint16_dec_pad n = + let s = FStar_UInt16.to_string n in + print_string (__zeropad 5 s) + +let print_uint32_dec_pad n = + let s = FStar_UInt32.to_string n in + print_string (__zeropad 10 s) + +let print_uint64_dec_pad n = + let s = FStar_UInt64.to_string n in + print_string (__zeropad 20 s) + +let print_any s = output_value stdout s; flush stdout +let input_line = read_line +let input_int () = Z.of_int (read_int ()) +let input_float = read_float +let open_read_file = open_in +let open_write_file = open_out +let close_read_file = close_in +let close_write_file = close_out +let read_line fd = try Stdlib.input_line fd with End_of_file -> raise EOF +let write_string = output_string + +let debug_print_string s = print_string s; false diff --git a/ulib/ml/app/FStar_ImmutableArray.ml b/ulib/ml/app/FStar_ImmutableArray.ml new file mode 100644 index 00000000000..342a434e9e9 --- /dev/null +++ b/ulib/ml/app/FStar_ImmutableArray.ml @@ -0,0 +1,2 @@ +module IAB = FStar_ImmutableArray_Base +let to_list (x:'a IAB.t) = Array.to_list x diff --git a/ulib/ml/app/FStar_ImmutableArray_Base.ml b/ulib/ml/app/FStar_ImmutableArray_Base.ml new file mode 100644 index 00000000000..2cb272926af --- /dev/null +++ b/ulib/ml/app/FStar_ImmutableArray_Base.ml @@ -0,0 +1,7 @@ +type 'a t = 'a array + +let of_list (l:'a list) = Array.of_list l + +let length (a: 'a t) = Z.of_int (Array.length a) + +let index (a: 'a t) (i:Z.t) = Array.get a (Z.to_int i) diff --git a/ulib/ml/app/FStar_List.ml b/ulib/ml/app/FStar_List.ml new file mode 100644 index 00000000000..4ae3e5c1b49 --- /dev/null +++ b/ulib/ml/app/FStar_List.ml @@ -0,0 +1,82 @@ +(* We give an implementation here using OCaml's BatList, + which provides tail-recursive versions of most functions *) +include FStar_List_Tot_Base + +let isEmpty l = l = [] +let singleton x = [x] +let mem = BatList.mem +let memT = mem +let hd = BatList.hd +let tl = BatList.tl +let tail = BatList.tl + +let nth l i = BatList.nth l (Z.to_int i) +let length l = Z.of_int (BatList.length l) +let rev = BatList.rev +let map = BatList.map +let mapT = map +let mapi f l = BatList.mapi (fun i x -> f (Z.of_int i) x) l +let map2 = BatList.map2 +let rec map3 f l1 l2 l3 = + match l1, l2, l3 with + | [], [], [] -> [] + | x::xs, y::ys, z::zs -> (f x y z)::(map3 f xs ys zs) + | _, _, _ -> failwith "The lists do not have the same length" +let iter = BatList.iter +let iter2 = BatList.iter2 +let iteri_aux _ _ _ = failwith "FStar_List.ml: Not implemented: iteri_aux" +let iteri f l = BatList.iteri (fun i x -> f (Z.of_int i) x) l +let partition = BatList.partition +let append = BatList.append +let rev_append = BatList.rev_append +let fold_left = BatList.fold_left +let fold_right = BatList.fold_right +let fold_left2 = BatList.fold_left2 +let fold_right2 = BatList.fold_right2 +let rev_map_onto f l acc = fold_left (fun acc x -> f x :: acc) acc l +let rec init = function + | [] -> failwith "init: empty list" + | [h] -> [] + | h::t -> h::(init t) +let last = BatList.last +let last_opt l = List.fold_left (fun _ x -> Some x) None l +let collect f l = BatList.flatten (BatList.map f l) +let unzip = BatList.split +let rec unzip3 = function + | [] -> ([],[],[]) + | (x,y,z)::xyzs -> + let (xs,ys,zs) = unzip3 xyzs in + (x::xs,y::ys,z::zs) +let filter = BatList.filter +let sortWith f l = BatList.sort (fun x y -> Z.to_int (f x y)) l +let for_all = BatList.for_all +let forall2 = BatList.for_all2 +let tryFind f l = try Some (BatList.find f l) with | Not_found -> None +let tryFindT = tryFind +let find = tryFind +let tryPick f l = try f (BatList.find (fun x -> f x <> None) l) with | Not_found -> None +let flatten = BatList.flatten +let concat = flatten +let split = unzip +let choose = BatList.filter_map +let existsb f l = BatList.exists f l +let existsML f l = BatList.exists f l +let contains x l = BatList.exists (fun y -> x = y) l +let zip = BatList.combine +let splitAt x l = BatList.split_at (Z.to_int x) l +let filter_map = BatList.filter_map +let index f l = + Z.of_int (fst (BatList.findi (fun _ x -> f x) l)) + +let rec zip3 l1 l2 l3 = + match l1, l2, l3 with + | [], [], [] -> [] + | h1::t1, h2::t2, h3::t3 -> (h1, h2, h3) :: (zip3 t1 t2 t3) + | _ -> failwith "zip3" +let unique = BatList.unique +let map_flatten f l = flatten (map f l) + +let span = BatList.span + +let deduplicate (f:'a -> 'a -> bool) (l:'a list) : 'a list = BatList.unique ~eq:f l +let fold_left_map = BatList.fold_left_map diff --git a/ulib/ml/app/FStar_List_Tot_Base.ml b/ulib/ml/app/FStar_List_Tot_Base.ml new file mode 100644 index 00000000000..537c03abb2a --- /dev/null +++ b/ulib/ml/app/FStar_List_Tot_Base.ml @@ -0,0 +1,76 @@ +(* We give an implementation here using OCaml's BatList, + which provide tail-recursive versions of most functions. + The rest we implement manually. *) + +let isEmpty l = l = [] +let hd = BatList.hd +let tail = BatList.tl +let tl = BatList.tl + +let rec last = function + | x :: [] -> x + | _ :: tl -> last tl + +let rec init = function + | _ :: [] -> [] + | hd :: tl -> hd :: init tl + +let length l = Z.of_int (BatList.length l) +let nth l i = try Some (BatList.nth l (Z.to_int i)) with _ -> None +let index l i = BatList.nth l (Z.to_int i) + +let rec count x = function + | [] -> Prims.int_zero + | hd::tl -> if x=hd then Z.add Prims.int_one (count x tl) else count x tl + +let rev_acc l r = BatList.rev_append l r +let rev = BatList.rev +let append = BatList.append +let op_At = append +let snoc (x, y) = append x [y] +let flatten = BatList.flatten +let map = BatList.map +let mapi_init _ _ _ = failwith "FStar_List_Tot_Base.ml: Not implemented: mapi_init" +let mapi f l = BatList.mapi (fun i x -> f (Z.of_int i) x) l +let concatMap f l = flatten (map f l) +let fold_left = BatList.fold_left +let fold_right = BatList.fold_right +let fold_left2 = BatList.fold_left2 +let mem = BatList.mem +type ('a, 'b, 'c) memP = unit +let contains x l = BatList.exists (fun y -> x = y) l +let existsb f l = BatList.exists f l +let find f l = try Some (BatList.find f l) with | Not_found -> None +let filter = BatList.filter +let for_all = BatList.for_all +let collect f l = BatList.flatten (BatList.map f l) +let tryFind = find +let tryPick f l = try f (BatList.find (fun x -> f x <> None) l) with | Not_found -> None +let choose = BatList.filter_map +let partition = BatList.partition +let subset la lb = BatList.subset (fun x y -> if x = y then 0 else 1) la lb + +let rec noRepeats = function + | [] -> true + | h :: tl -> not (mem h tl) && noRepeats tl + +let assoc x l = match List.assoc x l with exception Not_found -> None | x -> Some x +let split = BatList.split +let unzip = split +let rec unzip3 = function + | [] -> ([],[],[]) + | (x,y,z)::xyzs -> + let (xs,ys,zs) = unzip3 xyzs in + (x::xs,y::ys,z::zs) + +let splitAt n l = BatList.split_at (Z.to_int n) l +let unsnoc l = let l1, l2 = splitAt (Z.sub (length l) Z.one) l in l1, hd l2 +let split3 l i = let a, a1 = splitAt i l in let b :: c = a1 in a, b, c + +let bool_of_compare f x y = Z.gt (f x y) Z.zero +let compare_of_bool = + fun rel -> fun x -> fun y -> if (rel x y) then Z.one else (if x = y then Z.zero else (Z.neg Z.one)) +let sortWith f l = BatList.sort (fun x y -> Z.to_int (f x y)) l +let list_unref l = l +let list_ref _ l = l +let list_refb _ l = l diff --git a/ulib/ml/app/FStar_Monotonic_Heap.ml b/ulib/ml/app/FStar_Monotonic_Heap.ml new file mode 100644 index 00000000000..1c1cc85cb10 --- /dev/null +++ b/ulib/ml/app/FStar_Monotonic_Heap.ml @@ -0,0 +1,36 @@ +type heap = unit + +type nonrec 'a ref = 'a ref + +type ('a, 'b) mref = 'a ref + +let emp = + () + +(* Logical functions on heap *) +(* TODO : complete the functions to have the same interface as in FStar.Heap.fsti *) + +let addr_of _ = Obj.magic () +let is_mm _ = Obj.magic () + +(* let compare_addrs *) + +type ('a, 'b, 'c, 'd) contains +type ('a, 'b) addr_unused_in +type ('a, 'b, 'c, 'd) unused_in +let fresh _ _ _ = Obj.magic () + +let sel _ _ = Obj.magic () +let upd _ _ _ = Obj.magic () +let alloc _ _ _ = Obj.magic () + +let free_mm _ _ = Obj.magic () +let sel_tot = sel +let upd_tot = upd + +(* Untyped view of references *) +type aref = + | Ref of (unit * unit) +let dummy_aref = Ref ((), ()) +let aref_of _ = dummy_aref +let ref_of _ _ = Obj.magic () diff --git a/ulib/ml/app/FStar_Option.ml b/ulib/ml/app/FStar_Option.ml new file mode 100644 index 00000000000..18b7837e926 --- /dev/null +++ b/ulib/ml/app/FStar_Option.ml @@ -0,0 +1,37 @@ +open Prims +let isNone: 'a . 'a FStar_Pervasives_Native.option -> Prims.bool = + fun uu___10_12 -> + match uu___10_12 with + | FStar_Pervasives_Native.None -> true + | FStar_Pervasives_Native.Some uu____15 -> false +let isSome: 'a . 'a FStar_Pervasives_Native.option -> Prims.bool = + fun uu___11_27 -> + match uu___11_27 with + | FStar_Pervasives_Native.Some uu____30 -> true + | FStar_Pervasives_Native.None -> false +let map: + 'a 'b . + ('a -> 'b) -> + 'a FStar_Pervasives_Native.option -> 'b FStar_Pervasives_Native.option + = + fun f -> + fun uu___12_58 -> + match uu___12_58 with + | FStar_Pervasives_Native.Some x -> + let uu____64 = f x in FStar_Pervasives_Native.Some uu____64 + | FStar_Pervasives_Native.None -> FStar_Pervasives_Native.None +let mapTot: + 'a 'b . + ('a -> 'b) -> + 'a FStar_Pervasives_Native.option -> 'b FStar_Pervasives_Native.option + = + fun f -> + fun uu___13_91 -> + match uu___13_91 with + | FStar_Pervasives_Native.Some x -> FStar_Pervasives_Native.Some (f x) + | FStar_Pervasives_Native.None -> FStar_Pervasives_Native.None +let get: 'a . 'a FStar_Pervasives_Native.option -> 'a = + fun uu___14_108 -> + match uu___14_108 with + | FStar_Pervasives_Native.Some x -> x + | FStar_Pervasives_Native.None -> failwith "empty option" \ No newline at end of file diff --git a/ulib/ml/app/FStar_Pervasives_Native.ml b/ulib/ml/app/FStar_Pervasives_Native.ml new file mode 100644 index 00000000000..0027fcb263a --- /dev/null +++ b/ulib/ml/app/FStar_Pervasives_Native.ml @@ -0,0 +1,285 @@ + +type 'a option' = 'a option = + | None + | Some of 'a[@@deriving yojson,show] + +type 'a option = 'a option' = + | None + | Some of 'a[@@deriving yojson,show] + +let uu___is_None = function None -> true | _ -> false +let uu___is_Some = function Some _ -> true | _ -> false +let __proj__Some__item__v = function Some x -> x | _ -> assert false + +(* 'a * 'b *) +type ('a,'b) tuple2 = 'a * 'b[@@deriving yojson,show] + +let fst = Stdlib.fst +let snd = Stdlib.snd + +let __proj__Mktuple2__item___1 = fst +let __proj__Mktuple2__item___2 = snd + +type ('a,'b,'c) tuple3 = + 'a* 'b* 'c +[@@deriving yojson,show] +let uu___is_Mktuple3 projectee = true +let __proj__Mktuple3__item___1 projectee = + match projectee with | (_1,_2,_3) -> _1 +let __proj__Mktuple3__item___2 projectee = + match projectee with | (_1,_2,_3) -> _2 +let __proj__Mktuple3__item___3 projectee = + match projectee with | (_1,_2,_3) -> _3 + +type ('a,'b,'c,'d) tuple4 = + 'a* 'b* 'c* 'd +[@@deriving yojson,show] +let uu___is_Mktuple4 projectee = true +let __proj__Mktuple4__item___1 projectee = + match projectee with | (_1,_2,_3,_4) -> _1 +let __proj__Mktuple4__item___2 projectee = + match projectee with | (_1,_2,_3,_4) -> _2 +let __proj__Mktuple4__item___3 projectee = + match projectee with | (_1,_2,_3,_4) -> _3 +let __proj__Mktuple4__item___4 projectee = + match projectee with | (_1,_2,_3,_4) -> _4 + +type ('a,'b,'c,'d,'e) tuple5 = + 'a* 'b* 'c* 'd* 'e +[@@deriving yojson,show] +let uu___is_Mktuple5 projectee = true +let __proj__Mktuple5__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5) -> _1 +let __proj__Mktuple5__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5) -> _2 +let __proj__Mktuple5__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5) -> _3 +let __proj__Mktuple5__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5) -> _4 +let __proj__Mktuple5__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5) -> _5 + +type ('a,'b,'c,'d,'e,'f) tuple6 = + 'a* 'b* 'c* 'd* 'e* 'f +[@@deriving yojson,show] +let uu___is_Mktuple6 projectee = true +let __proj__Mktuple6__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6) -> _1 +let __proj__Mktuple6__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6) -> _2 +let __proj__Mktuple6__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6) -> _3 +let __proj__Mktuple6__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6) -> _4 +let __proj__Mktuple6__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6) -> _5 +let __proj__Mktuple6__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6) -> _6 + +type ('a,'b,'c,'d,'e,'f,'g) tuple7 = + 'a* 'b* 'c* 'd* 'e* 'f* 'g +[@@deriving yojson,show] +let uu___is_Mktuple7 projectee = true +let __proj__Mktuple7__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7) -> _1 +let __proj__Mktuple7__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7) -> _2 +let __proj__Mktuple7__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7) -> _3 +let __proj__Mktuple7__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7) -> _4 +let __proj__Mktuple7__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7) -> _5 +let __proj__Mktuple7__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7) -> _6 +let __proj__Mktuple7__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7) -> _7 + +type ('a,'b,'c,'d,'e,'f,'g,'h) tuple8 = + 'a* 'b* 'c* 'd* 'e* 'f* 'g* 'h +[@@deriving yojson,show] +let uu___is_Mktuple8 projectee = true +let __proj__Mktuple8__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _1 +let __proj__Mktuple8__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _2 +let __proj__Mktuple8__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _3 +let __proj__Mktuple8__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _4 +let __proj__Mktuple8__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _5 +let __proj__Mktuple8__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _6 +let __proj__Mktuple8__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _7 +let __proj__Mktuple8__item___8 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8) -> _8 + +type ('a,'b,'c,'d,'e,'f,'g,'h,'i) tuple9 = + 'a *'b *'c *'d *'e *'f *'g *'h *'i +[@@deriving yojson,show] +let uu___is_Mktuple9 projectee = true +let __proj__Mktuple9__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _1 +let __proj__Mktuple9__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _2 +let __proj__Mktuple9__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _3 +let __proj__Mktuple9__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _4 +let __proj__Mktuple9__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _5 +let __proj__Mktuple9__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _6 +let __proj__Mktuple9__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _7 +let __proj__Mktuple9__item___8 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _8 +let __proj__Mktuple9__item___9 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9) -> _9 + +type ('a,'b,'c,'d,'e,'f,'g,'h,'i,'j) tuple10 = + 'a *'b *'c *'d *'e *'f *'g *'h *'i *'j +[@@deriving yojson,show] +let uu___is_Mktuple10 projectee = true +let __proj__Mktuple10__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _1 +let __proj__Mktuple10__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _2 +let __proj__Mktuple10__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _3 +let __proj__Mktuple10__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _4 +let __proj__Mktuple10__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _5 +let __proj__Mktuple10__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _6 +let __proj__Mktuple10__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _7 +let __proj__Mktuple10__item___8 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _8 +let __proj__Mktuple10__item___9 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _9 +let __proj__Mktuple10__item___10 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10) -> _10 + +type ('a,'b,'c,'d,'e,'f,'g,'h,'i,'j,'k) tuple11 = + 'a *'b *'c *'d *'e *'f *'g *'h *'i *'j *'k +[@@deriving yojson,show] +let uu___is_Mktuple11 projectee = true +let __proj__Mktuple11__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _1 +let __proj__Mktuple11__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _2 +let __proj__Mktuple11__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _3 +let __proj__Mktuple11__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _4 +let __proj__Mktuple11__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _5 +let __proj__Mktuple11__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _6 +let __proj__Mktuple11__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _7 +let __proj__Mktuple11__item___8 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _8 +let __proj__Mktuple11__item___9 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _9 +let __proj__Mktuple11__item___10 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _10 +let __proj__Mktuple11__item___11 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11) -> _11 + +type ('a,'b,'c,'d,'e,'f,'g,'h,'i,'j,'k,'l) tuple12 = + 'a *'b *'c *'d *'e *'f *'g *'h *'i *'j *'k *'l +[@@deriving yojson,show] +let uu___is_Mktuple12 projectee = true +let __proj__Mktuple12__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _1 +let __proj__Mktuple12__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _2 +let __proj__Mktuple12__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _3 +let __proj__Mktuple12__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _4 +let __proj__Mktuple12__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _5 +let __proj__Mktuple12__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _6 +let __proj__Mktuple12__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _7 +let __proj__Mktuple12__item___8 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _8 +let __proj__Mktuple12__item___9 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _9 +let __proj__Mktuple12__item___10 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _10 +let __proj__Mktuple12__item___11 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _11 +let __proj__Mktuple12__item___12 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12) -> _12 + +type ('a,'b,'c,'d,'e,'f,'g,'h,'i,'j,'k,'l,'m) tuple13 = + 'a *'b *'c *'d *'e *'f *'g *'h *'i *'j *'k *'l *'m +[@@deriving yojson,show] +let uu___is_Mktuple13 projectee = true +let __proj__Mktuple13__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _1 +let __proj__Mktuple13__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _2 +let __proj__Mktuple13__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _3 +let __proj__Mktuple13__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _4 +let __proj__Mktuple13__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _5 +let __proj__Mktuple13__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _6 +let __proj__Mktuple13__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _7 +let __proj__Mktuple13__item___8 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _8 +let __proj__Mktuple13__item___9 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _9 +let __proj__Mktuple13__item___10 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _10 +let __proj__Mktuple13__item___11 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _11 +let __proj__Mktuple13__item___12 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _12 +let __proj__Mktuple13__item___13 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13) -> _13 + +type ('a,'b,'c,'d,'e,'f,'g,'h,'i,'j,'k,'l,'m,'n) tuple14 = + 'a *'b *'c *'d *'e *'f *'g *'h *'i *'j *'k *'l *'m *'n +[@@deriving yojson,show] +let uu___is_Mktuple14 projectee = true +let __proj__Mktuple14__item___1 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _1 +let __proj__Mktuple14__item___2 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _2 +let __proj__Mktuple14__item___3 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _3 +let __proj__Mktuple14__item___4 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _4 +let __proj__Mktuple14__item___5 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _5 +let __proj__Mktuple14__item___6 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _6 +let __proj__Mktuple14__item___7 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _7 +let __proj__Mktuple14__item___8 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _8 +let __proj__Mktuple14__item___9 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _9 +let __proj__Mktuple14__item___10 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _10 +let __proj__Mktuple14__item___11 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _11 +let __proj__Mktuple14__item___12 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _12 +let __proj__Mktuple14__item___13 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _13 +let __proj__Mktuple14__item___14 projectee = + match projectee with | (_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14) -> _14 diff --git a/ulib/ml/app/FStar_Pprint.ml b/ulib/ml/app/FStar_Pprint.ml new file mode 100644 index 00000000000..83bf2f366ee --- /dev/null +++ b/ulib/ml/app/FStar_Pprint.ml @@ -0,0 +1,95 @@ +(* + Copyright 2016 Microsoft Research + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*) + +(* NOTE!!! This is a copy of FStarC_Pprint that is exposed to applications +via the library, without needing to link against compiler modules. The compiler +itself could also use this but there are some issues with effect polymorphism +(e.g. flow_map would need two versions, and having the ML one in the ulib module +would introduce a lot of dependencies) and also would need to have a single definition +of `float` (the compiler defines its own, though this is probably unneeded and can +be removed). *) + +(* prettyprint.fsti's OCaml implementation is just a thin wrapper around + Francois Pottier's pprint package. *) +include PPrint + +(* FIXME(adl) also print the char in a comment if it's representable *) +let doc_of_char c = PPrint.OCaml.char (Char.chr c) +let doc_of_string = PPrint.string +let doc_of_bool b = PPrint.string (string_of_bool b) +let blank_buffer_doc = [ ("", PPrint.empty) ] + +let substring s ofs len = + PPrint.substring s (Z.to_int ofs) (Z.to_int len) + +let fancystring s apparent_length = + PPrint.fancystring s (Z.to_int apparent_length) + +let fancysubstring s ofs len apparent_length = + PPrint.fancysubstring s (Z.to_int ofs) (Z.to_int len) (Z.to_int apparent_length) + +let blank n = PPrint.blank (Z.to_int n) + +let break_ n = PPrint.break (Z.to_int n) + +let op_Hat_Hat = PPrint.(^^) +let op_Hat_Slash_Hat = PPrint.(^/^) + +let nest j doc = PPrint.nest (Z.to_int j) doc + +let long_left_arrow = PPrint.string "<--" +let larrow = PPrint.string "<-" +let rarrow = PPrint.string "->" + +let repeat n doc = PPrint.repeat (Z.to_int n) doc + +let hang n doc = PPrint.hang (Z.to_int n) doc + +let prefix n b left right = + PPrint.prefix (Z.to_int n) (Z.to_int b) left right + +let jump n b right = + PPrint.jump (Z.to_int n) (Z.to_int b) right + +let infix n b middle left right = + PPrint.infix (Z.to_int n) (Z.to_int b) middle left right + +let surround n b opening contents closing = + PPrint.surround (Z.to_int n) (Z.to_int b) opening contents closing + +let soft_surround n b opening contents closing = + PPrint.soft_surround (Z.to_int n) (Z.to_int b) opening contents closing + +let surround_separate n b void_ opening sep closing docs = + PPrint.surround_separate (Z.to_int n) (Z.to_int b) void_ opening sep closing docs + +let surround_separate_map n b void_ opening sep closing f xs = + PPrint.surround_separate_map (Z.to_int n) (Z.to_int b) void_ opening sep closing f xs + +(* Wrap up ToBuffer.pretty. *) +let pretty_string rfrac width doc = + let buf = Buffer.create 0 in + PPrint.ToBuffer.pretty rfrac (Z.to_int width) buf doc; + Buffer.contents buf + +(* Wrap up ToChannel.pretty *) +let pretty_out_channel rfrac width doc ch = + PPrint.ToChannel.pretty rfrac (Z.to_int width) ch doc; + flush ch + +(* A simple renderer, with some default values. *) +let render (doc:document) : string = + pretty_string 1.0 (Z.of_int 80) doc diff --git a/ulib/ml/app/FStar_ST.ml b/ulib/ml/app/FStar_ST.ml new file mode 100644 index 00000000000..a27ecf12ba2 --- /dev/null +++ b/ulib/ml/app/FStar_ST.ml @@ -0,0 +1,28 @@ +(* https://www.lexifi.com/blog/references-physical-equality *) + +open FStar_CommonST + +type ('a, 'b) mref = ('a, 'b) FStar_Monotonic_Heap.mref + +type 'a ref = 'a FStar_Monotonic_Heap.ref + +let ref_to_yojson _ _ = `Null +let ref_of_yojson _ _ = failwith "cannot readback" + +let read = read + +let op_Bang = op_Bang + +let write = write + +let op_Colon_Equals = op_Colon_Equals + +let alloc = alloc + +let recall = recall +let get = get + +type 'a witnessed = 'a FStar_CommonST.witnessed + +let gst_witness = gst_witness +let gst_recall = gst_recall diff --git a/ulib/ml/app/FStar_String.ml b/ulib/ml/app/FStar_String.ml new file mode 100644 index 00000000000..45c7ba41578 --- /dev/null +++ b/ulib/ml/app/FStar_String.ml @@ -0,0 +1,43 @@ +let make i c = BatUTF8.init (Z.to_int i) (fun _ -> BatUChar.chr c) +let strcat s t = s ^ t +let op_Hat s t = strcat s t + +(* restore pre-2.11 BatString.nsplit behavior, + see https://github.com/ocaml-batteries-team/batteries-included/issues/845 *) +let batstring_nsplit s t = + if s = "" then [] else BatString.split_on_string t s + +let split seps s = + let rec repeat_split acc = function + | [] -> acc + | sep::seps -> + let usep = BatUTF8.init 1 (fun _ -> BatUChar.chr sep) in + let l = BatList.flatten (BatList.map (fun x -> batstring_nsplit x usep) acc) in + repeat_split l seps in + repeat_split [s] seps +let compare x y = Z.of_int (BatString.compare x y) +type char = FStar_Char.char +let concat = BatString.concat +let length s = Z.of_int (BatUTF8.length s) +let strlen s = length s + +let substring s i j = + BatUTF8.init (Z.to_int j) (fun k -> BatUTF8.get s (k + Z.to_int i)) +let sub = substring + +let get s i = BatUChar.code (BatUTF8.get s (Z.to_int i)) +let collect f s = + let r = ref "" in + BatUTF8.iter (fun c -> r := !r ^ f (BatUChar.code c)) s; !r +let lowercase = BatString.lowercase +let uppercase = BatString.uppercase +let escaped = BatString.escaped +let index = get +exception Found of int +let index_of s c = + let c = BatUChar.chr c in + try let _ = BatUTF8.iteri (fun c' i -> if c = c' then raise (Found i) else ()) s in Z.of_int (-1) + with Found i -> Z.of_int i +let list_of_string s = BatList.init (BatUTF8.length s) (fun i -> BatUChar.code (BatUTF8.get s i)) +let string_of_list l = BatUTF8.init (BatList.length l) (fun i -> BatUChar.chr (BatList.at l i)) +let string_of_char (c:char) = BatString.of_char (Char.chr c) diff --git a/ulib/ml/app/FStar_UInt8.ml b/ulib/ml/app/FStar_UInt8.ml new file mode 100644 index 00000000000..2148ee255f1 --- /dev/null +++ b/ulib/ml/app/FStar_UInt8.ml @@ -0,0 +1,84 @@ +(* GM: This file is manual due to the derivings, + and that sucks. *) + +type uint8 = int[@@deriving yojson,show] +type byte = uint8[@@deriving yojson,show] +type t = uint8[@@deriving yojson,show] +type t' = t[@@deriving yojson,show] + +let (%) x y = if x < 0 then (x mod y) + y else x mod y + +let n = Prims.parse_int "8" +let v (x:uint8) : Prims.int = Prims.parse_int (string_of_int x) + +let zero = 0 +let one = 1 +let ones = 255 + +let add (a:uint8) (b:uint8) : uint8 = a + b +let add_underspec a b = (add a b) land 255 +let add_mod = add_underspec + +let sub (a:uint8) (b:uint8) : uint8 = a - b +let sub_underspec a b = (sub a b) land 255 +let sub_mod = sub_underspec + +let mul (a:uint8) (b:uint8) : uint8 = a * b +let mul_underspec a b = (mul a b) land 255 +let mul_mod = mul_underspec + +let div (a:uint8) (b:uint8) : uint8 = a / b + +let rem (a:uint8) (b:uint8) : uint8 = a mod b + +let logand (a:uint8) (b:uint8) : uint8 = a land b +let logxor (a:uint8) (b:uint8) : uint8 = a lxor b +let logor (a:uint8) (b:uint8) : uint8 = a lor b +let lognot (a:uint8) : uint8 = lnot a + +let int_to_uint8 (x:Prims.int) : uint8 = Z.to_int x % 256 + +let shift_right (a:uint8) (b:Stdint.Uint32.t) : uint8 = a lsr (Stdint.Uint32.to_int b) +let shift_left (a:uint8) (b:Stdint.Uint32.t) : uint8 = (a lsl (Stdint.Uint32.to_int b)) land 255 + +(* Comparison operators *) +let eq (a:uint8) (b:uint8) : bool = a = b +let gt (a:uint8) (b:uint8) : bool = a > b +let gte (a:uint8) (b:uint8) : bool = a >= b +let lt (a:uint8) (b:uint8) : bool = a < b +let lte (a:uint8) (b:uint8) : bool = a <= b + +(* NOT Constant time comparison operators *) +let gte_mask (a:uint8) (b:uint8) : uint8 = if a >= b then 255 else 0 +let eq_mask (a:uint8) (b:uint8) : uint8 = if a = b then 255 else 0 + +(* Infix notations *) +let op_Plus_Hat = add +let op_Plus_Question_Hat = add_underspec +let op_Plus_Percent_Hat = add_mod +let op_Subtraction_Hat = sub +let op_Subtraction_Question_Hat = sub_underspec +let op_Subtraction_Percent_Hat = sub_mod +let op_Star_Hat = mul +let op_Star_Question_Hat = mul_underspec +let op_Star_Percent_Hat = mul_mod +let op_Slash_Hat = div +let op_Percent_Hat = rem +let op_Hat_Hat = logxor +let op_Amp_Hat = logand +let op_Bar_Hat = logor +let op_Less_Less_Hat = shift_left +let op_Greater_Greater_Hat = shift_right +let op_Equals_Hat = eq +let op_Greater_Hat = gt +let op_Greater_Equals_Hat = gte +let op_Less_Hat = lt +let op_Less_Equals_Hat = lte + +let of_string s = int_of_string s +let to_string s = string_of_int s +let to_string_hex s = Printf.sprintf "0x%x" s +let to_string_hex_pad s = Printf.sprintf "%02x" s +let uint_to_t s = int_to_uint8 s +let to_int s = s +let __uint_to_t = uint_to_t diff --git a/ulib/ml/app/Prims.ml b/ulib/ml/app/Prims.ml new file mode 100644 index 00000000000..b96a81ebdc7 --- /dev/null +++ b/ulib/ml/app/Prims.ml @@ -0,0 +1,195 @@ +type int = Z.t[@printer Z.pp_print][@@deriving show] +let of_int = Z.of_int +let int_zero = Z.zero +let int_one = Z.one +let parse_int = Z.of_string +let to_string = Z.to_string + +type tmp = string [@@deriving yojson] +let int_to_yojson x = tmp_to_yojson (to_string x) +let int_of_yojson x = + match tmp_of_yojson x with + | Ok x -> Ok (parse_int x) + | Error x -> Error x + +type attribute = unit +let (cps : attribute) = () +type 'Auu____5 hasEq = unit +type eqtype = unit +type bool' = bool +[@@deriving yojson,show] +type bool = bool' +[@@deriving yojson,show] +type empty = unit +(*This is how Coq extracts Inductive void := . Our extraction needs to be fixed to recognize when there + are no constructors and generate this type abbreviation*) +type trivial = + | T +let (uu___is_T : trivial -> bool) = fun projectee -> true +type nonrec unit = unit +type 'Ap squash = unit +type 'Ap auto_squash = unit +type l_True = unit +type l_False = unit +type ('Aa,'Ax,'dummyV0) equals = + | Refl +let uu___is_Refl : 'Aa . 'Aa -> 'Aa -> ('Aa,unit,unit) equals -> bool = + fun x -> fun uu____65 -> fun projectee -> true +type ('Aa,'Ax,'Ay) eq2 = unit +type ('Aa,'Ab,'Ax,'Ay) op_Equals_Equals_Equals = unit +type 'Ab b2t = unit +type ('Ap,'Aq) pair = + | Pair of 'Ap * 'Aq +let uu___is_Pair : 'Ap 'Aq . ('Ap,'Aq) pair -> bool = + fun projectee -> true +let __proj__Pair__item___1 : 'Ap 'Aq . ('Ap,'Aq) pair -> 'Ap = + fun projectee -> match projectee with | Pair (_0,_1) -> _0 +let __proj__Pair__item___2 : 'Ap 'Aq . ('Ap,'Aq) pair -> 'Aq = + fun projectee -> match projectee with | Pair (_0,_1) -> _1 +type ('Ap,'Aq) l_and = unit +type ('Ap,'Aq) sum = + | Left of 'Ap + | Right of 'Aq +let uu___is_Left : 'Ap 'Aq . ('Ap,'Aq) sum -> bool = + fun projectee -> + match projectee with | Left _0 -> true | uu____344 -> false + +let __proj__Left__item___0 : 'Ap 'Aq . ('Ap,'Aq) sum -> 'Ap = + fun projectee -> match projectee with | Left _0 -> _0 +let uu___is_Right : 'Ap 'Aq . ('Ap,'Aq) sum -> bool = + fun projectee -> + match projectee with | Right _0 -> true | uu____404 -> false + +let __proj__Right__item___0 : 'Ap 'Aq . ('Ap,'Aq) sum -> 'Aq = + fun projectee -> match projectee with | Right _0 -> _0 +type ('Ap,'Aq) l_or = unit +type ('Ap,'Aq) l_imp = unit +type ('Ap,'Aq) l_iff = unit +type 'Ap l_not = unit +type ('Ap,'Aq,'Ar) l_ITE = unit +type ('Aa,'Ab,'Auu____484,'Auu____485) precedes = unit +type ('Aa,'Auu____490,'Auu____491) has_type = unit +type ('Aa,'Ap) l_Forall = unit +type prop = unit +let id x = x +type ('Aa,'Ab) dtuple2 = + | Mkdtuple2 of 'Aa * 'Ab +let uu___is_Mkdtuple2 : 'Aa 'Ab . ('Aa,'Ab) dtuple2 -> bool = + fun projectee -> true +let __proj__Mkdtuple2__item___1 : 'Aa 'Ab . ('Aa,'Ab) dtuple2 -> 'Aa = + fun projectee -> match projectee with | Mkdtuple2 (_1,_2) -> _1 +let __proj__Mkdtuple2__item___2 : 'Aa 'Ab . ('Aa,'Ab) dtuple2 -> 'Ab = + fun projectee -> match projectee with | Mkdtuple2 (_1,_2) -> _2 +type ('Aa,'Ap) l_Exists = unit +type string' = string[@@deriving yojson,show] +type string = string'[@@deriving yojson,show] +type pure_pre = unit +type ('Aa,'Apre) pure_post' = unit +type 'Aa pure_post = unit +type 'Aa pure_wp = unit +type 'Auu____655 guard_free = unit +type ('Aa,'Ax,'Ap) pure_return = unit +type ('Ar1,'Aa,'Ab,'Awp1,'Awp2,'Ap) pure_bind_wp = 'Awp1 +type ('Aa,'Ap,'Awp_then,'Awp_else,'Apost) pure_if_then_else = unit[@@deriving yojson,show] +type ('Aa,'Awp,'Apost) pure_ite_wp = unit +type ('Aa,'Awp1,'Awp2) pure_stronger = unit +type ('Aa,'Ab,'Awp,'Ap) pure_close_wp = unit +type ('Aa,'Aq,'Awp,'Ap) pure_assert_p = unit +type ('Aa,'Aq,'Awp,'Ap) pure_assume_p = unit +type ('Aa,'Ap) pure_null_wp = unit +type ('Aa,'Awp) pure_trivial = 'Awp +type ('Ap, 'Apost) pure_assert_wp = unit +type ('Aa,'Awp,'Auu____878) purewp_id = 'Awp + + +let op_AmpAmp x y = x && y +let op_BarBar x y = x || y +let op_Negation x = not x + +let ( + ) = Z.add +let ( - ) = Z.sub +let ( * ) = Z.mul +let ( / ) = Z.ediv +let ( <= ) = Z.leq +let ( >= ) = Z.geq +let ( < ) = Z.lt +let ( > ) = Z.gt +let ( mod ) = Z.erem +let ( ~- ) = Z.neg +let abs = Z.abs + +let op_Multiply x y = x * y +let op_Subtraction x y = x - y +let op_Addition x y = x + y +let op_Minus x = -x +let op_LessThan x y = x < y +let op_LessThanOrEqual x y = x <= y +let op_GreaterThan x y = x > y +let op_GreaterThanOrEqual x y = x >= y +let op_Equality x y = x = y +let op_disEquality x y = x<>y + +type nonrec exn = exn +type 'a array' = 'a array[@@deriving yojson,show] +type 'a array = 'a array'[@@deriving yojson,show] +let strcat x y = x ^ y +let op_Hat x y = x ^ y + +type 'a list' = 'a list[@@deriving yojson,show] +type 'a list = 'a list'[@@deriving yojson,show] +let uu___is_Nil : 'Aa . 'Aa list -> bool = + fun projectee -> match projectee with | [] -> true | uu____1190 -> false +let uu___is_Cons : 'Aa . 'Aa list -> bool = + fun projectee -> + match projectee with | hd::tl -> true | uu____1216 -> false + +let __proj__Cons__item__hd : 'Aa . 'Aa list -> 'Aa = + fun projectee -> match projectee with | hd::tl -> hd +let __proj__Cons__item__tl : 'Aa . 'Aa list -> 'Aa list = + fun projectee -> match projectee with | hd::tl -> tl +type pattern = unit + + +type ('Aa,'Auu____1278) decreases = unit +let returnM : 'Aa . 'Aa -> 'Aa = fun x -> x +type lex_t = + | LexTop + | LexCons of unit * Obj.t * lex_t +let (uu___is_LexTop : lex_t -> bool) = + fun projectee -> + match projectee with | LexTop -> true | uu____1313 -> false + +let (uu___is_LexCons : lex_t -> bool) = + fun projectee -> + match projectee with | LexCons (a,_1,_2) -> true | uu____1327 -> false + +type 'Aprojectee __proj__LexCons__item__a = Obj.t +let (__proj__LexCons__item___1 : lex_t -> Obj.t) = + fun projectee -> match projectee with | LexCons (a,_1,_2) -> _1 +let (__proj__LexCons__item___2 : lex_t -> lex_t) = + fun projectee -> match projectee with | LexCons (a,_1,_2) -> _2 +type ('Aa,'Awp) as_requires = 'Awp +type ('Aa,'Awp,'Ax) as_ensures = unit +let admit () = failwith "Prims.admit: cannot be executed" +let magic () = failwith "Prims.magic: cannot be executed" +let unsafe_coerce : 'Aa 'Ab . 'Aa -> 'Ab = + fun x -> Obj.magic x + +type 'Ap spinoff = 'Ap + + +type nat = int +type pos = int +type nonzero = int +let op_Modulus x y = x mod y +let op_Division x y = x / y +let rec (pow2 : nat -> pos) = + fun x -> + Z.shift_left Z.one (Z.to_int x) + +let (min : int -> int -> int) = + fun x -> fun y -> if x <= y then x else y +let (abs : int -> int) = + fun x -> if x >= (parse_int "0") then x else op_Minus x +let string_of_bool = string_of_bool +let string_of_int = to_string diff --git a/ulib/ml/app/ints/FStar_Ints.ml.body b/ulib/ml/app/ints/FStar_Ints.ml.body new file mode 100644 index 00000000000..de9b5d23188 --- /dev/null +++ b/ulib/ml/app/ints/FStar_Ints.ml.body @@ -0,0 +1,95 @@ +(* This .ml.body file is concatenated to every .ml.prefix file in this + * directory (ulib/ml/) to generate the OCaml realizations for machine + * integers, as they all pretty much share their definitions and are + * based on Stdint. *) + +type t = M.t + +let v (x:t) : Prims.int = Prims.parse_int (M.to_string x) + +let zero = M.zero +let one = M.one +let ones = M.pred M.zero + +(* Reexport add, plus aliases *) +let add = M.add +let add_underspec = M.add +let add_mod = M.add + +(* Reexport sub, plus aliases *) +let sub = M.sub +let sub_underspec = M.sub +let sub_mod = M.sub + +(* Reexport mul, plus aliases *) +let mul = M.mul +let mul_underspec = M.mul +let mul_mod = M.mul + +(* Conversions to Zarith's int *) +let to_int (x:t) : Z.t = Z.of_string (M.to_string x) +let of_int (x:Z.t) : t = M.of_string (Z.to_string x) + +(* Conversion to native ints; these are potentially unsafe and not part + * of the interface: they are meant to be called only from OCaml code + * that is doing the right thing *) +let of_native_int (x:int) : t = M.of_int x +let to_native_int (x:t) : int = M.to_int x + +(* Just reexport these *) +let div = M.div +let rem = M.rem +let logand = M.logand +let logxor = M.logxor +let logor = M.logor +let lognot = M.lognot +let to_string = M.to_string +let of_string = M.of_string + +let to_string_hex = M.to_string_hex + +let to_string_hex_pad i = + let s0 = M.to_string_hex i in + let len = (String.length s0 - 2) in + let s1 = String.sub s0 2 len in (* Remove leading "0x" *) + let zeroes = String.make ((Z.to_int n / 4) - len) '0' in + zeroes ^ s1 + +(* The shifts take a uint32 argument, so we need to convert *) +let shift_right n i = M.shift_right n (Stdint.Uint32.to_int i) +let shift_left n i = M.shift_left n (Stdint.Uint32.to_int i) +let shift_arithmetic_right = shift_right + +(* Comparison operators *) +let eq (a:t) (b:t) : bool = a = b +let gt (a:t) (b:t) : bool = a > b +let gte (a:t) (b:t) : bool = a >= b +let lt (a:t) (b:t) : bool = a < b +let lte (a:t) (b:t) : bool = a <= b + +(* NOT Constant time operators *) +let eq_mask (a:t) (b:t) : t = if a = b then ones else zero +let gte_mask (a:t) (b:t) : t = if a >= b then ones else zero + +(* Infix notations *) +let op_Plus_Hat = add +let op_Plus_Question_Hat = add_underspec +let op_Plus_Percent_Hat = add_mod +let op_Subtraction_Hat = sub +let op_Subtraction_Question_Hat = sub_underspec +let op_Subtraction_Percent_Hat = sub_mod +let op_Star_Hat = mul +let op_Star_Question_Hat = mul_underspec +let op_Star_Percent_Hat = mul_mod +let op_Slash_Hat = div +let op_Percent_Hat = rem +let op_Hat_Hat = logxor +let op_Amp_Hat = logand +let op_Bar_Hat = logor +let op_Less_Less_Hat = shift_left +let op_Greater_Greater_Hat = shift_right +let op_Equals_Hat = eq +let op_Greater_Hat = gt +let op_Greater_Equals_Hat = gte +let op_Less_Hat = lt +let op_Less_Equals_Hat = lte diff --git a/ulib/ml/app/ints/dune b/ulib/ml/app/ints/dune new file mode 100644 index 00000000000..cb984f42488 --- /dev/null +++ b/ulib/ml/app/ints/dune @@ -0,0 +1,41 @@ + +; This one is special and hand-written... sigh +; (rule +; (target FStar_UInt8.ml) +; (deps (:script mk_int_file.sh) (:body FStar_Ints.ml.body)) +; (action (with-stdout-to %{target} (run ./mk_int_file.sh U 8)))) + +(rule + (target FStar_UInt16.ml) + (deps (:script mk_int_file.sh) (:body FStar_Ints.ml.body)) + (action (with-stdout-to %{target} (run ./mk_int_file.sh U 16)))) + +(rule + (target FStar_UInt32.ml) + (deps (:script mk_int_file.sh) (:body FStar_Ints.ml.body)) + (action (with-stdout-to %{target} (run ./mk_int_file.sh U 32)))) + +(rule + (target FStar_UInt64.ml) + (deps (:script mk_int_file.sh) (:body FStar_Ints.ml.body)) + (action (with-stdout-to %{target} (run ./mk_int_file.sh U 64)))) + +(rule + (target FStar_Int8.ml) + (deps (:script mk_int_file.sh) (:body FStar_Ints.ml.body)) + (action (with-stdout-to %{target} (run ./mk_int_file.sh S 8)))) + +(rule + (target FStar_Int16.ml) + (deps (:script mk_int_file.sh) (:body FStar_Ints.ml.body)) + (action (with-stdout-to %{target} (run ./mk_int_file.sh S 16)))) + +(rule + (target FStar_Int32.ml) + (deps (:script mk_int_file.sh) (:body FStar_Ints.ml.body)) + (action (with-stdout-to %{target} (run ./mk_int_file.sh S 32)))) + +(rule + (target FStar_Int64.ml) + (deps (:script mk_int_file.sh) (:body FStar_Ints.ml.body)) + (action (with-stdout-to %{target} (run ./mk_int_file.sh S 64)))) diff --git a/ulib/ml/app/ints/mk_int_file.sh b/ulib/ml/app/ints/mk_int_file.sh new file mode 100755 index 00000000000..0d257da79b1 --- /dev/null +++ b/ulib/ml/app/ints/mk_int_file.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +set -eu + +SIGN=$1 +WIDTH=$2 + +if [ "$SIGN" == "U" ]; then + cat << EOF + module M = Stdint.Uint${WIDTH} + type uint${WIDTH} = M.t + let n = Prims.of_int ${WIDTH} + + let uint_to_t x = M.of_string (Z.to_string x) + let __uint_to_t = uint_to_t +EOF +elif [ "$SIGN" == "S" ]; then + cat << EOF + module M = Stdint.Int${WIDTH} + type int${WIDTH} = M.t + let n = Prims.of_int ${WIDTH} + + let int_to_t x = M.of_string (Z.to_string x) + let __int_to_t = int_to_t +EOF +else + echo "Bad usage" &>2 + exit 1 +fi + +cat ./FStar_Ints.ml.body +exit 0 diff --git a/ulib/ml/plugin/FStar_Issue.ml b/ulib/ml/plugin/FStar_Issue.ml new file mode 100644 index 00000000000..7a1c49b10ed --- /dev/null +++ b/ulib/ml/plugin/FStar_Issue.ml @@ -0,0 +1,55 @@ +open Fstarcompiler +type issue_level = FStarC_Errors.issue_level +type issue = FStarC_Errors.issue +type issue_level_string = string + +open FStarC_Errors + +let string_of_level (i:issue_level) += match i with + | ENotImplemented + | EError -> "Error" + | EInfo -> "Info" + | EWarning -> "Warning" + +let message_of_issue (i:issue) = i.issue_msg + +let level_of_issue (i:issue) = string_of_level (i.issue_level) + +let number_of_issue (i:issue) = i.issue_number + +let range_of_issue (i:issue) = i.issue_range + +let context_of_issue (i:issue) = i.issue_ctx + +let mk_issue_level (i:issue_level_string) + : issue_level + = match i with + | "Error" -> EError + | "Info" -> EInfo + | "Warning" -> EWarning + +let render_issue (i:issue) : string = FStarC_Errors.format_issue i + +let mk_issue_doc (i:issue_level_string) + (msg:FStarC_Pprint.document list) + (range:FStarC_Compiler_Range.range option) + (number:Z.t option) + (ctx:string list) + = { issue_level = mk_issue_level i; + issue_msg = msg; + issue_range = range; + issue_number = number; + issue_ctx = ctx } + +(* repeated... could be extracted *) +let mk_issue (i:issue_level_string) + (msg:string) + (range:FStarC_Compiler_Range.range option) + (number:Z.t option) + (ctx:string list) + = { issue_level = mk_issue_level i; + issue_msg = [FStarC_Pprint.arbitrary_string msg]; + issue_range = range; + issue_number = number; + issue_ctx = ctx } diff --git a/ulib/ml/plugin/FStar_Range.ml b/ulib/ml/plugin/FStar_Range.ml new file mode 100644 index 00000000000..f0b0bce3405 --- /dev/null +++ b/ulib/ml/plugin/FStar_Range.ml @@ -0,0 +1,15 @@ +type __range = Fstarcompiler.FStarC_Compiler_Range_Type.range +type range = __range + +let mk_range f a b c d = Fstarcompiler.FStarC_Compiler_Range_Type.mk_range f {line=a;col=b} {line=c;col=d} +let range_0 : range = let z = Prims.parse_int "0" in mk_range "dummy" z z z z +let join_range r1 r2 = Fstarcompiler.FStarC_Compiler_Range_Ops.union_ranges r1 r2 + +let explode (r:__range) = + (r.use_range.file_name, + r.use_range.start_pos.line, + r.use_range.start_pos.col, + r.use_range.end_pos.line, + r.use_range.end_pos.col) + +type ('Ar,'Amsg,'Ab) labeled = 'Ab diff --git a/ulib/ml/plugin/FStar_Reflection_Typing_Builtins.ml b/ulib/ml/plugin/FStar_Reflection_Typing_Builtins.ml new file mode 100644 index 00000000000..beff4838244 --- /dev/null +++ b/ulib/ml/plugin/FStar_Reflection_Typing_Builtins.ml @@ -0,0 +1,28 @@ +open Fstarcompiler +open FStarC_Syntax_Syntax +module R = FStarC_Compiler_Range + +let dummy_range = R.dummyRange +let underscore = FStarC_Ident.mk_ident ("_", R.dummyRange) +let int_as_bv (n:Prims.int) = { ppname = underscore; index = n; sort = tun} + +let open_term (t:term) (v:Prims.int) + : term + = let subst = DB (Z.zero, int_as_bv v) in + FStarC_Syntax_Subst.subst [subst] t + +let close_term (t:term) (v:Prims.int) + : term + = let subst = NM (int_as_bv v, Z.zero) in + FStarC_Syntax_Subst.subst [subst] t + +let open_with (t:term) (v:term) + : term + = let neg = int_as_bv (Z.of_int (-1)) in (* a temporary non-clashing name *) + let opened_t = FStarC_Syntax_Subst.subst [DB(Z.zero, neg)] t in + (* gets substituted away immediately *) + FStarC_Syntax_Subst.subst [NT(neg, v)] opened_t + +let rename (t:term) (x:Prims.int) (y:Prims.int) + : term + = FStarC_Syntax_Subst.subst [NT(int_as_bv x, bv_to_name (int_as_bv y))] t diff --git a/ulib/ml/plugin/FStar_Sealed.ml b/ulib/ml/plugin/FStar_Sealed.ml new file mode 100644 index 00000000000..622ae81bcd3 --- /dev/null +++ b/ulib/ml/plugin/FStar_Sealed.ml @@ -0,0 +1,4 @@ +type 'a sealed = 'a +let seal x = x +let map_seal s f = f s +let bind_seal s f = f s diff --git a/version.txt b/version.txt index d1ca6ef0dfe..629c46bfa83 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -2024.12.03~dev +2024.12.03