mirror of
https://github.com/signalapp/SparsePostQuantumRatchet.git
synced 2025-11-01 11:40:54 +00:00
Squashed history.
This commit is contained in:
commit
02c99a6b24
93 changed files with 17771 additions and 0 deletions
26
.github/workflows/hax.yml
vendored
Normal file
26
.github/workflows/hax.yml
vendored
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
name: hax
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
fstar-type-checking:
|
||||||
|
runs-on: "ubuntu-latest"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: ⤵ Install and configure hax
|
||||||
|
uses: hacspec/hax-actions@main
|
||||||
|
with:
|
||||||
|
fstar: v2025.02.17
|
||||||
|
|
||||||
|
- run: sudo apt-get install protobuf-compiler
|
||||||
|
|
||||||
|
- name: 🏃 Extract F*
|
||||||
|
run: |
|
||||||
|
rm -f proofs/fstar/extraction/*.fst*
|
||||||
|
./hax.py extract
|
||||||
|
- name: 🏃 Type-check extracted F*
|
||||||
|
run: ./hax.py prove
|
||||||
|
|
||||||
64
.github/workflows/test.yml
vendored
Normal file
64
.github/workflows/test.yml
vendored
Normal file
|
|
@ -0,0 +1,64 @@
|
||||||
|
name: CI
|
||||||
|
on: [push]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
name: cargo test
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
components: clippy
|
||||||
|
- run: sudo apt-get install protobuf-compiler
|
||||||
|
- run: ./check_copyrights.sh
|
||||||
|
- run: cargo test --all-features
|
||||||
|
- run: cargo clippy --workspace --all-targets --all-features --keep-going -- -D warnings
|
||||||
|
msrv:
|
||||||
|
name: MSRV
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: 1.80.1
|
||||||
|
components: clippy
|
||||||
|
- run: sudo apt-get install protobuf-compiler
|
||||||
|
- run: cargo +1.80.1 build
|
||||||
|
crosspoly_i586:
|
||||||
|
name: cross test polynomial i586
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
- run: wget https://github.com/cross-rs/cross/releases/download/v0.2.5/cross-x86_64-unknown-linux-gnu.tar.gz
|
||||||
|
- run: tar xvzf cross-x86_64-unknown-linux-gnu.tar.gz
|
||||||
|
- run: chmod u+x cross
|
||||||
|
- run: ./cross test --target i586-unknown-linux-gnu polynomial
|
||||||
|
crosspoly_i686:
|
||||||
|
name: cross test polynomial i686
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
- run: wget https://github.com/cross-rs/cross/releases/download/v0.2.5/cross-x86_64-unknown-linux-gnu.tar.gz
|
||||||
|
- run: tar xvzf cross-x86_64-unknown-linux-gnu.tar.gz
|
||||||
|
- run: chmod u+x cross
|
||||||
|
- run: ./cross test --target i686-unknown-linux-gnu polynomial
|
||||||
|
crosspoly_aarch64:
|
||||||
|
name: cross test polynomial aarch64
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
- run: wget https://github.com/cross-rs/cross/releases/download/v0.2.5/cross-x86_64-unknown-linux-gnu.tar.gz
|
||||||
|
- run: tar xvzf cross-x86_64-unknown-linux-gnu.tar.gz
|
||||||
|
- run: chmod u+x cross
|
||||||
|
- run: ./cross test --target aarch64-unknown-linux-gnu polynomial
|
||||||
4
.gitignore
vendored
Normal file
4
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
/target
|
||||||
|
tarpaulin-report.html
|
||||||
|
rust-toolchain.toml
|
||||||
|
/.fstar-cache
|
||||||
1175
Cargo.lock
generated
Normal file
1175
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
55
Cargo.toml
Normal file
55
Cargo.toml
Normal file
|
|
@ -0,0 +1,55 @@
|
||||||
|
[package]
|
||||||
|
name = "spqr"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
rust-version = "1.80.1"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
curve25519-dalek = { version = "4.1.3", features = ["rand_core"] }
|
||||||
|
displaydoc = "0.2"
|
||||||
|
hkdf = "0.12"
|
||||||
|
libcrux-hkdf = "0.0.2"
|
||||||
|
libcrux-hmac = "0.0.2"
|
||||||
|
num_enum = "0.7.3"
|
||||||
|
prost = "0.13.1"
|
||||||
|
rand = "0.9"
|
||||||
|
rand_core = "0.9"
|
||||||
|
sha2 = "0.10"
|
||||||
|
sorted-vec = "0.8.6"
|
||||||
|
thiserror = "1.0.57"
|
||||||
|
hax-lib = { git = "https://github.com/cryspen/hax/" }
|
||||||
|
|
||||||
|
# TODO: update this once libcrux has incremental in a known version.
|
||||||
|
libcrux-ml-kem = { version = "0.0.2", features = ["incremental"] }
|
||||||
|
unroll = "0.1.5"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
galois_field_2pm = "0.1.0"
|
||||||
|
matches = "0.1.10"
|
||||||
|
rand_08 = { package = "rand", version = "0.8" }
|
||||||
|
rand_distr = "0.5.1"
|
||||||
|
hmac = "0.12.1"
|
||||||
|
sha2 = "0.10.8"
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
prost-build = "0.13.1"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
proof = []
|
||||||
|
|
||||||
|
[target.'cfg(not(any(windows, target_arch = "x86")))'.dependencies]
|
||||||
|
# sha2's asm implementation uses standalone .S files that aren't compiled correctly on Windows,
|
||||||
|
# and aren't linked correctly on x86 Android.
|
||||||
|
# This will be fixed in sha2 0.11, which also removes the "asm" feature and turns it on by default.
|
||||||
|
# So when sha2 0.11 is released, this section will go away.
|
||||||
|
sha2 = { version = "0.10", features = ["asm"] }
|
||||||
|
|
||||||
|
[target.'cfg(any(target_arch = "aarch64", target_arch = "x86_64", target_arch = "x86"))'.dependencies]
|
||||||
|
cpufeatures = "0.2"
|
||||||
|
|
||||||
|
[lints.rust]
|
||||||
|
unexpected_cfgs = { level = "warn", check-cfg = [
|
||||||
|
'cfg(hax)',
|
||||||
|
] }
|
||||||
7
Cross.toml
Normal file
7
Cross.toml
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
[build]
|
||||||
|
pre-build = [
|
||||||
|
"apt-get install -y zip wget",
|
||||||
|
"wget https://github.com/protocolbuffers/protobuf/releases/download/v29.3/protoc-29.3-linux-x86_64.zip",
|
||||||
|
"unzip protoc-29.3-linux-x86_64.zip -d protoc",
|
||||||
|
"mv protoc/bin/protoc /usr/bin",
|
||||||
|
]
|
||||||
661
LICENSE
Normal file
661
LICENSE
Normal file
|
|
@ -0,0 +1,661 @@
|
||||||
|
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 19 November 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU Affero General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works, specifically designed to ensure
|
||||||
|
cooperation with the community in the case of network server software.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
our General Public Licenses are intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
Developers that use our General Public Licenses protect your rights
|
||||||
|
with two steps: (1) assert copyright on the software, and (2) offer
|
||||||
|
you this License which gives you legal permission to copy, distribute
|
||||||
|
and/or modify the software.
|
||||||
|
|
||||||
|
A secondary benefit of defending all users' freedom is that
|
||||||
|
improvements made in alternate versions of the program, if they
|
||||||
|
receive widespread use, become available for other developers to
|
||||||
|
incorporate. Many developers of free software are heartened and
|
||||||
|
encouraged by the resulting cooperation. However, in the case of
|
||||||
|
software used on network servers, this result may fail to come about.
|
||||||
|
The GNU General Public License permits making a modified version and
|
||||||
|
letting the public access it on a server without ever releasing its
|
||||||
|
source code to the public.
|
||||||
|
|
||||||
|
The GNU Affero General Public License is designed specifically to
|
||||||
|
ensure that, in such cases, the modified source code becomes available
|
||||||
|
to the community. It requires the operator of a network server to
|
||||||
|
provide the source code of the modified version running there to the
|
||||||
|
users of that server. Therefore, public use of a modified version, on
|
||||||
|
a publicly accessible server, gives the public access to the source
|
||||||
|
code of the modified version.
|
||||||
|
|
||||||
|
An older license, called the Affero General Public License and
|
||||||
|
published by Affero, was designed to accomplish similar goals. This is
|
||||||
|
a different license, not a version of the Affero GPL, but Affero has
|
||||||
|
released a new version of the Affero GPL which permits relicensing under
|
||||||
|
this license.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, if you modify the
|
||||||
|
Program, your modified version must prominently offer all users
|
||||||
|
interacting with it remotely through a computer network (if your version
|
||||||
|
supports such interaction) an opportunity to receive the Corresponding
|
||||||
|
Source of your version by providing access to the Corresponding Source
|
||||||
|
from a network server at no charge, through some standard or customary
|
||||||
|
means of facilitating copying of software. This Corresponding Source
|
||||||
|
shall include the Corresponding Source for any work covered by version 3
|
||||||
|
of the GNU General Public License that is incorporated pursuant to the
|
||||||
|
following paragraph.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the work with which it is combined will remain governed by version
|
||||||
|
3 of the GNU General Public License.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU Affero General Public License from time to time. Such new versions
|
||||||
|
will be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU Affero General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU Affero General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU Affero General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU Affero General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Affero General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If your software can interact with users remotely through a computer
|
||||||
|
network, you should also make sure that it provides a way for users to
|
||||||
|
get its source. For example, if your program is a web application, its
|
||||||
|
interface could display a "Source" link that leads users to an archive
|
||||||
|
of the code. There are many ways you could offer source, and different
|
||||||
|
solutions will be better for different programs; see section 13 for the
|
||||||
|
specific requirements.
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||||
|
<https://www.gnu.org/licenses/>.
|
||||||
77
README.md
Normal file
77
README.md
Normal file
|
|
@ -0,0 +1,77 @@
|
||||||
|
# SPQR: The Sparse Post Quantum Ratchet
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
- [Overview](#overview)
|
||||||
|
- [Testing](#testing)
|
||||||
|
- [Formal Verification](#formal-verification)
|
||||||
|
- [Contributing](#contributing)
|
||||||
|
- [License](#license)
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
SPQR is a Rust implementation of Signal's post-quantum secure ratchet protocol.
|
||||||
|
It can be used with a messaging protocol to provide post-quantum Forward Secrecy
|
||||||
|
(FS) and Post Compromise Security (PCS). Rather than implementing a full secure
|
||||||
|
messaging protocol, this implementation outputs *message keys* that can be used
|
||||||
|
to encrypt messages being sent and decrypt messages being received. The allows
|
||||||
|
easy integration of SPQR into a *hybrid secure* protocol, as described in
|
||||||
|
Signal's online documentation.
|
||||||
|
|
||||||
|
Notable modules include:
|
||||||
|
* [`chain.rs`](src/chain.rs): Implements the symmetric ratchet that provides
|
||||||
|
forward secrecy.
|
||||||
|
* [`encoding/polynomial.rs`](src/encoding/polynomial.rs): Implements
|
||||||
|
Reed-Solomon based systematic erasure codes used to robustly turn a long
|
||||||
|
messages into a stream of chunks so that, for *N*-chunk message, as long as
|
||||||
|
*any* N chunks are received, the message can be reconstructed.
|
||||||
|
* [`v1`](src/v1/): Implements the *ML-KEM Braid Protocol*, which serves as the
|
||||||
|
public ratchet part of this protocol, replacing the Diffie-Hellman ratchet in
|
||||||
|
the classical Double Ratchet protocol. A detailed description of this protocol
|
||||||
|
can be found in Signal's online documentation.
|
||||||
|
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
To run unit tests:
|
||||||
|
```
|
||||||
|
cargo test
|
||||||
|
```
|
||||||
|
For benchmarks (requires Rust nightly):
|
||||||
|
```
|
||||||
|
cargo +nightly bench
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Formal Verification
|
||||||
|
This crate is machine verified using [hax](https://github.com/cryspen/hax) and
|
||||||
|
[F*](https://fstar-lang.org/) to be panic free, and the finite field arithmetic
|
||||||
|
is machine verified to be correct. The formal verification is performed as part
|
||||||
|
of the CI workflow for this repository. To use the formal verification tools
|
||||||
|
locally, you will need to:
|
||||||
|
1. Set up hax and F*
|
||||||
|
[(instructions)](https://hacspec.org/book/quick_start/intro.html)
|
||||||
|
2. Ensure you have `python3` installed
|
||||||
|
3. In the root directory of this repository run `python3 hax.py extract` to
|
||||||
|
extract F* from the Rust source code.
|
||||||
|
4. In the root directory of this repository run `python3 hax.py prove` to prove
|
||||||
|
the crate is panic free and correct.
|
||||||
|
|
||||||
|
Additionally, this crate contains handwritten
|
||||||
|
[ProVerif](https://bblanche.gitlabpages.inria.fr/proverif/) models of the ML-KEM
|
||||||
|
Braid (implemented in [src/v1](src/v1/)) and the symmetric ratchet (implemented
|
||||||
|
in [chain.rs](src/chain.rs)). These can be used to prove security properties of
|
||||||
|
the protocol.
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
Signal does accept external contributions to this project. However unless the
|
||||||
|
change is simple and easily understood, for example fixing a bug or portability
|
||||||
|
issue, adding a new test, or improving performance, first open an issue to
|
||||||
|
discuss your intended change as not all changes can be accepted.
|
||||||
|
|
||||||
|
Contributions that will not be used directly by one of Signal's official client
|
||||||
|
apps may still be considered, but only if they do not pose an undue maintenance
|
||||||
|
burden or conflict with the goals of the project.
|
||||||
|
|
||||||
|
Signing a [CLA (Contributor License Agreement)](https://signal.org/cla/) is
|
||||||
|
required for all contributions.
|
||||||
|
|
||||||
|
## License
|
||||||
|
This project is licensed under the [AGPLv3](LICENSE).
|
||||||
11
SECURITY.md
Normal file
11
SECURITY.md
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
If you've found a security vulnerability in this repository,
|
||||||
|
please report it via email to <security@signal.org>.
|
||||||
|
|
||||||
|
Please only use this address to report security flaws in the Signal application (including this
|
||||||
|
repository). For questions, support, or feature requests concerning the app, please submit a
|
||||||
|
[support request][] or join the [unofficial community forum][].
|
||||||
|
|
||||||
|
[support request]: https://support.signal.org/hc/requests/new
|
||||||
|
[unofficial community forum]: https://community.signalusers.org/
|
||||||
71
benches/chain.rs
Normal file
71
benches/chain.rs
Normal file
|
|
@ -0,0 +1,71 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
#![feature(test)]
|
||||||
|
|
||||||
|
extern crate test;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use spqr::{chain, Direction, EpochSecret};
|
||||||
|
use test::{black_box, Bencher};
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn add_epoch(b: &mut Bencher) {
|
||||||
|
let mut c = chain::Chain::new(b"1", Direction::A2B);
|
||||||
|
let mut e: u64 = 0;
|
||||||
|
b.iter(|| {
|
||||||
|
// Inner closure, the actual test
|
||||||
|
e += 1;
|
||||||
|
c.add_epoch(EpochSecret {
|
||||||
|
epoch: e,
|
||||||
|
secret: vec![1],
|
||||||
|
});
|
||||||
|
black_box(());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn send_key(b: &mut Bencher) {
|
||||||
|
let mut c = chain::Chain::new(b"1", Direction::A2B);
|
||||||
|
b.iter(|| {
|
||||||
|
// Inner closure, the actual test
|
||||||
|
black_box(c.send_key(0).unwrap());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn recv_key(b: &mut Bencher) {
|
||||||
|
let mut c = chain::Chain::new(b"1", Direction::A2B);
|
||||||
|
let mut k: u32 = 0;
|
||||||
|
b.iter(|| {
|
||||||
|
// Inner closure, the actual test
|
||||||
|
k += 1;
|
||||||
|
black_box(c.recv_key(0, k).unwrap());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn recv_skip_key(b: &mut Bencher) {
|
||||||
|
let mut c = chain::Chain::new(b"1", Direction::A2B);
|
||||||
|
let mut k: u32 = 0;
|
||||||
|
b.iter(|| {
|
||||||
|
// Inner closure, the actual test
|
||||||
|
k += 2;
|
||||||
|
black_box(c.recv_key(0, k).unwrap());
|
||||||
|
black_box(c.recv_key(0, k - 1).unwrap());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn recv_with_truncate(b: &mut Bencher) {
|
||||||
|
let mut c = chain::Chain::new(b"1", Direction::A2B);
|
||||||
|
let mut k: u32 = 0;
|
||||||
|
b.iter(|| {
|
||||||
|
// Inner closure, the actual test
|
||||||
|
k += 2;
|
||||||
|
black_box(c.recv_key(0, k).unwrap());
|
||||||
|
// k-1 stays around and will eventually be truncated.
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
69
benches/gf.rs
Normal file
69
benches/gf.rs
Normal file
|
|
@ -0,0 +1,69 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
#![feature(test)]
|
||||||
|
|
||||||
|
extern crate test;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use spqr::encoding::gf;
|
||||||
|
use test::{black_box, Bencher};
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn add(b: &mut Bencher) {
|
||||||
|
let p1 = gf::GF16 { value: 0x1234 };
|
||||||
|
let p2 = gf::GF16 { value: 0x4567 };
|
||||||
|
b.iter(|| {
|
||||||
|
// Inner closure, the actual test
|
||||||
|
black_box(p1 + p2);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
#[bench]
|
||||||
|
fn mul(b: &mut Bencher) {
|
||||||
|
let p1 = gf::GF16 { value: 0x1234 };
|
||||||
|
let p2 = gf::GF16 { value: 0x4567 };
|
||||||
|
b.iter(|| {
|
||||||
|
// Inner closure, the actual test
|
||||||
|
black_box(p1 * p2);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
#[bench]
|
||||||
|
fn sub(b: &mut Bencher) {
|
||||||
|
let p1 = gf::GF16 { value: 0x1234 };
|
||||||
|
let p2 = gf::GF16 { value: 0x4567 };
|
||||||
|
b.iter(|| {
|
||||||
|
// Inner closure, the actual test
|
||||||
|
black_box(p1 - p2);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
#[bench]
|
||||||
|
fn div(b: &mut Bencher) {
|
||||||
|
let p1 = gf::GF16 { value: 0x1234 };
|
||||||
|
let p2 = gf::GF16 { value: 0x4567 };
|
||||||
|
b.iter(|| {
|
||||||
|
// Inner closure, the actual test
|
||||||
|
black_box(p1 / p2);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
#[bench]
|
||||||
|
fn parallel_mult_2(b: &mut Bencher) {
|
||||||
|
let p1 = gf::GF16 { value: 0x1234 };
|
||||||
|
let mut p2 = [gf::GF16 { value: 0x4567 }; 2];
|
||||||
|
b.iter(|| {
|
||||||
|
// Inner closure, the actual test
|
||||||
|
gf::parallel_mult(p1, &mut p2);
|
||||||
|
black_box(p2);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
#[bench]
|
||||||
|
fn parallel_mult_16(b: &mut Bencher) {
|
||||||
|
let p1 = gf::GF16 { value: 0x1234 };
|
||||||
|
let mut p2 = [gf::GF16 { value: 0x4567 }; 16];
|
||||||
|
b.iter(|| {
|
||||||
|
// Inner closure, the actual test
|
||||||
|
gf::parallel_mult(p1, &mut p2);
|
||||||
|
black_box(p2);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
92
benches/polynomial.rs
Normal file
92
benches/polynomial.rs
Normal file
|
|
@ -0,0 +1,92 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
#![feature(test)]
|
||||||
|
|
||||||
|
extern crate test;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use prost::Message;
|
||||||
|
use spqr::encoding::polynomial::{PolyDecoder, PolyEncoder};
|
||||||
|
use spqr::encoding::{Chunk, Decoder, Encoder};
|
||||||
|
use spqr::proto::pq_ratchet as pqrpb;
|
||||||
|
use test::{black_box, Bencher};
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn encode_bytes(b: &mut Bencher) {
|
||||||
|
b.iter(|| {
|
||||||
|
black_box(PolyEncoder::encode_bytes(&[3u8; 1088]).expect("encode_bytes"));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn chunk_at_encode(b: &mut Bencher) {
|
||||||
|
let chunks_needed = 1088 / 32;
|
||||||
|
let mut encoder = PolyEncoder::encode_bytes(&[3u8; 1088]).expect("encode_bytes");
|
||||||
|
b.iter(|| {
|
||||||
|
black_box(encoder.chunk_at(chunks_needed + 3));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn decode_one_chunk(b: &mut Bencher) {
|
||||||
|
let chunks_needed = 1088 / 32;
|
||||||
|
let mut chunks = Vec::<Chunk>::new();
|
||||||
|
let mut encoder = PolyEncoder::encode_bytes(&[3u8; 1088]).expect("encode_bytes");
|
||||||
|
for i in 1..chunks_needed + 1 {
|
||||||
|
chunks.push(encoder.chunk_at(i));
|
||||||
|
}
|
||||||
|
b.iter(|| {
|
||||||
|
let mut decoder = PolyDecoder::new(1088).expect("for_message_type");
|
||||||
|
for chunk in &chunks {
|
||||||
|
decoder.add_chunk(chunk);
|
||||||
|
}
|
||||||
|
black_box(decoder.decoded_message().unwrap());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn decode_all_chunks(b: &mut Bencher) {
|
||||||
|
let chunks_needed = 1088 / 32;
|
||||||
|
let mut chunks = Vec::<Chunk>::new();
|
||||||
|
let mut encoder = PolyEncoder::encode_bytes(&[3u8; 1088]).expect("encode_bytes");
|
||||||
|
for i in chunks_needed..chunks_needed * 2 {
|
||||||
|
chunks.push(encoder.chunk_at(i));
|
||||||
|
}
|
||||||
|
b.iter(|| {
|
||||||
|
let mut decoder = PolyDecoder::new(1088).expect("for_message_type");
|
||||||
|
for chunk in &chunks {
|
||||||
|
decoder.add_chunk(chunk);
|
||||||
|
}
|
||||||
|
black_box(decoder.decoded_message().unwrap());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn encoder_from_pb(b: &mut Bencher) {
|
||||||
|
let encoder = PolyEncoder::encode_bytes(&[3u8; 1088]).expect("encode_bytes");
|
||||||
|
let bytes = encoder.into_pb().encode_to_vec();
|
||||||
|
b.iter(|| {
|
||||||
|
black_box(PolyEncoder::from_pb(
|
||||||
|
pqrpb::PolynomialEncoder::decode(bytes.as_slice()).unwrap(),
|
||||||
|
))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn decoder_to_from_pb(b: &mut Bencher) {
|
||||||
|
let chunks_needed = 1088 / 32;
|
||||||
|
let mut encoder = PolyEncoder::encode_bytes(&[3u8; 1088]).expect("encode_bytes");
|
||||||
|
let mut decoder = PolyDecoder::new(1088).expect("for_message_type");
|
||||||
|
for i in 1..chunks_needed {
|
||||||
|
decoder.add_chunk(&encoder.chunk_at(i));
|
||||||
|
}
|
||||||
|
let bytes = decoder.into_pb().encode_to_vec();
|
||||||
|
b.iter(|| {
|
||||||
|
black_box(PolyDecoder::from_pb(
|
||||||
|
pqrpb::PolynomialDecoder::decode(bytes.as_slice()).unwrap(),
|
||||||
|
))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
85
benches/spqr.rs
Normal file
85
benches/spqr.rs
Normal file
|
|
@ -0,0 +1,85 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
#![feature(test)]
|
||||||
|
|
||||||
|
extern crate test;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use rand::TryRngCore;
|
||||||
|
use rand_core::OsRng;
|
||||||
|
use spqr::*;
|
||||||
|
use test::{black_box, Bencher};
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn init_a(b: &mut Bencher) {
|
||||||
|
b.iter(|| {
|
||||||
|
// Inner closure, the actual test
|
||||||
|
black_box(Version::MAX.initial_alice_state(b"1", Version::V1));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
#[bench]
|
||||||
|
fn init_b(b: &mut Bencher) {
|
||||||
|
b.iter(|| {
|
||||||
|
// Inner closure, the actual test
|
||||||
|
black_box(Version::MAX.initial_bob_state(b"1", Version::V1));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
#[bench]
|
||||||
|
fn send_recv(bench: &mut Bencher) {
|
||||||
|
let mut ctr: u64 = 0;
|
||||||
|
let mut a = Version::MAX.initial_alice_state(b"1", Version::V1);
|
||||||
|
let mut b = Version::MAX.initial_bob_state(b"1", Version::V1);
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
let mut drop_ctr = 0;
|
||||||
|
bench.iter(|| {
|
||||||
|
ctr += 1;
|
||||||
|
let (x, y) = if ctr % 2 == 1 {
|
||||||
|
(&mut a, &mut b)
|
||||||
|
} else {
|
||||||
|
(&mut b, &mut a)
|
||||||
|
};
|
||||||
|
let Send {
|
||||||
|
state,
|
||||||
|
msg,
|
||||||
|
key: key_a,
|
||||||
|
} = send(x, &mut rng).unwrap();
|
||||||
|
*x = state;
|
||||||
|
let Recv { state, key: key_b } = recv(y, &msg).unwrap();
|
||||||
|
assert_eq!(key_a.unwrap(), key_b.unwrap());
|
||||||
|
if drop_ctr == 0 {
|
||||||
|
drop_ctr += 30;
|
||||||
|
// We 'drop' a message by not replacing y's state.
|
||||||
|
} else {
|
||||||
|
drop_ctr -= 1;
|
||||||
|
*y = state;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn long_chain_send(bench: &mut Bencher) {
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
let mut a = Version::MAX.initial_alice_state(b"1", Version::V1);
|
||||||
|
let mut b = Version::MAX.initial_bob_state(b"1", Version::V1);
|
||||||
|
|
||||||
|
// Build a state with a lot of unused chain keys.
|
||||||
|
for _i in 0..8 {
|
||||||
|
for _j in 0..24000 {
|
||||||
|
let Send { state, .. } = send(&a, &mut rng).unwrap();
|
||||||
|
a = state;
|
||||||
|
}
|
||||||
|
let Send { state, msg, .. } = send(&a, &mut rng).unwrap();
|
||||||
|
a = state;
|
||||||
|
let Recv { state, .. } = recv(&b, &msg).unwrap();
|
||||||
|
b = state;
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("state size: {}", b.len());
|
||||||
|
let Send { msg, .. } = send(&a, &mut rng).unwrap();
|
||||||
|
bench.iter(|| {
|
||||||
|
black_box(recv(&b, &msg).unwrap());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
13
build.rs
Normal file
13
build.rs
Normal file
|
|
@ -0,0 +1,13 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let protos = ["src/proto/pq_ratchet.proto"];
|
||||||
|
let mut prost_build = prost_build::Config::new();
|
||||||
|
prost_build
|
||||||
|
.compile_protos(&protos, &["src"])
|
||||||
|
.expect("Protobufs in src are valid");
|
||||||
|
for proto in &protos {
|
||||||
|
println!("cargo:rerun-if-changed={proto}");
|
||||||
|
}
|
||||||
|
}
|
||||||
15
check_copyrights.sh
Executable file
15
check_copyrights.sh
Executable file
|
|
@ -0,0 +1,15 @@
|
||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright 2025 Signal Messenger, LLC
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
OUT=0
|
||||||
|
for pattern in '*.rs' '*.proto' '*.sh'; do
|
||||||
|
for file in `find ./ -name "$pattern" ! -path './target/*' ! -path './proofs/*' ! -path './.git/*'`; do
|
||||||
|
if ! head $file | grep -q Copyright; then
|
||||||
|
OUT=1
|
||||||
|
echo "Missing copyright in '$file'" 1>&2
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
done
|
||||||
|
exit $OUT
|
||||||
125
hax.py
Executable file
125
hax.py
Executable file
|
|
@ -0,0 +1,125 @@
|
||||||
|
#! /usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def shell(command, expect=0, cwd=None, env={}):
|
||||||
|
subprocess_stdout = subprocess.DEVNULL
|
||||||
|
|
||||||
|
print("Env:", env)
|
||||||
|
print("Command: ", end="")
|
||||||
|
for i, word in enumerate(command):
|
||||||
|
if i == 4:
|
||||||
|
print("'{}' ".format(word), end="")
|
||||||
|
else:
|
||||||
|
print("{} ".format(word), end="")
|
||||||
|
|
||||||
|
print("\nDirectory: {}".format(cwd))
|
||||||
|
|
||||||
|
os_env = os.environ
|
||||||
|
os_env.update(env)
|
||||||
|
|
||||||
|
ret = subprocess.run(command, cwd=cwd, env=os_env)
|
||||||
|
if ret.returncode != expect:
|
||||||
|
raise Exception("Error {}. Expected {}.".format(ret, expect))
|
||||||
|
|
||||||
|
|
||||||
|
class extractAction(argparse.Action):
|
||||||
|
|
||||||
|
def __call__(self, parser, args, values, option_string=None) -> None:
|
||||||
|
# Extract spqr
|
||||||
|
include_str = "-**::proto::** +:**::proto::**"
|
||||||
|
if args.include:
|
||||||
|
include_str = "-** +:**::proto::** " + args.include
|
||||||
|
if args.encoding:
|
||||||
|
include_str = "-** +:**::proto::** +**::encoding::**"
|
||||||
|
interface_include = "+**::proto::**"
|
||||||
|
cargo_hax_into = [
|
||||||
|
"cargo",
|
||||||
|
"hax",
|
||||||
|
"into",
|
||||||
|
"-i",
|
||||||
|
include_str,
|
||||||
|
"fstar",
|
||||||
|
"--interfaces",
|
||||||
|
interface_include,
|
||||||
|
]
|
||||||
|
hax_env = {}
|
||||||
|
shell(
|
||||||
|
cargo_hax_into,
|
||||||
|
cwd=".",
|
||||||
|
env=hax_env,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class proveAction(argparse.Action):
|
||||||
|
|
||||||
|
def __call__(self, parser, args, values, option_string=None) -> None:
|
||||||
|
admit_env = {}
|
||||||
|
if args.admit:
|
||||||
|
admit_env = {"OTHERFLAGS": "--admit_smt_queries true"}
|
||||||
|
shell(["make", "-C", "proofs/fstar/extraction/"], env=admit_env)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def parse_arguments():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="SPQR prove script. "
|
||||||
|
+ "Make sure to separate sub-command arguments with --."
|
||||||
|
)
|
||||||
|
subparsers = parser.add_subparsers()
|
||||||
|
|
||||||
|
extract_parser = subparsers.add_parser(
|
||||||
|
"extract", help="Extract the F* code for the proofs."
|
||||||
|
)
|
||||||
|
extract_parser.add_argument(
|
||||||
|
"--include",
|
||||||
|
required=False,
|
||||||
|
help="Include flag to pass to hax.",
|
||||||
|
)
|
||||||
|
extract_parser.add_argument(
|
||||||
|
"--encoding",
|
||||||
|
help="Extract only encoding module.",
|
||||||
|
action="store_true",
|
||||||
|
)
|
||||||
|
extract_parser.add_argument("extract", nargs="*", action=extractAction)
|
||||||
|
|
||||||
|
prover_parser = subparsers.add_parser(
|
||||||
|
"prove",
|
||||||
|
help="""
|
||||||
|
Run F*.
|
||||||
|
|
||||||
|
This typechecks the extracted code.
|
||||||
|
To lax-typecheck use --admit.
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
prover_parser.add_argument(
|
||||||
|
"--admit",
|
||||||
|
help="Admit all smt queries to lax typecheck.",
|
||||||
|
action="store_true",
|
||||||
|
)
|
||||||
|
prover_parser.add_argument(
|
||||||
|
"prove",
|
||||||
|
nargs="*",
|
||||||
|
action=proveAction,
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(sys.argv) == 1:
|
||||||
|
parser.print_help(sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Don't print unnecessary Python stack traces.
|
||||||
|
sys.tracebacklimit = 0
|
||||||
|
parse_arguments()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
5
proofs/fstar/extraction/.gitignore
vendored
Normal file
5
proofs/fstar/extraction/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
dep.graph
|
||||||
|
*.fst
|
||||||
|
*.fsti
|
||||||
|
*.json
|
||||||
|
.depend
|
||||||
271
proofs/fstar/extraction/Makefile
Normal file
271
proofs/fstar/extraction/Makefile
Normal file
|
|
@ -0,0 +1,271 @@
|
||||||
|
# This is a generically useful Makefile for F* that is self-contained
|
||||||
|
#
|
||||||
|
# We expect:
|
||||||
|
# 1. `fstar.exe` to be in PATH (alternatively, you can also set
|
||||||
|
# $FSTAR_HOME to be set to your F* repo/install directory)
|
||||||
|
#
|
||||||
|
# 2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH.
|
||||||
|
#
|
||||||
|
# 3. the extracted Cargo crate to have "hax-lib" as a dependency:
|
||||||
|
# `hax-lib = { version = "0.1.0-pre.1", git = "https://github.com/hacspec/hax"}`
|
||||||
|
#
|
||||||
|
# Optionally, you can set `HACL_HOME`.
|
||||||
|
#
|
||||||
|
# ROOTS contains all the top-level F* files you wish to verify
|
||||||
|
# The default target `verify` verified ROOTS and its dependencies
|
||||||
|
# To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line
|
||||||
|
#
|
||||||
|
# To make F* emacs mode use the settings in this file, you need to
|
||||||
|
# add the following lines to your .emacs
|
||||||
|
#
|
||||||
|
# (setq-default fstar-executable "<YOUR_FSTAR_HOME>/bin/fstar.exe")
|
||||||
|
# (setq-default fstar-smt-executable "<YOUR_Z3_HOME>/bin/z3")
|
||||||
|
#
|
||||||
|
# (defun my-fstar-compute-prover-args-using-make ()
|
||||||
|
# "Construct arguments to pass to F* by calling make."
|
||||||
|
# (with-demoted-errors "Error when constructing arg string: %S"
|
||||||
|
# (let* ((fname (file-name-nondirectory buffer-file-name))
|
||||||
|
# (target (concat fname "-in"))
|
||||||
|
# (argstr (car (process-lines "make" "--quiet" target))))
|
||||||
|
# (split-string argstr))))
|
||||||
|
# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)
|
||||||
|
#
|
||||||
|
|
||||||
|
PATH_TO_CHILD_MAKEFILE := "$(abspath $(firstword $(MAKEFILE_LIST)))"
|
||||||
|
PATH_TO_TEMPLATE_MAKEFILE := "$(abspath $(lastword $(MAKEFILE_LIST)))"
|
||||||
|
|
||||||
|
HACL_HOME ?= $(HOME)/.hax/hacl_home
|
||||||
|
# Expand variable FSTAR_BIN_DETECT now, so that we don't run this over and over
|
||||||
|
|
||||||
|
FSTAR_BIN_DETECT := $(if $(shell command -v fstar.exe), fstar.exe, $(FSTAR_HOME)/bin/fstar.exe)
|
||||||
|
FSTAR_BIN ?= $(FSTAR_BIN_DETECT)
|
||||||
|
|
||||||
|
GIT_ROOT_DIR := $(shell git rev-parse --show-toplevel)/
|
||||||
|
CACHE_DIR ?= ${GIT_ROOT_DIR}.fstar-cache/checked
|
||||||
|
HINT_DIR ?= ${GIT_ROOT_DIR}.fstar-cache/hints
|
||||||
|
|
||||||
|
# Makes command quiet by default
|
||||||
|
Q ?= @
|
||||||
|
|
||||||
|
# Verify the required executable are in PATH
|
||||||
|
EXECUTABLES = cargo cargo-hax jq
|
||||||
|
K := $(foreach exec,$(EXECUTABLES),\
|
||||||
|
$(if $(shell which $(exec)),some string,$(error "No $(exec) in PATH")))
|
||||||
|
|
||||||
|
export ANSI_COLOR_BLUE=\033[34m
|
||||||
|
export ANSI_COLOR_RED=\033[31m
|
||||||
|
export ANSI_COLOR_BBLUE=\033[1;34m
|
||||||
|
export ANSI_COLOR_GRAY=\033[90m
|
||||||
|
export ANSI_COLOR_TONE=\033[35m
|
||||||
|
export ANSI_COLOR_RESET=\033[0m
|
||||||
|
|
||||||
|
ifdef NO_COLOR
|
||||||
|
export ANSI_COLOR_BLUE=
|
||||||
|
export ANSI_COLOR_RED=
|
||||||
|
export ANSI_COLOR_BBLUE=
|
||||||
|
export ANSI_COLOR_GRAY=
|
||||||
|
export ANSI_COLOR_TONE=
|
||||||
|
export ANSI_COLOR_RESET=
|
||||||
|
endif
|
||||||
|
|
||||||
|
# The following is a bash script that discovers F* libraries.
|
||||||
|
# Due to incompatibilities with make 4.3, I had to make a "oneliner" bash script...
|
||||||
|
define FINDLIBS
|
||||||
|
: "Prints a path if and only if it exists. Takes one argument: the path."; \
|
||||||
|
function print_if_exists() { \
|
||||||
|
if [ -d "$$1" ]; then \
|
||||||
|
echo "$$1"; \
|
||||||
|
fi; \
|
||||||
|
} ; \
|
||||||
|
: "Asks Cargo all the dependencies for the current crate or workspace,"; \
|
||||||
|
: "and extract all "root" directories for each. Takes zero argument."; \
|
||||||
|
function dependencies() { \
|
||||||
|
cargo metadata --format-version 1 | \
|
||||||
|
jq -r ".packages | .[] | .manifest_path | split(\"/\") | .[:-1] | join(\"/\")"; \
|
||||||
|
} ; \
|
||||||
|
: "Find hax libraries *around* a given path. Takes one argument: the"; \
|
||||||
|
: "path."; \
|
||||||
|
function find_hax_libraries_at_path() { \
|
||||||
|
path="$$1" ; \
|
||||||
|
: "if there is a [proofs/fstar/extraction] subfolder, then that s a F* library" ; \
|
||||||
|
print_if_exists "$$path/proofs/fstar/extraction" ; \
|
||||||
|
: "Maybe the [proof-libs] folder of hax is around?" ; \
|
||||||
|
MAYBE_PROOF_LIBS=$$(realpath -q "$$path/../proof-libs/fstar") ; \
|
||||||
|
if [ $$? -eq 0 ]; then \
|
||||||
|
print_if_exists "$$MAYBE_PROOF_LIBS/core" ; \
|
||||||
|
print_if_exists "$$MAYBE_PROOF_LIBS/rust_primitives" ; \
|
||||||
|
fi ; \
|
||||||
|
} ; \
|
||||||
|
{ while IFS= read path; do \
|
||||||
|
find_hax_libraries_at_path "$$path"; \
|
||||||
|
done < <(dependencies) ; } | sort -u
|
||||||
|
endef
|
||||||
|
export FINDLIBS
|
||||||
|
|
||||||
|
FSTAR_INCLUDE_DIRS_EXTRA ?=
|
||||||
|
FINDLIBS_OUTPUT := $(shell bash -c '${FINDLIBS}')
|
||||||
|
FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(FSTAR_INCLUDE_DIRS_EXTRA) $(FINDLIBS_OUTPUT) ../models
|
||||||
|
|
||||||
|
# Make sure FSTAR_INCLUDE_DIRS has the `proof-libs`, print hints and
|
||||||
|
# an error message otherwise
|
||||||
|
ifneq (,$(findstring proof-libs/fstar,$(FSTAR_INCLUDE_DIRS)))
|
||||||
|
else
|
||||||
|
K += $(info )
|
||||||
|
ERROR := $(shell printf '${ANSI_COLOR_RED}Error: could not detect `proof-libs`!${ANSI_COLOR_RESET}')
|
||||||
|
K += $(info ${ERROR})
|
||||||
|
ERROR := $(shell printf ' > Do you have `${ANSI_COLOR_BLUE}hax-lib${ANSI_COLOR_RESET}` in your `${ANSI_COLOR_BLUE}Cargo.toml${ANSI_COLOR_RESET}` as a ${ANSI_COLOR_BLUE}git${ANSI_COLOR_RESET} or ${ANSI_COLOR_BLUE}path${ANSI_COLOR_RESET} dependency?')
|
||||||
|
K += $(info ${ERROR})
|
||||||
|
ERROR := $(shell printf ' ${ANSI_COLOR_BLUE}> Tip: you may want to run `cargo add --git https://github.com/hacspec/hax hax-lib`${ANSI_COLOR_RESET}')
|
||||||
|
K += $(info ${ERROR})
|
||||||
|
K += $(info )
|
||||||
|
K += $(error Fatal error: `proof-libs` is required.)
|
||||||
|
endif
|
||||||
|
|
||||||
|
.PHONY: all verify clean
|
||||||
|
|
||||||
|
all:
|
||||||
|
$(Q)rm -f .depend
|
||||||
|
$(Q)$(MAKE) -f $(PATH_TO_CHILD_MAKEFILE) .depend hax.fst.config.json verify
|
||||||
|
|
||||||
|
all-keep-going:
|
||||||
|
$(Q)rm -f .depend
|
||||||
|
$(Q)$(MAKE) -f $(PATH_TO_CHILD_MAKEFILE) --keep-going .depend hax.fst.config.json verify
|
||||||
|
|
||||||
|
# If $HACL_HOME doesn't exist, clone it
|
||||||
|
${HACL_HOME}:
|
||||||
|
$(Q)mkdir -p "${HACL_HOME}"
|
||||||
|
$(info Cloning Hacl* in ${HACL_HOME}...)
|
||||||
|
git clone --depth 1 https://github.com/hacl-star/hacl-star.git "${HACL_HOME}"
|
||||||
|
$(info Cloning Hacl* in ${HACL_HOME}... done!)
|
||||||
|
|
||||||
|
# If no any F* file is detected, we run hax
|
||||||
|
ifeq "$(wildcard *.fst *fsti)" ""
|
||||||
|
$(shell cargo hax into fstar)
|
||||||
|
endif
|
||||||
|
|
||||||
|
# By default, we process all the files in the current directory
|
||||||
|
ROOTS ?= $(wildcard *.fst *fsti)
|
||||||
|
ADMIT_MODULES ?=
|
||||||
|
|
||||||
|
ADMIT_MODULE_FLAGS ?= --admit_smt_queries true
|
||||||
|
|
||||||
|
# Can be useful for debugging purposes
|
||||||
|
FINDLIBS.sh:
|
||||||
|
$(Q)echo '${FINDLIBS}' > FINDLIBS.sh
|
||||||
|
include-dirs:
|
||||||
|
$(Q)bash -c '${FINDLIBS}'
|
||||||
|
|
||||||
|
FSTAR_FLAGS = \
|
||||||
|
--warn_error -321-331-241-274-239-271 \
|
||||||
|
--ext context_pruning --z3version 4.13.3 --query_stats \
|
||||||
|
--cache_checked_modules --cache_dir $(CACHE_DIR) \
|
||||||
|
--already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \
|
||||||
|
$(addprefix --include ,$(FSTAR_INCLUDE_DIRS))
|
||||||
|
|
||||||
|
FSTAR := $(FSTAR_BIN) $(FSTAR_FLAGS)
|
||||||
|
|
||||||
|
.depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS) $(HACL_HOME)
|
||||||
|
@$(FSTAR) --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@
|
||||||
|
|
||||||
|
include .depend
|
||||||
|
|
||||||
|
$(HINT_DIR) $(CACHE_DIR):
|
||||||
|
$(Q)mkdir -p $@
|
||||||
|
|
||||||
|
define HELPMESSAGE
|
||||||
|
echo "hax' default Makefile for F*"
|
||||||
|
echo ""
|
||||||
|
echo "The available targets are:"
|
||||||
|
echo ""
|
||||||
|
function target() {
|
||||||
|
printf ' ${ANSI_COLOR_BLUE}%-20b${ANSI_COLOR_RESET} %s\n' "$$1" "$$2"
|
||||||
|
}
|
||||||
|
target "all" "Verify every F* files (stops whenever an F* fails first)"
|
||||||
|
target "all-keep-going" "Verify every F* files (tries as many F* module as possible)"
|
||||||
|
target "" ""
|
||||||
|
target "run/${ANSI_COLOR_TONE}<MyModule.fst> " 'Runs F* on `MyModule.fst` only'
|
||||||
|
target "" ""
|
||||||
|
target "vscode" 'Generates a `hax.fst.config.json` file'
|
||||||
|
target "${ANSI_COLOR_TONE}<MyModule.fst>${ANSI_COLOR_BLUE}-in " 'Useful for Emacs, outputs the F* prefix command to be used'
|
||||||
|
target "" ""
|
||||||
|
target "clean" 'Cleanup the target'
|
||||||
|
target "include-dirs" 'List the F* include directories'
|
||||||
|
target "" ""
|
||||||
|
target "describe" 'List the F* root modules, and describe the environment.'
|
||||||
|
echo ""
|
||||||
|
echo "Variables:"
|
||||||
|
target "NO_COLOR" "Set to anything to disable colors"
|
||||||
|
target "ADMIT_MODULES" "List of modules where F* will assume every SMT query"
|
||||||
|
target "FSTAR_INCLUDE_DIRS_EXTRA" "List of extra include F* dirs"
|
||||||
|
endef
|
||||||
|
export HELPMESSAGE
|
||||||
|
|
||||||
|
describe:
|
||||||
|
@printf '${ANSI_COLOR_BBLUE}F* roots:${ANSI_COLOR_RESET}\n'
|
||||||
|
@for root in ${ROOTS}; do \
|
||||||
|
filename=$$(basename -- "$$root") ;\
|
||||||
|
ext="$${filename##*.}" ;\
|
||||||
|
noext="$${filename%.*}" ;\
|
||||||
|
printf "${ANSI_COLOR_GRAY}$$(dirname -- "$$root")/${ANSI_COLOR_RESET}%s${ANSI_COLOR_GRAY}.${ANSI_COLOR_TONE}%s${ANSI_COLOR_RESET}%b\n" "$$noext" "$$ext" $$([[ "${ADMIT_MODULES}" =~ (^| )$$root($$| ) ]] && echo '${ANSI_COLOR_RED}\t[ADMITTED]${ANSI_COLOR_RESET}'); \
|
||||||
|
done
|
||||||
|
@printf '\n${ANSI_COLOR_BBLUE}Environment:${ANSI_COLOR_RESET}\n'
|
||||||
|
@printf ' - ${ANSI_COLOR_BLUE}HACL_HOME${ANSI_COLOR_RESET} = %s\n' '${HACL_HOME}'
|
||||||
|
@printf ' - ${ANSI_COLOR_BLUE}FSTAR_BIN${ANSI_COLOR_RESET} = %s\n' '${FSTAR_BIN}'
|
||||||
|
@printf ' - ${ANSI_COLOR_BLUE}GIT_ROOT_DIR${ANSI_COLOR_RESET} = %s\n' '${GIT_ROOT_DIR}'
|
||||||
|
@printf ' - ${ANSI_COLOR_BLUE}CACHE_DIR${ANSI_COLOR_RESET} = %s\n' '${CACHE_DIR}'
|
||||||
|
@printf ' - ${ANSI_COLOR_BLUE}HINT_DIR${ANSI_COLOR_RESET} = %s\n' '${HINT_DIR}'
|
||||||
|
@printf ' - ${ANSI_COLOR_BLUE}ADMIT_MODULE_FLAGS${ANSI_COLOR_RESET} = %s\n' '${ADMIT_MODULE_FLAGS}'
|
||||||
|
@printf ' - ${ANSI_COLOR_BLUE}FSTAR_INCLUDE_DIRS_EXTRA${ANSI_COLOR_RESET} = %s\n' '${FSTAR_INCLUDE_DIRS_EXTRA}'
|
||||||
|
|
||||||
|
help: ;@bash -c "$$HELPMESSAGE"
|
||||||
|
h: ;@bash -c "$$HELPMESSAGE"
|
||||||
|
|
||||||
|
HEADER = $(Q)printf '${ANSI_COLOR_BBLUE}[CHECK] %s ${ANSI_COLOR_RESET}\n' "$(basename $(notdir $@))"
|
||||||
|
|
||||||
|
run/%: | .depend $(HINT_DIR) $(CACHE_DIR) $(HACL_HOME)
|
||||||
|
${HEADER}
|
||||||
|
$(Q)$(FSTAR) $(OTHERFLAGS) $(@:run/%=%)
|
||||||
|
|
||||||
|
VERIFIED_CHECKED = $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))
|
||||||
|
ADMIT_CHECKED = $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ADMIT_MODULES)))
|
||||||
|
|
||||||
|
$(ADMIT_CHECKED):
|
||||||
|
$(Q)printf '${ANSI_COLOR_BBLUE}[${ANSI_COLOR_TONE}ADMIT${ANSI_COLOR_BBLUE}] %s ${ANSI_COLOR_RESET}\n' "$(basename $(notdir $@))"
|
||||||
|
$(Q)$(FSTAR) $(OTHERFLAGS) $(ADMIT_MODULE_FLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints || { \
|
||||||
|
echo "" ; \
|
||||||
|
exit 1 ; \
|
||||||
|
}
|
||||||
|
$(Q)printf "\n\n"
|
||||||
|
|
||||||
|
$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(HACL_HOME)
|
||||||
|
${HEADER}
|
||||||
|
$(Q)$(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints || { \
|
||||||
|
echo "" ; \
|
||||||
|
exit 1 ; \
|
||||||
|
}
|
||||||
|
touch $@
|
||||||
|
$(Q)printf "\n\n"
|
||||||
|
|
||||||
|
verify: $(VERIFIED_CHECKED) $(ADMIT_CHECKED)
|
||||||
|
|
||||||
|
# Targets for Emacs
|
||||||
|
%.fst-in:
|
||||||
|
$(info $(FSTAR_FLAGS) \
|
||||||
|
$(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)
|
||||||
|
%.fsti-in:
|
||||||
|
$(info $(FSTAR_FLAGS) \
|
||||||
|
$(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)
|
||||||
|
|
||||||
|
# Targets for VSCode
|
||||||
|
hax.fst.config.json: .depend
|
||||||
|
$(Q)echo "$(FSTAR_INCLUDE_DIRS)" | jq --arg fstar "$(FSTAR_BIN)" -R 'split(" ") | {fstar_exe: $$fstar | gsub("^\\s+|\\s+$$";""), include_dirs: .}' > $@
|
||||||
|
vscode:
|
||||||
|
$(Q)rm -f .depend
|
||||||
|
$(Q)$(MAKE) -f $(PATH_TO_CHILD_MAKEFILE) hax.fst.config.json
|
||||||
|
|
||||||
|
SHELL=bash
|
||||||
|
|
||||||
|
# Clean target
|
||||||
|
clean:
|
||||||
|
rm -rf $(CACHE_DIR)/*
|
||||||
|
rm *.fst
|
||||||
293
proofs/fstar/models/BitVecEq.fsti
Normal file
293
proofs/fstar/models/BitVecEq.fsti
Normal file
|
|
@ -0,0 +1,293 @@
|
||||||
|
module BitVecEq
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 100"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
open MkSeq
|
||||||
|
open FStar.FunctionalExtensionality
|
||||||
|
|
||||||
|
val bit_vec_equal (#n: nat) (bv1 bv2: bit_vec n): Type0
|
||||||
|
val bit_vec_equal_intro (#n: nat) (bv1 bv2: bit_vec n)
|
||||||
|
: Lemma (requires forall i. bv1 i == bv2 i)
|
||||||
|
(ensures bit_vec_equal bv1 bv2)
|
||||||
|
val bit_vec_equal_elim (#n: nat) (bv1 bv2: bit_vec n)
|
||||||
|
: Lemma (requires bit_vec_equal #n bv1 bv2)
|
||||||
|
(ensures bv1 == bv2)
|
||||||
|
[SMTPat (bit_vec_equal #n bv1 bv2)]
|
||||||
|
|
||||||
|
let bit_vec_equal_intro_principle ()
|
||||||
|
: Lemma (forall n (bv1 bv2: bit_vec n). (forall i. bv1 i == bv2 i) ==> bit_vec_equal #n bv1 bv2)
|
||||||
|
= introduce forall n (bv1 bv2: bit_vec n). _
|
||||||
|
with introduce (forall i. bv1 i == bv2 i) ==> bit_vec_equal #n bv1 bv2
|
||||||
|
with _. bit_vec_equal_intro #n bv1 bv2
|
||||||
|
|
||||||
|
let bit_vec_equal_elim_principle ()
|
||||||
|
: Lemma (forall n (bv1 bv2: bit_vec n). bit_vec_equal #n bv1 bv2 ==> (forall i. bv1 i == bv2 i))
|
||||||
|
= introduce forall n (bv1 bv2: bit_vec n). _
|
||||||
|
with introduce bit_vec_equal #n bv1 bv2 ==> (forall i. bv1 i == bv2 i)
|
||||||
|
with _. bit_vec_equal_elim #n bv1 bv2
|
||||||
|
|
||||||
|
let bit_vec_equal_trivial (bv1 bv2: bit_vec 0): Lemma (bv1 == bv2)
|
||||||
|
[SMTPat (eq2 #(bit_vec 0) bv1 bv2)]
|
||||||
|
= bit_vec_equal_intro bv1 bv2
|
||||||
|
|
||||||
|
let bit_vec_sub #n (bv: bit_vec n) (start: nat) (len: nat {start + len <= n})
|
||||||
|
: bit_vec len
|
||||||
|
= on (i: nat {i < len})
|
||||||
|
(fun i -> bv (start + i))
|
||||||
|
|
||||||
|
let bit_vec_equal_trivial_sub_smtpat (bv1: bit_vec 'n)
|
||||||
|
: Lemma (forall (bv2: bit_vec 0). bit_vec_sub bv1 0 0 == bv2)
|
||||||
|
[SMTPat (bit_vec_sub bv1 0 0)]
|
||||||
|
= introduce forall (bv2: bit_vec 0). bit_vec_sub bv1 0 0 == bv2
|
||||||
|
with bit_vec_equal_trivial (bit_vec_sub bv1 0 0) bv2
|
||||||
|
|
||||||
|
unfold let retype #a #b (#_:unit{a == b})
|
||||||
|
(x: a): b
|
||||||
|
= x
|
||||||
|
|
||||||
|
let bit_vec_sub_all_lemma #n (bv: bit_vec n)
|
||||||
|
: Lemma (bit_vec_sub bv 0 n == bv)
|
||||||
|
[SMTPat (bit_vec_sub bv 0 n)]
|
||||||
|
= bit_vec_equal_intro (bit_vec_sub bv 0 n) bv
|
||||||
|
|
||||||
|
let int_t_array_bitwise_eq'
|
||||||
|
#t1 #t2 #n1 #n2
|
||||||
|
(arr1: t_Array (int_t t1) n1) (d1: num_bits t1)
|
||||||
|
(arr2: t_Array (int_t t2) n2) (d2: num_bits t2 {v n1 * d1 == v n2 * d2})
|
||||||
|
= bit_vec_equal (bit_vec_of_int_t_array arr1 d1)
|
||||||
|
(retype (bit_vec_of_int_t_array arr2 d2))
|
||||||
|
|
||||||
|
let int_t_array_bitwise_eq
|
||||||
|
#t1 #t2 #n1 #n2
|
||||||
|
(arr1: t_Array (int_t t1) n1) (d1: num_bits t1)
|
||||||
|
(arr2: t_Array (int_t t2) n2) (d2: num_bits t2 {v n1 * d1 == v n2 * d2})
|
||||||
|
= bit_vec_of_int_t_array arr1 d1 == bit_vec_of_int_t_array arr2 d2
|
||||||
|
|
||||||
|
// let get_bit_intro ()
|
||||||
|
// : Lemma (forall (#n: inttype) (x: int_t n) (nth: usize {v nth < bits n}).
|
||||||
|
// get_bit #n x nth == ( if v x >= 0 then get_bit_nat (v x) (v nth)
|
||||||
|
// else get_bit_nat (pow2 (bits n) + v x) (v nth)))
|
||||||
|
// = introduce forall (n: inttype) (x: int_t n) (nth: usize {v nth < bits n}).
|
||||||
|
// get_bit #n x nth == ( if v x >= 0 then get_bit_nat (v x) (v nth)
|
||||||
|
// else get_bit_nat (pow2 (bits n) + v x) (v nth))
|
||||||
|
// with get_bit_intro #n x nth
|
||||||
|
|
||||||
|
#push-options "--fuel 0 --ifuel 0 --z3rlimit 150"
|
||||||
|
/// Rewrite a `bit_vec_of_int_t_array (Seq.slice arr ...)` into a `bit_vec_sub ...`
|
||||||
|
let int_t_seq_slice_to_bv_sub_lemma #t #n
|
||||||
|
(arr: t_Array (int_t t) n)
|
||||||
|
(start: nat) (len: usize {start + v len <= v n})
|
||||||
|
(d: num_bits t)
|
||||||
|
: Lemma ( bit_vec_of_int_t_array (Seq.slice arr start (start + v len) <: t_Array _ len) d
|
||||||
|
`bit_vec_equal` bit_vec_sub (bit_vec_of_int_t_array arr d) (start * d) (v len * d))
|
||||||
|
[SMTPat (bit_vec_sub (bit_vec_of_int_t_array arr d) (start * d) (v len * d))]
|
||||||
|
= let bv1 = bit_vec_of_int_t_array #_ #len (Seq.slice arr start (start + v len)) d in
|
||||||
|
let bv2 = bit_vec_sub (bit_vec_of_int_t_array arr d) (start * d) (v len * d) in
|
||||||
|
introduce forall i. bv1 i == bv2 i
|
||||||
|
with ( Seq.lemma_index_slice arr start (start + v len) (i / d);
|
||||||
|
Math.Lemmas.lemma_div_plus i start d;
|
||||||
|
Math.Lemmas.lemma_mod_plus i start d);
|
||||||
|
bit_vec_equal_intro bv1 bv2
|
||||||
|
|
||||||
|
#push-options "--split_queries always"
|
||||||
|
let int_t_eq_seq_slice_bv_sub_lemma #t #n1 #n2
|
||||||
|
(arr1: t_Array (int_t t) n1) (arr2: t_Array (int_t t) n2) (d: num_bits t)
|
||||||
|
(start1 start2: nat) (len: nat {start1 + len <= v n1 /\ start2 + len <= v n2})
|
||||||
|
: Lemma (requires Seq.slice arr1 start1 (start1 + len) == Seq.slice arr2 start2 (start2 + len))
|
||||||
|
(ensures bit_vec_equal
|
||||||
|
(bit_vec_sub (bit_vec_of_int_t_array arr1 d) (start1 * d) (len * d))
|
||||||
|
(bit_vec_sub (bit_vec_of_int_t_array arr2 d) (start2 * d) (len * d)))
|
||||||
|
[SMTPat ((bit_vec_sub (bit_vec_of_int_t_array arr1 d) (start1 * d) (len * d)) ==
|
||||||
|
(bit_vec_sub (bit_vec_of_int_t_array arr2 d) (start2 * d) (len * d)))]
|
||||||
|
= let len = sz len in
|
||||||
|
int_t_seq_slice_to_bv_sub_lemma arr1 start1 len d;
|
||||||
|
int_t_seq_slice_to_bv_sub_lemma arr2 start2 len d;
|
||||||
|
// bit_vec_equal_elim_principle ();
|
||||||
|
bit_vec_equal_intro_principle ()
|
||||||
|
#pop-options
|
||||||
|
|
||||||
|
let bit_vec_equal_extend #n1 #n2
|
||||||
|
(bv1: bit_vec n1) (bv2: bit_vec n2) (start1 start2: nat)
|
||||||
|
(len1: nat)
|
||||||
|
(len2: nat { start1 + len1 + len2 <= n1 /\ start2 + len1 + len2 <= n2})
|
||||||
|
: Lemma
|
||||||
|
(requires
|
||||||
|
bit_vec_sub bv1 start1 len1 == bit_vec_sub bv2 start2 len1
|
||||||
|
/\ bit_vec_sub bv1 (start1 + len1) len2 == bit_vec_sub bv2 (start2 + len1) len2)
|
||||||
|
(ensures bit_vec_sub bv1 start1 (len1+len2) == bit_vec_sub bv2 start2 (len1+len2))
|
||||||
|
// [SMTPat (bit_vec_sub bv1 start1 len1 == bit_vec_sub bv2 start2 len1);
|
||||||
|
// SMTPat ()
|
||||||
|
// ]
|
||||||
|
// SMTPat (bit_vec_sub bv1 (start1 + len1) len2 == bit_vec_sub bv2 (start2 + len1) len2)]
|
||||||
|
= let left1 = bit_vec_sub bv1 start1 len1 in
|
||||||
|
let left2 = bit_vec_sub bv2 start2 len1 in
|
||||||
|
let right1 = bit_vec_sub bv1 (start1 + len1) len2 in
|
||||||
|
let right2 = bit_vec_sub bv2 (start2 + len1) len2 in
|
||||||
|
// ()
|
||||||
|
// bit_vec_equal_elim left1 left2 ;
|
||||||
|
// bit_vec_equal_elim right1 right2;
|
||||||
|
let entire1 = bit_vec_sub bv1 start1 (len1 + len2) in
|
||||||
|
let entire2 = bit_vec_sub bv2 start2 (len1 + len2) in
|
||||||
|
assert (forall (i:nat). i < len1 ==> left1 i == left2 i);
|
||||||
|
assert (forall (i:nat). i < len2 ==> right1 i == right2 i);
|
||||||
|
introduce forall (i:nat). i < len1 + len2 ==> entire1 i == entire2 i
|
||||||
|
with introduce i < len1 + len2 ==> entire1 i == entire2 i
|
||||||
|
with _. if i < len1 then assert (left1 i == left2 i)
|
||||||
|
else assert (entire1 i == right1 (i - len1));
|
||||||
|
bit_vec_equal_intro entire1 entire2
|
||||||
|
#pop-options
|
||||||
|
|
||||||
|
// let bit_vec_equal_trans (#n: nat) (bv1 bv2 bv3: bit_vec n)
|
||||||
|
// : Lemma (requires bv1 `bit_vec_equal` bv2 /\ bv2 `bit_vec_equal` bv3)
|
||||||
|
// (ensures bv1 `bit_vec_equal` bv3)
|
||||||
|
// = bit_vec_equal_elim_principle ();
|
||||||
|
// bit_vec_equal_intro_principle ()
|
||||||
|
|
||||||
|
(*
|
||||||
|
let int_arr_bitwise_eq_range
|
||||||
|
#t1 #t2 #n1 #n2
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement1: int_t t1 -> Type0)
|
||||||
|
(arr1: t_Array (x: int_t t1 {refinement1 x}) n1)
|
||||||
|
(d1: num_bits t1)
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement2: int_t t2 -> Type0)
|
||||||
|
(arr2: t_Array (x: int_t t2 {refinement2 x}) n2)
|
||||||
|
(d2: num_bits t2)
|
||||||
|
(offset1 offset2: nat)
|
||||||
|
(bits: nat {
|
||||||
|
offset1 + bits <= v n1 * d1
|
||||||
|
/\ offset2 + bits <= v n2 * d2
|
||||||
|
})
|
||||||
|
= bit_vec_equal #bits (fun i -> bit_vec_of_int_t_array arr1 d1 (i + offset1))
|
||||||
|
= forall (k: nat). k < bits ==>
|
||||||
|
bit_vec_of_int_t_array arr1 d1 (offset1 + k)
|
||||||
|
== bit_vec_of_int_t_array arr2 d2 (offset2 + k)
|
||||||
|
|
||||||
|
let int_arr_bitwise_eq_range_comm
|
||||||
|
#t1 #t2 #n1 #n2
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement1: int_t t1 -> Type0)
|
||||||
|
(arr1: t_Array (x: int_t t1 {refinement1 x}) n1)
|
||||||
|
(d1: num_bits t1)
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement2: int_t t2 -> Type0)
|
||||||
|
(arr2: t_Array (x: int_t t2 {refinement2 x}) n2)
|
||||||
|
(d2: num_bits t2)
|
||||||
|
(offset1 offset2: nat)
|
||||||
|
(bits: nat {
|
||||||
|
offset1 + bits <= v n1 * d1
|
||||||
|
/\ offset2 + bits <= v n2 * d2
|
||||||
|
})
|
||||||
|
: Lemma (requires int_arr_bitwise_eq_range arr1 d1 arr2 d2 offset1 offset2 bits)
|
||||||
|
(ensures int_arr_bitwise_eq_range arr2 d2 arr1 d1 offset2 offset1 bits)
|
||||||
|
= ()
|
||||||
|
|
||||||
|
// kill that function in favor of range
|
||||||
|
let int_arr_bitwise_eq_up_to
|
||||||
|
#t1 #t2 #n1 #n2
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement1: int_t t1 -> Type0)
|
||||||
|
(arr1: t_Array (x: int_t t1 {refinement1 x}) n1)
|
||||||
|
(d1: num_bits t1)
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement: int_t t2 -> Type0)
|
||||||
|
(arr2: t_Array (x: int_t t2 {refinement x}) n2)
|
||||||
|
(d2: num_bits t2 {v n1 * d1 == v n2 * d2})
|
||||||
|
(max: nat {max <= v n1 * d1})
|
||||||
|
|
||||||
|
= forall i. i < max
|
||||||
|
==> bit_vec_of_int_t_array arr1 d1 i == bit_vec_of_int_t_array arr2 d2 i
|
||||||
|
|
||||||
|
let int_arr_bitwise_eq_
|
||||||
|
#t1 #t2 #n1 #n2
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement1: int_t t1 -> Type0)
|
||||||
|
(arr1: t_Array (x: int_t t1 {refinement1 x}) n1)
|
||||||
|
(d1: num_bits t1)
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement: int_t t2 -> Type0)
|
||||||
|
(arr2: t_Array (x: int_t t2 {refinement x}) n2)
|
||||||
|
(d2: num_bits t2 {v n1 * d1 == v n2 * d2})
|
||||||
|
= int_arr_bitwise_eq_up_to arr1 d1 arr2 d2 (v n1 * d1)
|
||||||
|
|
||||||
|
// move to fsti
|
||||||
|
let bit_vec_equal #n (bv1 bv2: bit_vec n)
|
||||||
|
= forall i. i < n ==> bv1 i == bv2 i
|
||||||
|
|
||||||
|
let int_arr_bitwise_eq
|
||||||
|
#t1 #t2 #n1 #n2
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement1: int_t t1 -> Type0)
|
||||||
|
(arr1: t_Array (x: int_t t1 {refinement1 x}) n1)
|
||||||
|
(d1: num_bits t1)
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement: int_t t2 -> Type0)
|
||||||
|
(arr2: t_Array (x: int_t t2 {refinement x}) n2)
|
||||||
|
(d2: num_bits t2 {v n1 * d1 == v n2 * d2})
|
||||||
|
= forall i. i < v n1 * d1
|
||||||
|
==> bit_vec_of_int_t_array arr1 d1 i == bit_vec_of_int_t_array arr2 d2 i
|
||||||
|
|
||||||
|
let int_arr_bitwise_eq_range_transitivity
|
||||||
|
#t1 #t2 #t3 #n1 #n2 #n3
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement1: int_t t1 -> Type0)
|
||||||
|
(arr1: t_Array (x: int_t t1 {refinement1 x}) n1)
|
||||||
|
(d1: num_bits t1)
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement2: int_t t2 -> Type0)
|
||||||
|
(arr2: t_Array (x: int_t t2 {refinement2 x}) n2)
|
||||||
|
(d2: num_bits t2)
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement3: int_t t3 -> Type0)
|
||||||
|
(arr3: t_Array (x: int_t t3 {refinement3 x}) n3)
|
||||||
|
(d3: num_bits t3)
|
||||||
|
(offset1 offset2 offset3: nat)
|
||||||
|
(bits: nat {
|
||||||
|
offset1 + bits <= v n1 * d1
|
||||||
|
/\ offset2 + bits <= v n2 * d2
|
||||||
|
/\ offset3 + bits <= v n3 * d3
|
||||||
|
})
|
||||||
|
: Lemma
|
||||||
|
(requires int_arr_bitwise_eq_range #t1 #t2 #n1 #n2 arr1 d1 arr2 d2 offset1 offset2 bits
|
||||||
|
/\ int_arr_bitwise_eq_range #t2 #t3 #n2 #n3 arr2 d2 arr3 d3 offset2 offset3 bits)
|
||||||
|
(ensures int_arr_bitwise_eq_range #t1 #t3 #n1 #n3 arr1 d1 arr3 d3 offset1 offset3 bits)
|
||||||
|
= ()
|
||||||
|
|
||||||
|
|
||||||
|
let int_arr_bitwise_eq_range_intro
|
||||||
|
#t1 #t2 #n1 #n2
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement1: int_t t1 -> Type0)
|
||||||
|
(arr1: t_Array (x: int_t t1 {refinement1 x}) n1)
|
||||||
|
(d1: num_bits t1)
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement: int_t t2 -> Type0)
|
||||||
|
(arr2: t_Array (x: int_t t2 {refinement x}) n2)
|
||||||
|
(d2: num_bits t2 {v n1 * d1 == v n2 * d2})
|
||||||
|
: Lemma
|
||||||
|
(requires int_arr_bitwise_eq arr1 d1 arr2 d2)
|
||||||
|
(ensures int_arr_bitwise_eq_range arr1 d1 arr2 d2 0 0 (v n1 * d1))
|
||||||
|
= admit ()
|
||||||
|
|
||||||
|
let int_arr_bitwise_eq_range_intro_eq_slice
|
||||||
|
#t #n1 #n2
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement: int_t t -> Type0)
|
||||||
|
(arr1: t_Array (x: int_t t {refinement x}) n1)
|
||||||
|
(arr2: t_Array (x: int_t t {refinement x}) n2)
|
||||||
|
(d: num_bits t)
|
||||||
|
(offset1 offset2: nat)
|
||||||
|
(n: nat {offset1 + n < v n1 /\ offset2 + n < v n2})
|
||||||
|
(bits: nat {
|
||||||
|
offset1 + bits <= v n1 * d
|
||||||
|
/\ offset2 + bits <= v n2 * d
|
||||||
|
/\ bits <= n * d
|
||||||
|
})
|
||||||
|
: Lemma (requires Seq.slice arr1 offset1 (offset1 + n) == Seq.slice arr2 offset2 (offset2 + n))
|
||||||
|
(ensures int_arr_bitwise_eq_range arr1 d arr2 d offset1 offset2 bits)
|
||||||
|
= admit ()
|
||||||
|
|
||||||
|
let int_arr_bitwise_eq_range_intro_eq
|
||||||
|
#t #n1 #n2
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement1: int_t t -> Type0)
|
||||||
|
(arr1: t_Array (x: int_t t {refinement1 x}) n1)
|
||||||
|
(#[FStar.Tactics.exact (`(fun _ -> True))]refinement2: int_t t -> Type0)
|
||||||
|
(arr2: t_Array (x: int_t t {refinement2 x}) n2)
|
||||||
|
(d: num_bits t)
|
||||||
|
(n_offset1 n_offset2: nat)
|
||||||
|
(n: nat {n_offset1 + n <= v n1 /\ n_offset2 + n <= v n2})
|
||||||
|
// (offset1 offset2: nat)
|
||||||
|
(bits: nat {
|
||||||
|
n_offset1 * d + bits <= v n1 * d
|
||||||
|
/\ n_offset2 * d + bits <= v n2 * d
|
||||||
|
/\ bits <= n * d
|
||||||
|
})
|
||||||
|
: Lemma (requires forall (i: nat). i < n ==> Seq.index arr1 (i + n_offset1) == Seq.index arr2 (i + n_offset2))
|
||||||
|
(ensures int_arr_bitwise_eq_range arr1 d arr2 d (n_offset1 * d) (n_offset2 * d) bits)
|
||||||
|
= admit ()
|
||||||
|
*)
|
||||||
258
proofs/fstar/models/Bytes.Buf.Buf_impl.fsti
Normal file
258
proofs/fstar/models/Bytes.Buf.Buf_impl.fsti
Normal file
|
|
@ -0,0 +1,258 @@
|
||||||
|
module Bytes.Buf.Buf_impl
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 15"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
val sign_extend (v_val: u64) (nbytes: usize) : Prims.Pure i64 Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
(* item error backend: (AndMutDefsite) The support in hax of function with one or more inputs of type `&mut _` is limited. Onlu trivial patterns are allowed there: `fn f(x: &mut (T, U)) ...` is allowed while `f((x, y): &mut (T, U))` is rejected.
|
||||||
|
Last available AST for this item:
|
||||||
|
|
||||||
|
/** Read bytes from a buffer.*//***//** A buffer stores bytes in memory such that read operations are infallible.*//** The underlying storage may or may not be in contiguous memory. A `Buf` value*//** is a cursor into the buffer. Reading from `Buf` advances the cursor*//** position. It can be thought of as an efficient `Iterator` for collections of*//** bytes.*//***//** The simplest `Buf` is a `&[u8]`.*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"hello world"[..];*//***//** assert_eq!(b'h', buf.get_u8());*//** assert_eq!(b'e', buf.get_u8());*//** assert_eq!(b'l', buf.get_u8());*//***//** let mut rest = [0; 8];*//** buf.copy_to_slice(&mut rest);*//***//** assert_eq!(&rest[..], &b"lo world"[..]);*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]trait t_Buf<Self_>{/** Returns the number of bytes between the current position and the end of*//** the buffer.*//***//** This value is greater than or equal to the length of the slice returned*//** by `chunk()`.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"hello world"[..];*//***//** assert_eq!(buf.remaining(), 11);*//***//** buf.get_u8();*//***//** assert_eq!(buf.remaining(), 10);*//** ```*//***//** # Implementer notes*//***//** Implementations of `remaining` should ensure that the return value does*//** not change unless a call is made to `advance` or any other function that*//** is documented to change the `Buf`'s current position.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_remaining<Anonymous: 'unk>(_: &Self) -> int;
|
||||||
|
/** Returns a slice starting at the current position and of length between 0*//** and `Buf::remaining()`. Note that this *can* return a shorter slice (this*//** allows non-continuous internal representation).*//***//** This is a lower level function. Most operations are done with other*//** functions.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"hello world"[..];*//***//** assert_eq!(buf.chunk(), &b"hello world"[..]);*//***//** buf.advance(6);*//***//** assert_eq!(buf.chunk(), &b"world"[..]);*//** ```*//***//** # Implementer notes*//***//** This function should never panic. `chunk()` should return an empty*//** slice **if and only if** `remaining()` returns 0. In other words,*//** `chunk()` returning an empty slice implies that `remaining()` will*//** return 0 and `remaining()` returning 0 implies that `chunk()` will*//** return an empty slice.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_chunk<Anonymous: 'unk>(_: &Self) -> &[int];
|
||||||
|
/** Fills `dst` with potentially multiple slices starting at `self`'s*//** current position.*//***//** If the `Buf` is backed by disjoint slices of bytes, `chunk_vectored` enables*//** fetching more than one slice at once. `dst` is a slice of `IoSlice`*//** references, enabling the slice to be directly used with [`writev`]*//** without any further conversion. The sum of the lengths of all the*//** buffers written to `dst` will be less than or equal to `Buf::remaining()`.*//***//** The entries in `dst` will be overwritten, but the data **contained** by*//** the slices **will not** be modified. The return value is the number of*//** slices written to `dst`. If `Buf::remaining()` is non-zero, then this*//** writes at least one non-empty slice to `dst`.*//***//** This is a lower level function. Most operations are done with other*//** functions.*//***//** # Implementer notes*//***//** This function should never panic. Once the end of the buffer is reached,*//** i.e., `Buf::remaining` returns 0, calls to `chunk_vectored` must return 0*//** without mutating `dst`.*//***//** Implementations should also take care to properly handle being called*//** with `dst` being a zero length slice.*//***//** [`writev`]: http://man7.org/linux/man-pages/man2/readv.2.html*/#[cfg(feature = "std")]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_chunks_vectored<'a: 'unk, Anonymous: 'unk>((self: &Self,dst: &mut [std::io::t_IoSlice<lifetime!(something)>])) -> int{{let _: tuple0 = {(if core::slice::impl__is_empty::<std::io::t_IoSlice<lifetime!(something)>>(&(deref(dst))){rust_primitives::hax::never_to_any({(return 0)})})};{(if bytes::buf::buf_impl::f_has_remaining(&(deref(self))){{let _: tuple0 = {(deref(dst)[0] = std::io::impl_10__new::<lifetime!(something)>(&(deref(bytes::buf::buf_impl::f_chunk(&(deref(self)))))))};{1}}} else {{0}})}}}
|
||||||
|
/** Advance the internal cursor of the Buf*//***//** The next call to `chunk()` will return a slice starting `cnt` bytes*//** further into the underlying buffer.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"hello world"[..];*//***//** assert_eq!(buf.chunk(), &b"hello world"[..]);*//***//** buf.advance(6);*//***//** assert_eq!(buf.chunk(), &b"world"[..]);*//** ```*//***//** # Panics*//***//** This function **may** panic if `cnt > self.remaining()`.*//***//** # Implementer notes*//***//** It is recommended for implementations of `advance` to panic if `cnt >*//** self.remaining()`. If the implementation does not panic, the call must*//** behave as if `cnt == self.remaining()`.*//***//** A call with `cnt == 0` should never panic and be a no-op.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_advance<Anonymous: 'unk>(_: &mut Self,_: int) -> tuple0;
|
||||||
|
/** Returns true if there are any more bytes to consume*//***//** This is equivalent to `self.remaining() != 0`.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"a"[..];*//***//** assert!(buf.has_remaining());*//***//** buf.get_u8();*//***//** assert!(!buf.has_remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_has_remaining<Anonymous: 'unk>((self: &Self)) -> bool{{core::cmp::PartialOrd::gt(bytes::buf::buf_impl::f_remaining(&(deref(self))),0)}}
|
||||||
|
/** Copies bytes from `self` into `dst`.*//***//** The cursor is advanced by the number of bytes copied. `self` must have*//** enough remaining bytes to fill `dst`.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"hello world"[..];*//** let mut dst = [0; 5];*//***//** buf.copy_to_slice(&mut dst);*//** assert_eq!(&b"hello"[..], &dst);*//** assert_eq!(6, buf.remaining());*//** ```*//***//** # Panics*//***//** This function panics if `self.remaining() < dst.len()`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_copy_to_slice<Anonymous: 'unk, Anonymous: 'unk>((self: &mut Self,dst: &mut [int])) -> tuple0{{let _: tuple0 = {core::result::impl__unwrap_or_else::<tuple0,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> tuple0)>(bytes::buf::buf_impl::f_try_copy_to_slice(&mut (deref(self)),&mut (deref(dst))),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))}))};Tuple0}}
|
||||||
|
/** Gets an unsigned 8 bit integer from `self`.*//***//** The current position is advanced by 1.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08 hello"[..];*//** assert_eq!(8, buf.get_u8());*//** ```*//***//** # Panics*//***//** This function panics if there is no more remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_u8<Anonymous: 'unk>((self: &mut Self)) -> int{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),1){{rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(bytes::TryGetError{f_requested:1,f_available:0,})))))}})};{let ret: int = {core::ops::index::Index::index(deref(bytes::buf::buf_impl::f_chunk(&(self))),0)};{let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),1)};{ret}}}}}
|
||||||
|
/** Gets a signed 8 bit integer from `self`.*//***//** The current position is advanced by 1.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08 hello"[..];*//** assert_eq!(8, buf.get_i8());*//** ```*//***//** # Panics*//***//** This function panics if there is no more remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_i8<Anonymous: 'unk>((self: &mut Self)) -> int{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),1){rust_primitives::hax::never_to_any({bytes::panic_advance(&(deref(&(bytes::TryGetError{f_requested:1,f_available:0,}))))})})};{let ret: int = {cast(core::ops::index::Index::index(deref(bytes::buf::buf_impl::f_chunk(&(self))),0))};{let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),1)};{ret}}}}}
|
||||||
|
/** Gets an unsigned 16 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 2.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08\x09 hello"[..];*//** assert_eq!(0x0809, buf.get_u16());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_u16<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_u16::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_u16::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_u16::v_SIZE,}),(|src| {unsafe {core::num::impl__u16__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_u16::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;2] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,2))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u16__from_be_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets an unsigned 16 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 2.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x09\x08 hello"[..];*//** assert_eq!(0x0809, buf.get_u16_le());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_u16_le<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_u16_le::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_u16_le::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_u16_le::v_SIZE,}),(|src| {unsafe {core::num::impl__u16__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_u16_le::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;2] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,2))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u16__from_le_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets an unsigned 16 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 2.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x08\x09 hello",*//** false => b"\x09\x08 hello",*//** };*//** assert_eq!(0x0809, buf.get_u16_ne());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_u16_ne<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_u16_ne::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_u16_ne::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_u16_ne::v_SIZE,}),(|src| {unsafe {core::num::impl__u16__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_u16_ne::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;2] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,2))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u16__from_ne_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets a signed 16 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 2.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08\x09 hello"[..];*//** assert_eq!(0x0809, buf.get_i16());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_i16<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_i16::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_i16::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_i16::v_SIZE,}),(|src| {unsafe {core::num::impl__i16__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_i16::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;2] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,2))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i16__from_be_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets a signed 16 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 2.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x09\x08 hello"[..];*//** assert_eq!(0x0809, buf.get_i16_le());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_i16_le<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_i16_le::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_i16_le::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_i16_le::v_SIZE,}),(|src| {unsafe {core::num::impl__i16__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_i16_le::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;2] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,2))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i16__from_le_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets a signed 16 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 2.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x08\x09 hello",*//** false => b"\x09\x08 hello",*//** };*//** assert_eq!(0x0809, buf.get_i16_ne());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_i16_ne<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_i16_ne::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_i16_ne::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_i16_ne::v_SIZE,}),(|src| {unsafe {core::num::impl__i16__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_i16_ne::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;2] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,2))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i16__from_ne_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets an unsigned 32 bit integer from `self` in the big-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08\x09\xA0\xA1 hello"[..];*//** assert_eq!(0x0809A0A1, buf.get_u32());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_u32<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_u32::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_u32::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_u32::v_SIZE,}),(|src| {unsafe {core::num::impl__u32__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_u32::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;4] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,4))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u32__from_be_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets an unsigned 32 bit integer from `self` in the little-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\xA1\xA0\x09\x08 hello"[..];*//** assert_eq!(0x0809A0A1, buf.get_u32_le());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_u32_le<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_u32_le::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_u32_le::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_u32_le::v_SIZE,}),(|src| {unsafe {core::num::impl__u32__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_u32_le::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;4] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,4))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u32__from_le_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets an unsigned 32 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x08\x09\xA0\xA1 hello",*//** false => b"\xA1\xA0\x09\x08 hello",*//** };*//** assert_eq!(0x0809A0A1, buf.get_u32_ne());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_u32_ne<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_u32_ne::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_u32_ne::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_u32_ne::v_SIZE,}),(|src| {unsafe {core::num::impl__u32__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_u32_ne::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;4] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,4))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u32__from_ne_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets a signed 32 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08\x09\xA0\xA1 hello"[..];*//** assert_eq!(0x0809A0A1, buf.get_i32());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_i32<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_i32::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_i32::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_i32::v_SIZE,}),(|src| {unsafe {core::num::impl__i32__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_i32::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;4] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,4))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i32__from_be_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets a signed 32 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\xA1\xA0\x09\x08 hello"[..];*//** assert_eq!(0x0809A0A1, buf.get_i32_le());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_i32_le<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_i32_le::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_i32_le::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_i32_le::v_SIZE,}),(|src| {unsafe {core::num::impl__i32__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_i32_le::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;4] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,4))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i32__from_le_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets a signed 32 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x08\x09\xA0\xA1 hello",*//** false => b"\xA1\xA0\x09\x08 hello",*//** };*//** assert_eq!(0x0809A0A1, buf.get_i32_ne());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_i32_ne<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_i32_ne::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_i32_ne::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_i32_ne::v_SIZE,}),(|src| {unsafe {core::num::impl__i32__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_i32_ne::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;4] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,4))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i32__from_ne_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets an unsigned 64 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07\x08 hello"[..];*//** assert_eq!(0x0102030405060708, buf.get_u64());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_u64<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_u64::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_u64::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_u64::v_SIZE,}),(|src| {unsafe {core::num::impl__u64__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_u64::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u64__from_be_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets an unsigned 64 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08\x07\x06\x05\x04\x03\x02\x01 hello"[..];*//** assert_eq!(0x0102030405060708, buf.get_u64_le());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_u64_le<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_u64_le::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_u64_le::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_u64_le::v_SIZE,}),(|src| {unsafe {core::num::impl__u64__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_u64_le::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u64__from_le_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets an unsigned 64 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x01\x02\x03\x04\x05\x06\x07\x08 hello",*//** false => b"\x08\x07\x06\x05\x04\x03\x02\x01 hello",*//** };*//** assert_eq!(0x0102030405060708, buf.get_u64_ne());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_u64_ne<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_u64_ne::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_u64_ne::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_u64_ne::v_SIZE,}),(|src| {unsafe {core::num::impl__u64__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_u64_ne::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u64__from_ne_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets a signed 64 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07\x08 hello"[..];*//** assert_eq!(0x0102030405060708, buf.get_i64());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_i64<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_i64::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_i64::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_i64::v_SIZE,}),(|src| {unsafe {core::num::impl__i64__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_i64::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i64__from_be_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets a signed 64 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08\x07\x06\x05\x04\x03\x02\x01 hello"[..];*//** assert_eq!(0x0102030405060708, buf.get_i64_le());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_i64_le<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_i64_le::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_i64_le::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_i64_le::v_SIZE,}),(|src| {unsafe {core::num::impl__i64__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_i64_le::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i64__from_le_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets a signed 64 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x01\x02\x03\x04\x05\x06\x07\x08 hello",*//** false => b"\x08\x07\x06\x05\x04\x03\x02\x01 hello",*//** };*//** assert_eq!(0x0102030405060708, buf.get_i64_ne());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_i64_ne<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_i64_ne::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_i64_ne::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_i64_ne::v_SIZE,}),(|src| {unsafe {core::num::impl__i64__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_i64_ne::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i64__from_ne_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets an unsigned 128 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 16.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16 hello"[..];*//** assert_eq!(0x01020304050607080910111213141516, buf.get_u128());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_u128<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_u128::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_u128::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_u128::v_SIZE,}),(|src| {unsafe {core::num::impl__u128__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_u128::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;16] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,16))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u128__from_be_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets an unsigned 128 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 16.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x16\x15\x14\x13\x12\x11\x10\x09\x08\x07\x06\x05\x04\x03\x02\x01 hello"[..];*//** assert_eq!(0x01020304050607080910111213141516, buf.get_u128_le());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_u128_le<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_u128_le::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_u128_le::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_u128_le::v_SIZE,}),(|src| {unsafe {core::num::impl__u128__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_u128_le::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;16] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,16))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u128__from_le_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets an unsigned 128 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 16.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16 hello",*//** false => b"\x16\x15\x14\x13\x12\x11\x10\x09\x08\x07\x06\x05\x04\x03\x02\x01 hello",*//** };*//** assert_eq!(0x01020304050607080910111213141516, buf.get_u128_ne());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_u128_ne<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_u128_ne::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_u128_ne::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_u128_ne::v_SIZE,}),(|src| {unsafe {core::num::impl__u128__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_u128_ne::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;16] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,16))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u128__from_ne_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets a signed 128 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 16.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16 hello"[..];*//** assert_eq!(0x01020304050607080910111213141516, buf.get_i128());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_i128<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_i128::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_i128::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_i128::v_SIZE,}),(|src| {unsafe {core::num::impl__i128__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_i128::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;16] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,16))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i128__from_be_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets a signed 128 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 16.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x16\x15\x14\x13\x12\x11\x10\x09\x08\x07\x06\x05\x04\x03\x02\x01 hello"[..];*//** assert_eq!(0x01020304050607080910111213141516, buf.get_i128_le());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_i128_le<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_i128_le::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_i128_le::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_i128_le::v_SIZE,}),(|src| {unsafe {core::num::impl__i128__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_i128_le::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;16] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,16))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i128__from_le_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets a signed 128 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 16.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16 hello",*//** false => b"\x16\x15\x14\x13\x12\x11\x10\x09\x08\x07\x06\x05\x04\x03\x02\x01 hello",*//** };*//** assert_eq!(0x01020304050607080910111213141516, buf.get_i128_ne());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_i128_ne<Anonymous: 'unk>((self: &mut Self)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::get_i128_ne::v_SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::get_i128_ne::v_SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::get_i128_ne::v_SIZE,}),(|src| {unsafe {core::num::impl__i128__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::get_i128_ne::v_SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;16] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,16))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i128__from_ne_bytes(buf)))}}})}})}}}})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets an unsigned n-byte integer from `self` in big-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x01\x02\x03 hello"[..];*//** assert_eq!(0x010203, buf.get_uint(3));*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`, or*//** if `nbytes` is greater than 8.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_uint<Anonymous: 'unk>((self: &mut Self,nbytes: int)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {rust_primitives::hax::never_to_any({let slice_at: int = {(match (core::num::impl__usize__checked_sub(bytes::buf::buf_impl::Buf::get_uint::v_SIZE,nbytes)) {core::option::Option_Some(slice_at) => {slice_at},core::option::Option_None => {rust_primitives::hax::never_to_any(bytes::panic_does_not_fit(bytes::buf::buf_impl::Buf::get_uint::v_SIZE,nbytes))}})};{let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {(match (bytes::buf::buf_impl::f_try_copy_to_slice(&mut (deref(self)),&mut (deref(&mut (deref(core::ops::index::f_index_mut(&mut (buf),core::ops::range::RangeFrom{f_start:slice_at,}))))))) {core::result::Result_Ok(ok) => {ok},core::result::Result_Err(err) => {(return core::result::Result_Err(err))}})};{(return core::result::Result_Ok(core::num::impl__u64__from_be_bytes(buf)))}}}})})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets an unsigned n-byte integer from `self` in little-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x03\x02\x01 hello"[..];*//** assert_eq!(0x010203, buf.get_uint_le(3));*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`, or*//** if `nbytes` is greater than 8.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_uint_le<Anonymous: 'unk>((self: &mut Self,nbytes: int)) -> int{rust_primitives::hax::never_to_any({{(return core::result::impl__unwrap_or_else::<int,bytes::t_TryGetError,arrow!(bytes::t_TryGetError -> int)>(core::ops::function::f_call_mut(&mut ((|_| {rust_primitives::hax::never_to_any({let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let subslice: &mut [int] = {(match (core::slice::impl__get_mut::<int,core::ops::range::t_RangeTo<int>>(rust_primitives::unsize(&mut (buf)),core::ops::range::RangeTo{f_end:nbytes,})) {core::option::Option_Some(subslice) => {subslice},core::option::Option_None => {rust_primitives::hax::never_to_any(bytes::panic_does_not_fit(bytes::buf::buf_impl::Buf::get_uint_le::v_SIZE,nbytes))}})};{let _: tuple0 = {(match (bytes::buf::buf_impl::f_try_copy_to_slice(&mut (deref(self)),&mut (deref(subslice)))) {core::result::Result_Ok(ok) => {ok},core::result::Result_Err(err) => {(return core::result::Result_Err(err))}})};{(return core::result::Result_Ok(core::num::impl__u64__from_le_bytes(buf)))}}}})})),Tuple0()),(|error| {rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(error)))))})))}})}
|
||||||
|
/** Gets an unsigned n-byte integer from `self` in native-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x01\x02\x03 hello",*//** false => b"\x03\x02\x01 hello",*//** };*//** assert_eq!(0x010203, buf.get_uint_ne(3));*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`, or*//** if `nbytes` is greater than 8.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_uint_ne<Anonymous: 'unk>((self: &mut Self,nbytes: int)) -> int{{(if false{{bytes::buf::buf_impl::f_get_uint(&mut (deref(self)),nbytes)}} else {{bytes::buf::buf_impl::f_get_uint_le(&mut (deref(self)),nbytes)}})}}
|
||||||
|
/** Gets a signed n-byte integer from `self` in big-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x01\x02\x03 hello"[..];*//** assert_eq!(0x010203, buf.get_int(3));*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`, or*//** if `nbytes` is greater than 8.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_int<Anonymous: 'unk>((self: &mut Self,nbytes: int)) -> int{{bytes::buf::buf_impl::sign_extend(bytes::buf::buf_impl::f_get_uint(&mut (deref(self)),nbytes),nbytes)}}
|
||||||
|
/** Gets a signed n-byte integer from `self` in little-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x03\x02\x01 hello"[..];*//** assert_eq!(0x010203, buf.get_int_le(3));*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`, or*//** if `nbytes` is greater than 8.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_int_le<Anonymous: 'unk>((self: &mut Self,nbytes: int)) -> int{{bytes::buf::buf_impl::sign_extend(bytes::buf::buf_impl::f_get_uint_le(&mut (deref(self)),nbytes),nbytes)}}
|
||||||
|
/** Gets a signed n-byte integer from `self` in native-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x01\x02\x03 hello",*//** false => b"\x03\x02\x01 hello",*//** };*//** assert_eq!(0x010203, buf.get_int_ne(3));*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`, or*//** if `nbytes` is greater than 8.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_int_ne<Anonymous: 'unk>((self: &mut Self,nbytes: int)) -> int{{(if false{{bytes::buf::buf_impl::f_get_int(&mut (deref(self)),nbytes)}} else {{bytes::buf::buf_impl::f_get_int_le(&mut (deref(self)),nbytes)}})}}
|
||||||
|
/** Gets an IEEE754 single-precision (4 bytes) floating point number from*//** `self` in big-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x3F\x99\x99\x9A hello"[..];*//** assert_eq!(1.2f32, buf.get_f32());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_f32<Anonymous: 'unk>((self: &mut Self)) -> float{{core::f32::impl__f32__from_bits(bytes::buf::buf_impl::f_get_u32(&mut (deref(self))))}}
|
||||||
|
/** Gets an IEEE754 single-precision (4 bytes) floating point number from*//** `self` in little-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x9A\x99\x99\x3F hello"[..];*//** assert_eq!(1.2f32, buf.get_f32_le());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_f32_le<Anonymous: 'unk>((self: &mut Self)) -> float{{core::f32::impl__f32__from_bits(bytes::buf::buf_impl::f_get_u32_le(&mut (deref(self))))}}
|
||||||
|
/** Gets an IEEE754 single-precision (4 bytes) floating point number from*//** `self` in native-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x3F\x99\x99\x9A hello",*//** false => b"\x9A\x99\x99\x3F hello",*//** };*//** assert_eq!(1.2f32, buf.get_f32_ne());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_f32_ne<Anonymous: 'unk>((self: &mut Self)) -> float{{core::f32::impl__f32__from_bits(bytes::buf::buf_impl::f_get_u32_ne(&mut (deref(self))))}}
|
||||||
|
/** Gets an IEEE754 double-precision (8 bytes) floating point number from*//** `self` in big-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x3F\xF3\x33\x33\x33\x33\x33\x33 hello"[..];*//** assert_eq!(1.2f64, buf.get_f64());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_f64<Anonymous: 'unk>((self: &mut Self)) -> float{{core::f64::impl__f64__from_bits(bytes::buf::buf_impl::f_get_u64(&mut (deref(self))))}}
|
||||||
|
/** Gets an IEEE754 double-precision (8 bytes) floating point number from*//** `self` in little-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x33\x33\x33\x33\x33\x33\xF3\x3F hello"[..];*//** assert_eq!(1.2f64, buf.get_f64_le());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_f64_le<Anonymous: 'unk>((self: &mut Self)) -> float{{core::f64::impl__f64__from_bits(bytes::buf::buf_impl::f_get_u64_le(&mut (deref(self))))}}
|
||||||
|
/** Gets an IEEE754 double-precision (8 bytes) floating point number from*//** `self` in native-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x3F\xF3\x33\x33\x33\x33\x33\x33 hello",*//** false => b"\x33\x33\x33\x33\x33\x33\xF3\x3F hello",*//** };*//** assert_eq!(1.2f64, buf.get_f64_ne());*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining data in `self`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_get_f64_ne<Anonymous: 'unk>((self: &mut Self)) -> float{{core::f64::impl__f64__from_bits(bytes::buf::buf_impl::f_get_u64_ne(&mut (deref(self))))}}
|
||||||
|
/** Copies bytes from `self` into `dst`.*//***//** The cursor is advanced by the number of bytes copied. `self` must have*//** enough remaining bytes to fill `dst`.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"hello world"[..];*//** let mut dst = [0; 5];*//***//** assert_eq!(Ok(()), buf.try_copy_to_slice(&mut dst));*//** assert_eq!(&b"hello"[..], &dst);*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"hello world"[..];*//** let mut dst = [0; 12];*//***//** assert_eq!(Err(TryGetError{requested: 12, available: 11}), buf.try_copy_to_slice(&mut dst));*//** assert_eq!(11, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_copy_to_slice<Anonymous: 'unk, Anonymous: 'unk>((self: &mut Self,mut dst: &mut [int])) -> core::result::t_Result<tuple0, bytes::t_TryGetError>{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),core::slice::impl__len::<int>(&(deref(dst)))){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:core::slice::impl__len::<int>(&(deref(dst))),f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let _: tuple0 = {{loop { {(if core::ops::bit::Not::not(core::slice::impl__is_empty::<int>(&(deref(dst)))){{let src: &[int] = {bytes::buf::buf_impl::f_chunk(&(self))};{let cnt: int = {core::cmp::f_min(core::slice::impl__len::<int>(&(deref(src))),core::slice::impl__len::<int>(&(deref(dst))))};{let _: tuple0 = {core::slice::impl__copy_from_slice::<int>(&mut (deref(core::ops::index::f_index_mut(&mut (deref(dst)),core::ops::range::RangeTo{f_end:cnt,}))),&(deref(&(deref(core::ops::index::f_index(&(deref(src)),core::ops::range::RangeTo{f_end:cnt,}))))))};{let _: tuple0 = {(dst = &mut (deref(&mut (deref(core::ops::index::f_index_mut(&mut (deref(dst)),core::ops::range::RangeFrom{f_start:cnt,}))))))};{let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),cnt)};Tuple0}}}}}} else {rust_primitives::hax::never_to_any({rust_primitives::hax::never_to_any((break (Tuple0)))})})} }}};{core::result::Result_Ok(Tuple0())}}}}
|
||||||
|
/** Gets an unsigned 8 bit integer from `self`.*//***//** The current position is advanced by 1.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08 hello"[..];*//** assert_eq!(Ok(0x08_u8), buf.try_get_u8());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b""[..];*//** assert_eq!(Err(TryGetError{requested: 1, available: 0}), buf.try_get_u8());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_u8<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),1){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:1,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: int = {core::ops::index::Index::index(deref(bytes::buf::buf_impl::f_chunk(&(self))),0)};{let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),1)};{core::result::Result_Ok(ret)}}}}}
|
||||||
|
/** Gets a signed 8 bit integer from `self`.*//***//** The current position is advanced by 1.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08 hello"[..];*//** assert_eq!(Ok(0x08_i8), buf.try_get_i8());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b""[..];*//** assert_eq!(Err(TryGetError{requested: 1, available: 0}), buf.try_get_i8());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_i8<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),1){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:1,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: int = {cast(core::ops::index::Index::index(deref(bytes::buf::buf_impl::f_chunk(&(self))),0))};{let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),1)};{core::result::Result_Ok(ret)}}}}}
|
||||||
|
/** Gets an unsigned 16 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 2.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08\x09 hello"[..];*//** assert_eq!(Ok(0x0809_u16), buf.try_get_u16());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x08"[..];*//** assert_eq!(Err(TryGetError{requested: 2, available: 1}), buf.try_get_u16());*//** assert_eq!(1, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_u16<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_u16__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_u16__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_u16__SIZE,}),(|src| {unsafe {core::num::impl__u16__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_u16__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;2] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,2))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u16__from_be_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets an unsigned 16 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 2.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x09\x08 hello"[..];*//** assert_eq!(Ok(0x0809_u16), buf.try_get_u16_le());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x08"[..];*//** assert_eq!(Err(TryGetError{requested: 2, available: 1}), buf.try_get_u16_le());*//** assert_eq!(1, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_u16_le<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_u16_le__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_u16_le__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_u16_le__SIZE,}),(|src| {unsafe {core::num::impl__u16__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_u16_le__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;2] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,2))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u16__from_le_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets an unsigned 16 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 2.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x08\x09 hello",*//** false => b"\x09\x08 hello",*//** };*//** assert_eq!(Ok(0x0809_u16), buf.try_get_u16_ne());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x08"[..];*//** assert_eq!(Err(TryGetError{requested: 2, available: 1}), buf.try_get_u16_ne());*//** assert_eq!(1, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_u16_ne<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_u16_ne__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_u16_ne__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_u16_ne__SIZE,}),(|src| {unsafe {core::num::impl__u16__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_u16_ne__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;2] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,2))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u16__from_ne_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets a signed 16 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 2.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08\x09 hello"[..];*//** assert_eq!(Ok(0x0809_i16), buf.try_get_i16());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x08"[..];*//** assert_eq!(Err(TryGetError{requested: 2, available: 1}), buf.try_get_i16());*//** assert_eq!(1, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_i16<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_i16__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_i16__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_i16__SIZE,}),(|src| {unsafe {core::num::impl__i16__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_i16__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;2] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,2))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i16__from_be_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets an signed 16 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 2.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x09\x08 hello"[..];*//** assert_eq!(Ok(0x0809_i16), buf.try_get_i16_le());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x08"[..];*//** assert_eq!(Err(TryGetError{requested: 2, available: 1}), buf.try_get_i16_le());*//** assert_eq!(1, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_i16_le<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_i16_le__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_i16_le__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_i16_le__SIZE,}),(|src| {unsafe {core::num::impl__i16__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_i16_le__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;2] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,2))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i16__from_le_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets a signed 16 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 2.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x08\x09 hello",*//** false => b"\x09\x08 hello",*//** };*//** assert_eq!(Ok(0x0809_i16), buf.try_get_i16_ne());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x08"[..];*//** assert_eq!(Err(TryGetError{requested: 2, available: 1}), buf.try_get_i16_ne());*//** assert_eq!(1, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_i16_ne<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_i16_ne__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_i16_ne__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_i16_ne__SIZE,}),(|src| {unsafe {core::num::impl__i16__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_i16_ne__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;2] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,2))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i16__from_ne_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets an unsigned 32 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 4.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08\x09\xA0\xA1 hello"[..];*//** assert_eq!(Ok(0x0809A0A1), buf.try_get_u32());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x01\x02\x03"[..];*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_u32());*//** assert_eq!(3, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_u32<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_u32__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_u32__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_u32__SIZE,}),(|src| {unsafe {core::num::impl__u32__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_u32__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;4] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,4))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u32__from_be_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets an unsigned 32 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 4.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\xA1\xA0\x09\x08 hello"[..];*//** assert_eq!(Ok(0x0809A0A1_u32), buf.try_get_u32_le());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x08\x09\xA0"[..];*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_u32_le());*//** assert_eq!(3, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_u32_le<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_u32_le__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_u32_le__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_u32_le__SIZE,}),(|src| {unsafe {core::num::impl__u32__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_u32_le__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;4] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,4))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u32__from_le_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets an unsigned 32 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 4.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x08\x09\xA0\xA1 hello",*//** false => b"\xA1\xA0\x09\x08 hello",*//** };*//** assert_eq!(Ok(0x0809A0A1_u32), buf.try_get_u32_ne());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x08\x09\xA0"[..];*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_u32_ne());*//** assert_eq!(3, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_u32_ne<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_u32_ne__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_u32_ne__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_u32_ne__SIZE,}),(|src| {unsafe {core::num::impl__u32__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_u32_ne__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;4] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,4))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u32__from_ne_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets a signed 32 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 4.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08\x09\xA0\xA1 hello"[..];*//** assert_eq!(Ok(0x0809A0A1_i32), buf.try_get_i32());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x01\x02\x03"[..];*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_i32());*//** assert_eq!(3, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_i32<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_i32__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_i32__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_i32__SIZE,}),(|src| {unsafe {core::num::impl__i32__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_i32__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;4] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,4))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i32__from_be_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets a signed 32 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 4.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\xA1\xA0\x09\x08 hello"[..];*//** assert_eq!(Ok(0x0809A0A1_i32), buf.try_get_i32_le());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x08\x09\xA0"[..];*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_i32_le());*//** assert_eq!(3, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_i32_le<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_i32_le__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_i32_le__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_i32_le__SIZE,}),(|src| {unsafe {core::num::impl__i32__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_i32_le__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;4] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,4))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i32__from_le_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets a signed 32 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 4.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x08\x09\xA0\xA1 hello",*//** false => b"\xA1\xA0\x09\x08 hello",*//** };*//** assert_eq!(Ok(0x0809A0A1_i32), buf.try_get_i32_ne());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x08\x09\xA0"[..];*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_i32_ne());*//** assert_eq!(3, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_i32_ne<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_i32_ne__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_i32_ne__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_i32_ne__SIZE,}),(|src| {unsafe {core::num::impl__i32__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_i32_ne__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;4] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,4))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i32__from_ne_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets an unsigned 64 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 8.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07\x08 hello"[..];*//** assert_eq!(Ok(0x0102030405060708_u64), buf.try_get_u64());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07"[..];*//** assert_eq!(Err(TryGetError{requested: 8, available: 7}), buf.try_get_u64());*//** assert_eq!(7, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_u64<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_u64__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_u64__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_u64__SIZE,}),(|src| {unsafe {core::num::impl__u64__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_u64__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u64__from_be_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets an unsigned 64 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 8.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08\x07\x06\x05\x04\x03\x02\x01 hello"[..];*//** assert_eq!(Ok(0x0102030405060708_u64), buf.try_get_u64_le());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x08\x07\x06\x05\x04\x03\x02"[..];*//** assert_eq!(Err(TryGetError{requested: 8, available: 7}), buf.try_get_u64_le());*//** assert_eq!(7, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_u64_le<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_u64_le__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_u64_le__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_u64_le__SIZE,}),(|src| {unsafe {core::num::impl__u64__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_u64_le__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u64__from_le_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets an unsigned 64 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 8.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x01\x02\x03\x04\x05\x06\x07\x08 hello",*//** false => b"\x08\x07\x06\x05\x04\x03\x02\x01 hello",*//** };*//** assert_eq!(Ok(0x0102030405060708_u64), buf.try_get_u64_ne());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07"[..];*//** assert_eq!(Err(TryGetError{requested: 8, available: 7}), buf.try_get_u64_ne());*//** assert_eq!(7, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_u64_ne<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_u64_ne__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_u64_ne__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_u64_ne__SIZE,}),(|src| {unsafe {core::num::impl__u64__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_u64_ne__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u64__from_ne_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets a signed 64 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 8.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07\x08 hello"[..];*//** assert_eq!(Ok(0x0102030405060708_i64), buf.try_get_i64());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07"[..];*//** assert_eq!(Err(TryGetError{requested: 8, available: 7}), buf.try_get_i64());*//** assert_eq!(7, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_i64<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_i64__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_i64__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_i64__SIZE,}),(|src| {unsafe {core::num::impl__i64__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_i64__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i64__from_be_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets a signed 64 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 8.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x08\x07\x06\x05\x04\x03\x02\x01 hello"[..];*//** assert_eq!(Ok(0x0102030405060708_i64), buf.try_get_i64_le());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x08\x07\x06\x05\x04\x03\x02"[..];*//** assert_eq!(Err(TryGetError{requested: 8, available: 7}), buf.try_get_i64_le());*//** assert_eq!(7, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_i64_le<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_i64_le__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_i64_le__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_i64_le__SIZE,}),(|src| {unsafe {core::num::impl__i64__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_i64_le__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i64__from_le_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets a signed 64 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 8.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x01\x02\x03\x04\x05\x06\x07\x08 hello",*//** false => b"\x08\x07\x06\x05\x04\x03\x02\x01 hello",*//** };*//** assert_eq!(Ok(0x0102030405060708_i64), buf.try_get_i64_ne());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07"[..];*//** assert_eq!(Err(TryGetError{requested: 8, available: 7}), buf.try_get_i64_ne());*//** assert_eq!(7, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_i64_ne<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_i64_ne__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_i64_ne__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_i64_ne__SIZE,}),(|src| {unsafe {core::num::impl__i64__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_i64_ne__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i64__from_ne_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets an unsigned 128 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 16.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16 hello"[..];*//** assert_eq!(Ok(0x01020304050607080910111213141516_u128), buf.try_get_u128());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15"[..];*//** assert_eq!(Err(TryGetError{requested: 16, available: 15}), buf.try_get_u128());*//** assert_eq!(15, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_u128<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_u128__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_u128__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_u128__SIZE,}),(|src| {unsafe {core::num::impl__u128__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_u128__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;16] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,16))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u128__from_be_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets an unsigned 128 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 16.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x16\x15\x14\x13\x12\x11\x10\x09\x08\x07\x06\x05\x04\x03\x02\x01 hello"[..];*//** assert_eq!(Ok(0x01020304050607080910111213141516_u128), buf.try_get_u128_le());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x16\x15\x14\x13\x12\x11\x10\x09\x08\x07\x06\x05\x04\x03\x02"[..];*//** assert_eq!(Err(TryGetError{requested: 16, available: 15}), buf.try_get_u128_le());*//** assert_eq!(15, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_u128_le<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_u128_le__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_u128_le__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_u128_le__SIZE,}),(|src| {unsafe {core::num::impl__u128__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_u128_le__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;16] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,16))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u128__from_le_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets an unsigned 128 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 16.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16 hello",*//** false => b"\x16\x15\x14\x13\x12\x11\x10\x09\x08\x07\x06\x05\x04\x03\x02\x01 hello",*//** };*//** assert_eq!(Ok(0x01020304050607080910111213141516_u128), buf.try_get_u128_ne());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15"[..];*//** assert_eq!(Err(TryGetError{requested: 16, available: 15}), buf.try_get_u128_ne());*//** assert_eq!(15, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_u128_ne<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_u128_ne__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_u128_ne__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_u128_ne__SIZE,}),(|src| {unsafe {core::num::impl__u128__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_u128_ne__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;16] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,16))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__u128__from_ne_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets a signed 128 bit integer from `self` in big-endian byte order.*//***//** The current position is advanced by 16.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16 hello"[..];*//** assert_eq!(Ok(0x01020304050607080910111213141516_i128), buf.try_get_i128());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15"[..];*//** assert_eq!(Err(TryGetError{requested: 16, available: 15}), buf.try_get_i128());*//** assert_eq!(15, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_i128<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_i128__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_i128__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_i128__SIZE,}),(|src| {unsafe {core::num::impl__i128__from_be_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_i128__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;16] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,16))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i128__from_be_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets a signed 128 bit integer from `self` in little-endian byte order.*//***//** The current position is advanced by 16.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x16\x15\x14\x13\x12\x11\x10\x09\x08\x07\x06\x05\x04\x03\x02\x01 hello"[..];*//** assert_eq!(Ok(0x01020304050607080910111213141516_i128), buf.try_get_i128_le());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x16\x15\x14\x13\x12\x11\x10\x09\x08\x07\x06\x05\x04\x03\x02"[..];*//** assert_eq!(Err(TryGetError{requested: 16, available: 15}), buf.try_get_i128_le());*//** assert_eq!(15, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_i128_le<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_i128_le__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_i128_le__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_i128_le__SIZE,}),(|src| {unsafe {core::num::impl__i128__from_le_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_i128_le__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;16] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,16))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i128__from_le_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets a signed 128 bit integer from `self` in native-endian byte order.*//***//** The current position is advanced by 16.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16 hello",*//** false => b"\x16\x15\x14\x13\x12\x11\x10\x09\x08\x07\x06\x05\x04\x03\x02\x01 hello",*//** };*//** assert_eq!(Ok(0x01020304050607080910111213141516_i128), buf.try_get_i128_ne());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15"[..];*//** assert_eq!(Err(TryGetError{requested: 16, available: 15}), buf.try_get_i128_ne());*//** assert_eq!(15, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_i128_ne<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<int, bytes::t_TryGetError>{{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),bytes::buf::buf_impl::Buf::try_get_i128_ne__SIZE){rust_primitives::hax::never_to_any({(return core::result::Result_Err(bytes::TryGetError{f_requested:bytes::buf::buf_impl::Buf::try_get_i128_ne__SIZE,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))})})};{let ret: core::option::t_Option<int> = {core::option::impl__map::<&[int],int,arrow!(&[int] -> int)>(core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(&(deref(bytes::buf::buf_impl::f_chunk(&(self)))),core::ops::range::RangeTo{f_end:bytes::buf::buf_impl::Buf::try_get_i128_ne__SIZE,}),(|src| {unsafe {core::num::impl__i128__from_ne_bytes(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","deref")(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","cast(address_of)")))}}))};{(match (ret) {core::option::Option_Some(ret) => {rust_primitives::hax::never_to_any({let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (deref(self)),bytes::buf::buf_impl::Buf::try_get_i128_ne__SIZE)};{(return core::result::Result_Ok(ret))}})},_ => {rust_primitives::hax::never_to_any({let mut buf: [int;16] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,16))};{let _: tuple0 = {bytes::buf::buf_impl::f_copy_to_slice(&mut (deref(self)),rust_primitives::unsize(&mut (deref(&mut (buf)))))};{(return core::result::Result_Ok(core::num::impl__i128__from_ne_bytes(buf)))}}})}})}}}}}
|
||||||
|
/** Gets an unsigned n-byte integer from `self` in big-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x01\x02\x03 hello"[..];*//** assert_eq!(Ok(0x010203_u64), buf.try_get_uint(3));*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x01\x02\x03"[..];*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_uint(4));*//** assert_eq!(3, buf.remaining());*//** ```*//***//** # Panics*//***//** This function panics if `nbytes` > 8.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_uint<Anonymous: 'unk>((self: &mut Self,nbytes: int)) -> core::result::t_Result<int, bytes::t_TryGetError>{rust_primitives::hax::never_to_any({{let slice_at: int = {(match (core::num::impl__usize__checked_sub(bytes::buf::buf_impl::Buf::try_get_uint__SIZE,nbytes)) {core::option::Option_Some(slice_at) => {slice_at},core::option::Option_None => {rust_primitives::hax::never_to_any(bytes::panic_does_not_fit(bytes::buf::buf_impl::Buf::try_get_uint__SIZE,nbytes))}})};{let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {(match (bytes::buf::buf_impl::f_try_copy_to_slice(&mut (deref(self)),&mut (deref(&mut (deref(core::ops::index::f_index_mut(&mut (buf),core::ops::range::RangeFrom{f_start:slice_at,}))))))) {core::result::Result_Ok(ok) => {ok},core::result::Result_Err(err) => {(return core::result::Result_Err(err))}})};{(return core::result::Result_Ok(core::num::impl__u64__from_be_bytes(buf)))}}}}})}
|
||||||
|
/** Gets an unsigned n-byte integer from `self` in little-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x03\x02\x01 hello"[..];*//** assert_eq!(Ok(0x010203_u64), buf.try_get_uint_le(3));*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x01\x02\x03"[..];*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_uint_le(4));*//** assert_eq!(3, buf.remaining());*//** ```*//***//** # Panics*//***//** This function panics if `nbytes` > 8.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_uint_le<Anonymous: 'unk>((self: &mut Self,nbytes: int)) -> core::result::t_Result<int, bytes::t_TryGetError>{rust_primitives::hax::never_to_any({{let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let subslice: &mut [int] = {(match (core::slice::impl__get_mut::<int,core::ops::range::t_RangeTo<int>>(rust_primitives::unsize(&mut (buf)),core::ops::range::RangeTo{f_end:nbytes,})) {core::option::Option_Some(subslice) => {subslice},core::option::Option_None => {rust_primitives::hax::never_to_any(bytes::panic_does_not_fit(bytes::buf::buf_impl::Buf::try_get_uint_le__SIZE,nbytes))}})};{let _: tuple0 = {(match (bytes::buf::buf_impl::f_try_copy_to_slice(&mut (deref(self)),&mut (deref(subslice)))) {core::result::Result_Ok(ok) => {ok},core::result::Result_Err(err) => {(return core::result::Result_Err(err))}})};{(return core::result::Result_Ok(core::num::impl__u64__from_le_bytes(buf)))}}}}})}
|
||||||
|
/** Gets an unsigned n-byte integer from `self` in native-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x01\x02\x03 hello",*//** false => b"\x03\x02\x01 hello",*//** };*//** assert_eq!(Ok(0x010203_u64), buf.try_get_uint_ne(3));*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x01\x02\x03",*//** false => b"\x03\x02\x01",*//** };*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_uint_ne(4));*//** assert_eq!(3, buf.remaining());*//** ```*//***//** # Panics*//***//** This function panics if `nbytes` is greater than 8.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_uint_ne<Anonymous: 'unk>((self: &mut Self,nbytes: int)) -> core::result::t_Result<int, bytes::t_TryGetError>{{(if false{{bytes::buf::buf_impl::f_try_get_uint(&mut (deref(self)),nbytes)}} else {{bytes::buf::buf_impl::f_try_get_uint_le(&mut (deref(self)),nbytes)}})}}
|
||||||
|
/** Gets a signed n-byte integer from `self` in big-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x01\x02\x03 hello"[..];*//** assert_eq!(Ok(0x010203_i64), buf.try_get_int(3));*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x01\x02\x03"[..];*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_int(4));*//** assert_eq!(3, buf.remaining());*//** ```*//***//** # Panics*//***//** This function panics if `nbytes` is greater than 8.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_int<Anonymous: 'unk>((self: &mut Self,nbytes: int)) -> core::result::t_Result<int, bytes::t_TryGetError>{rust_primitives::hax::never_to_any({{let slice_at: int = {(match (core::num::impl__usize__checked_sub(bytes::buf::buf_impl::Buf::try_get_int__SIZE,nbytes)) {core::option::Option_Some(slice_at) => {slice_at},core::option::Option_None => {rust_primitives::hax::never_to_any(bytes::panic_does_not_fit(bytes::buf::buf_impl::Buf::try_get_int__SIZE,nbytes))}})};{let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let _: tuple0 = {(match (bytes::buf::buf_impl::f_try_copy_to_slice(&mut (deref(self)),&mut (deref(&mut (deref(core::ops::index::f_index_mut(&mut (buf),core::ops::range::RangeFrom{f_start:slice_at,}))))))) {core::result::Result_Ok(ok) => {ok},core::result::Result_Err(err) => {(return core::result::Result_Err(err))}})};{(return core::result::Result_Ok(core::num::impl__i64__from_be_bytes(buf)))}}}}})}
|
||||||
|
/** Gets a signed n-byte integer from `self` in little-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x03\x02\x01 hello"[..];*//** assert_eq!(Ok(0x010203_i64), buf.try_get_int_le(3));*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x01\x02\x03"[..];*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_int_le(4));*//** assert_eq!(3, buf.remaining());*//** ```*//***//** # Panics*//***//** This function panics if `nbytes` is greater than 8.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_int_le<Anonymous: 'unk>((self: &mut Self,nbytes: int)) -> core::result::t_Result<int, bytes::t_TryGetError>{rust_primitives::hax::never_to_any({{let mut buf: [int;8] = {alloc::boxed::impl__new(rust_primitives::hax::repeat(0,8))};{let subslice: &mut [int] = {(match (core::slice::impl__get_mut::<int,core::ops::range::t_RangeTo<int>>(rust_primitives::unsize(&mut (buf)),core::ops::range::RangeTo{f_end:nbytes,})) {core::option::Option_Some(subslice) => {subslice},core::option::Option_None => {rust_primitives::hax::never_to_any(bytes::panic_does_not_fit(bytes::buf::buf_impl::Buf::try_get_int_le__SIZE,nbytes))}})};{let _: tuple0 = {(match (bytes::buf::buf_impl::f_try_copy_to_slice(&mut (deref(self)),&mut (deref(subslice)))) {core::result::Result_Ok(ok) => {ok},core::result::Result_Err(err) => {(return core::result::Result_Err(err))}})};{(return core::result::Result_Ok(core::num::impl__i64__from_le_bytes(buf)))}}}}})}
|
||||||
|
/** Gets a signed n-byte integer from `self` in native-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x01\x02\x03 hello",*//** false => b"\x03\x02\x01 hello",*//** };*//** assert_eq!(Ok(0x010203_i64), buf.try_get_int_ne(3));*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x01\x02\x03",*//** false => b"\x03\x02\x01",*//** };*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_int_ne(4));*//** assert_eq!(3, buf.remaining());*//** ```*//***//** # Panics*//***//** This function panics if `nbytes` is greater than 8.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_int_ne<Anonymous: 'unk>((self: &mut Self,nbytes: int)) -> core::result::t_Result<int, bytes::t_TryGetError>{{(if false{{bytes::buf::buf_impl::f_try_get_int(&mut (deref(self)),nbytes)}} else {{bytes::buf::buf_impl::f_try_get_int_le(&mut (deref(self)),nbytes)}})}}
|
||||||
|
/** Gets an IEEE754 single-precision (4 bytes) floating point number from*//** `self` in big-endian byte order.*//***//** The current position is advanced by 4.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x3F\x99\x99\x9A hello"[..];*//** assert_eq!(1.2f32, buf.get_f32());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x3F\x99\x99"[..];*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_f32());*//** assert_eq!(3, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_f32<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<float, bytes::t_TryGetError>{{core::result::Result_Ok(core::f32::impl__f32__from_bits((match (bytes::buf::buf_impl::f_try_get_u32(&mut (deref(self)))) {core::result::Result_Ok(ok) => {ok},core::result::Result_Err(err) => {(return core::result::Result_Err(err))}})))}}
|
||||||
|
/** Gets an IEEE754 single-precision (4 bytes) floating point number from*//** `self` in little-endian byte order.*//***//** The current position is advanced by 4.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x9A\x99\x99\x3F hello"[..];*//** assert_eq!(1.2f32, buf.get_f32_le());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x3F\x99\x99"[..];*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_f32_le());*//** assert_eq!(3, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_f32_le<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<float, bytes::t_TryGetError>{{core::result::Result_Ok(core::f32::impl__f32__from_bits((match (bytes::buf::buf_impl::f_try_get_u32_le(&mut (deref(self)))) {core::result::Result_Ok(ok) => {ok},core::result::Result_Err(err) => {(return core::result::Result_Err(err))}})))}}
|
||||||
|
/** Gets an IEEE754 single-precision (4 bytes) floating point number from*//** `self` in native-endian byte order.*//***//** The current position is advanced by 4.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x3F\x99\x99\x9A hello",*//** false => b"\x9A\x99\x99\x3F hello",*//** };*//** assert_eq!(1.2f32, buf.get_f32_ne());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x3F\x99\x99"[..];*//** assert_eq!(Err(TryGetError{requested: 4, available: 3}), buf.try_get_f32_ne());*//** assert_eq!(3, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_f32_ne<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<float, bytes::t_TryGetError>{{core::result::Result_Ok(core::f32::impl__f32__from_bits((match (bytes::buf::buf_impl::f_try_get_u32_ne(&mut (deref(self)))) {core::result::Result_Ok(ok) => {ok},core::result::Result_Err(err) => {(return core::result::Result_Err(err))}})))}}
|
||||||
|
/** Gets an IEEE754 double-precision (8 bytes) floating point number from*//** `self` in big-endian byte order.*//***//** The current position is advanced by 8.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x3F\xF3\x33\x33\x33\x33\x33\x33 hello"[..];*//** assert_eq!(1.2f64, buf.get_f64());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x3F\xF3\x33\x33\x33\x33\x33"[..];*//** assert_eq!(Err(TryGetError{requested: 8, available: 7}), buf.try_get_f64());*//** assert_eq!(7, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_f64<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<float, bytes::t_TryGetError>{{core::result::Result_Ok(core::f64::impl__f64__from_bits((match (bytes::buf::buf_impl::f_try_get_u64(&mut (deref(self)))) {core::result::Result_Ok(ok) => {ok},core::result::Result_Err(err) => {(return core::result::Result_Err(err))}})))}}
|
||||||
|
/** Gets an IEEE754 double-precision (8 bytes) floating point number from*//** `self` in little-endian byte order.*//***//** The current position is advanced by 8.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf = &b"\x33\x33\x33\x33\x33\x33\xF3\x3F hello"[..];*//** assert_eq!(1.2f64, buf.get_f64_le());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x3F\xF3\x33\x33\x33\x33\x33"[..];*//** assert_eq!(Err(TryGetError{requested: 8, available: 7}), buf.try_get_f64_le());*//** assert_eq!(7, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_f64_le<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<float, bytes::t_TryGetError>{{core::result::Result_Ok(core::f64::impl__f64__from_bits((match (bytes::buf::buf_impl::f_try_get_u64_le(&mut (deref(self)))) {core::result::Result_Ok(ok) => {ok},core::result::Result_Err(err) => {(return core::result::Result_Err(err))}})))}}
|
||||||
|
/** Gets an IEEE754 double-precision (8 bytes) floating point number from*//** `self` in native-endian byte order.*//***//** The current position is advanced by 8.*//***//** Returns `Err(TryGetError)` when there are not enough*//** remaining bytes to read the value.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut buf: &[u8] = match cfg!(target_endian = "big") {*//** true => b"\x3F\xF3\x33\x33\x33\x33\x33\x33 hello",*//** false => b"\x33\x33\x33\x33\x33\x33\xF3\x3F hello",*//** };*//** assert_eq!(1.2f64, buf.get_f64_ne());*//** assert_eq!(6, buf.remaining());*//** ```*//***//** ```*//** use bytes::{Buf, TryGetError};*//***//** let mut buf = &b"\x3F\xF3\x33\x33\x33\x33\x33"[..];*//** assert_eq!(Err(TryGetError{requested: 8, available: 7}), buf.try_get_f64_ne());*//** assert_eq!(7, buf.remaining());*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_try_get_f64_ne<Anonymous: 'unk>((self: &mut Self)) -> core::result::t_Result<float, bytes::t_TryGetError>{{core::result::Result_Ok(core::f64::impl__f64__from_bits((match (bytes::buf::buf_impl::f_try_get_u64_ne(&mut (deref(self)))) {core::result::Result_Ok(ok) => {ok},core::result::Result_Err(err) => {(return core::result::Result_Err(err))}})))}}
|
||||||
|
/** Consumes `len` bytes inside self and returns new instance of `Bytes`*//** with this data.*//***//** This function may be optimized by the underlying type to avoid actual*//** copies. For example, `Bytes` implementation will do a shallow copy*//** (ref-count increment).*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let bytes = (&b"hello world"[..]).copy_to_bytes(5);*//** assert_eq!(&bytes[..], &b"hello"[..]);*//** ```*//***//** # Panics*//***//** This function panics if `len > self.remaining()`.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_copy_to_bytes<Anonymous: 'unk>((self: &mut Self,len: int)) -> bytes::bytes::t_Bytes{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_impl::f_remaining(&(self)),len){rust_primitives::hax::never_to_any({bytes::panic_advance(&(deref(&(bytes::TryGetError{f_requested:len,f_available:bytes::buf::buf_impl::f_remaining(&(self)),}))))})})};{let mut ret: bytes::bytes_mut::t_BytesMut = {bytes::bytes_mut::impl__BytesMut__with_capacity(len)};{let _: tuple0 = {bytes::buf::buf_mut::f_put::<bytes::buf::take::t_Take<&mut Self>>(&mut (ret),bytes::buf::buf_impl::f_take(&mut (deref(self)),len))};{bytes::bytes_mut::impl__BytesMut__freeze(ret)}}}}}
|
||||||
|
/** Creates an adaptor which will read at most `limit` bytes from `self`.*//***//** This function returns a new instance of `Buf` which will read at most*//** `limit` bytes.*//***//** # Examples*//***//** ```*//** use bytes::{Buf, BufMut};*//***//** let mut buf = b"hello world"[..].take(5);*//** let mut dst = vec![];*//***//** dst.put(&mut buf);*//** assert_eq!(dst, b"hello");*//***//** let mut buf = buf.into_inner();*//** dst.clear();*//** dst.put(&mut buf);*//** assert_eq!(dst, b" world");*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_take((self: Self,limit: int)) -> bytes::buf::take::t_Take<Self>{{bytes::buf::take::new::<Self>(self,limit)}}
|
||||||
|
/** Creates an adaptor which will chain this buffer with another.*//***//** The returned `Buf` instance will first consume all bytes from `self`.*//** Afterwards the output is equivalent to the output of next.*//***//** # Examples*//***//** ```*//** use bytes::Buf;*//***//** let mut chain = b"hello "[..].chain(&b"world"[..]);*//***//** let full = chain.copy_to_bytes(11);*//** assert_eq!(full.chunk(), b"hello world");*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_chain<U>((self: Self,next: U)) -> bytes::buf::chain::t_Chain<Self, U> where _: bytes::buf::buf_impl::t_Buf<U>{{bytes::buf::chain::impl__new::<Self,U>(self,next)}}
|
||||||
|
/** Creates an adaptor which implements the `Read` trait for `self`.*//***//** This function returns a new value which implements `Read` by adapting*//** the `Read` trait functions to the `Buf` trait functions. Given that*//** `Buf` operations are infallible, none of the `Read` functions will*//** return with `Err`.*//***//** # Examples*//***//** ```*//** use bytes::{Bytes, Buf};*//** use std::io::Read;*//***//** let buf = Bytes::from("hello world");*//***//** let mut reader = buf.reader();*//** let mut dst = [0; 1024];*//***//** let num = reader.read(&mut dst).unwrap();*//***//** assert_eq!(11, num);*//** assert_eq!(&dst[..11], &b"hello world"[..]);*//** ```*/#[cfg(feature = "std")]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_reader((self: Self)) -> bytes::buf::reader::t_Reader<Self>{{bytes::buf::reader::new::<Self>(self)}}}
|
||||||
|
|
||||||
|
Last AST:
|
||||||
|
/** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id =
|
||||||
|
{ Concrete_ident.Imported.krate = "bytes";
|
||||||
|
path =
|
||||||
|
[{ Concrete_ident.Imported.data = (Concrete_ident.Imported.TypeNs "buf");
|
||||||
|
disambiguator = 0 };
|
||||||
|
{ Concrete_ident.Imported.data =
|
||||||
|
(Concrete_ident.Imported.TypeNs "buf_impl"); disambiguator = 0 };
|
||||||
|
{ Concrete_ident.Imported.data = (Concrete_ident.Imported.TypeNs "Buf");
|
||||||
|
disambiguator = 0 }
|
||||||
|
]
|
||||||
|
};
|
||||||
|
kind = Concrete_ident.Kind.Value }) */
|
||||||
|
const _: () = ();
|
||||||
|
*)
|
||||||
|
|
||||||
|
(* item error backend: (DirectAndMut) The mutation of this [1m&mut[0m is not allowed here.
|
||||||
|
Last available AST for this item:
|
||||||
|
|
||||||
|
#[_hax::json("\"Erased\"")]
|
||||||
|
#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]
|
||||||
|
#[doc(
|
||||||
|
test(
|
||||||
|
no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))
|
||||||
|
)
|
||||||
|
)]
|
||||||
|
#[no_std()]
|
||||||
|
#[feature(register_tool)]
|
||||||
|
#[register_tool(_hax)]
|
||||||
|
impl<T, Anonymous: 'unk> bytes::buf::buf_impl::t_Buf<&mut T> for &mut T
|
||||||
|
where
|
||||||
|
_: bytes::buf::buf_impl::t_Buf<T>,
|
||||||
|
{
|
||||||
|
fn dropped_body(_: tuple0) -> tuple0 {
|
||||||
|
Tuple0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Last AST:
|
||||||
|
/** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id =
|
||||||
|
{ Concrete_ident.Imported.krate = "bytes";
|
||||||
|
path =
|
||||||
|
[{ Concrete_ident.Imported.data = (Concrete_ident.Imported.TypeNs "buf");
|
||||||
|
disambiguator = 0 };
|
||||||
|
{ Concrete_ident.Imported.data =
|
||||||
|
(Concrete_ident.Imported.TypeNs "buf_impl"); disambiguator = 0 };
|
||||||
|
{ Concrete_ident.Imported.data = Concrete_ident.Imported.Impl;
|
||||||
|
disambiguator = 0 }
|
||||||
|
]
|
||||||
|
};
|
||||||
|
kind = Concrete_ident.Kind.Value }) */
|
||||||
|
const _: () = ();
|
||||||
|
*)
|
||||||
|
|
||||||
|
class t_Buf (v_T: Type0) = {
|
||||||
|
dummy_field: Type0
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_1 (#v_T: Type0) {| i0: t_Buf v_T |} : t_Buf (Alloc.Boxed.t_Box v_T Alloc.Alloc.t_Global)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_2:t_Buf (t_Slice u8)
|
||||||
|
|
||||||
|
(* [@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_3 (#v_T: Type0) {| i1: Core.Convert.t_AsRef v_T (t_Slice u8) |}
|
||||||
|
: t_Buf (Std.Io.Cursor.t_Cursor v_T)
|
||||||
|
|
||||||
|
val v__assert_trait_object (v__b: dyn 1 (fun z -> t_Buf z))
|
||||||
|
: Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True) *)
|
||||||
251
proofs/fstar/models/Bytes.Buf.Buf_mut.fsti
Normal file
251
proofs/fstar/models/Bytes.Buf.Buf_mut.fsti
Normal file
|
|
@ -0,0 +1,251 @@
|
||||||
|
module Bytes.Buf.Buf_mut
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 15"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
|
||||||
|
val t_BufMut: Type0 -> Type0
|
||||||
|
|
||||||
|
(* item error backend: (DirectAndMut) The mutation of this [1m&mut[0m is not allowed here.
|
||||||
|
Last available AST for this item:
|
||||||
|
|
||||||
|
/** A trait for values that provide sequential write access to bytes.*//***//** Write bytes to a buffer*//***//** A buffer stores bytes in memory such that write operations are infallible.*//** The underlying storage may or may not be in contiguous memory. A `BufMut`*//** value is a cursor into the buffer. Writing to `BufMut` advances the cursor*//** position.*//***//** The simplest `BufMut` is a `Vec<u8>`.*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//***//** buf.put(&b"hello world"[..]);*//***//** assert_eq!(buf, b"hello world");*//** ```*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]unsafe trait t_BufMut<Self_>{/** Returns the number of bytes that can be written from the current*//** position until the end of the buffer is reached.*//***//** This value is greater than or equal to the length of the slice returned*//** by `chunk_mut()`.*//***//** Writing to a `BufMut` may involve allocating more memory on the fly.*//** Implementations may fail before reaching the number of bytes indicated*//** by this method if they encounter an allocation failure.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut dst = [0; 10];*//** let mut buf = &mut dst[..];*//***//** let original_remaining = buf.remaining_mut();*//** buf.put(&b"hello"[..]);*//***//** assert_eq!(original_remaining - 5, buf.remaining_mut());*//** ```*//***//** # Implementer notes*//***//** Implementations of `remaining_mut` should ensure that the return value*//** does not change unless a call is made to `advance_mut` or any other*//** function that is documented to change the `BufMut`'s current position.*//***//** # Note*//***//** `remaining_mut` may return value smaller than actual available space.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_remaining_mut<Anonymous: 'unk>(_: &Self) -> int;
|
||||||
|
/** Advance the internal cursor of the BufMut*//***//** The next call to `chunk_mut` will return a slice starting `cnt` bytes*//** further into the underlying buffer.*//***//** # Safety*//***//** The caller must ensure that the next `cnt` bytes of `chunk` are*//** initialized.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = Vec::with_capacity(16);*//***//** // Write some data*//** buf.chunk_mut()[0..2].copy_from_slice(b"he");*//** unsafe { buf.advance_mut(2) };*//***//** // write more bytes*//** buf.chunk_mut()[0..3].copy_from_slice(b"llo");*//***//** unsafe { buf.advance_mut(3); }*//***//** assert_eq!(5, buf.len());*//** assert_eq!(buf, b"hello");*//** ```*//***//** # Panics*//***//** This function **may** panic if `cnt > self.remaining_mut()`.*//***//** # Implementer notes*//***//** It is recommended for implementations of `advance_mut` to panic if*//** `cnt > self.remaining_mut()`. If the implementation does not panic,*//** the call must behave as if `cnt == self.remaining_mut()`.*//***//** A call with `cnt == 0` should never panic and be a no-op.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_advance_mut<Anonymous: 'unk>(_: Self,_: int) -> Self;
|
||||||
|
/** Returns true if there is space in `self` for more bytes.*//***//** This is equivalent to `self.remaining_mut() != 0`.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut dst = [0; 5];*//** let mut buf = &mut dst[..];*//***//** assert!(buf.has_remaining_mut());*//***//** buf.put(&b"hello"[..]);*//***//** assert!(!buf.has_remaining_mut());*//** ```*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_has_remaining_mut<Anonymous: 'unk>((self: &Self)) -> bool{{core::cmp::PartialOrd::gt(bytes::buf::buf_mut::f_remaining_mut(&(deref(self))),0)}}
|
||||||
|
/** Returns a mutable slice starting at the current BufMut position and of*//** length between 0 and `BufMut::remaining_mut()`. Note that this *can* be shorter than the*//** whole remainder of the buffer (this allows non-continuous implementation).*//***//** This is a lower level function. Most operations are done with other*//** functions.*//***//** The returned byte slice may represent uninitialized memory.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = Vec::with_capacity(16);*//***//** unsafe {*//** // MaybeUninit::as_mut_ptr*//** buf.chunk_mut()[0..].as_mut_ptr().write(b'h');*//** buf.chunk_mut()[1..].as_mut_ptr().write(b'e');*//***//** buf.advance_mut(2);*//***//** buf.chunk_mut()[0..].as_mut_ptr().write(b'l');*//** buf.chunk_mut()[1..].as_mut_ptr().write(b'l');*//** buf.chunk_mut()[2..].as_mut_ptr().write(b'o');*//***//** buf.advance_mut(3);*//** }*//***//** assert_eq!(5, buf.len());*//** assert_eq!(buf, b"hello");*//** ```*//***//** # Implementer notes*//***//** This function should never panic. `chunk_mut()` should return an empty*//** slice **if and only if** `remaining_mut()` returns 0. In other words,*//** `chunk_mut()` returning an empty slice implies that `remaining_mut()` will*//** return 0 and `remaining_mut()` returning 0 implies that `chunk_mut()` will*//** return an empty slice.*//***//** This function may trigger an out-of-memory abort if it tries to allocate*//** memory and fails to do so.*/#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_chunk_mut<Anonymous: 'unk>(_: Self) -> tuple2<Self, &mut bytes::buf::uninit_slice::t_UninitSlice>;
|
||||||
|
/** Transfer bytes into `self` from `src` and advance the cursor by the*//** number of bytes written.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//***//** buf.put_u8(b'h');*//** buf.put(&b"ello"[..]);*//** buf.put(&b" world"[..]);*//***//** assert_eq!(buf, b"hello world");*//** ```*//***//** # Panics*//***//** Panics if `self` does not have enough capacity to contain `src`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put<T, Anonymous: 'unk>((mut self: Self,mut src: T)) -> tuple0 where _: bytes::buf::buf_impl::t_Buf<T>{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_mut::f_remaining_mut(&(self)),bytes::buf::buf_impl::f_remaining(&(src))){rust_primitives::hax::never_to_any({bytes::panic_advance(&(deref(&(bytes::TryGetError{f_requested:bytes::buf::buf_impl::f_remaining(&(src)),f_available:bytes::buf::buf_mut::f_remaining_mut(&(self)),}))))})})};{let _: tuple0 = {{{while bytes::buf::buf_impl::f_has_remaining(&(src)) { {let s: &[int] = {bytes::buf::buf_impl::f_chunk(&(src))};{let d: &mut bytes::buf::uninit_slice::t_UninitSlice = {bytes::buf::buf_mut::f_chunk_mut(&mut (self))};{let cnt: int = {core::cmp::f_min(core::slice::impl__len::<int>(&(deref(s))),bytes::buf::uninit_slice::impl__UninitSlice__len(&(deref(d))))};{let _: tuple0 = {bytes::buf::uninit_slice::impl__UninitSlice__copy_from_slice(&mut (deref(core::ops::index::f_index_mut(&mut (deref(d)),core::ops::range::RangeTo{f_end:cnt,}))),&(deref(core::ops::index::f_index(&(deref(s)),core::ops::range::RangeTo{f_end:cnt,}))))};{let _: tuple0 = {unsafe {bytes::buf::buf_mut::f_advance_mut(&mut (self),cnt)}};{let _: tuple0 = {bytes::buf::buf_impl::f_advance(&mut (src),cnt)};Tuple0}}}}}} }}}};self}}}
|
||||||
|
/** Transfer bytes into `self` from `src` and advance the cursor by the*//** number of bytes written.*//***//** `self` must have enough remaining capacity to contain all of `src`.*//***//** ```*//** use bytes::BufMut;*//***//** let mut dst = [0; 6];*//***//** {*//** let mut buf = &mut dst[..];*//** buf.put_slice(b"hello");*//***//** assert_eq!(1, buf.remaining_mut());*//** }*//***//** assert_eq!(b"hello\0", &dst);*//** ```*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_slice<Anonymous: 'unk, Anonymous: 'unk>((mut self: Self,mut src: &[int])) -> tuple0{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_mut::f_remaining_mut(&(self)),core::slice::impl__len::<int>(&(deref(src)))){rust_primitives::hax::never_to_any({bytes::panic_advance(&(deref(&(bytes::TryGetError{f_requested:core::slice::impl__len::<int>(&(deref(src))),f_available:bytes::buf::buf_mut::f_remaining_mut(&(self)),}))))})})};{let _: tuple0 = {{{while core::ops::bit::Not::not(core::slice::impl__is_empty::<int>(&(deref(src)))) { {let dst: &mut bytes::buf::uninit_slice::t_UninitSlice = {bytes::buf::buf_mut::f_chunk_mut(&mut (self))};{let cnt: int = {core::cmp::f_min(core::slice::impl__len::<int>(&(deref(src))),bytes::buf::uninit_slice::impl__UninitSlice__len(&(deref(dst))))};{let _: tuple0 = {bytes::buf::uninit_slice::impl__UninitSlice__copy_from_slice(&mut (deref(core::ops::index::f_index_mut(&mut (deref(dst)),core::ops::range::RangeTo{f_end:cnt,}))),&(deref(core::ops::index::f_index(&(deref(src)),core::ops::range::RangeTo{f_end:cnt,}))))};{let _: tuple0 = {(src = &(deref(core::ops::index::f_index(&(deref(src)),core::ops::range::RangeFrom{f_start:cnt,}))))};{let _: tuple0 = {unsafe {bytes::buf::buf_mut::f_advance_mut(&mut (self),cnt)}};Tuple0}}}}} }}}};self}}}
|
||||||
|
/** Put `cnt` bytes `val` into `self`.*//***//** Logically equivalent to calling `self.put_u8(val)` `cnt` times, but may work faster.*//***//** `self` must have at least `cnt` remaining capacity.*//***//** ```*//** use bytes::BufMut;*//***//** let mut dst = [0; 6];*//***//** {*//** let mut buf = &mut dst[..];*//** buf.put_bytes(b'a', 4);*//***//** assert_eq!(2, buf.remaining_mut());*//** }*//***//** assert_eq!(b"aaaa\0\0", &dst);*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_bytes<Anonymous: 'unk>((mut self: Self,val: int,mut cnt: int)) -> tuple0{{let _: tuple0 = {(if core::cmp::PartialOrd::lt(bytes::buf::buf_mut::f_remaining_mut(&(self)),cnt){{rust_primitives::hax::never_to_any(bytes::panic_advance(&(deref(&(bytes::TryGetError{f_requested:cnt,f_available:bytes::buf::buf_mut::f_remaining_mut(&(self)),})))))}})};{let _: tuple0 = {{{while core::cmp::PartialOrd::gt(cnt,0) { {let dst: &mut bytes::buf::uninit_slice::t_UninitSlice = {bytes::buf::buf_mut::f_chunk_mut(&mut (self))};{let dst_len: int = {core::cmp::f_min(bytes::buf::uninit_slice::impl__UninitSlice__len(&(deref(dst))),cnt)};{let _: tuple0 = {unsafe {rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","core::intrinsics::write_bytes")::<int>(rust_primitives::hax::failure("(reject_RawOrMutPointer) ExplicitRejection { reason: \"a node of kind [Raw_pointer] have been found in the AST\" }","bytes::buf::uninit_slice::impl__UninitSlice__as_mut_ptr(&mut (deref(dst)))"),val,dst_len)}};{let _: tuple0 = {unsafe {bytes::buf::buf_mut::f_advance_mut(&mut (self),dst_len)}};{let _: tuple0 = {(cnt = core::ops::arith::Sub::sub(cnt,dst_len))};Tuple0}}}}} }}}};self}}}
|
||||||
|
/** Writes an unsigned 8 bit integer to `self`.*//***//** The current position is advanced by 1.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_u8(0x01);*//** assert_eq!(buf, b"\x01");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_u8<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let src: [int;1] = {[n]};{let _: tuple0 = {bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(src)))))};self}}}
|
||||||
|
/** Writes a signed 8 bit integer to `self`.*//***//** The current position is advanced by 1.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_i8(0x01);*//** assert_eq!(buf, b"\x01");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_i8<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let src: [int;1] = {[cast(n)]};{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(src)))))}};self}}}
|
||||||
|
/** Writes an unsigned 16 bit integer to `self` in big-endian byte order.*//***//** The current position is advanced by 2.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_u16(0x0809);*//** assert_eq!(buf, b"\x08\x09");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_u16<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__u16__to_be_bytes(n))))))}};self}}
|
||||||
|
/** Writes an unsigned 16 bit integer to `self` in little-endian byte order.*//***//** The current position is advanced by 2.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_u16_le(0x0809);*//** assert_eq!(buf, b"\x09\x08");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_u16_le<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__u16__to_le_bytes(n))))))}};self}}
|
||||||
|
/** Writes an unsigned 16 bit integer to `self` in native-endian byte order.*//***//** The current position is advanced by 2.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_u16_ne(0x0809);*//** if cfg!(target_endian = "big") {*//** assert_eq!(buf, b"\x08\x09");*//** } else {*//** assert_eq!(buf, b"\x09\x08");*//** }*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_u16_ne<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__u16__to_ne_bytes(n))))))}};self}}
|
||||||
|
/** Writes a signed 16 bit integer to `self` in big-endian byte order.*//***//** The current position is advanced by 2.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_i16(0x0809);*//** assert_eq!(buf, b"\x08\x09");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_i16<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__i16__to_be_bytes(n))))))}};self}}
|
||||||
|
/** Writes a signed 16 bit integer to `self` in little-endian byte order.*//***//** The current position is advanced by 2.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_i16_le(0x0809);*//** assert_eq!(buf, b"\x09\x08");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_i16_le<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__i16__to_le_bytes(n))))))}};self}}
|
||||||
|
/** Writes a signed 16 bit integer to `self` in native-endian byte order.*//***//** The current position is advanced by 2.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_i16_ne(0x0809);*//** if cfg!(target_endian = "big") {*//** assert_eq!(buf, b"\x08\x09");*//** } else {*//** assert_eq!(buf, b"\x09\x08");*//** }*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_i16_ne<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__i16__to_ne_bytes(n))))))}};self}}
|
||||||
|
/** Writes an unsigned 32 bit integer to `self` in big-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_u32(0x0809A0A1);*//** assert_eq!(buf, b"\x08\x09\xA0\xA1");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_u32<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__u32__to_be_bytes(n))))))}};self}}
|
||||||
|
/** Writes an unsigned 32 bit integer to `self` in little-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_u32_le(0x0809A0A1);*//** assert_eq!(buf, b"\xA1\xA0\x09\x08");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_u32_le<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__u32__to_le_bytes(n))))))}};self}}
|
||||||
|
/** Writes an unsigned 32 bit integer to `self` in native-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_u32_ne(0x0809A0A1);*//** if cfg!(target_endian = "big") {*//** assert_eq!(buf, b"\x08\x09\xA0\xA1");*//** } else {*//** assert_eq!(buf, b"\xA1\xA0\x09\x08");*//** }*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_u32_ne<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__u32__to_ne_bytes(n))))))}};self}}
|
||||||
|
/** Writes a signed 32 bit integer to `self` in big-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_i32(0x0809A0A1);*//** assert_eq!(buf, b"\x08\x09\xA0\xA1");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_i32<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__i32__to_be_bytes(n))))))}};self}}
|
||||||
|
/** Writes a signed 32 bit integer to `self` in little-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_i32_le(0x0809A0A1);*//** assert_eq!(buf, b"\xA1\xA0\x09\x08");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_i32_le<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__i32__to_le_bytes(n))))))}};self}}
|
||||||
|
/** Writes a signed 32 bit integer to `self` in native-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_i32_ne(0x0809A0A1);*//** if cfg!(target_endian = "big") {*//** assert_eq!(buf, b"\x08\x09\xA0\xA1");*//** } else {*//** assert_eq!(buf, b"\xA1\xA0\x09\x08");*//** }*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_i32_ne<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__i32__to_ne_bytes(n))))))}};self}}
|
||||||
|
/** Writes an unsigned 64 bit integer to `self` in the big-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_u64(0x0102030405060708);*//** assert_eq!(buf, b"\x01\x02\x03\x04\x05\x06\x07\x08");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_u64<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__u64__to_be_bytes(n))))))}};self}}
|
||||||
|
/** Writes an unsigned 64 bit integer to `self` in little-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_u64_le(0x0102030405060708);*//** assert_eq!(buf, b"\x08\x07\x06\x05\x04\x03\x02\x01");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_u64_le<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__u64__to_le_bytes(n))))))}};self}}
|
||||||
|
/** Writes an unsigned 64 bit integer to `self` in native-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_u64_ne(0x0102030405060708);*//** if cfg!(target_endian = "big") {*//** assert_eq!(buf, b"\x01\x02\x03\x04\x05\x06\x07\x08");*//** } else {*//** assert_eq!(buf, b"\x08\x07\x06\x05\x04\x03\x02\x01");*//** }*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_u64_ne<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__u64__to_ne_bytes(n))))))}};self}}
|
||||||
|
/** Writes a signed 64 bit integer to `self` in the big-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_i64(0x0102030405060708);*//** assert_eq!(buf, b"\x01\x02\x03\x04\x05\x06\x07\x08");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_i64<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__i64__to_be_bytes(n))))))}};self}}
|
||||||
|
/** Writes a signed 64 bit integer to `self` in little-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_i64_le(0x0102030405060708);*//** assert_eq!(buf, b"\x08\x07\x06\x05\x04\x03\x02\x01");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_i64_le<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__i64__to_le_bytes(n))))))}};self}}
|
||||||
|
/** Writes a signed 64 bit integer to `self` in native-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_i64_ne(0x0102030405060708);*//** if cfg!(target_endian = "big") {*//** assert_eq!(buf, b"\x01\x02\x03\x04\x05\x06\x07\x08");*//** } else {*//** assert_eq!(buf, b"\x08\x07\x06\x05\x04\x03\x02\x01");*//** }*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_i64_ne<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__i64__to_ne_bytes(n))))))}};self}}
|
||||||
|
/** Writes an unsigned 128 bit integer to `self` in the big-endian byte order.*//***//** The current position is advanced by 16.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_u128(0x01020304050607080910111213141516);*//** assert_eq!(buf, b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_u128<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__u128__to_be_bytes(n))))))}};self}}
|
||||||
|
/** Writes an unsigned 128 bit integer to `self` in little-endian byte order.*//***//** The current position is advanced by 16.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_u128_le(0x01020304050607080910111213141516);*//** assert_eq!(buf, b"\x16\x15\x14\x13\x12\x11\x10\x09\x08\x07\x06\x05\x04\x03\x02\x01");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_u128_le<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__u128__to_le_bytes(n))))))}};self}}
|
||||||
|
/** Writes an unsigned 128 bit integer to `self` in native-endian byte order.*//***//** The current position is advanced by 16.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_u128_ne(0x01020304050607080910111213141516);*//** if cfg!(target_endian = "big") {*//** assert_eq!(buf, b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16");*//** } else {*//** assert_eq!(buf, b"\x16\x15\x14\x13\x12\x11\x10\x09\x08\x07\x06\x05\x04\x03\x02\x01");*//** }*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_u128_ne<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__u128__to_ne_bytes(n))))))}};self}}
|
||||||
|
/** Writes a signed 128 bit integer to `self` in the big-endian byte order.*//***//** The current position is advanced by 16.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_i128(0x01020304050607080910111213141516);*//** assert_eq!(buf, b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_i128<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__i128__to_be_bytes(n))))))}};self}}
|
||||||
|
/** Writes a signed 128 bit integer to `self` in little-endian byte order.*//***//** The current position is advanced by 16.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_i128_le(0x01020304050607080910111213141516);*//** assert_eq!(buf, b"\x16\x15\x14\x13\x12\x11\x10\x09\x08\x07\x06\x05\x04\x03\x02\x01");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_i128_le<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__i128__to_le_bytes(n))))))}};self}}
|
||||||
|
/** Writes a signed 128 bit integer to `self` in native-endian byte order.*//***//** The current position is advanced by 16.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_i128_ne(0x01020304050607080910111213141516);*//** if cfg!(target_endian = "big") {*//** assert_eq!(buf, b"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16");*//** } else {*//** assert_eq!(buf, b"\x16\x15\x14\x13\x12\x11\x10\x09\x08\x07\x06\x05\x04\x03\x02\x01");*//** }*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_i128_ne<Anonymous: 'unk>((mut self: Self,n: int)) -> tuple0{{let _: tuple0 = {{bytes::buf::buf_mut::f_put_slice(&mut (self),rust_primitives::unsize(&(deref(&(core::num::impl__i128__to_ne_bytes(n))))))}};self}}
|
||||||
|
/** Writes an unsigned n-byte integer to `self` in big-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_uint(0x010203, 3);*//** assert_eq!(buf, b"\x01\x02\x03");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self` or if `nbytes` is greater than 8.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_uint<Anonymous: 'unk>((mut self: Self,n: int,nbytes: int)) -> tuple0{{let start: int = {(match (core::num::impl__usize__checked_sub(core::mem::size_of_val::<int>(&(deref(&(n)))),nbytes)) {core::option::Option_Some(start) => {start},core::option::Option_None => {rust_primitives::hax::never_to_any(bytes::panic_does_not_fit(nbytes,core::mem::size_of_val::<int>(&(deref(&(n))))))}})};{let _: tuple0 = {bytes::buf::buf_mut::f_put_slice(&mut (self),&(deref(&(deref(core::ops::index::f_index(&(core::num::impl__u64__to_be_bytes(n)),core::ops::range::RangeFrom{f_start:start,}))))))};self}}}
|
||||||
|
/** Writes an unsigned n-byte integer to `self` in the little-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_uint_le(0x010203, 3);*//** assert_eq!(buf, b"\x03\x02\x01");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self` or if `nbytes` is greater than 8.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_uint_le<Anonymous: 'unk>((mut self: Self,n: int,nbytes: int)) -> tuple0{{let slice: [int;8] = {core::num::impl__u64__to_le_bytes(n)};{let slice: &[int] = {(match (core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(rust_primitives::unsize(&(slice)),core::ops::range::RangeTo{f_end:nbytes,})) {core::option::Option_Some(slice) => {slice},core::option::Option_None => {rust_primitives::hax::never_to_any(bytes::panic_does_not_fit(nbytes,core::slice::impl__len::<int>(rust_primitives::unsize(&(slice)))))}})};{let _: tuple0 = {bytes::buf::buf_mut::f_put_slice(&mut (self),&(deref(slice)))};self}}}}
|
||||||
|
/** Writes an unsigned n-byte integer to `self` in the native-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_uint_ne(0x010203, 3);*//** if cfg!(target_endian = "big") {*//** assert_eq!(buf, b"\x01\x02\x03");*//** } else {*//** assert_eq!(buf, b"\x03\x02\x01");*//** }*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self` or if `nbytes` is greater than 8.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_uint_ne<Anonymous: 'unk>((mut self: Self,n: int,nbytes: int)) -> tuple0{{let _: tuple0 = {{(if false{{bytes::buf::buf_mut::f_put_uint(&mut (self),n,nbytes)}} else {{bytes::buf::buf_mut::f_put_uint_le(&mut (self),n,nbytes)}})}};self}}
|
||||||
|
/** Writes low `nbytes` of a signed integer to `self` in big-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_int(0x0504010203, 3);*//** assert_eq!(buf, b"\x01\x02\x03");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self` or if `nbytes` is greater than 8.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_int<Anonymous: 'unk>((mut self: Self,n: int,nbytes: int)) -> tuple0{{let start: int = {(match (core::num::impl__usize__checked_sub(core::mem::size_of_val::<int>(&(deref(&(n)))),nbytes)) {core::option::Option_Some(start) => {start},core::option::Option_None => {rust_primitives::hax::never_to_any(bytes::panic_does_not_fit(nbytes,core::mem::size_of_val::<int>(&(deref(&(n))))))}})};{let _: tuple0 = {bytes::buf::buf_mut::f_put_slice(&mut (self),&(deref(&(deref(core::ops::index::f_index(&(core::num::impl__i64__to_be_bytes(n)),core::ops::range::RangeFrom{f_start:start,}))))))};self}}}
|
||||||
|
/** Writes low `nbytes` of a signed integer to `self` in little-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_int_le(0x0504010203, 3);*//** assert_eq!(buf, b"\x03\x02\x01");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self` or if `nbytes` is greater than 8.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_int_le<Anonymous: 'unk>((mut self: Self,n: int,nbytes: int)) -> tuple0{{let slice: [int;8] = {core::num::impl__i64__to_le_bytes(n)};{let slice: &[int] = {(match (core::slice::impl__get::<int,core::ops::range::t_RangeTo<int>>(rust_primitives::unsize(&(slice)),core::ops::range::RangeTo{f_end:nbytes,})) {core::option::Option_Some(slice) => {slice},core::option::Option_None => {rust_primitives::hax::never_to_any(bytes::panic_does_not_fit(nbytes,core::slice::impl__len::<int>(rust_primitives::unsize(&(slice)))))}})};{let _: tuple0 = {bytes::buf::buf_mut::f_put_slice(&mut (self),&(deref(slice)))};self}}}}
|
||||||
|
/** Writes low `nbytes` of a signed integer to `self` in native-endian byte order.*//***//** The current position is advanced by `nbytes`.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_int_ne(0x010203, 3);*//** if cfg!(target_endian = "big") {*//** assert_eq!(buf, b"\x01\x02\x03");*//** } else {*//** assert_eq!(buf, b"\x03\x02\x01");*//** }*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self` or if `nbytes` is greater than 8.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_int_ne<Anonymous: 'unk>((mut self: Self,n: int,nbytes: int)) -> tuple0{{let _: tuple0 = {{(if false{{bytes::buf::buf_mut::f_put_int(&mut (self),n,nbytes)}} else {{bytes::buf::buf_mut::f_put_int_le(&mut (self),n,nbytes)}})}};self}}
|
||||||
|
/** Writes an IEEE754 single-precision (4 bytes) floating point number to*//** `self` in big-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_f32(1.2f32);*//** assert_eq!(buf, b"\x3F\x99\x99\x9A");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_f32<Anonymous: 'unk>((mut self: Self,n: float)) -> tuple0{{let _: tuple0 = {bytes::buf::buf_mut::f_put_u32(&mut (self),core::f32::impl__f32__to_bits(n))};self}}
|
||||||
|
/** Writes an IEEE754 single-precision (4 bytes) floating point number to*//** `self` in little-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_f32_le(1.2f32);*//** assert_eq!(buf, b"\x9A\x99\x99\x3F");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_f32_le<Anonymous: 'unk>((mut self: Self,n: float)) -> tuple0{{let _: tuple0 = {bytes::buf::buf_mut::f_put_u32_le(&mut (self),core::f32::impl__f32__to_bits(n))};self}}
|
||||||
|
/** Writes an IEEE754 single-precision (4 bytes) floating point number to*//** `self` in native-endian byte order.*//***//** The current position is advanced by 4.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_f32_ne(1.2f32);*//** if cfg!(target_endian = "big") {*//** assert_eq!(buf, b"\x3F\x99\x99\x9A");*//** } else {*//** assert_eq!(buf, b"\x9A\x99\x99\x3F");*//** }*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_f32_ne<Anonymous: 'unk>((mut self: Self,n: float)) -> tuple0{{let _: tuple0 = {bytes::buf::buf_mut::f_put_u32_ne(&mut (self),core::f32::impl__f32__to_bits(n))};self}}
|
||||||
|
/** Writes an IEEE754 double-precision (8 bytes) floating point number to*//** `self` in big-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_f64(1.2f64);*//** assert_eq!(buf, b"\x3F\xF3\x33\x33\x33\x33\x33\x33");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_f64<Anonymous: 'unk>((mut self: Self,n: float)) -> tuple0{{let _: tuple0 = {bytes::buf::buf_mut::f_put_u64(&mut (self),core::f64::impl__f64__to_bits(n))};self}}
|
||||||
|
/** Writes an IEEE754 double-precision (8 bytes) floating point number to*//** `self` in little-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_f64_le(1.2f64);*//** assert_eq!(buf, b"\x33\x33\x33\x33\x33\x33\xF3\x3F");*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_f64_le<Anonymous: 'unk>((mut self: Self,n: float)) -> tuple0{{let _: tuple0 = {bytes::buf::buf_mut::f_put_u64_le(&mut (self),core::f64::impl__f64__to_bits(n))};self}}
|
||||||
|
/** Writes an IEEE754 double-precision (8 bytes) floating point number to*//** `self` in native-endian byte order.*//***//** The current position is advanced by 8.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut buf = vec![];*//** buf.put_f64_ne(1.2f64);*//** if cfg!(target_endian = "big") {*//** assert_eq!(buf, b"\x3F\xF3\x33\x33\x33\x33\x33\x33");*//** } else {*//** assert_eq!(buf, b"\x33\x33\x33\x33\x33\x33\xF3\x3F");*//** }*//** ```*//***//** # Panics*//***//** This function panics if there is not enough remaining capacity in*//** `self`.*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_put_f64_ne<Anonymous: 'unk>((mut self: Self,n: float)) -> tuple0{{let _: tuple0 = {bytes::buf::buf_mut::f_put_u64_ne(&mut (self),core::f64::impl__f64__to_bits(n))};self}}
|
||||||
|
/** Creates an adaptor which can write at most `limit` bytes to `self`.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let arr = &mut [0u8; 128][..];*//** assert_eq!(arr.remaining_mut(), 128);*//***//** let dst = arr.limit(10);*//** assert_eq!(dst.remaining_mut(), 10);*//** ```*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_limit((self: Self,limit: int)) -> bytes::buf::limit::t_Limit<Self>{{bytes::buf::limit::new::<Self>(self,limit)}}
|
||||||
|
/** Creates an adaptor which implements the `Write` trait for `self`.*//***//** This function returns a new value which implements `Write` by adapting*//** the `Write` trait functions to the `BufMut` trait functions. Given that*//** `BufMut` operations are infallible, none of the `Write` functions will*//** return with `Err`.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//** use std::io::Write;*//***//** let mut buf = vec![].writer();*//***//** let num = buf.write(&b"hello world"[..]).unwrap();*//** assert_eq!(11, num);*//***//** let buf = buf.into_inner();*//***//** assert_eq!(*buf, b"hello world"[..]);*//** ```*/#[cfg(feature = "std")]#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_writer((self: Self)) -> bytes::buf::writer::t_Writer<Self>{{bytes::buf::writer::new::<Self>(self)}}
|
||||||
|
/** Creates an adapter which will chain this buffer with another.*//***//** The returned `BufMut` instance will first write to all bytes from*//** `self`. Afterwards, it will write to `next`.*//***//** # Examples*//***//** ```*//** use bytes::BufMut;*//***//** let mut a = [0u8; 5];*//** let mut b = [0u8; 6];*//***//** let mut chain = (&mut a[..]).chain_mut(&mut b[..]);*//***//** chain.put_slice(b"hello world");*//***//** assert_eq!(&a[..], b"hello");*//** assert_eq!(&b[..], b" world");*//** ```*/#[inline()]#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]#[doc(test(no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))))]#[no_std()]#[feature(register_tool)]#[register_tool(_hax)]fn f_chain_mut<U>((self: Self,next: U)) -> bytes::buf::chain::t_Chain<Self, U> where _: bytes::buf::buf_mut::t_BufMut<U>{{bytes::buf::chain::impl__new::<Self,U>(self,next)}}}
|
||||||
|
|
||||||
|
Last AST:
|
||||||
|
/** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id =
|
||||||
|
{ Concrete_ident.Imported.krate = "bytes";
|
||||||
|
path =
|
||||||
|
[{ Concrete_ident.Imported.data = (Concrete_ident.Imported.TypeNs "buf");
|
||||||
|
disambiguator = 0 };
|
||||||
|
{ Concrete_ident.Imported.data =
|
||||||
|
(Concrete_ident.Imported.TypeNs "buf_mut"); disambiguator = 0 };
|
||||||
|
{ Concrete_ident.Imported.data =
|
||||||
|
(Concrete_ident.Imported.TypeNs "BufMut"); disambiguator = 0 }
|
||||||
|
]
|
||||||
|
};
|
||||||
|
kind = Concrete_ident.Kind.Value }) */
|
||||||
|
const _: () = ();
|
||||||
|
*)
|
||||||
|
|
||||||
|
(* item error backend: (DirectAndMut) The mutation of this [1m&mut[0m is not allowed here.
|
||||||
|
Last available AST for this item:
|
||||||
|
|
||||||
|
#[_hax::json("\"Erased\"")]
|
||||||
|
#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]
|
||||||
|
#[doc(
|
||||||
|
test(
|
||||||
|
no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))
|
||||||
|
)
|
||||||
|
)]
|
||||||
|
#[no_std()]
|
||||||
|
#[feature(register_tool)]
|
||||||
|
#[register_tool(_hax)]
|
||||||
|
unsafe impl<T, Anonymous: 'unk> bytes::buf::buf_mut::t_BufMut<&mut T> for &mut T
|
||||||
|
where
|
||||||
|
_: bytes::buf::buf_mut::t_BufMut<T>,
|
||||||
|
{
|
||||||
|
fn dropped_body(_: tuple0) -> tuple0 {
|
||||||
|
Tuple0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Last AST:
|
||||||
|
/** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id =
|
||||||
|
{ Concrete_ident.Imported.krate = "bytes";
|
||||||
|
path =
|
||||||
|
[{ Concrete_ident.Imported.data = (Concrete_ident.Imported.TypeNs "buf");
|
||||||
|
disambiguator = 0 };
|
||||||
|
{ Concrete_ident.Imported.data =
|
||||||
|
(Concrete_ident.Imported.TypeNs "buf_mut"); disambiguator = 0 };
|
||||||
|
{ Concrete_ident.Imported.data = Concrete_ident.Imported.Impl;
|
||||||
|
disambiguator = 0 }
|
||||||
|
]
|
||||||
|
};
|
||||||
|
kind = Concrete_ident.Kind.Value }) */
|
||||||
|
const _: () = ();
|
||||||
|
*)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_1 (#v_T: Type0) {| i0: t_BufMut v_T |}
|
||||||
|
: t_BufMut (Alloc.Boxed.t_Box v_T Alloc.Alloc.t_Global)
|
||||||
|
|
||||||
|
(* item error backend: (DirectAndMut) The mutation of this [1m&mut[0m is not allowed here.
|
||||||
|
Last available AST for this item:
|
||||||
|
|
||||||
|
#[_hax::json("\"Erased\"")]
|
||||||
|
#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]
|
||||||
|
#[doc(
|
||||||
|
test(
|
||||||
|
no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))
|
||||||
|
)
|
||||||
|
)]
|
||||||
|
#[no_std()]
|
||||||
|
#[feature(register_tool)]
|
||||||
|
#[register_tool(_hax)]
|
||||||
|
unsafe impl<Anonymous: 'unk> bytes::buf::buf_mut::t_BufMut<&mut [int]> for &mut [int] {
|
||||||
|
fn dropped_body(_: tuple0) -> tuple0 {
|
||||||
|
Tuple0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Last AST:
|
||||||
|
/** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id =
|
||||||
|
{ Concrete_ident.Imported.krate = "bytes";
|
||||||
|
path =
|
||||||
|
[{ Concrete_ident.Imported.data = (Concrete_ident.Imported.TypeNs "buf");
|
||||||
|
disambiguator = 0 };
|
||||||
|
{ Concrete_ident.Imported.data =
|
||||||
|
(Concrete_ident.Imported.TypeNs "buf_mut"); disambiguator = 0 };
|
||||||
|
{ Concrete_ident.Imported.data = Concrete_ident.Imported.Impl;
|
||||||
|
disambiguator = 2 }
|
||||||
|
]
|
||||||
|
};
|
||||||
|
kind = Concrete_ident.Kind.Value }) */
|
||||||
|
const _: () = ();
|
||||||
|
*)
|
||||||
|
|
||||||
|
(* item error backend: (DirectAndMut) The mutation of this [1m&mut[0m is not allowed here.
|
||||||
|
Last available AST for this item:
|
||||||
|
|
||||||
|
#[_hax::json("\"Erased\"")]
|
||||||
|
#[warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]
|
||||||
|
#[doc(
|
||||||
|
test(
|
||||||
|
no_crate_inject,
|
||||||
|
attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables))
|
||||||
|
)
|
||||||
|
)]
|
||||||
|
#[no_std()]
|
||||||
|
#[feature(register_tool)]
|
||||||
|
#[register_tool(_hax)]
|
||||||
|
unsafe impl<
|
||||||
|
Anonymous: 'unk,
|
||||||
|
> bytes::buf::buf_mut::t_BufMut<&mut [core::mem::maybe_uninit::t_MaybeUninit<int>]>
|
||||||
|
for &mut [core::mem::maybe_uninit::t_MaybeUninit<int>] {
|
||||||
|
fn dropped_body(_: tuple0) -> tuple0 {
|
||||||
|
Tuple0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Last AST:
|
||||||
|
/** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id =
|
||||||
|
{ Concrete_ident.Imported.krate = "bytes";
|
||||||
|
path =
|
||||||
|
[{ Concrete_ident.Imported.data = (Concrete_ident.Imported.TypeNs "buf");
|
||||||
|
disambiguator = 0 };
|
||||||
|
{ Concrete_ident.Imported.data =
|
||||||
|
(Concrete_ident.Imported.TypeNs "buf_mut"); disambiguator = 0 };
|
||||||
|
{ Concrete_ident.Imported.data = Concrete_ident.Imported.Impl;
|
||||||
|
disambiguator = 3 }
|
||||||
|
]
|
||||||
|
};
|
||||||
|
kind = Concrete_ident.Kind.Value }) */
|
||||||
|
const _: () = ();
|
||||||
|
*)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_4:t_BufMut (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)
|
||||||
|
|
||||||
|
val v__assert_trait_object (v__b: dyn 1 (fun z -> t_BufMut z))
|
||||||
|
: Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True)
|
||||||
96
proofs/fstar/models/Libcrux_hmac.fsti
Normal file
96
proofs/fstar/models/Libcrux_hmac.fsti
Normal file
|
|
@ -0,0 +1,96 @@
|
||||||
|
module Libcrux_hmac
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 15"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
/// The HMAC algorithm defining the used hash function.
|
||||||
|
type t_Algorithm =
|
||||||
|
| Algorithm_Sha1 : t_Algorithm
|
||||||
|
| Algorithm_Sha256 : t_Algorithm
|
||||||
|
| Algorithm_Sha384 : t_Algorithm
|
||||||
|
| Algorithm_Sha512 : t_Algorithm
|
||||||
|
|
||||||
|
val t_Algorithm_cast_to_repr (x: t_Algorithm)
|
||||||
|
: Prims.Pure isize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_1:Core.Clone.t_Clone t_Algorithm
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl:Core.Marker.t_Copy t_Algorithm
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_2:Core.Fmt.t_Debug t_Algorithm
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_3:Core.Marker.t_StructuralPartialEq t_Algorithm
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_4:Core.Cmp.t_PartialEq t_Algorithm t_Algorithm
|
||||||
|
|
||||||
|
/// Get the tag size for a given algorithm.
|
||||||
|
val tag_size (alg: t_Algorithm) : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Compute the HMAC value with the given `alg` and `key` on `data` with an
|
||||||
|
/// output tag length of `tag_length`.
|
||||||
|
/// Returns a vector of length `tag_length`.
|
||||||
|
/// Panics if either `key` or `data` are longer than `u32::MAX`.
|
||||||
|
val hmac (alg: t_Algorithm) (key data: t_Slice u8) (tag_length: Core.Option.t_Option usize)
|
||||||
|
: Prims.Pure (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)
|
||||||
|
Prims.l_True
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = result in
|
||||||
|
let native_tag_length:usize =
|
||||||
|
match alg <: t_Algorithm with
|
||||||
|
| Algorithm_Sha1 -> mk_usize 20
|
||||||
|
| Algorithm_Sha256 -> mk_usize 32
|
||||||
|
| Algorithm_Sha384 -> mk_usize 48
|
||||||
|
| Algorithm_Sha512 -> mk_usize 64
|
||||||
|
in
|
||||||
|
match
|
||||||
|
(match tag_length <: Core.Option.t_Option usize with
|
||||||
|
| Core.Option.Option_Some l ->
|
||||||
|
(match l <=. native_tag_length <: bool with
|
||||||
|
| true ->
|
||||||
|
Core.Option.Option_Some
|
||||||
|
((Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global result <: usize) =. l)
|
||||||
|
<:
|
||||||
|
Core.Option.t_Option bool
|
||||||
|
| _ -> Core.Option.Option_None <: Core.Option.t_Option bool)
|
||||||
|
| _ -> Core.Option.Option_None <: Core.Option.t_Option bool)
|
||||||
|
<:
|
||||||
|
Core.Option.t_Option bool
|
||||||
|
with
|
||||||
|
| Core.Option.Option_Some x -> x
|
||||||
|
| Core.Option.Option_None ->
|
||||||
|
(Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global result <: usize) =. native_tag_length)
|
||||||
|
|
||||||
|
(* item error backend: (DirectAndMut) The mutation of this [1m&mut[0m is not allowed here.
|
||||||
|
Last available AST for this item:
|
||||||
|
|
||||||
|
#[_hax::json("\"Erased\"")]
|
||||||
|
#[inline(always)]
|
||||||
|
#[no_std()]
|
||||||
|
#[feature(register_tool)]
|
||||||
|
#[register_tool(_hax)]
|
||||||
|
fn wrap_bufalloc<const N: int, F>(f: F) -> alloc::vec::t_Vec<int, alloc::alloc::t_Global>
|
||||||
|
where
|
||||||
|
_: core::ops::function::t_Fn<F, tuple1<&mut [int; N]>>,
|
||||||
|
F: core::ops::function::t_FnOnce<f_Output = tuple0>,
|
||||||
|
{
|
||||||
|
rust_primitives::hax::dropped_body
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Last AST:
|
||||||
|
/** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id =
|
||||||
|
{ Concrete_ident.Imported.krate = "libcrux_hmac";
|
||||||
|
path =
|
||||||
|
[{ Concrete_ident.Imported.data =
|
||||||
|
(Concrete_ident.Imported.ValueNs "wrap_bufalloc"); disambiguator = 0 }
|
||||||
|
]
|
||||||
|
};
|
||||||
|
kind = Concrete_ident.Kind.Value }) */
|
||||||
|
const _: () = ();
|
||||||
|
*)
|
||||||
39
proofs/fstar/models/Libcrux_ml_kem.Constants.fst
Normal file
39
proofs/fstar/models/Libcrux_ml_kem.Constants.fst
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
module Libcrux_ml_kem.Constants
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
/// Each field element needs floor(log_2(FIELD_MODULUS)) + 1 = 12 bits to represent
|
||||||
|
let v_BITS_PER_COEFFICIENT: usize = mk_usize 12
|
||||||
|
|
||||||
|
/// Coefficients per ring element
|
||||||
|
let v_COEFFICIENTS_IN_RING_ELEMENT: usize = mk_usize 256
|
||||||
|
|
||||||
|
/// Bits required per (uncompressed) ring element
|
||||||
|
let v_BITS_PER_RING_ELEMENT: usize = v_COEFFICIENTS_IN_RING_ELEMENT *! mk_usize 12
|
||||||
|
|
||||||
|
/// Bytes required per (uncompressed) ring element
|
||||||
|
let v_BYTES_PER_RING_ELEMENT: usize = v_BITS_PER_RING_ELEMENT /! mk_usize 8
|
||||||
|
|
||||||
|
/// The size of an ML-KEM shared secret.
|
||||||
|
let v_SHARED_SECRET_SIZE: usize = mk_usize 32
|
||||||
|
|
||||||
|
let v_CPA_PKE_KEY_GENERATION_SEED_SIZE: usize = mk_usize 32
|
||||||
|
|
||||||
|
/// SHA3 256 digest size
|
||||||
|
let v_H_DIGEST_SIZE: usize = mk_usize 32
|
||||||
|
|
||||||
|
/// SHA3 512 digest size
|
||||||
|
let v_G_DIGEST_SIZE: usize = mk_usize 64
|
||||||
|
|
||||||
|
/// K * BITS_PER_RING_ELEMENT / 8
|
||||||
|
/// [eurydice] Note that we can\'t use const generics here because that breaks
|
||||||
|
/// C extraction with eurydice.
|
||||||
|
let ranked_bytes_per_ring_element (rank: usize)
|
||||||
|
: Prims.Pure usize
|
||||||
|
(requires rank <=. mk_usize 4)
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:usize = result in
|
||||||
|
result =. ((rank *! v_BITS_PER_RING_ELEMENT <: usize) /! mk_usize 8 <: usize)) =
|
||||||
|
(rank *! v_BITS_PER_RING_ELEMENT <: usize) /! mk_usize 8
|
||||||
|
|
@ -0,0 +1,57 @@
|
||||||
|
module Libcrux_ml_kem.Hash_functions.Portable
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
/// The state.
|
||||||
|
/// It\'s only used for SHAKE128.
|
||||||
|
/// All other functions don\'t actually use any members.
|
||||||
|
val t_PortableHash (v_K: usize) : eqtype
|
||||||
|
|
||||||
|
val v_G (input: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 64))
|
||||||
|
Prims.l_True
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_Array u8 (mk_usize 64) = result in
|
||||||
|
result == Spec.Utils.v_G input)
|
||||||
|
|
||||||
|
val v_H (input: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 32))
|
||||||
|
Prims.l_True
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_Array u8 (mk_usize 32) = result in
|
||||||
|
result == Spec.Utils.v_H input)
|
||||||
|
|
||||||
|
val v_PRF (v_LEN: usize) (input: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Array u8 v_LEN)
|
||||||
|
(requires v v_LEN < pow2 32)
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_Array u8 v_LEN = result in
|
||||||
|
result == Spec.Utils.v_PRF v_LEN input)
|
||||||
|
|
||||||
|
val v_PRFxN (v_K v_LEN: usize) (input: t_Array (t_Array u8 (mk_usize 33)) v_K)
|
||||||
|
: Prims.Pure (t_Array (t_Array u8 v_LEN) v_K)
|
||||||
|
(requires v v_LEN < pow2 32 /\ (v v_K == 2 \/ v v_K == 3 \/ v v_K == 4))
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_Array (t_Array u8 v_LEN) v_K = result in
|
||||||
|
result == Spec.Utils.v_PRFxN v_K v_LEN input)
|
||||||
|
|
||||||
|
val shake128_init_absorb_final (v_K: usize) (input: t_Array (t_Array u8 (mk_usize 34)) v_K)
|
||||||
|
: Prims.Pure (t_PortableHash v_K) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val shake128_squeeze_first_three_blocks (v_K: usize) (st: t_PortableHash v_K)
|
||||||
|
: Prims.Pure (t_PortableHash v_K & t_Array (t_Array u8 (mk_usize 504)) v_K)
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val shake128_squeeze_next_block (v_K: usize) (st: t_PortableHash v_K)
|
||||||
|
: Prims.Pure (t_PortableHash v_K & t_Array (t_Array u8 (mk_usize 168)) v_K)
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl (v_K: usize) : Libcrux_ml_kem.Hash_functions.t_Hash (t_PortableHash v_K) v_K
|
||||||
72
proofs/fstar/models/Libcrux_ml_kem.Hash_functions.fsti
Normal file
72
proofs/fstar/models/Libcrux_ml_kem.Hash_functions.fsti
Normal file
|
|
@ -0,0 +1,72 @@
|
||||||
|
module Libcrux_ml_kem.Hash_functions
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
/// The SHA3 block size.
|
||||||
|
let v_BLOCK_SIZE: usize = mk_usize 168
|
||||||
|
|
||||||
|
/// The size of 3 SHA3 blocks.
|
||||||
|
let v_THREE_BLOCKS: usize = v_BLOCK_SIZE *! mk_usize 3
|
||||||
|
|
||||||
|
/// Abstraction for the hashing, to pick the fastest version depending on the
|
||||||
|
/// platform features available.
|
||||||
|
/// There are 3 instantiations of this trait right now, using the libcrux-sha3 crate.
|
||||||
|
/// - AVX2
|
||||||
|
/// - NEON
|
||||||
|
/// - Portable
|
||||||
|
class t_Hash (v_Self: Type0) (v_K: usize) = {
|
||||||
|
f_G_pre:input: t_Slice u8 -> pred: Type0{true ==> pred};
|
||||||
|
f_G_post:input: t_Slice u8 -> result: t_Array u8 (mk_usize 64)
|
||||||
|
-> pred: Type0{pred ==> result == Spec.Utils.v_G input};
|
||||||
|
f_G:x0: t_Slice u8
|
||||||
|
-> Prims.Pure (t_Array u8 (mk_usize 64)) (f_G_pre x0) (fun result -> f_G_post x0 result);
|
||||||
|
f_H_pre:input: t_Slice u8 -> pred: Type0{true ==> pred};
|
||||||
|
f_H_post:input: t_Slice u8 -> result: t_Array u8 (mk_usize 32)
|
||||||
|
-> pred: Type0{pred ==> result == Spec.Utils.v_H input};
|
||||||
|
f_H:x0: t_Slice u8
|
||||||
|
-> Prims.Pure (t_Array u8 (mk_usize 32)) (f_H_pre x0) (fun result -> f_H_post x0 result);
|
||||||
|
f_PRF_pre:v_LEN: usize -> input: t_Slice u8 -> pred: Type0{v v_LEN < pow2 32 ==> pred};
|
||||||
|
f_PRF_post:v_LEN: usize -> input: t_Slice u8 -> result: t_Array u8 v_LEN
|
||||||
|
-> pred: Type0{pred ==> v v_LEN < pow2 32 ==> result == Spec.Utils.v_PRF v_LEN input};
|
||||||
|
f_PRF:v_LEN: usize -> x0: t_Slice u8
|
||||||
|
-> Prims.Pure (t_Array u8 v_LEN) (f_PRF_pre v_LEN x0) (fun result -> f_PRF_post v_LEN x0 result);
|
||||||
|
f_PRFxN_pre:v_LEN: usize -> input: t_Array (t_Array u8 (mk_usize 33)) v_K
|
||||||
|
-> pred: Type0{v v_LEN < pow2 32 /\ (v v_K == 2 \/ v v_K == 3 \/ v v_K == 4) ==> pred};
|
||||||
|
f_PRFxN_post:
|
||||||
|
v_LEN: usize ->
|
||||||
|
input: t_Array (t_Array u8 (mk_usize 33)) v_K ->
|
||||||
|
result: t_Array (t_Array u8 v_LEN) v_K
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ pred ==>
|
||||||
|
(v v_LEN < pow2 32 /\ (v v_K == 2 \/ v v_K == 3 \/ v v_K == 4)) ==>
|
||||||
|
result == Spec.Utils.v_PRFxN v_K v_LEN input };
|
||||||
|
f_PRFxN:v_LEN: usize -> x0: t_Array (t_Array u8 (mk_usize 33)) v_K
|
||||||
|
-> Prims.Pure (t_Array (t_Array u8 v_LEN) v_K)
|
||||||
|
(f_PRFxN_pre v_LEN x0)
|
||||||
|
(fun result -> f_PRFxN_post v_LEN x0 result);
|
||||||
|
f_shake128_init_absorb_final_pre:input: t_Array (t_Array u8 (mk_usize 34)) v_K
|
||||||
|
-> pred: Type0{true ==> pred};
|
||||||
|
f_shake128_init_absorb_final_post:t_Array (t_Array u8 (mk_usize 34)) v_K -> v_Self -> Type0;
|
||||||
|
f_shake128_init_absorb_final:x0: t_Array (t_Array u8 (mk_usize 34)) v_K
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_shake128_init_absorb_final_pre x0)
|
||||||
|
(fun result -> f_shake128_init_absorb_final_post x0 result);
|
||||||
|
f_shake128_squeeze_first_three_blocks_pre:self_: v_Self -> pred: Type0{true ==> pred};
|
||||||
|
f_shake128_squeeze_first_three_blocks_post:
|
||||||
|
v_Self ->
|
||||||
|
(v_Self & t_Array (t_Array u8 (mk_usize 504)) v_K)
|
||||||
|
-> Type0;
|
||||||
|
f_shake128_squeeze_first_three_blocks:x0: v_Self
|
||||||
|
-> Prims.Pure (v_Self & t_Array (t_Array u8 (mk_usize 504)) v_K)
|
||||||
|
(f_shake128_squeeze_first_three_blocks_pre x0)
|
||||||
|
(fun result -> f_shake128_squeeze_first_three_blocks_post x0 result);
|
||||||
|
f_shake128_squeeze_next_block_pre:self_: v_Self -> pred: Type0{true ==> pred};
|
||||||
|
f_shake128_squeeze_next_block_post:v_Self -> (v_Self & t_Array (t_Array u8 (mk_usize 168)) v_K)
|
||||||
|
-> Type0;
|
||||||
|
f_shake128_squeeze_next_block:x0: v_Self
|
||||||
|
-> Prims.Pure (v_Self & t_Array (t_Array u8 (mk_usize 168)) v_K)
|
||||||
|
(f_shake128_squeeze_next_block_pre x0)
|
||||||
|
(fun result -> f_shake128_squeeze_next_block_post x0 result)
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,346 @@
|
||||||
|
module Libcrux_ml_kem.Ind_cca.Incremental.Types
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
let _ =
|
||||||
|
(* This module has implicit dependencies, here we make them explicit. *)
|
||||||
|
(* The implicit dependencies arise from typeclasses instances. *)
|
||||||
|
let open Libcrux_ml_kem.Ind_cpa.Unpacked in
|
||||||
|
let open Libcrux_ml_kem.Vector.Traits in
|
||||||
|
()
|
||||||
|
|
||||||
|
/// Errors
|
||||||
|
type t_Error =
|
||||||
|
| Error_InvalidInputLength : t_Error
|
||||||
|
| Error_InvalidOutputLength : t_Error
|
||||||
|
| Error_InvalidPublicKey : t_Error
|
||||||
|
| Error_InsufficientRandomness : t_Error
|
||||||
|
|
||||||
|
val t_Error_cast_to_repr (x: t_Error) : Prims.Pure isize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_15:Core.Fmt.t_Debug t_Error
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_16:Core.Clone.t_Clone t_Error
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_17:Core.Marker.t_Copy t_Error
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_18:Core.Marker.t_StructuralPartialEq t_Error
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_19:Core.Cmp.t_PartialEq t_Error t_Error
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_20:Core.Cmp.t_Eq t_Error
|
||||||
|
|
||||||
|
/// Incremental trait for unpacked key pairs.
|
||||||
|
class t_IncrementalKeyPair (v_Self: Type0) = {
|
||||||
|
f_pk1_bytes_pre:v_Self -> t_Slice u8 -> Type0;
|
||||||
|
f_pk1_bytes_post:v_Self -> t_Slice u8 -> (t_Slice u8 & Core.Result.t_Result Prims.unit t_Error)
|
||||||
|
-> Type0;
|
||||||
|
f_pk1_bytes:x0: v_Self -> x1: t_Slice u8
|
||||||
|
-> Prims.Pure (t_Slice u8 & Core.Result.t_Result Prims.unit t_Error)
|
||||||
|
(f_pk1_bytes_pre x0 x1)
|
||||||
|
(fun result -> f_pk1_bytes_post x0 x1 result);
|
||||||
|
f_pk2_bytes_pre:v_Self -> t_Slice u8 -> Type0;
|
||||||
|
f_pk2_bytes_post:v_Self -> t_Slice u8 -> t_Slice u8 -> Type0;
|
||||||
|
f_pk2_bytes:x0: v_Self -> x1: t_Slice u8
|
||||||
|
-> Prims.Pure (t_Slice u8) (f_pk2_bytes_pre x0 x1) (fun result -> f_pk2_bytes_post x0 x1 result)
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
: t_IncrementalKeyPair (Libcrux_ml_kem.Ind_cca.Unpacked.t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
|
||||||
|
/// The incremental public key that allows generating [`Ciphertext1`].
|
||||||
|
type t_PublicKey1 = {
|
||||||
|
f_seed:t_Array u8 (mk_usize 32);
|
||||||
|
f_hash:t_Array u8 (mk_usize 32)
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_21:Core.Default.t_Default t_PublicKey1
|
||||||
|
|
||||||
|
/// Get the size of the first public key in bytes.
|
||||||
|
val impl_PublicKey1__len: Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_2: Core.Convert.t_TryFrom t_PublicKey1 (t_Slice u8) =
|
||||||
|
{
|
||||||
|
f_Error = t_Error;
|
||||||
|
f_try_from_pre = (fun (value: t_Slice u8) -> true);
|
||||||
|
f_try_from_post
|
||||||
|
=
|
||||||
|
(fun (value: t_Slice u8) (out: Core.Result.t_Result t_PublicKey1 t_Error) -> true);
|
||||||
|
f_try_from
|
||||||
|
=
|
||||||
|
fun (value: t_Slice u8) ->
|
||||||
|
if (Core.Slice.impl__len #u8 value <: usize) <. mk_usize 64
|
||||||
|
then
|
||||||
|
Core.Result.Result_Err (Error_InvalidInputLength <: t_Error)
|
||||||
|
<:
|
||||||
|
Core.Result.t_Result t_PublicKey1 t_Error
|
||||||
|
else
|
||||||
|
let seed:t_Array u8 (mk_usize 32) = Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 32) in
|
||||||
|
let seed:t_Array u8 (mk_usize 32) =
|
||||||
|
Core.Slice.impl__copy_from_slice #u8
|
||||||
|
seed
|
||||||
|
(value.[ { Core.Ops.Range.f_start = mk_usize 0; Core.Ops.Range.f_end = mk_usize 32 }
|
||||||
|
<:
|
||||||
|
Core.Ops.Range.t_Range usize ]
|
||||||
|
<:
|
||||||
|
t_Slice u8)
|
||||||
|
in
|
||||||
|
let hash:t_Array u8 (mk_usize 32) = Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 32) in
|
||||||
|
let hash:t_Array u8 (mk_usize 32) =
|
||||||
|
Core.Slice.impl__copy_from_slice #u8
|
||||||
|
hash
|
||||||
|
(value.[ { Core.Ops.Range.f_start = mk_usize 32; Core.Ops.Range.f_end = mk_usize 64 }
|
||||||
|
<:
|
||||||
|
Core.Ops.Range.t_Range usize ]
|
||||||
|
<:
|
||||||
|
t_Slice u8)
|
||||||
|
in
|
||||||
|
Core.Result.Result_Ok ({ f_seed = seed; f_hash = hash } <: t_PublicKey1)
|
||||||
|
<:
|
||||||
|
Core.Result.t_Result t_PublicKey1 t_Error
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_3:Core.Convert.t_From t_PublicKey1 (t_Array u8 (mk_usize 64))
|
||||||
|
|
||||||
|
/// The incremental public key that allows generating [`Ciphertext2`].
|
||||||
|
/// This public key is serialized to safe bytes on the wire.
|
||||||
|
type t_PublicKey2 (v_LEN: usize) = { f_tt_as_ntt:t_Array u8 v_LEN }
|
||||||
|
|
||||||
|
/// Get the size of the second public key in bytes.
|
||||||
|
val impl_4__len: v_LEN: usize -> Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Deserialize the public key.
|
||||||
|
val impl_4__deserialize
|
||||||
|
(v_LEN v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_PublicKey2 v_LEN)
|
||||||
|
: Prims.Pure (t_Array (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) v_K)
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// The partial ciphertext c1 - first part.
|
||||||
|
type t_Ciphertext1 (v_LEN: usize) = { f_value:t_Array u8 v_LEN }
|
||||||
|
|
||||||
|
/// The size of the ciphertext.
|
||||||
|
val impl_5__len: v_LEN: usize -> Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// The partial ciphertext c2 - second part.
|
||||||
|
type t_Ciphertext2 (v_LEN: usize) = { f_value:t_Array u8 v_LEN }
|
||||||
|
|
||||||
|
/// The size of the ciphertext.
|
||||||
|
val impl_6__len: v_LEN: usize -> Prims.unit -> Prims.Pure usize Prims.l_True
|
||||||
|
(ensures fun res -> let res:usize = res in res =. v_LEN)
|
||||||
|
|
||||||
|
/// The incremental state for encapsulate.
|
||||||
|
type t_EncapsState
|
||||||
|
(v_K: usize) (v_Vector: Type0) {| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
= {
|
||||||
|
f_r_as_ntt:t_Array (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) v_K;
|
||||||
|
f_error2:Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector;
|
||||||
|
f_randomness:t_Array u8 (mk_usize 32)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the number of bytes, required for the state.
|
||||||
|
val impl_7__num_bytes:
|
||||||
|
v_K: usize ->
|
||||||
|
#v_Vector: Type0 ->
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |} ->
|
||||||
|
Prims.unit
|
||||||
|
-> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Get the state as bytes
|
||||||
|
val impl_7__to_bytes
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_EncapsState v_K v_Vector)
|
||||||
|
(state: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Slice u8 & Core.Result.t_Result Prims.unit t_Error)
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Build a state from bytes
|
||||||
|
val impl_7__try_from_bytes
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(bytes: t_Slice u8)
|
||||||
|
: Prims.Pure (Core.Result.t_Result (t_EncapsState v_K v_Vector) t_Error)
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Build a state from bytes
|
||||||
|
val impl_7__from_bytes
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
(v_STATE_LEN: usize)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(bytes: t_Array u8 v_STATE_LEN)
|
||||||
|
: Prims.Pure (t_EncapsState v_K v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Convert [`MlKemPublicKeyUnpacked`] to a [`PublicKey1`]
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_8
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
: Core.Convert.t_From t_PublicKey1
|
||||||
|
(Libcrux_ml_kem.Ind_cca.Unpacked.t_MlKemPublicKeyUnpacked v_K v_Vector)
|
||||||
|
|
||||||
|
/// Convert [`MlKemPublicKeyUnpacked`] to a [`PublicKey2`].
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_9
|
||||||
|
(v_K v_LEN: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
: Core.Convert.t_From (t_PublicKey2 v_LEN)
|
||||||
|
(Libcrux_ml_kem.Ind_cca.Unpacked.t_MlKemPublicKeyUnpacked v_K v_Vector)
|
||||||
|
|
||||||
|
/// Convert a byte slice `&[u8]` to a [`PublicKey2`].
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_10 (v_LEN: usize) : Core.Convert.t_TryFrom (t_PublicKey2 v_LEN) (t_Slice u8) =
|
||||||
|
{
|
||||||
|
f_Error = t_Error;
|
||||||
|
f_try_from_pre = (fun (value: t_Slice u8) -> true);
|
||||||
|
f_try_from_post
|
||||||
|
=
|
||||||
|
(fun (value: t_Slice u8) (out: Core.Result.t_Result (t_PublicKey2 v_LEN) t_Error) -> true);
|
||||||
|
f_try_from
|
||||||
|
=
|
||||||
|
fun (value: t_Slice u8) ->
|
||||||
|
if (Core.Slice.impl__len #u8 value <: usize) <. v_LEN
|
||||||
|
then
|
||||||
|
Core.Result.Result_Err (Error_InvalidInputLength <: t_Error)
|
||||||
|
<:
|
||||||
|
Core.Result.t_Result (t_PublicKey2 v_LEN) t_Error
|
||||||
|
else
|
||||||
|
let tt_as_ntt:t_Array u8 v_LEN = Rust_primitives.Hax.repeat (mk_u8 0) v_LEN in
|
||||||
|
let tt_as_ntt:t_Array u8 v_LEN =
|
||||||
|
Core.Slice.impl__copy_from_slice #u8
|
||||||
|
tt_as_ntt
|
||||||
|
(value.[ { Core.Ops.Range.f_start = mk_usize 0; Core.Ops.Range.f_end = v_LEN }
|
||||||
|
<:
|
||||||
|
Core.Ops.Range.t_Range usize ]
|
||||||
|
<:
|
||||||
|
t_Slice u8)
|
||||||
|
in
|
||||||
|
Core.Result.Result_Ok ({ f_tt_as_ntt = tt_as_ntt } <: t_PublicKey2 v_LEN)
|
||||||
|
<:
|
||||||
|
Core.Result.t_Result (t_PublicKey2 v_LEN) t_Error
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert bytes `&[u8; LEN]` to a [`PublicKey2`].
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_11 (v_LEN: usize) : Core.Convert.t_From (t_PublicKey2 v_LEN) (t_Array u8 v_LEN)
|
||||||
|
|
||||||
|
type t_KeyPair
|
||||||
|
(v_K: usize) (v_PK2_LEN: usize) (v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
= {
|
||||||
|
f_pk1:t_PublicKey1;
|
||||||
|
f_pk2:t_PublicKey2 v_PK2_LEN;
|
||||||
|
f_sk:Libcrux_ml_kem.Ind_cca.Unpacked.t_MlKemPrivateKeyUnpacked v_K v_Vector;
|
||||||
|
f_matrix:t_Array (t_Array (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) v_K) v_K
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_12
|
||||||
|
(v_K v_PK2_LEN: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
: Core.Convert.t_From (t_KeyPair v_K v_PK2_LEN v_Vector)
|
||||||
|
(Libcrux_ml_kem.Ind_cca.Unpacked.t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_13
|
||||||
|
(v_K v_PK2_LEN: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
: Core.Convert.t_From (Libcrux_ml_kem.Ind_cca.Unpacked.t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
(t_KeyPair v_K v_PK2_LEN v_Vector)
|
||||||
|
|
||||||
|
/// Write `value` into `out` at `offset`.
|
||||||
|
val write (out value: t_Slice u8) (offset: usize)
|
||||||
|
: Prims.Pure (t_Slice u8 & usize) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Get [`PublicKey1`] as bytes.
|
||||||
|
val impl_14__pk1_bytes
|
||||||
|
(v_K v_PK2_LEN: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_KeyPair v_K v_PK2_LEN v_Vector)
|
||||||
|
(pk1: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Slice u8 & Core.Result.t_Result Prims.unit t_Error)
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Get [`PublicKey2`] as bytes.
|
||||||
|
val impl_14__pk2_bytes
|
||||||
|
(v_K v_PK2_LEN: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_KeyPair v_K v_PK2_LEN v_Vector)
|
||||||
|
(pk2: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Slice u8 & Core.Result.t_Result Prims.unit t_Error)
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// The byte size of this key pair.
|
||||||
|
val impl_14__num_bytes:
|
||||||
|
v_K: usize ->
|
||||||
|
v_PK2_LEN: usize ->
|
||||||
|
#v_Vector: Type0 ->
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |} ->
|
||||||
|
Prims.unit
|
||||||
|
-> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Write this key pair into the `key` bytes.
|
||||||
|
/// `key` must be at least of length `num_bytes()`
|
||||||
|
val impl_14__to_bytes
|
||||||
|
(v_K v_PK2_LEN: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_KeyPair v_K v_PK2_LEN v_Vector)
|
||||||
|
(key: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Slice u8 & Core.Result.t_Result Prims.unit t_Error)
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Write this key pair into the `key` bytes.
|
||||||
|
/// This is the compressed private key.
|
||||||
|
/// `key` must be at least of length secret key size
|
||||||
|
/// Layout: dk | ek | H(ek) | z
|
||||||
|
val impl_14__to_bytes_compressed
|
||||||
|
(v_K v_PK2_LEN: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
(v_KEY_SIZE v_VEC_SIZE: usize)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_KeyPair v_K v_PK2_LEN v_Vector)
|
||||||
|
(key: t_Array u8 v_KEY_SIZE)
|
||||||
|
: Prims.Pure (t_Array u8 v_KEY_SIZE) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Read a key pair from the `key` bytes.
|
||||||
|
/// `key` must be at least of length `num_bytes()`
|
||||||
|
val impl_14__from_bytes
|
||||||
|
(v_K v_PK2_LEN: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(key: t_Slice u8)
|
||||||
|
: Prims.Pure (Core.Result.t_Result (t_KeyPair v_K v_PK2_LEN v_Vector) t_Error)
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
446
proofs/fstar/models/Libcrux_ml_kem.Ind_cca.Unpacked.fsti
Normal file
446
proofs/fstar/models/Libcrux_ml_kem.Ind_cca.Unpacked.fsti
Normal file
|
|
@ -0,0 +1,446 @@
|
||||||
|
module Libcrux_ml_kem.Ind_cca.Unpacked
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
let _ =
|
||||||
|
(* This module has implicit dependencies, here we make them explicit. *)
|
||||||
|
(* The implicit dependencies arise from typeclasses instances. *)
|
||||||
|
let open Libcrux_ml_kem.Hash_functions in
|
||||||
|
let open Libcrux_ml_kem.Hash_functions.Portable in
|
||||||
|
let open Libcrux_ml_kem.Ind_cpa.Unpacked in
|
||||||
|
let open Libcrux_ml_kem.Polynomial in
|
||||||
|
let open Libcrux_ml_kem.Types in
|
||||||
|
let open Libcrux_ml_kem.Variant in
|
||||||
|
let open Libcrux_ml_kem.Vector.Traits in
|
||||||
|
()
|
||||||
|
|
||||||
|
/// An unpacked ML-KEM IND-CCA Private Key
|
||||||
|
type t_MlKemPrivateKeyUnpacked
|
||||||
|
(v_K: usize) (v_Vector: Type0) {| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
= {
|
||||||
|
f_ind_cpa_private_key:Libcrux_ml_kem.Ind_cpa.Unpacked.t_IndCpaPrivateKeyUnpacked v_K v_Vector;
|
||||||
|
f_implicit_rejection_value:t_Array u8 (mk_usize 32)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An unpacked ML-KEM IND-CCA Private Key
|
||||||
|
type t_MlKemPublicKeyUnpacked
|
||||||
|
(v_K: usize) (v_Vector: Type0) {| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
= {
|
||||||
|
f_ind_cpa_public_key:Libcrux_ml_kem.Ind_cpa.Unpacked.t_IndCpaPublicKeyUnpacked v_K v_Vector;
|
||||||
|
f_public_key_hash:t_Array u8 (mk_usize 32)
|
||||||
|
}
|
||||||
|
|
||||||
|
let impl_2
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
(#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core.Clone.t_Clone v_Vector)
|
||||||
|
(#[FStar.Tactics.Typeclasses.tcresolve ()]
|
||||||
|
i2:
|
||||||
|
Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector)
|
||||||
|
: Core.Clone.t_Clone (t_MlKemPublicKeyUnpacked v_K v_Vector) = { f_clone = (fun x -> x) }
|
||||||
|
|
||||||
|
/// An unpacked ML-KEM KeyPair
|
||||||
|
type t_MlKemKeyPairUnpacked
|
||||||
|
(v_K: usize) (v_Vector: Type0) {| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
= {
|
||||||
|
f_private_key:t_MlKemPrivateKeyUnpacked v_K v_Vector;
|
||||||
|
f_public_key:t_MlKemPublicKeyUnpacked v_K v_Vector
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate an unpacked key from a serialized key.
|
||||||
|
val unpack_public_key
|
||||||
|
(v_K v_T_AS_NTT_ENCODED_SIZE v_PUBLIC_KEY_SIZE: usize)
|
||||||
|
(#v_Hasher #v_Vector: Type0)
|
||||||
|
{| i2: Libcrux_ml_kem.Hash_functions.t_Hash v_Hasher v_K |}
|
||||||
|
{| i3: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(public_key: Libcrux_ml_kem.Types.t_MlKemPublicKey v_PUBLIC_KEY_SIZE)
|
||||||
|
(unpacked_public_key: t_MlKemPublicKeyUnpacked v_K v_Vector)
|
||||||
|
: Prims.Pure (t_MlKemPublicKeyUnpacked v_K v_Vector)
|
||||||
|
(requires
|
||||||
|
Spec.MLKEM.is_rank v_K /\ v_PUBLIC_KEY_SIZE == Spec.MLKEM.v_CPA_PUBLIC_KEY_SIZE v_K /\
|
||||||
|
v_T_AS_NTT_ENCODED_SIZE == Spec.MLKEM.v_T_AS_NTT_ENCODED_SIZE v_K)
|
||||||
|
(ensures
|
||||||
|
fun unpacked_public_key_future ->
|
||||||
|
let unpacked_public_key_future:t_MlKemPublicKeyUnpacked v_K v_Vector =
|
||||||
|
unpacked_public_key_future
|
||||||
|
in
|
||||||
|
let unpacked_public_key_future:t_MlKemPublicKeyUnpacked v_K v_Vector =
|
||||||
|
unpacked_public_key_future
|
||||||
|
in
|
||||||
|
let public_key_hash, (seed, (deserialized_pk, (matrix_A, valid))) =
|
||||||
|
Spec.MLKEM.ind_cca_unpack_public_key v_K public_key.f_value
|
||||||
|
in
|
||||||
|
(valid ==>
|
||||||
|
Libcrux_ml_kem.Polynomial.to_spec_matrix_t #v_K
|
||||||
|
#v_Vector
|
||||||
|
unpacked_public_key_future.f_ind_cpa_public_key.Libcrux_ml_kem.Ind_cpa.Unpacked.f_A ==
|
||||||
|
matrix_A) /\
|
||||||
|
Libcrux_ml_kem.Polynomial.to_spec_vector_t #v_K
|
||||||
|
#v_Vector
|
||||||
|
unpacked_public_key_future.f_ind_cpa_public_key
|
||||||
|
.Libcrux_ml_kem.Ind_cpa.Unpacked.f_tt_as_ntt ==
|
||||||
|
deserialized_pk /\
|
||||||
|
unpacked_public_key_future.f_ind_cpa_public_key
|
||||||
|
.Libcrux_ml_kem.Ind_cpa.Unpacked.f_seed_for_A ==
|
||||||
|
seed /\ unpacked_public_key_future.f_public_key_hash == public_key_hash)
|
||||||
|
|
||||||
|
/// Get the serialized public key.
|
||||||
|
val impl_3__serialized_mut
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
(v_PUBLIC_KEY_SIZE: usize)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_MlKemPublicKeyUnpacked v_K v_Vector)
|
||||||
|
(serialized: Libcrux_ml_kem.Types.t_MlKemPublicKey v_PUBLIC_KEY_SIZE)
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Types.t_MlKemPublicKey v_PUBLIC_KEY_SIZE)
|
||||||
|
(requires
|
||||||
|
(let self = self in
|
||||||
|
Spec.MLKEM.is_rank v_K /\ v_PUBLIC_KEY_SIZE == Spec.MLKEM.v_CPA_PUBLIC_KEY_SIZE v_K /\
|
||||||
|
(forall (i: nat).
|
||||||
|
i < v v_K ==>
|
||||||
|
Libcrux_ml_kem.Serialize.coefficients_field_modulus_range (Seq.index self
|
||||||
|
.f_ind_cpa_public_key
|
||||||
|
.Libcrux_ml_kem.Ind_cpa.Unpacked.f_tt_as_ntt
|
||||||
|
i))))
|
||||||
|
(ensures
|
||||||
|
fun serialized_future ->
|
||||||
|
let serialized_future:Libcrux_ml_kem.Types.t_MlKemPublicKey v_PUBLIC_KEY_SIZE =
|
||||||
|
serialized_future
|
||||||
|
in
|
||||||
|
let self = self in
|
||||||
|
serialized_future.f_value ==
|
||||||
|
Seq.append (Spec.MLKEM.vector_encode_12 #v_K
|
||||||
|
(Libcrux_ml_kem.Polynomial.to_spec_vector_t #v_K
|
||||||
|
#v_Vector
|
||||||
|
self.f_ind_cpa_public_key.Libcrux_ml_kem.Ind_cpa.Unpacked.f_tt_as_ntt))
|
||||||
|
self.f_ind_cpa_public_key.Libcrux_ml_kem.Ind_cpa.Unpacked.f_seed_for_A)
|
||||||
|
|
||||||
|
/// Get the serialized public key.
|
||||||
|
val impl_3__serialized
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
(v_PUBLIC_KEY_SIZE: usize)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_MlKemPublicKeyUnpacked v_K v_Vector)
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Types.t_MlKemPublicKey v_PUBLIC_KEY_SIZE)
|
||||||
|
(requires
|
||||||
|
(let self = self in
|
||||||
|
Spec.MLKEM.is_rank v_K /\ v_PUBLIC_KEY_SIZE == Spec.MLKEM.v_CPA_PUBLIC_KEY_SIZE v_K /\
|
||||||
|
(forall (i: nat).
|
||||||
|
i < v v_K ==>
|
||||||
|
Libcrux_ml_kem.Serialize.coefficients_field_modulus_range (Seq.index self
|
||||||
|
.f_ind_cpa_public_key
|
||||||
|
.Libcrux_ml_kem.Ind_cpa.Unpacked.f_tt_as_ntt
|
||||||
|
i))))
|
||||||
|
(ensures
|
||||||
|
fun res ->
|
||||||
|
let res:Libcrux_ml_kem.Types.t_MlKemPublicKey v_PUBLIC_KEY_SIZE = res in
|
||||||
|
let self = self in
|
||||||
|
res.Libcrux_ml_kem.Types.f_value ==
|
||||||
|
Seq.append (Spec.MLKEM.vector_encode_12 #v_K
|
||||||
|
(Libcrux_ml_kem.Polynomial.to_spec_vector_t #v_K
|
||||||
|
#v_Vector
|
||||||
|
self.f_ind_cpa_public_key.Libcrux_ml_kem.Ind_cpa.Unpacked.f_tt_as_ntt))
|
||||||
|
self.f_ind_cpa_public_key.Libcrux_ml_kem.Ind_cpa.Unpacked.f_seed_for_A)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
: Core.Default.t_Default (t_MlKemPublicKeyUnpacked v_K v_Vector)
|
||||||
|
|
||||||
|
/// Take a serialized private key and generate an unpacked key pair from it.
|
||||||
|
val keys_from_private_key
|
||||||
|
(v_K v_SECRET_KEY_SIZE v_CPA_SECRET_KEY_SIZE v_PUBLIC_KEY_SIZE v_T_AS_NTT_ENCODED_SIZE: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(private_key: Libcrux_ml_kem.Types.t_MlKemPrivateKey v_SECRET_KEY_SIZE)
|
||||||
|
(key_pair: t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
: Prims.Pure (t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
(requires
|
||||||
|
Spec.MLKEM.is_rank v_K /\ v_SECRET_KEY_SIZE == Spec.MLKEM.v_CCA_PRIVATE_KEY_SIZE v_K /\
|
||||||
|
v_CPA_SECRET_KEY_SIZE == Spec.MLKEM.v_CPA_PRIVATE_KEY_SIZE v_K /\
|
||||||
|
v_PUBLIC_KEY_SIZE == Spec.MLKEM.v_CPA_PUBLIC_KEY_SIZE v_K /\
|
||||||
|
v_T_AS_NTT_ENCODED_SIZE == Spec.MLKEM.v_T_AS_NTT_ENCODED_SIZE v_K)
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Get the serialized public key.
|
||||||
|
val impl_4__public_key
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
: Prims.Pure (t_MlKemPublicKeyUnpacked v_K v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Get the serialized public key.
|
||||||
|
val impl_4__private_key
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
: Prims.Pure (t_MlKemPrivateKeyUnpacked v_K v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Get the serialized public key.
|
||||||
|
val impl_4__serialized_public_key_mut
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
(v_PUBLIC_KEY_SIZE: usize)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
(serialized: Libcrux_ml_kem.Types.t_MlKemPublicKey v_PUBLIC_KEY_SIZE)
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Types.t_MlKemPublicKey v_PUBLIC_KEY_SIZE)
|
||||||
|
(requires
|
||||||
|
(let self = self in
|
||||||
|
Spec.MLKEM.is_rank v_K /\ v_PUBLIC_KEY_SIZE == Spec.MLKEM.v_CPA_PUBLIC_KEY_SIZE v_K /\
|
||||||
|
(forall (i: nat).
|
||||||
|
i < v v_K ==>
|
||||||
|
Libcrux_ml_kem.Serialize.coefficients_field_modulus_range (Seq.index self.f_public_key
|
||||||
|
.f_ind_cpa_public_key
|
||||||
|
.Libcrux_ml_kem.Ind_cpa.Unpacked.f_tt_as_ntt
|
||||||
|
i))))
|
||||||
|
(ensures
|
||||||
|
fun serialized_future ->
|
||||||
|
let serialized_future:Libcrux_ml_kem.Types.t_MlKemPublicKey v_PUBLIC_KEY_SIZE =
|
||||||
|
serialized_future
|
||||||
|
in
|
||||||
|
let self = self in
|
||||||
|
serialized_future.f_value ==
|
||||||
|
Seq.append (Spec.MLKEM.vector_encode_12 #v_K
|
||||||
|
(Libcrux_ml_kem.Polynomial.to_spec_vector_t #v_K
|
||||||
|
#v_Vector
|
||||||
|
self.f_public_key.f_ind_cpa_public_key
|
||||||
|
.Libcrux_ml_kem.Ind_cpa.Unpacked.f_tt_as_ntt))
|
||||||
|
self.f_public_key.f_ind_cpa_public_key.Libcrux_ml_kem.Ind_cpa.Unpacked.f_seed_for_A)
|
||||||
|
|
||||||
|
/// Get the serialized public key.
|
||||||
|
val impl_4__serialized_public_key
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
(v_PUBLIC_KEY_SIZE: usize)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Types.t_MlKemPublicKey v_PUBLIC_KEY_SIZE)
|
||||||
|
(requires
|
||||||
|
(let self = self in
|
||||||
|
Spec.MLKEM.is_rank v_K /\ v_PUBLIC_KEY_SIZE == Spec.MLKEM.v_CPA_PUBLIC_KEY_SIZE v_K /\
|
||||||
|
(forall (i: nat).
|
||||||
|
i < v v_K ==>
|
||||||
|
Libcrux_ml_kem.Serialize.coefficients_field_modulus_range (Seq.index self.f_public_key
|
||||||
|
.f_ind_cpa_public_key
|
||||||
|
.Libcrux_ml_kem.Ind_cpa.Unpacked.f_tt_as_ntt
|
||||||
|
i))))
|
||||||
|
(ensures
|
||||||
|
fun res ->
|
||||||
|
let res:Libcrux_ml_kem.Types.t_MlKemPublicKey v_PUBLIC_KEY_SIZE = res in
|
||||||
|
let self = self in
|
||||||
|
res.f_value ==
|
||||||
|
Seq.append (Spec.MLKEM.vector_encode_12 #v_K
|
||||||
|
(Libcrux_ml_kem.Polynomial.to_spec_vector_t #v_K
|
||||||
|
#v_Vector
|
||||||
|
self.f_public_key.f_ind_cpa_public_key
|
||||||
|
.Libcrux_ml_kem.Ind_cpa.Unpacked.f_tt_as_ntt))
|
||||||
|
self.f_public_key.f_ind_cpa_public_key.Libcrux_ml_kem.Ind_cpa.Unpacked.f_seed_for_A)
|
||||||
|
|
||||||
|
/// Get the serialized private key.
|
||||||
|
val impl_4__serialized_private_key_mut
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
(v_CPA_PRIVATE_KEY_SIZE v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE: usize)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
(serialized: Libcrux_ml_kem.Types.t_MlKemPrivateKey v_PRIVATE_KEY_SIZE)
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Types.t_MlKemPrivateKey v_PRIVATE_KEY_SIZE)
|
||||||
|
(requires
|
||||||
|
Spec.MLKEM.is_rank v_K /\ v_PRIVATE_KEY_SIZE == Spec.MLKEM.v_CCA_PRIVATE_KEY_SIZE v_K /\
|
||||||
|
v_CPA_PRIVATE_KEY_SIZE == Spec.MLKEM.v_CPA_PRIVATE_KEY_SIZE v_K /\
|
||||||
|
v_PUBLIC_KEY_SIZE == Spec.MLKEM.v_CPA_PUBLIC_KEY_SIZE v_K)
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Get the serialized private key.
|
||||||
|
val impl_4__serialized_private_key
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
(v_CPA_PRIVATE_KEY_SIZE v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE: usize)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Types.t_MlKemPrivateKey v_PRIVATE_KEY_SIZE)
|
||||||
|
(requires
|
||||||
|
Spec.MLKEM.is_rank v_K /\ v_PRIVATE_KEY_SIZE == Spec.MLKEM.v_CCA_PRIVATE_KEY_SIZE v_K /\
|
||||||
|
v_CPA_PRIVATE_KEY_SIZE == Spec.MLKEM.v_CPA_PRIVATE_KEY_SIZE v_K /\
|
||||||
|
v_PUBLIC_KEY_SIZE == Spec.MLKEM.v_CPA_PUBLIC_KEY_SIZE v_K)
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_1
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
: Core.Default.t_Default (t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
|
||||||
|
/// Create a new empty unpacked key pair.
|
||||||
|
val impl_4__new:
|
||||||
|
v_K: usize ->
|
||||||
|
#v_Vector: Type0 ->
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |} ->
|
||||||
|
Prims.unit
|
||||||
|
-> Prims.Pure (t_MlKemKeyPairUnpacked v_K v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Take a serialized private key and generate an unpacked key pair from it.
|
||||||
|
val impl_4__from_private_key
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
(v_SECRET_KEY_SIZE v_CPA_SECRET_KEY_SIZE v_PUBLIC_KEY_SIZE v_T_AS_NTT_ENCODED_SIZE: usize)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(private_key: Libcrux_ml_kem.Types.t_MlKemPrivateKey v_SECRET_KEY_SIZE)
|
||||||
|
: Prims.Pure (t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
(requires
|
||||||
|
Spec.MLKEM.is_rank v_K /\ v_SECRET_KEY_SIZE == Spec.MLKEM.v_CCA_PRIVATE_KEY_SIZE v_K /\
|
||||||
|
v_CPA_SECRET_KEY_SIZE == Spec.MLKEM.v_CPA_PRIVATE_KEY_SIZE v_K /\
|
||||||
|
v_PUBLIC_KEY_SIZE == Spec.MLKEM.v_CPA_PUBLIC_KEY_SIZE v_K /\
|
||||||
|
v_T_AS_NTT_ENCODED_SIZE == Spec.MLKEM.v_T_AS_NTT_ENCODED_SIZE v_K)
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val transpose_a
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(ind_cpa_a:
|
||||||
|
t_Array (t_Array (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) v_K) v_K)
|
||||||
|
: Prims.Pure
|
||||||
|
(t_Array (t_Array (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) v_K) v_K)
|
||||||
|
Prims.l_True
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_Array
|
||||||
|
(t_Array (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) v_K) v_K =
|
||||||
|
result
|
||||||
|
in
|
||||||
|
forall (i: nat).
|
||||||
|
i < v v_K ==>
|
||||||
|
(forall (j: nat).
|
||||||
|
j < v v_K ==>
|
||||||
|
Seq.index (Seq.index result i) j == Seq.index (Seq.index ind_cpa_a j) i))
|
||||||
|
|
||||||
|
/// Generate Unpacked Keys
|
||||||
|
val generate_keypair
|
||||||
|
(v_K v_CPA_PRIVATE_KEY_SIZE v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE v_ETA1 v_ETA1_RANDOMNESS_SIZE:
|
||||||
|
usize)
|
||||||
|
(#v_Vector #v_Hasher #v_Scheme: Type0)
|
||||||
|
{| i3: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
{| i4: Libcrux_ml_kem.Hash_functions.t_Hash v_Hasher v_K |}
|
||||||
|
{| i5: Libcrux_ml_kem.Variant.t_Variant v_Scheme |}
|
||||||
|
(randomness: t_Array u8 (mk_usize 64))
|
||||||
|
(out: t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
: Prims.Pure (t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
(requires
|
||||||
|
Spec.MLKEM.is_rank v_K /\ v_ETA1_RANDOMNESS_SIZE == Spec.MLKEM.v_ETA1_RANDOMNESS_SIZE v_K /\
|
||||||
|
v_ETA1 == Spec.MLKEM.v_ETA1 v_K /\ v_PUBLIC_KEY_SIZE == Spec.MLKEM.v_CPA_PUBLIC_KEY_SIZE v_K
|
||||||
|
)
|
||||||
|
(ensures
|
||||||
|
fun out_future ->
|
||||||
|
let out_future:t_MlKemKeyPairUnpacked v_K v_Vector = out_future in
|
||||||
|
let ((m_A, public_key_hash), implicit_rejection_value), valid =
|
||||||
|
Spec.MLKEM.ind_cca_unpack_generate_keypair v_K randomness
|
||||||
|
in
|
||||||
|
valid ==>
|
||||||
|
Libcrux_ml_kem.Polynomial.to_spec_matrix_t #v_K
|
||||||
|
#v_Vector
|
||||||
|
out_future.f_public_key.f_ind_cpa_public_key.f_A ==
|
||||||
|
m_A /\ out_future.f_public_key.f_public_key_hash == public_key_hash /\
|
||||||
|
out_future.f_private_key.f_implicit_rejection_value == implicit_rejection_value)
|
||||||
|
|
||||||
|
val encaps_prepare
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Hasher: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Hash_functions.t_Hash v_Hasher v_K |}
|
||||||
|
(randomness pk_hash: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 64))
|
||||||
|
(requires
|
||||||
|
(Core.Slice.impl__len #u8 randomness <: usize) =. mk_usize 32 &&
|
||||||
|
(Core.Slice.impl__len #u8 pk_hash <: usize) =. mk_usize 32)
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_Array u8 (mk_usize 64) = result in
|
||||||
|
result == Spec.Utils.v_G (concat randomness pk_hash))
|
||||||
|
|
||||||
|
val encapsulate
|
||||||
|
(v_K v_CIPHERTEXT_SIZE v_PUBLIC_KEY_SIZE v_T_AS_NTT_ENCODED_SIZE v_C1_SIZE v_C2_SIZE v_VECTOR_U_COMPRESSION_FACTOR v_VECTOR_V_COMPRESSION_FACTOR v_VECTOR_U_BLOCK_LEN v_ETA1 v_ETA1_RANDOMNESS_SIZE v_ETA2 v_ETA2_RANDOMNESS_SIZE:
|
||||||
|
usize)
|
||||||
|
(#v_Vector #v_Hasher: Type0)
|
||||||
|
{| i2: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
{| i3: Libcrux_ml_kem.Hash_functions.t_Hash v_Hasher v_K |}
|
||||||
|
(public_key: t_MlKemPublicKeyUnpacked v_K v_Vector)
|
||||||
|
(randomness: t_Array u8 (mk_usize 32))
|
||||||
|
: Prims.Pure
|
||||||
|
(Libcrux_ml_kem.Types.t_MlKemCiphertext v_CIPHERTEXT_SIZE & t_Array u8 (mk_usize 32))
|
||||||
|
(requires
|
||||||
|
Spec.MLKEM.is_rank v_K /\ v_ETA1 == Spec.MLKEM.v_ETA1 v_K /\
|
||||||
|
v_ETA1_RANDOMNESS_SIZE == Spec.MLKEM.v_ETA1_RANDOMNESS_SIZE v_K /\
|
||||||
|
v_ETA2 == Spec.MLKEM.v_ETA2 v_K /\
|
||||||
|
v_ETA2_RANDOMNESS_SIZE == Spec.MLKEM.v_ETA2_RANDOMNESS_SIZE v_K /\
|
||||||
|
v_C1_SIZE == Spec.MLKEM.v_C1_SIZE v_K /\ v_C2_SIZE == Spec.MLKEM.v_C2_SIZE v_K /\
|
||||||
|
v_VECTOR_U_COMPRESSION_FACTOR == Spec.MLKEM.v_VECTOR_U_COMPRESSION_FACTOR v_K /\
|
||||||
|
v_VECTOR_V_COMPRESSION_FACTOR == Spec.MLKEM.v_VECTOR_V_COMPRESSION_FACTOR v_K /\
|
||||||
|
v_VECTOR_U_BLOCK_LEN == Spec.MLKEM.v_C1_BLOCK_SIZE v_K /\
|
||||||
|
v_CIPHERTEXT_SIZE == Spec.MLKEM.v_CPA_CIPHERTEXT_SIZE v_K)
|
||||||
|
(ensures
|
||||||
|
fun temp_0_ ->
|
||||||
|
let ciphertext_result, shared_secret_array:(Libcrux_ml_kem.Types.t_MlKemCiphertext
|
||||||
|
v_CIPHERTEXT_SIZE &
|
||||||
|
t_Array u8 (mk_usize 32)) =
|
||||||
|
temp_0_
|
||||||
|
in
|
||||||
|
let ciphertext, shared_secret =
|
||||||
|
Spec.MLKEM.ind_cca_unpack_encapsulate v_K
|
||||||
|
public_key.f_public_key_hash
|
||||||
|
(Libcrux_ml_kem.Polynomial.to_spec_vector_t #v_K
|
||||||
|
#v_Vector
|
||||||
|
public_key.f_ind_cpa_public_key.Libcrux_ml_kem.Ind_cpa.Unpacked.f_tt_as_ntt)
|
||||||
|
(Libcrux_ml_kem.Polynomial.to_spec_matrix_t #v_K
|
||||||
|
#v_Vector
|
||||||
|
public_key.f_ind_cpa_public_key.Libcrux_ml_kem.Ind_cpa.Unpacked.f_A)
|
||||||
|
randomness
|
||||||
|
in
|
||||||
|
ciphertext_result.f_value == ciphertext /\ shared_secret_array == shared_secret)
|
||||||
|
|
||||||
|
val decapsulate
|
||||||
|
(v_K v_SECRET_KEY_SIZE v_CPA_SECRET_KEY_SIZE v_PUBLIC_KEY_SIZE v_CIPHERTEXT_SIZE v_T_AS_NTT_ENCODED_SIZE v_C1_SIZE v_C2_SIZE v_VECTOR_U_COMPRESSION_FACTOR v_VECTOR_V_COMPRESSION_FACTOR v_C1_BLOCK_SIZE v_ETA1 v_ETA1_RANDOMNESS_SIZE v_ETA2 v_ETA2_RANDOMNESS_SIZE v_IMPLICIT_REJECTION_HASH_INPUT_SIZE:
|
||||||
|
usize)
|
||||||
|
(#v_Vector #v_Hasher: Type0)
|
||||||
|
{| i2: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
{| i3: Libcrux_ml_kem.Hash_functions.t_Hash v_Hasher v_K |}
|
||||||
|
(key_pair: t_MlKemKeyPairUnpacked v_K v_Vector)
|
||||||
|
(ciphertext: Libcrux_ml_kem.Types.t_MlKemCiphertext v_CIPHERTEXT_SIZE)
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 32))
|
||||||
|
(requires
|
||||||
|
Spec.MLKEM.is_rank v_K /\ v_ETA1 == Spec.MLKEM.v_ETA1 v_K /\
|
||||||
|
v_ETA1_RANDOMNESS_SIZE == Spec.MLKEM.v_ETA1_RANDOMNESS_SIZE v_K /\
|
||||||
|
v_ETA2 == Spec.MLKEM.v_ETA2 v_K /\
|
||||||
|
v_ETA2_RANDOMNESS_SIZE == Spec.MLKEM.v_ETA2_RANDOMNESS_SIZE v_K /\
|
||||||
|
v_C1_SIZE == Spec.MLKEM.v_C1_SIZE v_K /\ v_C2_SIZE == Spec.MLKEM.v_C2_SIZE v_K /\
|
||||||
|
v_VECTOR_U_COMPRESSION_FACTOR == Spec.MLKEM.v_VECTOR_U_COMPRESSION_FACTOR v_K /\
|
||||||
|
v_VECTOR_V_COMPRESSION_FACTOR == Spec.MLKEM.v_VECTOR_V_COMPRESSION_FACTOR v_K /\
|
||||||
|
v_C1_BLOCK_SIZE == Spec.MLKEM.v_C1_BLOCK_SIZE v_K /\
|
||||||
|
v_CIPHERTEXT_SIZE == Spec.MLKEM.v_CPA_CIPHERTEXT_SIZE v_K /\
|
||||||
|
v_IMPLICIT_REJECTION_HASH_INPUT_SIZE == Spec.MLKEM.v_IMPLICIT_REJECTION_HASH_INPUT_SIZE v_K)
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_Array u8 (mk_usize 32) = result in
|
||||||
|
result ==
|
||||||
|
Spec.MLKEM.ind_cca_unpack_decapsulate v_K
|
||||||
|
key_pair.f_public_key.f_public_key_hash
|
||||||
|
key_pair.f_private_key.f_implicit_rejection_value
|
||||||
|
ciphertext.Libcrux_ml_kem.Types.f_value
|
||||||
|
(Libcrux_ml_kem.Polynomial.to_spec_vector_t #v_K
|
||||||
|
#v_Vector
|
||||||
|
key_pair.f_private_key.f_ind_cpa_private_key
|
||||||
|
.Libcrux_ml_kem.Ind_cpa.Unpacked.f_secret_as_ntt)
|
||||||
|
(Libcrux_ml_kem.Polynomial.to_spec_vector_t #v_K
|
||||||
|
#v_Vector
|
||||||
|
key_pair.f_public_key.f_ind_cpa_public_key
|
||||||
|
.Libcrux_ml_kem.Ind_cpa.Unpacked.f_tt_as_ntt)
|
||||||
|
(Libcrux_ml_kem.Polynomial.to_spec_matrix_t #v_K
|
||||||
|
#v_Vector
|
||||||
|
key_pair.f_public_key.f_ind_cpa_public_key.Libcrux_ml_kem.Ind_cpa.Unpacked.f_A))
|
||||||
47
proofs/fstar/models/Libcrux_ml_kem.Ind_cpa.Unpacked.fsti
Normal file
47
proofs/fstar/models/Libcrux_ml_kem.Ind_cpa.Unpacked.fsti
Normal file
|
|
@ -0,0 +1,47 @@
|
||||||
|
module Libcrux_ml_kem.Ind_cpa.Unpacked
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
let _ =
|
||||||
|
(* This module has implicit dependencies, here we make them explicit. *)
|
||||||
|
(* The implicit dependencies arise from typeclasses instances. *)
|
||||||
|
let open Libcrux_ml_kem.Vector.Traits in
|
||||||
|
()
|
||||||
|
|
||||||
|
/// An unpacked ML-KEM IND-CPA Private Key
|
||||||
|
type t_IndCpaPrivateKeyUnpacked
|
||||||
|
(v_K: usize) (v_Vector: Type0) {| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
= { f_secret_as_ntt:t_Array (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) v_K }
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
: Core.Default.t_Default (t_IndCpaPrivateKeyUnpacked v_K v_Vector)
|
||||||
|
|
||||||
|
/// An unpacked ML-KEM IND-CPA Public Key
|
||||||
|
type t_IndCpaPublicKeyUnpacked
|
||||||
|
(v_K: usize) (v_Vector: Type0) {| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
= {
|
||||||
|
f_tt_as_ntt:t_Array (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) v_K;
|
||||||
|
f_seed_for_A:t_Array u8 (mk_usize 32);
|
||||||
|
f_A:t_Array (t_Array (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) v_K) v_K
|
||||||
|
}
|
||||||
|
|
||||||
|
let impl_2
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
(#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core.Clone.t_Clone v_Vector)
|
||||||
|
(#[FStar.Tactics.Typeclasses.tcresolve ()]
|
||||||
|
i2:
|
||||||
|
Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector)
|
||||||
|
: Core.Clone.t_Clone (t_IndCpaPublicKeyUnpacked v_K v_Vector) = { f_clone = (fun x -> x) }
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_1
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
: Core.Default.t_Default (t_IndCpaPublicKeyUnpacked v_K v_Vector)
|
||||||
181
proofs/fstar/models/Libcrux_ml_kem.Mlkem768.Incremental.fsti
Normal file
181
proofs/fstar/models/Libcrux_ml_kem.Mlkem768.Incremental.fsti
Normal file
|
|
@ -0,0 +1,181 @@
|
||||||
|
module Libcrux_ml_kem.Mlkem768.Incremental
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
let _ =
|
||||||
|
(* This module has implicit dependencies, here we make them explicit. *)
|
||||||
|
(* The implicit dependencies arise from typeclasses instances. *)
|
||||||
|
let open Libcrux_ml_kem.Ind_cca.Incremental.Types in
|
||||||
|
let open Rand_core in
|
||||||
|
()
|
||||||
|
|
||||||
|
/// Get the size of the first public key in bytes.
|
||||||
|
val pk1_len: Prims.unit -> Prims.Pure usize Prims.l_True
|
||||||
|
(ensures fun res -> let res:usize = res in res =. mk_usize 64)
|
||||||
|
|
||||||
|
/// Get the size of the second public key in bytes.
|
||||||
|
val pk2_len: Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// The size of a compressed key pair in bytes.
|
||||||
|
let v_COMPRESSED_KEYPAIR_LEN: usize = Libcrux_ml_kem.Mlkem768.v_SECRET_KEY_SIZE
|
||||||
|
|
||||||
|
/// The size of the key pair in bytes.
|
||||||
|
val key_pair_len: Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// The size of the compressed key pair in bytes.
|
||||||
|
val key_pair_compressed_len: Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// The size of the encaps state in bytes.
|
||||||
|
val encaps_state_len: Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// The size of the shared secret.
|
||||||
|
val shared_secret_size: Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// An encoded, incremental key pair.
|
||||||
|
type t_KeyPairBytes = { f_value:t_Array u8 (mk_usize 7392) }
|
||||||
|
|
||||||
|
/// Get the raw bytes.
|
||||||
|
val impl_KeyPairBytes__to_bytes (self: t_KeyPairBytes)
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 7392)) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Get the PK1 bytes from the serialized key pair bytes
|
||||||
|
val impl_KeyPairBytes__pk1 (self: t_KeyPairBytes)
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 64)) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Get the PK2 bytes from the serialized key pair bytes
|
||||||
|
val impl_KeyPairBytes__pk2 (self: t_KeyPairBytes)
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 1152)) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_1:Core.Convert.t_AsRef t_KeyPairBytes (t_Slice u8)
|
||||||
|
|
||||||
|
/// Generate a key pair and write it into `key_pair`.
|
||||||
|
/// This uses unpacked keys and does not compress the keys.
|
||||||
|
/// `key_pair.len()` must be of size `key_pair_len()`.
|
||||||
|
/// The function returns an error if this is not the case.
|
||||||
|
val generate_key_pair (randomness: t_Array u8 (mk_usize 64)) (key_pair: t_Slice u8)
|
||||||
|
: Prims.Pure
|
||||||
|
(t_Slice u8 & Core.Result.t_Result Prims.unit Libcrux_ml_kem.Ind_cca.Incremental.Types.t_Error
|
||||||
|
) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Generate a new key pair.
|
||||||
|
/// This uses unpacked keys and does not compress the keys.
|
||||||
|
val impl_KeyPairBytes__from_seed (randomness: t_Array u8 (mk_usize 64))
|
||||||
|
: Prims.Pure t_KeyPairBytes Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Generate a new key pair.
|
||||||
|
/// This uses unpacked keys and does not compress the keys.
|
||||||
|
val impl_KeyPairBytes__generate
|
||||||
|
(#iimpl_277843321_: Type0)
|
||||||
|
{| i1: Rand_core.t_RngCore iimpl_277843321_ |}
|
||||||
|
{| i2: Rand_core.t_CryptoRng iimpl_277843321_ |}
|
||||||
|
(rng: iimpl_277843321_)
|
||||||
|
: Prims.Pure (iimpl_277843321_ & t_KeyPairBytes) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// An encoded, compressed, incremental key pair.
|
||||||
|
/// Layout: dk | (t | ⍴) | H(ek) | z
|
||||||
|
type t_KeyPairCompressedBytes = { f_value:t_Array u8 (mk_usize 2400) }
|
||||||
|
|
||||||
|
/// Get the raw bytes.
|
||||||
|
val impl_KeyPairCompressedBytes__to_bytes (self: t_KeyPairCompressedBytes)
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 2400)) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Get the serialized private for decapsulation.
|
||||||
|
val impl_KeyPairCompressedBytes__sk (self: t_KeyPairCompressedBytes)
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 2400)) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
let impl_KeyPairCompressedBytes__pk1__v_START: usize =
|
||||||
|
mk_usize 2 *! Libcrux_ml_kem.Mlkem768.v_RANKED_BYTES_PER_RING_ELEMENT
|
||||||
|
|
||||||
|
/// Get the PK1 bytes from the serialized key pair bytes
|
||||||
|
val impl_KeyPairCompressedBytes__pk1 (self: t_KeyPairCompressedBytes)
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 64)) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
let impl_KeyPairCompressedBytes__pk2__v_START: usize =
|
||||||
|
Libcrux_ml_kem.Mlkem768.v_RANKED_BYTES_PER_RING_ELEMENT
|
||||||
|
|
||||||
|
/// Get the PK2 bytes from the serialized key pair bytes
|
||||||
|
val impl_KeyPairCompressedBytes__pk2 (self: t_KeyPairCompressedBytes)
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 1152)) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_3:Core.Convert.t_AsRef t_KeyPairCompressedBytes (t_Slice u8)
|
||||||
|
|
||||||
|
/// Generate a key pair and write it into `key_pair`.
|
||||||
|
/// This compresses the keys.
|
||||||
|
val generate_key_pair_compressed
|
||||||
|
(randomness: t_Array u8 (mk_usize 64))
|
||||||
|
(key_pair: t_Array u8 (mk_usize 2400))
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 2400)) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Generate a new key pair.
|
||||||
|
/// This uses unpacked keys and does not compress the keys.
|
||||||
|
val impl_KeyPairCompressedBytes__from_seed (randomness: t_Array u8 (mk_usize 64))
|
||||||
|
: Prims.Pure t_KeyPairCompressedBytes Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Generate a new key pair.
|
||||||
|
/// This uses unpacked keys and does not compress the keys.
|
||||||
|
val impl_KeyPairCompressedBytes__generate
|
||||||
|
(#iimpl_277843321_: Type0)
|
||||||
|
{| i1: Rand_core.t_RngCore iimpl_277843321_ |}
|
||||||
|
{| i2: Rand_core.t_CryptoRng iimpl_277843321_ |}
|
||||||
|
(rng: iimpl_277843321_)
|
||||||
|
: Prims.Pure (iimpl_277843321_ & t_KeyPairCompressedBytes) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Get the PK1 bytes from the serialized key pair bytes
|
||||||
|
val pk1 (keypair: t_Array u8 (mk_usize 7392))
|
||||||
|
: Prims.Pure (t_Slice u8) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Get the PK2 bytes from the serialized key pair bytes
|
||||||
|
val pk2 (keypair: t_Array u8 (mk_usize 7392))
|
||||||
|
: Prims.Pure (t_Slice u8) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Validate that the two parts `pk1` and `pk2` are consistent.
|
||||||
|
val validate_pk (pk1: Libcrux_ml_kem.Ind_cca.Incremental.Types.t_PublicKey1) (pk2: t_Slice u8)
|
||||||
|
: Prims.Pure (Core.Result.t_Result Prims.unit Libcrux_ml_kem.Ind_cca.Incremental.Types.t_Error)
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Validate that the two parts `pk1` and `pk2` are consistent.
|
||||||
|
val validate_pk_bytes (pk1 pk2: t_Slice u8)
|
||||||
|
: Prims.Pure (Core.Result.t_Result Prims.unit Libcrux_ml_kem.Ind_cca.Incremental.Types.t_Error)
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Encapsulate the first part of the ciphertext.
|
||||||
|
/// Returns an [`Error`] if the provided input or output don't have
|
||||||
|
/// the appropriate sizes.
|
||||||
|
val encapsulate1
|
||||||
|
(pk1: t_Slice u8)
|
||||||
|
(randomness: t_Array u8 (mk_usize 32))
|
||||||
|
(state shared_secret: t_Slice u8)
|
||||||
|
: Prims.Pure
|
||||||
|
(t_Slice u8 & t_Slice u8 &
|
||||||
|
Core.Result.t_Result (Libcrux_ml_kem.Ind_cca.Incremental.Types.t_Ciphertext1 (mk_usize 960))
|
||||||
|
Libcrux_ml_kem.Ind_cca.Incremental.Types.t_Error) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Encapsulate the second part of the ciphertext.
|
||||||
|
/// The second part of the public key is passed in as byte slice.
|
||||||
|
/// [`Error::InvalidInputLength`] is returned if `public_key_part` is too
|
||||||
|
/// short.
|
||||||
|
val encapsulate2 (state: t_Array u8 (mk_usize 2080)) (public_key_part: t_Array u8 (mk_usize 1152))
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Ind_cca.Incremental.Types.t_Ciphertext2 (mk_usize 128))
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Decapsulate incremental ciphertexts.
|
||||||
|
val decapsulate_incremental_key
|
||||||
|
(private_key: t_Slice u8)
|
||||||
|
(ciphertext1: Libcrux_ml_kem.Ind_cca.Incremental.Types.t_Ciphertext1 (mk_usize 960))
|
||||||
|
(ciphertext2: Libcrux_ml_kem.Ind_cca.Incremental.Types.t_Ciphertext2 (mk_usize 128))
|
||||||
|
: Prims.Pure
|
||||||
|
(Core.Result.t_Result (t_Array u8 (mk_usize 32))
|
||||||
|
Libcrux_ml_kem.Ind_cca.Incremental.Types.t_Error) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Decapsulate incremental ciphertexts.
|
||||||
|
val decapsulate_compressed_key
|
||||||
|
(private_key: t_Array u8 (mk_usize 2400))
|
||||||
|
(ciphertext1: Libcrux_ml_kem.Ind_cca.Incremental.Types.t_Ciphertext1 (mk_usize 960))
|
||||||
|
(ciphertext2: Libcrux_ml_kem.Ind_cca.Incremental.Types.t_Ciphertext2 (mk_usize 128))
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 32)) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
125
proofs/fstar/models/Libcrux_ml_kem.Mlkem768.fsti
Normal file
125
proofs/fstar/models/Libcrux_ml_kem.Mlkem768.fsti
Normal file
|
|
@ -0,0 +1,125 @@
|
||||||
|
module Libcrux_ml_kem.Mlkem768
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
let v_RANK: usize = mk_usize 3
|
||||||
|
|
||||||
|
let v_RANKED_BYTES_PER_RING_ELEMENT: usize =
|
||||||
|
(v_RANK *! Libcrux_ml_kem.Constants.v_BITS_PER_RING_ELEMENT <: usize) /! mk_usize 8
|
||||||
|
|
||||||
|
let v_T_AS_NTT_ENCODED_SIZE: usize =
|
||||||
|
((v_RANK *! Libcrux_ml_kem.Constants.v_COEFFICIENTS_IN_RING_ELEMENT <: usize) *!
|
||||||
|
Libcrux_ml_kem.Constants.v_BITS_PER_COEFFICIENT
|
||||||
|
<:
|
||||||
|
usize) /!
|
||||||
|
mk_usize 8
|
||||||
|
|
||||||
|
let v_VECTOR_U_COMPRESSION_FACTOR: usize = mk_usize 10
|
||||||
|
|
||||||
|
let v_C1_BLOCK_SIZE: usize =
|
||||||
|
(Libcrux_ml_kem.Constants.v_COEFFICIENTS_IN_RING_ELEMENT *! v_VECTOR_U_COMPRESSION_FACTOR <: usize
|
||||||
|
) /!
|
||||||
|
mk_usize 8
|
||||||
|
|
||||||
|
let v_C1_SIZE: usize = v_C1_BLOCK_SIZE *! v_RANK
|
||||||
|
|
||||||
|
let v_VECTOR_V_COMPRESSION_FACTOR: usize = mk_usize 4
|
||||||
|
|
||||||
|
let v_C2_SIZE: usize =
|
||||||
|
(Libcrux_ml_kem.Constants.v_COEFFICIENTS_IN_RING_ELEMENT *! v_VECTOR_V_COMPRESSION_FACTOR <: usize
|
||||||
|
) /!
|
||||||
|
mk_usize 8
|
||||||
|
|
||||||
|
let v_CPA_PKE_SECRET_KEY_SIZE: usize =
|
||||||
|
((v_RANK *! Libcrux_ml_kem.Constants.v_COEFFICIENTS_IN_RING_ELEMENT <: usize) *!
|
||||||
|
Libcrux_ml_kem.Constants.v_BITS_PER_COEFFICIENT
|
||||||
|
<:
|
||||||
|
usize) /!
|
||||||
|
mk_usize 8
|
||||||
|
|
||||||
|
let v_CPA_PKE_PUBLIC_KEY_SIZE: usize = v_T_AS_NTT_ENCODED_SIZE +! mk_usize 32
|
||||||
|
|
||||||
|
let v_CPA_PKE_CIPHERTEXT_SIZE: usize = v_C1_SIZE +! v_C2_SIZE
|
||||||
|
|
||||||
|
let v_SECRET_KEY_SIZE: usize =
|
||||||
|
((v_CPA_PKE_SECRET_KEY_SIZE +! v_CPA_PKE_PUBLIC_KEY_SIZE <: usize) +!
|
||||||
|
Libcrux_ml_kem.Constants.v_H_DIGEST_SIZE
|
||||||
|
<:
|
||||||
|
usize) +!
|
||||||
|
Libcrux_ml_kem.Constants.v_SHARED_SECRET_SIZE
|
||||||
|
|
||||||
|
let v_ETA1: usize = mk_usize 2
|
||||||
|
|
||||||
|
let v_ETA1_RANDOMNESS_SIZE: usize = v_ETA1 *! mk_usize 64
|
||||||
|
|
||||||
|
let v_ETA2: usize = mk_usize 2
|
||||||
|
|
||||||
|
let v_ETA2_RANDOMNESS_SIZE: usize = v_ETA2 *! mk_usize 64
|
||||||
|
|
||||||
|
let v_IMPLICIT_REJECTION_HASH_INPUT_SIZE: usize =
|
||||||
|
Libcrux_ml_kem.Constants.v_SHARED_SECRET_SIZE +! v_CPA_PKE_CIPHERTEXT_SIZE
|
||||||
|
|
||||||
|
/// Validate a public key.
|
||||||
|
/// Returns `true` if valid, and `false` otherwise.
|
||||||
|
val validate_public_key (public_key: Libcrux_ml_kem.Types.t_MlKemPublicKey (mk_usize 1184))
|
||||||
|
: Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Validate a private key.
|
||||||
|
/// Returns `true` if valid, and `false` otherwise.
|
||||||
|
val validate_private_key
|
||||||
|
(private_key: Libcrux_ml_kem.Types.t_MlKemPrivateKey (mk_usize 2400))
|
||||||
|
(ciphertext: Libcrux_ml_kem.Types.t_MlKemCiphertext (mk_usize 1088))
|
||||||
|
: Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Generate ML-KEM 768 Key Pair
|
||||||
|
/// Generate an ML-KEM key pair. The input is a byte array of size
|
||||||
|
/// [`KEY_GENERATION_SEED_SIZE`].
|
||||||
|
/// This function returns an [`MlKem768KeyPair`].
|
||||||
|
val generate_key_pair (randomness: t_Array u8 (mk_usize 64))
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Types.t_MlKemKeyPair (mk_usize 2400) (mk_usize 1184))
|
||||||
|
Prims.l_True
|
||||||
|
(ensures
|
||||||
|
fun res ->
|
||||||
|
let res:Libcrux_ml_kem.Types.t_MlKemKeyPair (mk_usize 2400) (mk_usize 1184) = res in
|
||||||
|
let (secret_key, public_key), valid =
|
||||||
|
Spec.MLKEM.Instances.mlkem768_generate_keypair randomness
|
||||||
|
in
|
||||||
|
valid ==> (res.f_sk.f_value == secret_key /\ res.f_pk.f_value == public_key))
|
||||||
|
|
||||||
|
/// Encapsulate ML-KEM 768
|
||||||
|
/// Generates an ([`MlKem768Ciphertext`], [`MlKemSharedSecret`]) tuple.
|
||||||
|
/// The input is a reference to an [`MlKem768PublicKey`] and [`SHARED_SECRET_SIZE`]
|
||||||
|
/// bytes of `randomness`.
|
||||||
|
val encapsulate
|
||||||
|
(public_key: Libcrux_ml_kem.Types.t_MlKemPublicKey (mk_usize 1184))
|
||||||
|
(randomness: t_Array u8 (mk_usize 32))
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Types.t_MlKemCiphertext (mk_usize 1088) & t_Array u8 (mk_usize 32))
|
||||||
|
Prims.l_True
|
||||||
|
(ensures
|
||||||
|
fun res ->
|
||||||
|
let res:(Libcrux_ml_kem.Types.t_MlKemCiphertext (mk_usize 1088) & t_Array u8 (mk_usize 32)
|
||||||
|
) =
|
||||||
|
res
|
||||||
|
in
|
||||||
|
let (ciphertext, shared_secret), valid =
|
||||||
|
Spec.MLKEM.Instances.mlkem768_encapsulate public_key.f_value randomness
|
||||||
|
in
|
||||||
|
let res_ciphertext, res_shared_secret = res in
|
||||||
|
valid ==> (res_ciphertext.f_value == ciphertext /\ res_shared_secret == shared_secret))
|
||||||
|
|
||||||
|
/// Decapsulate ML-KEM 768
|
||||||
|
/// Generates an [`MlKemSharedSecret`].
|
||||||
|
/// The input is a reference to an [`MlKem768PrivateKey`] and an [`MlKem768Ciphertext`].
|
||||||
|
val decapsulate
|
||||||
|
(private_key: Libcrux_ml_kem.Types.t_MlKemPrivateKey (mk_usize 2400))
|
||||||
|
(ciphertext: Libcrux_ml_kem.Types.t_MlKemCiphertext (mk_usize 1088))
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 32))
|
||||||
|
Prims.l_True
|
||||||
|
(ensures
|
||||||
|
fun res ->
|
||||||
|
let res:t_Array u8 (mk_usize 32) = res in
|
||||||
|
let shared_secret, valid =
|
||||||
|
Spec.MLKEM.Instances.mlkem768_decapsulate private_key.f_value ciphertext.f_value
|
||||||
|
in
|
||||||
|
valid ==> res == shared_secret)
|
||||||
343
proofs/fstar/models/Libcrux_ml_kem.Polynomial.fsti
Normal file
343
proofs/fstar/models/Libcrux_ml_kem.Polynomial.fsti
Normal file
|
|
@ -0,0 +1,343 @@
|
||||||
|
module Libcrux_ml_kem.Polynomial
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
let _ =
|
||||||
|
(* This module has implicit dependencies, here we make them explicit. *)
|
||||||
|
(* The implicit dependencies arise from typeclasses instances. *)
|
||||||
|
let open Libcrux_ml_kem.Vector.Traits in
|
||||||
|
()
|
||||||
|
|
||||||
|
let v_ZETAS_TIMES_MONTGOMERY_R: t_Array i16 (mk_usize 128) =
|
||||||
|
let _:Prims.unit = assert_norm (pow2 16 == 65536) in
|
||||||
|
let list =
|
||||||
|
[
|
||||||
|
mk_i16 (-1044); mk_i16 (-758); mk_i16 (-359); mk_i16 (-1517); mk_i16 1493; mk_i16 1422;
|
||||||
|
mk_i16 287; mk_i16 202; mk_i16 (-171); mk_i16 622; mk_i16 1577; mk_i16 182; mk_i16 962;
|
||||||
|
mk_i16 (-1202); mk_i16 (-1474); mk_i16 1468; mk_i16 573; mk_i16 (-1325); mk_i16 264;
|
||||||
|
mk_i16 383; mk_i16 (-829); mk_i16 1458; mk_i16 (-1602); mk_i16 (-130); mk_i16 (-681);
|
||||||
|
mk_i16 1017; mk_i16 732; mk_i16 608; mk_i16 (-1542); mk_i16 411; mk_i16 (-205); mk_i16 (-1571);
|
||||||
|
mk_i16 1223; mk_i16 652; mk_i16 (-552); mk_i16 1015; mk_i16 (-1293); mk_i16 1491;
|
||||||
|
mk_i16 (-282); mk_i16 (-1544); mk_i16 516; mk_i16 (-8); mk_i16 (-320); mk_i16 (-666);
|
||||||
|
mk_i16 (-1618); mk_i16 (-1162); mk_i16 126; mk_i16 1469; mk_i16 (-853); mk_i16 (-90);
|
||||||
|
mk_i16 (-271); mk_i16 830; mk_i16 107; mk_i16 (-1421); mk_i16 (-247); mk_i16 (-951);
|
||||||
|
mk_i16 (-398); mk_i16 961; mk_i16 (-1508); mk_i16 (-725); mk_i16 448; mk_i16 (-1065);
|
||||||
|
mk_i16 677; mk_i16 (-1275); mk_i16 (-1103); mk_i16 430; mk_i16 555; mk_i16 843; mk_i16 (-1251);
|
||||||
|
mk_i16 871; mk_i16 1550; mk_i16 105; mk_i16 422; mk_i16 587; mk_i16 177; mk_i16 (-235);
|
||||||
|
mk_i16 (-291); mk_i16 (-460); mk_i16 1574; mk_i16 1653; mk_i16 (-246); mk_i16 778; mk_i16 1159;
|
||||||
|
mk_i16 (-147); mk_i16 (-777); mk_i16 1483; mk_i16 (-602); mk_i16 1119; mk_i16 (-1590);
|
||||||
|
mk_i16 644; mk_i16 (-872); mk_i16 349; mk_i16 418; mk_i16 329; mk_i16 (-156); mk_i16 (-75);
|
||||||
|
mk_i16 817; mk_i16 1097; mk_i16 603; mk_i16 610; mk_i16 1322; mk_i16 (-1285); mk_i16 (-1465);
|
||||||
|
mk_i16 384; mk_i16 (-1215); mk_i16 (-136); mk_i16 1218; mk_i16 (-1335); mk_i16 (-874);
|
||||||
|
mk_i16 220; mk_i16 (-1187); mk_i16 (-1659); mk_i16 (-1185); mk_i16 (-1530); mk_i16 (-1278);
|
||||||
|
mk_i16 794; mk_i16 (-1510); mk_i16 (-854); mk_i16 (-870); mk_i16 478; mk_i16 (-108);
|
||||||
|
mk_i16 (-308); mk_i16 996; mk_i16 991; mk_i16 958; mk_i16 (-1460); mk_i16 1522; mk_i16 1628
|
||||||
|
]
|
||||||
|
in
|
||||||
|
FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 128);
|
||||||
|
Rust_primitives.Hax.array_of_list 128 list
|
||||||
|
|
||||||
|
val zeta (i: usize)
|
||||||
|
: Prims.Pure i16
|
||||||
|
(requires i <. mk_usize 128)
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:i16 = result in
|
||||||
|
Spec.Utils.is_i16b 1664 result)
|
||||||
|
|
||||||
|
let v_VECTORS_IN_RING_ELEMENT: usize =
|
||||||
|
Libcrux_ml_kem.Constants.v_COEFFICIENTS_IN_RING_ELEMENT /!
|
||||||
|
Libcrux_ml_kem.Vector.Traits.v_FIELD_ELEMENTS_IN_VECTOR
|
||||||
|
|
||||||
|
type t_PolynomialRingElement
|
||||||
|
(v_Vector: Type0) {| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
= { f_coefficients:t_Array v_Vector (mk_usize 16) }
|
||||||
|
|
||||||
|
let to_spec_poly_t (#v_Vector: Type0)
|
||||||
|
{| i2: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(p: t_PolynomialRingElement v_Vector) : Spec.MLKEM.polynomial =
|
||||||
|
createi (sz 256) (fun i -> Spec.MLKEM.Math.to_spec_fe
|
||||||
|
(Seq.index (i2._super_15138760880757129450.f_repr
|
||||||
|
(Seq.index p.f_coefficients (v i / 16))) (v i % 16)))
|
||||||
|
let to_spec_vector_t (#r:Spec.MLKEM.rank) (#v_Vector: Type0)
|
||||||
|
{| i2: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(m:t_Array (t_PolynomialRingElement v_Vector) r) : Spec.MLKEM.vector r =
|
||||||
|
createi r (fun i -> to_spec_poly_t #v_Vector (m.[i]))
|
||||||
|
let to_spec_matrix_t (#r:Spec.MLKEM.rank) (#v_Vector: Type0)
|
||||||
|
{| i2: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(m:t_Array (t_Array (t_PolynomialRingElement v_Vector) r) r) : Spec.MLKEM.matrix r =
|
||||||
|
createi r (fun i -> to_spec_vector_t #r #v_Vector (m.[i]))
|
||||||
|
|
||||||
|
let impl
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
(#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core.Clone.t_Clone v_Vector)
|
||||||
|
(#[FStar.Tactics.Typeclasses.tcresolve ()]
|
||||||
|
i2:
|
||||||
|
Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector)
|
||||||
|
: Core.Clone.t_Clone (t_PolynomialRingElement v_Vector) = { f_clone = (fun x -> x) }
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_1
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Core.Marker.t_Copy v_Vector |}
|
||||||
|
{| i2: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
: Core.Marker.t_Copy (t_PolynomialRingElement v_Vector)
|
||||||
|
|
||||||
|
val v_ZERO:
|
||||||
|
#v_Vector: Type0 ->
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |} ->
|
||||||
|
Prims.unit
|
||||||
|
-> Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val from_i16_array
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(a: t_Slice i16)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector)
|
||||||
|
(requires
|
||||||
|
(v_VECTORS_IN_RING_ELEMENT *! mk_usize 16 <: usize) <=.
|
||||||
|
(Core.Slice.impl__len #i16 a <: usize))
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val to_i16_array
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(re: t_PolynomialRingElement v_Vector)
|
||||||
|
(out: t_Slice i16)
|
||||||
|
: Prims.Pure (t_Slice i16)
|
||||||
|
(requires
|
||||||
|
(Core.Slice.impl__len #i16 out <: usize) >=.
|
||||||
|
(v_VECTORS_IN_RING_ELEMENT *! mk_usize 16 <: usize))
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val from_bytes
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(bytes: t_Slice u8)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector)
|
||||||
|
(requires
|
||||||
|
((v_VECTORS_IN_RING_ELEMENT *! mk_usize 16 <: usize) *! mk_usize 2 <: usize) <=.
|
||||||
|
(Core.Slice.impl__len #u8 bytes <: usize))
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val to_bytes
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(re: t_PolynomialRingElement v_Vector)
|
||||||
|
(out: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Slice u8)
|
||||||
|
(requires
|
||||||
|
((v_VECTORS_IN_RING_ELEMENT *! mk_usize 16 <: usize) *! mk_usize 2 <: usize) <=.
|
||||||
|
(Core.Slice.impl__len #u8 out <: usize))
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Given two polynomial ring elements `lhs` and `rhs`, compute the pointwise
|
||||||
|
/// sum of their constituent coefficients.
|
||||||
|
val add_to_ring_element
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
(v_K: usize)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(myself rhs: t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val poly_barrett_reduce
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(myself: t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val subtract_reduce
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(myself b: t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val add_message_error_reduce
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(myself message result: t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val add_error_reduce
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(myself error: t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val add_standard_error_reduce
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(myself error: t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Given two `KyberPolynomialRingElement`s in their NTT representations,
|
||||||
|
/// compute their product. Given two polynomials in the NTT domain `f^` and `ĵ`,
|
||||||
|
/// the `iᵗʰ` coefficient of the product `k̂` is determined by the calculation:
|
||||||
|
/// ```plaintext
|
||||||
|
/// ĥ[2·i] + ĥ[2·i + 1]X = (f^[2·i] + f^[2·i + 1]X)·(ĝ[2·i] + ĝ[2·i + 1]X) mod (X² - ζ^(2·BitRev₇(i) + 1))
|
||||||
|
/// ```
|
||||||
|
/// This function almost implements <strong>Algorithm 10</strong> of the
|
||||||
|
/// NIST FIPS 203 standard, which is reproduced below:
|
||||||
|
/// ```plaintext
|
||||||
|
/// Input: Two arrays fˆ ∈ ℤ₂₅₆ and ĝ ∈ ℤ₂₅₆.
|
||||||
|
/// Output: An array ĥ ∈ ℤq.
|
||||||
|
/// for(i ← 0; i < 128; i++)
|
||||||
|
/// (ĥ[2i], ĥ[2i+1]) ← BaseCaseMultiply(fˆ[2i], fˆ[2i+1], ĝ[2i], ĝ[2i+1], ζ^(2·BitRev₇(i) + 1))
|
||||||
|
/// end for
|
||||||
|
/// return ĥ
|
||||||
|
/// ```
|
||||||
|
/// We say "almost" because the coefficients of the ring element output by
|
||||||
|
/// this function are in the Montgomery domain.
|
||||||
|
/// The NIST FIPS 203 standard can be found at
|
||||||
|
/// <https://csrc.nist.gov/pubs/fips/203/ipd>.
|
||||||
|
val ntt_multiply
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(myself rhs: t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__ZERO:
|
||||||
|
#v_Vector: Type0 ->
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |} ->
|
||||||
|
Prims.unit
|
||||||
|
-> Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Given two polynomial ring elements `lhs` and `rhs`, compute the pointwise
|
||||||
|
/// sum of their constituent coefficients.
|
||||||
|
val impl_2__add_to_ring_element
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
(v_K: usize)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self rhs: t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__poly_barrett_reduce
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__subtract_reduce
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self b: t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__add_message_error_reduce
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self message result: t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__add_error_reduce
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self error: t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__add_standard_error_reduce
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self error: t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__ntt_multiply
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self rhs: t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Size of a ring element in bytes.
|
||||||
|
val impl_2__num_bytes:
|
||||||
|
#v_Vector: Type0 ->
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |} ->
|
||||||
|
Prims.unit
|
||||||
|
-> Prims.Pure usize
|
||||||
|
Prims.l_True
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:usize = result in
|
||||||
|
result =. mk_usize 512)
|
||||||
|
|
||||||
|
/// The length of a vector of ring elements in bytes
|
||||||
|
val vec_len_bytes:
|
||||||
|
v_K: usize ->
|
||||||
|
#v_Vector: Type0 ->
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |} ->
|
||||||
|
Prims.unit
|
||||||
|
-> Prims.Pure usize (requires v_K <=. mk_usize 4) (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__from_i16_array
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(a: t_Slice i16)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector)
|
||||||
|
(requires
|
||||||
|
(v_VECTORS_IN_RING_ELEMENT *! mk_usize 16 <: usize) <=.
|
||||||
|
(Core.Slice.impl__len #i16 a <: usize))
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__to_i16_array
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_PolynomialRingElement v_Vector)
|
||||||
|
(out: t_Slice i16)
|
||||||
|
: Prims.Pure (t_Slice i16)
|
||||||
|
(requires
|
||||||
|
(v_VECTORS_IN_RING_ELEMENT *! mk_usize 16 <: usize) <=.
|
||||||
|
(Core.Slice.impl__len #i16 out <: usize))
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__from_bytes
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(bytes: t_Slice u8)
|
||||||
|
: Prims.Pure (t_PolynomialRingElement v_Vector)
|
||||||
|
(requires
|
||||||
|
((v_VECTORS_IN_RING_ELEMENT *! mk_usize 16 <: usize) *! mk_usize 2 <: usize) <=.
|
||||||
|
(Core.Slice.impl__len #u8 bytes <: usize))
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Build a vector of ring elements from `bytes`.
|
||||||
|
val vec_from_bytes
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(bytes: t_Slice u8)
|
||||||
|
(out: t_Slice (t_PolynomialRingElement v_Vector))
|
||||||
|
: Prims.Pure (t_Slice (t_PolynomialRingElement v_Vector))
|
||||||
|
(requires
|
||||||
|
(Core.Slice.impl__len #(t_PolynomialRingElement v_Vector) out <: usize) <=. mk_usize 4 &&
|
||||||
|
(((v_VECTORS_IN_RING_ELEMENT *! mk_usize 16 <: usize) *! mk_usize 2 <: usize) *!
|
||||||
|
(Core.Slice.impl__len #(t_PolynomialRingElement v_Vector) out <: usize)
|
||||||
|
<:
|
||||||
|
usize) <=.
|
||||||
|
(Core.Slice.impl__len #u8 bytes <: usize))
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__to_bytes
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(self: t_PolynomialRingElement v_Vector)
|
||||||
|
(out: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Slice u8)
|
||||||
|
(requires
|
||||||
|
((v_VECTORS_IN_RING_ELEMENT *! mk_usize 16 <: usize) *! mk_usize 2 <: usize) <=.
|
||||||
|
(Core.Slice.impl__len #u8 out <: usize))
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Get the bytes of the vector of ring elements in `re` and write them to `out`.
|
||||||
|
val vec_to_bytes
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(re: t_Slice (t_PolynomialRingElement v_Vector))
|
||||||
|
(out: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Slice u8)
|
||||||
|
(requires
|
||||||
|
(Core.Slice.impl__len #(t_PolynomialRingElement v_Vector) re <: usize) <=. mk_usize 4 &&
|
||||||
|
(((v_VECTORS_IN_RING_ELEMENT *! mk_usize 16 <: usize) *! mk_usize 2 <: usize) *!
|
||||||
|
(Core.Slice.impl__len #(t_PolynomialRingElement v_Vector) re <: usize)
|
||||||
|
<:
|
||||||
|
usize) <=.
|
||||||
|
(Core.Slice.impl__len #u8 out <: usize))
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
281
proofs/fstar/models/Libcrux_ml_kem.Serialize.fsti
Normal file
281
proofs/fstar/models/Libcrux_ml_kem.Serialize.fsti
Normal file
|
|
@ -0,0 +1,281 @@
|
||||||
|
module Libcrux_ml_kem.Serialize
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
let _ =
|
||||||
|
(* This module has implicit dependencies, here we make them explicit. *)
|
||||||
|
(* The implicit dependencies arise from typeclasses instances. *)
|
||||||
|
let open Libcrux_ml_kem.Vector.Traits in
|
||||||
|
()
|
||||||
|
|
||||||
|
[@@ "opaque_to_smt"]
|
||||||
|
let field_modulus_range (#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(a: v_Vector) =
|
||||||
|
let coef = Libcrux_ml_kem.Vector.Traits.f_to_i16_array a in
|
||||||
|
forall (i:nat). i < 16 ==> v (Seq.index coef i) > -(v Libcrux_ml_kem.Vector.Traits.v_FIELD_MODULUS) /\
|
||||||
|
v (Seq.index coef i) < v Libcrux_ml_kem.Vector.Traits.v_FIELD_MODULUS
|
||||||
|
|
||||||
|
[@@ "opaque_to_smt"]
|
||||||
|
let coefficients_field_modulus_range (#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(re: Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) =
|
||||||
|
forall (i:nat). i < 16 ==> field_modulus_range (Seq.index re.f_coefficients i)
|
||||||
|
|
||||||
|
val to_unsigned_field_modulus
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(a: v_Vector)
|
||||||
|
: Prims.Pure v_Vector
|
||||||
|
(requires field_modulus_range a)
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:v_Vector = result in
|
||||||
|
forall (i: nat).
|
||||||
|
i < 16 ==>
|
||||||
|
v (Seq.index (Libcrux_ml_kem.Vector.Traits.f_to_i16_array result) i) >= 0 /\
|
||||||
|
v (Seq.index (Libcrux_ml_kem.Vector.Traits.f_to_i16_array result) i) <
|
||||||
|
v Libcrux_ml_kem.Vector.Traits.v_FIELD_MODULUS)
|
||||||
|
|
||||||
|
val compress_then_serialize_message
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(re: Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 32))
|
||||||
|
(requires coefficients_field_modulus_range re)
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_Array u8 (mk_usize 32) = result in
|
||||||
|
result ==
|
||||||
|
Spec.MLKEM.compress_then_encode_message (Libcrux_ml_kem.Polynomial.to_spec_poly_t #v_Vector
|
||||||
|
re))
|
||||||
|
|
||||||
|
val deserialize_then_decompress_message
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(serialized: t_Array u8 (mk_usize 32))
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
Prims.l_True
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector = result in
|
||||||
|
Libcrux_ml_kem.Polynomial.to_spec_poly_t #v_Vector result ==
|
||||||
|
Spec.MLKEM.decode_then_decompress_message serialized)
|
||||||
|
|
||||||
|
val serialize_uncompressed_ring_element
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(re: Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_Array u8 (mk_usize 384))
|
||||||
|
(requires coefficients_field_modulus_range re)
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_Array u8 (mk_usize 384) = result in
|
||||||
|
result ==
|
||||||
|
Spec.MLKEM.byte_encode 12 (Libcrux_ml_kem.Polynomial.to_spec_poly_t #v_Vector re))
|
||||||
|
|
||||||
|
val deserialize_to_uncompressed_ring_element
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(serialized: t_Slice u8)
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
(requires
|
||||||
|
(Core.Slice.impl__len #u8 serialized <: usize) =.
|
||||||
|
Libcrux_ml_kem.Constants.v_BYTES_PER_RING_ELEMENT)
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector = result in
|
||||||
|
Libcrux_ml_kem.Polynomial.to_spec_poly_t #v_Vector result ==
|
||||||
|
Spec.MLKEM.byte_decode 12 serialized)
|
||||||
|
|
||||||
|
/// Only use with public values.
|
||||||
|
/// This MUST NOT be used with secret inputs, like its caller `deserialize_ring_elements_reduced`.
|
||||||
|
val deserialize_to_reduced_ring_element
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(serialized: t_Slice u8)
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
(requires
|
||||||
|
(Core.Slice.impl__len #u8 serialized <: usize) =.
|
||||||
|
Libcrux_ml_kem.Constants.v_BYTES_PER_RING_ELEMENT)
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// See [deserialize_ring_elements_reduced_out].
|
||||||
|
val deserialize_ring_elements_reduced
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(public_key: t_Slice u8)
|
||||||
|
(deserialized_pk: t_Array (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) v_K)
|
||||||
|
: Prims.Pure (t_Array (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) v_K)
|
||||||
|
(requires
|
||||||
|
Spec.MLKEM.is_rank v_K /\
|
||||||
|
Seq.length public_key == v (Spec.MLKEM.v_T_AS_NTT_ENCODED_SIZE v_K))
|
||||||
|
(ensures
|
||||||
|
fun deserialized_pk_future ->
|
||||||
|
let deserialized_pk_future:t_Array
|
||||||
|
(Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) v_K =
|
||||||
|
deserialized_pk_future
|
||||||
|
in
|
||||||
|
Libcrux_ml_kem.Polynomial.to_spec_vector_t #v_K #v_Vector deserialized_pk_future ==
|
||||||
|
Spec.MLKEM.vector_decode_12 #v_K public_key)
|
||||||
|
|
||||||
|
/// This function deserializes ring elements and reduces the result by the field
|
||||||
|
/// modulus.
|
||||||
|
/// This function MUST NOT be used on secret inputs.
|
||||||
|
val deserialize_ring_elements_reduced_out
|
||||||
|
(v_K: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(public_key: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Array (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) v_K)
|
||||||
|
(requires
|
||||||
|
Spec.MLKEM.is_rank v_K /\
|
||||||
|
Seq.length public_key == v (Spec.MLKEM.v_T_AS_NTT_ENCODED_SIZE v_K))
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_Array (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector) v_K =
|
||||||
|
result
|
||||||
|
in
|
||||||
|
forall (i: nat). i < v v_K ==> coefficients_field_modulus_range (Seq.index result i))
|
||||||
|
|
||||||
|
val compress_then_serialize_10_
|
||||||
|
(v_OUT_LEN: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(re: Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_Array u8 v_OUT_LEN)
|
||||||
|
(requires v v_OUT_LEN == 320 /\ coefficients_field_modulus_range re)
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val compress_then_serialize_11_
|
||||||
|
(v_OUT_LEN: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(re: Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_Array u8 v_OUT_LEN) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val compress_then_serialize_ring_element_u
|
||||||
|
(v_COMPRESSION_FACTOR v_OUT_LEN: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(re: Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
: Prims.Pure (t_Array u8 v_OUT_LEN)
|
||||||
|
(requires
|
||||||
|
(v v_COMPRESSION_FACTOR == 10 \/ v v_COMPRESSION_FACTOR == 11) /\
|
||||||
|
v v_OUT_LEN == 32 * v v_COMPRESSION_FACTOR /\ coefficients_field_modulus_range re)
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_Array u8 v_OUT_LEN = result in
|
||||||
|
result ==
|
||||||
|
Spec.MLKEM.compress_then_byte_encode (v v_COMPRESSION_FACTOR)
|
||||||
|
(Libcrux_ml_kem.Polynomial.to_spec_poly_t #v_Vector re))
|
||||||
|
|
||||||
|
val compress_then_serialize_4_
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(re: Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
(serialized: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Slice u8)
|
||||||
|
(requires Seq.length serialized == 128 /\ coefficients_field_modulus_range re)
|
||||||
|
(ensures
|
||||||
|
fun serialized_future ->
|
||||||
|
let serialized_future:t_Slice u8 = serialized_future in
|
||||||
|
Core.Slice.impl__len #u8 serialized_future == Core.Slice.impl__len #u8 serialized)
|
||||||
|
|
||||||
|
val compress_then_serialize_5_
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(re: Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
(serialized: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Slice u8)
|
||||||
|
(requires (Core.Slice.impl__len #u8 serialized <: usize) =. mk_usize 160)
|
||||||
|
(ensures
|
||||||
|
fun serialized_future ->
|
||||||
|
let serialized_future:t_Slice u8 = serialized_future in
|
||||||
|
Core.Slice.impl__len #u8 serialized_future == Core.Slice.impl__len #u8 serialized)
|
||||||
|
|
||||||
|
val compress_then_serialize_ring_element_v
|
||||||
|
(v_K v_COMPRESSION_FACTOR v_OUT_LEN: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(re: Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
(out: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Slice u8)
|
||||||
|
(requires
|
||||||
|
Spec.MLKEM.is_rank v_K /\
|
||||||
|
v_COMPRESSION_FACTOR == Spec.MLKEM.v_VECTOR_V_COMPRESSION_FACTOR v_K /\
|
||||||
|
Seq.length out == v v_OUT_LEN /\ v v_OUT_LEN == 32 * v v_COMPRESSION_FACTOR /\
|
||||||
|
coefficients_field_modulus_range re)
|
||||||
|
(ensures
|
||||||
|
fun out_future ->
|
||||||
|
let out_future:t_Slice u8 = out_future in
|
||||||
|
Core.Slice.impl__len #u8 out_future == Core.Slice.impl__len #u8 out /\
|
||||||
|
out_future ==
|
||||||
|
Spec.MLKEM.compress_then_encode_v #v_K
|
||||||
|
(Libcrux_ml_kem.Polynomial.to_spec_poly_t #v_Vector re))
|
||||||
|
|
||||||
|
val deserialize_then_decompress_10_
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(serialized: t_Slice u8)
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
(requires (Core.Slice.impl__len #u8 serialized <: usize) =. mk_usize 320)
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val deserialize_then_decompress_11_
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(serialized: t_Slice u8)
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
(requires (Core.Slice.impl__len #u8 serialized <: usize) =. mk_usize 352)
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val deserialize_then_decompress_ring_element_u
|
||||||
|
(v_COMPRESSION_FACTOR: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(serialized: t_Slice u8)
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
(requires
|
||||||
|
(v_COMPRESSION_FACTOR =. mk_usize 10 || v_COMPRESSION_FACTOR =. mk_usize 11) &&
|
||||||
|
(Core.Slice.impl__len #u8 serialized <: usize) =.
|
||||||
|
(mk_usize 32 *! v_COMPRESSION_FACTOR <: usize))
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector = result in
|
||||||
|
Libcrux_ml_kem.Polynomial.to_spec_poly_t #v_Vector result ==
|
||||||
|
Spec.MLKEM.byte_decode_then_decompress (v v_COMPRESSION_FACTOR) serialized)
|
||||||
|
|
||||||
|
val deserialize_then_decompress_4_
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(serialized: t_Slice u8)
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
(requires (Core.Slice.impl__len #u8 serialized <: usize) =. mk_usize 128)
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val deserialize_then_decompress_5_
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(serialized: t_Slice u8)
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
(requires (Core.Slice.impl__len #u8 serialized <: usize) =. mk_usize 160)
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val deserialize_then_decompress_ring_element_v
|
||||||
|
(v_K v_COMPRESSION_FACTOR: usize)
|
||||||
|
(#v_Vector: Type0)
|
||||||
|
{| i1: Libcrux_ml_kem.Vector.Traits.t_Operations v_Vector |}
|
||||||
|
(serialized: t_Slice u8)
|
||||||
|
: Prims.Pure (Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector)
|
||||||
|
(requires
|
||||||
|
Spec.MLKEM.is_rank v_K /\
|
||||||
|
v_COMPRESSION_FACTOR == Spec.MLKEM.v_VECTOR_V_COMPRESSION_FACTOR v_K /\
|
||||||
|
Seq.length serialized == 32 * v v_COMPRESSION_FACTOR)
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:Libcrux_ml_kem.Polynomial.t_PolynomialRingElement v_Vector = result in
|
||||||
|
Libcrux_ml_kem.Polynomial.to_spec_poly_t #v_Vector result ==
|
||||||
|
Spec.MLKEM.decode_then_decompress_v #v_K serialized)
|
||||||
422
proofs/fstar/models/Libcrux_ml_kem.Types.fst
Normal file
422
proofs/fstar/models/Libcrux_ml_kem.Types.fst
Normal file
|
|
@ -0,0 +1,422 @@
|
||||||
|
module Libcrux_ml_kem.Types
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
///An ML-KEM Ciphertext
|
||||||
|
type t_MlKemCiphertext (v_SIZE: usize) = { f_value:t_Array u8 v_SIZE }
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl (v_SIZE: usize) : Core.Default.t_Default (t_MlKemCiphertext v_SIZE) =
|
||||||
|
{
|
||||||
|
f_default_pre = (fun (_: Prims.unit) -> true);
|
||||||
|
f_default_post = (fun (_: Prims.unit) (out: t_MlKemCiphertext v_SIZE) -> true);
|
||||||
|
f_default
|
||||||
|
=
|
||||||
|
fun (_: Prims.unit) ->
|
||||||
|
{ f_value = Rust_primitives.Hax.repeat (mk_u8 0) v_SIZE } <: t_MlKemCiphertext v_SIZE
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_4 (v_SIZE: usize) : Core.Convert.t_AsRef (t_MlKemCiphertext v_SIZE) (t_Slice u8) =
|
||||||
|
{
|
||||||
|
f_as_ref_pre = (fun (self: t_MlKemCiphertext v_SIZE) -> true);
|
||||||
|
f_as_ref_post
|
||||||
|
=
|
||||||
|
(fun (self_: t_MlKemCiphertext v_SIZE) (result: t_Slice u8) -> result = self_.f_value);
|
||||||
|
f_as_ref = fun (self: t_MlKemCiphertext v_SIZE) -> self.f_value <: t_Slice u8
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_5 (v_SIZE: usize) : Core.Convert.t_From (t_MlKemCiphertext v_SIZE) (t_Array u8 v_SIZE) =
|
||||||
|
{
|
||||||
|
f_from_pre = (fun (value: t_Array u8 v_SIZE) -> true);
|
||||||
|
f_from_post
|
||||||
|
=
|
||||||
|
(fun (value: t_Array u8 v_SIZE) (result: t_MlKemCiphertext v_SIZE) -> result.f_value = value);
|
||||||
|
f_from = fun (value: t_Array u8 v_SIZE) -> { f_value = value } <: t_MlKemCiphertext v_SIZE
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_1 (v_SIZE: usize) : Core.Convert.t_From (t_MlKemCiphertext v_SIZE) (t_Array u8 v_SIZE) =
|
||||||
|
{
|
||||||
|
f_from_pre = (fun (value: t_Array u8 v_SIZE) -> true);
|
||||||
|
f_from_post = (fun (value: t_Array u8 v_SIZE) (out: t_MlKemCiphertext v_SIZE) -> true);
|
||||||
|
f_from
|
||||||
|
=
|
||||||
|
fun (value: t_Array u8 v_SIZE) ->
|
||||||
|
{ f_value = Core.Clone.f_clone #(t_Array u8 v_SIZE) #FStar.Tactics.Typeclasses.solve value }
|
||||||
|
<:
|
||||||
|
t_MlKemCiphertext v_SIZE
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_2 (v_SIZE: usize) : Core.Convert.t_From (t_Array u8 v_SIZE) (t_MlKemCiphertext v_SIZE) =
|
||||||
|
{
|
||||||
|
f_from_pre = (fun (value: t_MlKemCiphertext v_SIZE) -> true);
|
||||||
|
f_from_post = (fun (value: t_MlKemCiphertext v_SIZE) (out: t_Array u8 v_SIZE) -> true);
|
||||||
|
f_from = fun (value: t_MlKemCiphertext v_SIZE) -> value.f_value
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_3 (v_SIZE: usize) : Core.Convert.t_TryFrom (t_MlKemCiphertext v_SIZE) (t_Slice u8) =
|
||||||
|
{
|
||||||
|
f_Error = Core.Array.t_TryFromSliceError;
|
||||||
|
f_try_from_pre = (fun (value: t_Slice u8) -> true);
|
||||||
|
f_try_from_post
|
||||||
|
=
|
||||||
|
(fun
|
||||||
|
(value: t_Slice u8)
|
||||||
|
(out: Core.Result.t_Result (t_MlKemCiphertext v_SIZE) Core.Array.t_TryFromSliceError)
|
||||||
|
->
|
||||||
|
true);
|
||||||
|
f_try_from
|
||||||
|
=
|
||||||
|
fun (value: t_Slice u8) ->
|
||||||
|
match
|
||||||
|
Core.Convert.f_try_into #(t_Slice u8)
|
||||||
|
#(t_Array u8 v_SIZE)
|
||||||
|
#FStar.Tactics.Typeclasses.solve
|
||||||
|
value
|
||||||
|
<:
|
||||||
|
Core.Result.t_Result (t_Array u8 v_SIZE) Core.Array.t_TryFromSliceError
|
||||||
|
with
|
||||||
|
| Core.Result.Result_Ok value ->
|
||||||
|
Core.Result.Result_Ok ({ f_value = value } <: t_MlKemCiphertext v_SIZE)
|
||||||
|
<:
|
||||||
|
Core.Result.t_Result (t_MlKemCiphertext v_SIZE) Core.Array.t_TryFromSliceError
|
||||||
|
| Core.Result.Result_Err e ->
|
||||||
|
Core.Result.Result_Err e
|
||||||
|
<:
|
||||||
|
Core.Result.t_Result (t_MlKemCiphertext v_SIZE) Core.Array.t_TryFromSliceError
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The number of bytes
|
||||||
|
let impl_6__len (v_SIZE: usize) (_: Prims.unit) : usize = v_SIZE
|
||||||
|
|
||||||
|
/// A reference to the raw byte slice.
|
||||||
|
let impl_6__as_slice (v_SIZE: usize) (self: t_MlKemCiphertext v_SIZE)
|
||||||
|
: Prims.Pure (t_Array u8 v_SIZE)
|
||||||
|
Prims.l_True
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_Array u8 v_SIZE = result in
|
||||||
|
result == self.f_value) = self.f_value
|
||||||
|
|
||||||
|
///An ML-KEM Private key
|
||||||
|
type t_MlKemPrivateKey (v_SIZE: usize) = { f_value:t_Array u8 v_SIZE }
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_7 (v_SIZE: usize) : Core.Default.t_Default (t_MlKemPrivateKey v_SIZE) =
|
||||||
|
{
|
||||||
|
f_default_pre = (fun (_: Prims.unit) -> true);
|
||||||
|
f_default_post = (fun (_: Prims.unit) (out: t_MlKemPrivateKey v_SIZE) -> true);
|
||||||
|
f_default
|
||||||
|
=
|
||||||
|
fun (_: Prims.unit) ->
|
||||||
|
{ f_value = Rust_primitives.Hax.repeat (mk_u8 0) v_SIZE } <: t_MlKemPrivateKey v_SIZE
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_11 (v_SIZE: usize) : Core.Convert.t_AsRef (t_MlKemPrivateKey v_SIZE) (t_Slice u8) =
|
||||||
|
{
|
||||||
|
f_as_ref_pre = (fun (self: t_MlKemPrivateKey v_SIZE) -> true);
|
||||||
|
f_as_ref_post
|
||||||
|
=
|
||||||
|
(fun (self_: t_MlKemPrivateKey v_SIZE) (result: t_Slice u8) -> result = self_.f_value);
|
||||||
|
f_as_ref = fun (self: t_MlKemPrivateKey v_SIZE) -> self.f_value <: t_Slice u8
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_12 (v_SIZE: usize) : Core.Convert.t_From (t_MlKemPrivateKey v_SIZE) (t_Array u8 v_SIZE) =
|
||||||
|
{
|
||||||
|
f_from_pre = (fun (value: t_Array u8 v_SIZE) -> true);
|
||||||
|
f_from_post
|
||||||
|
=
|
||||||
|
(fun (value: t_Array u8 v_SIZE) (result: t_MlKemPrivateKey v_SIZE) -> result.f_value = value);
|
||||||
|
f_from = fun (value: t_Array u8 v_SIZE) -> { f_value = value } <: t_MlKemPrivateKey v_SIZE
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_8 (v_SIZE: usize) : Core.Convert.t_From (t_MlKemPrivateKey v_SIZE) (t_Array u8 v_SIZE) =
|
||||||
|
{
|
||||||
|
f_from_pre = (fun (value: t_Array u8 v_SIZE) -> true);
|
||||||
|
f_from_post = (fun (value: t_Array u8 v_SIZE) (out: t_MlKemPrivateKey v_SIZE) -> true);
|
||||||
|
f_from
|
||||||
|
=
|
||||||
|
fun (value: t_Array u8 v_SIZE) ->
|
||||||
|
{ f_value = Core.Clone.f_clone #(t_Array u8 v_SIZE) #FStar.Tactics.Typeclasses.solve value }
|
||||||
|
<:
|
||||||
|
t_MlKemPrivateKey v_SIZE
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_9 (v_SIZE: usize) : Core.Convert.t_From (t_Array u8 v_SIZE) (t_MlKemPrivateKey v_SIZE) =
|
||||||
|
{
|
||||||
|
f_from_pre = (fun (value: t_MlKemPrivateKey v_SIZE) -> true);
|
||||||
|
f_from_post = (fun (value: t_MlKemPrivateKey v_SIZE) (out: t_Array u8 v_SIZE) -> true);
|
||||||
|
f_from = fun (value: t_MlKemPrivateKey v_SIZE) -> value.f_value
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_10 (v_SIZE: usize) : Core.Convert.t_TryFrom (t_MlKemPrivateKey v_SIZE) (t_Slice u8) =
|
||||||
|
{
|
||||||
|
f_Error = Core.Array.t_TryFromSliceError;
|
||||||
|
f_try_from_pre = (fun (value: t_Slice u8) -> true);
|
||||||
|
f_try_from_post
|
||||||
|
=
|
||||||
|
(fun
|
||||||
|
(value: t_Slice u8)
|
||||||
|
(out: Core.Result.t_Result (t_MlKemPrivateKey v_SIZE) Core.Array.t_TryFromSliceError)
|
||||||
|
->
|
||||||
|
true);
|
||||||
|
f_try_from
|
||||||
|
=
|
||||||
|
fun (value: t_Slice u8) ->
|
||||||
|
match
|
||||||
|
Core.Convert.f_try_into #(t_Slice u8)
|
||||||
|
#(t_Array u8 v_SIZE)
|
||||||
|
#FStar.Tactics.Typeclasses.solve
|
||||||
|
value
|
||||||
|
<:
|
||||||
|
Core.Result.t_Result (t_Array u8 v_SIZE) Core.Array.t_TryFromSliceError
|
||||||
|
with
|
||||||
|
| Core.Result.Result_Ok value ->
|
||||||
|
Core.Result.Result_Ok ({ f_value = value } <: t_MlKemPrivateKey v_SIZE)
|
||||||
|
<:
|
||||||
|
Core.Result.t_Result (t_MlKemPrivateKey v_SIZE) Core.Array.t_TryFromSliceError
|
||||||
|
| Core.Result.Result_Err e ->
|
||||||
|
Core.Result.Result_Err e
|
||||||
|
<:
|
||||||
|
Core.Result.t_Result (t_MlKemPrivateKey v_SIZE) Core.Array.t_TryFromSliceError
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The number of bytes
|
||||||
|
let impl_13__len (v_SIZE: usize) (_: Prims.unit) : usize = v_SIZE
|
||||||
|
|
||||||
|
/// A reference to the raw byte slice.
|
||||||
|
let impl_13__as_slice (v_SIZE: usize) (self: t_MlKemPrivateKey v_SIZE)
|
||||||
|
: Prims.Pure (t_Array u8 v_SIZE)
|
||||||
|
Prims.l_True
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_Array u8 v_SIZE = result in
|
||||||
|
result == self.f_value) = self.f_value
|
||||||
|
|
||||||
|
///An ML-KEM Public key
|
||||||
|
type t_MlKemPublicKey (v_SIZE: usize) = { f_value:t_Array u8 v_SIZE }
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_14 (v_SIZE: usize) : Core.Default.t_Default (t_MlKemPublicKey v_SIZE) =
|
||||||
|
{
|
||||||
|
f_default_pre = (fun (_: Prims.unit) -> true);
|
||||||
|
f_default_post = (fun (_: Prims.unit) (out: t_MlKemPublicKey v_SIZE) -> true);
|
||||||
|
f_default
|
||||||
|
=
|
||||||
|
fun (_: Prims.unit) ->
|
||||||
|
{ f_value = Rust_primitives.Hax.repeat (mk_u8 0) v_SIZE } <: t_MlKemPublicKey v_SIZE
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_18 (v_SIZE: usize) : Core.Convert.t_AsRef (t_MlKemPublicKey v_SIZE) (t_Slice u8) =
|
||||||
|
{
|
||||||
|
f_as_ref_pre = (fun (self: t_MlKemPublicKey v_SIZE) -> true);
|
||||||
|
f_as_ref_post
|
||||||
|
=
|
||||||
|
(fun (self_: t_MlKemPublicKey v_SIZE) (result: t_Slice u8) -> result = self_.f_value);
|
||||||
|
f_as_ref = fun (self: t_MlKemPublicKey v_SIZE) -> self.f_value <: t_Slice u8
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_19 (v_SIZE: usize) : Core.Convert.t_From (t_MlKemPublicKey v_SIZE) (t_Array u8 v_SIZE) =
|
||||||
|
{
|
||||||
|
f_from_pre = (fun (value: t_Array u8 v_SIZE) -> true);
|
||||||
|
f_from_post
|
||||||
|
=
|
||||||
|
(fun (value: t_Array u8 v_SIZE) (result: t_MlKemPublicKey v_SIZE) -> result.f_value = value);
|
||||||
|
f_from = fun (value: t_Array u8 v_SIZE) -> { f_value = value } <: t_MlKemPublicKey v_SIZE
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_15 (v_SIZE: usize) : Core.Convert.t_From (t_MlKemPublicKey v_SIZE) (t_Array u8 v_SIZE) =
|
||||||
|
{
|
||||||
|
f_from_pre = (fun (value: t_Array u8 v_SIZE) -> true);
|
||||||
|
f_from_post = (fun (value: t_Array u8 v_SIZE) (out: t_MlKemPublicKey v_SIZE) -> true);
|
||||||
|
f_from
|
||||||
|
=
|
||||||
|
fun (value: t_Array u8 v_SIZE) ->
|
||||||
|
{ f_value = Core.Clone.f_clone #(t_Array u8 v_SIZE) #FStar.Tactics.Typeclasses.solve value }
|
||||||
|
<:
|
||||||
|
t_MlKemPublicKey v_SIZE
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_16 (v_SIZE: usize) : Core.Convert.t_From (t_Array u8 v_SIZE) (t_MlKemPublicKey v_SIZE) =
|
||||||
|
{
|
||||||
|
f_from_pre = (fun (value: t_MlKemPublicKey v_SIZE) -> true);
|
||||||
|
f_from_post = (fun (value: t_MlKemPublicKey v_SIZE) (out: t_Array u8 v_SIZE) -> true);
|
||||||
|
f_from = fun (value: t_MlKemPublicKey v_SIZE) -> value.f_value
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_17 (v_SIZE: usize) : Core.Convert.t_TryFrom (t_MlKemPublicKey v_SIZE) (t_Slice u8) =
|
||||||
|
{
|
||||||
|
f_Error = Core.Array.t_TryFromSliceError;
|
||||||
|
f_try_from_pre = (fun (value: t_Slice u8) -> true);
|
||||||
|
f_try_from_post
|
||||||
|
=
|
||||||
|
(fun
|
||||||
|
(value: t_Slice u8)
|
||||||
|
(out: Core.Result.t_Result (t_MlKemPublicKey v_SIZE) Core.Array.t_TryFromSliceError)
|
||||||
|
->
|
||||||
|
true);
|
||||||
|
f_try_from
|
||||||
|
=
|
||||||
|
fun (value: t_Slice u8) ->
|
||||||
|
match
|
||||||
|
Core.Convert.f_try_into #(t_Slice u8)
|
||||||
|
#(t_Array u8 v_SIZE)
|
||||||
|
#FStar.Tactics.Typeclasses.solve
|
||||||
|
value
|
||||||
|
<:
|
||||||
|
Core.Result.t_Result (t_Array u8 v_SIZE) Core.Array.t_TryFromSliceError
|
||||||
|
with
|
||||||
|
| Core.Result.Result_Ok value ->
|
||||||
|
Core.Result.Result_Ok ({ f_value = value } <: t_MlKemPublicKey v_SIZE)
|
||||||
|
<:
|
||||||
|
Core.Result.t_Result (t_MlKemPublicKey v_SIZE) Core.Array.t_TryFromSliceError
|
||||||
|
| Core.Result.Result_Err e ->
|
||||||
|
Core.Result.Result_Err e
|
||||||
|
<:
|
||||||
|
Core.Result.t_Result (t_MlKemPublicKey v_SIZE) Core.Array.t_TryFromSliceError
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The number of bytes
|
||||||
|
let impl_20__len (v_SIZE: usize) (_: Prims.unit) : usize = v_SIZE
|
||||||
|
|
||||||
|
/// A reference to the raw byte slice.
|
||||||
|
let impl_20__as_slice (v_SIZE: usize) (self: t_MlKemPublicKey v_SIZE)
|
||||||
|
: Prims.Pure (t_Array u8 v_SIZE)
|
||||||
|
Prims.l_True
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_Array u8 v_SIZE = result in
|
||||||
|
result == self.f_value) = self.f_value
|
||||||
|
|
||||||
|
/// An ML-KEM key pair
|
||||||
|
type t_MlKemKeyPair (v_PRIVATE_KEY_SIZE: usize) (v_PUBLIC_KEY_SIZE: usize) = {
|
||||||
|
f_sk:t_MlKemPrivateKey v_PRIVATE_KEY_SIZE;
|
||||||
|
f_pk:t_MlKemPublicKey v_PUBLIC_KEY_SIZE
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new [`MlKemKeyPair`].
|
||||||
|
let impl_21__new
|
||||||
|
(v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE: usize)
|
||||||
|
(sk: t_Array u8 v_PRIVATE_KEY_SIZE)
|
||||||
|
(pk: t_Array u8 v_PUBLIC_KEY_SIZE)
|
||||||
|
: t_MlKemKeyPair v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE =
|
||||||
|
{
|
||||||
|
f_sk
|
||||||
|
=
|
||||||
|
Core.Convert.f_into #(t_Array u8 v_PRIVATE_KEY_SIZE)
|
||||||
|
#(t_MlKemPrivateKey v_PRIVATE_KEY_SIZE)
|
||||||
|
#FStar.Tactics.Typeclasses.solve
|
||||||
|
sk;
|
||||||
|
f_pk
|
||||||
|
=
|
||||||
|
Core.Convert.f_into #(t_Array u8 v_PUBLIC_KEY_SIZE)
|
||||||
|
#(t_MlKemPublicKey v_PUBLIC_KEY_SIZE)
|
||||||
|
#FStar.Tactics.Typeclasses.solve
|
||||||
|
pk
|
||||||
|
}
|
||||||
|
<:
|
||||||
|
t_MlKemKeyPair v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE
|
||||||
|
|
||||||
|
/// Get a reference to the [`MlKemPublicKey<PUBLIC_KEY_SIZE>`].
|
||||||
|
let impl_21__public_key
|
||||||
|
(v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE: usize)
|
||||||
|
(self: t_MlKemKeyPair v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE)
|
||||||
|
: t_MlKemPublicKey v_PUBLIC_KEY_SIZE = self.f_pk
|
||||||
|
|
||||||
|
/// Get a reference to the [`MlKemPrivateKey<PRIVATE_KEY_SIZE>`].
|
||||||
|
let impl_21__private_key
|
||||||
|
(v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE: usize)
|
||||||
|
(self: t_MlKemKeyPair v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE)
|
||||||
|
: t_MlKemPrivateKey v_PRIVATE_KEY_SIZE = self.f_sk
|
||||||
|
|
||||||
|
/// Get a reference to the raw public key bytes.
|
||||||
|
let impl_21__pk
|
||||||
|
(v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE: usize)
|
||||||
|
(self: t_MlKemKeyPair v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE)
|
||||||
|
: t_Array u8 v_PUBLIC_KEY_SIZE = impl_20__as_slice v_PUBLIC_KEY_SIZE self.f_pk
|
||||||
|
|
||||||
|
/// Get a reference to the raw private key bytes.
|
||||||
|
let impl_21__sk
|
||||||
|
(v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE: usize)
|
||||||
|
(self: t_MlKemKeyPair v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE)
|
||||||
|
: t_Array u8 v_PRIVATE_KEY_SIZE = impl_13__as_slice v_PRIVATE_KEY_SIZE self.f_sk
|
||||||
|
|
||||||
|
/// Separate this key into the public and private key.
|
||||||
|
let impl_21__into_parts
|
||||||
|
(v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE: usize)
|
||||||
|
(self: t_MlKemKeyPair v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE)
|
||||||
|
: (t_MlKemPrivateKey v_PRIVATE_KEY_SIZE & t_MlKemPublicKey v_PUBLIC_KEY_SIZE) =
|
||||||
|
self.f_sk, self.f_pk
|
||||||
|
<:
|
||||||
|
(t_MlKemPrivateKey v_PRIVATE_KEY_SIZE & t_MlKemPublicKey v_PUBLIC_KEY_SIZE)
|
||||||
|
|
||||||
|
/// Create a new [`MlKemKeyPair`] from the secret and public key.
|
||||||
|
let impl_21__from
|
||||||
|
(v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE: usize)
|
||||||
|
(sk: t_MlKemPrivateKey v_PRIVATE_KEY_SIZE)
|
||||||
|
(pk: t_MlKemPublicKey v_PUBLIC_KEY_SIZE)
|
||||||
|
: Prims.Pure (t_MlKemKeyPair v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE)
|
||||||
|
Prims.l_True
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:t_MlKemKeyPair v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE = result in
|
||||||
|
result.f_sk == sk /\ result.f_pk == pk) =
|
||||||
|
{ f_sk = sk; f_pk = pk } <: t_MlKemKeyPair v_PRIVATE_KEY_SIZE v_PUBLIC_KEY_SIZE
|
||||||
|
|
||||||
|
/// Unpack an incoming private key into it\'s different parts.
|
||||||
|
/// We have this here in types to extract into a common core for C.
|
||||||
|
let unpack_private_key (v_CPA_SECRET_KEY_SIZE v_PUBLIC_KEY_SIZE: usize) (private_key: t_Slice u8)
|
||||||
|
: Prims.Pure (t_Slice u8 & t_Slice u8 & t_Slice u8 & t_Slice u8)
|
||||||
|
(requires
|
||||||
|
Seq.length private_key >=
|
||||||
|
v v_CPA_SECRET_KEY_SIZE + v v_PUBLIC_KEY_SIZE + v Libcrux_ml_kem.Constants.v_H_DIGEST_SIZE)
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:(t_Slice u8 & t_Slice u8 & t_Slice u8 & t_Slice u8) = result in
|
||||||
|
let ind_cpa_secret_key_s, rest = split private_key v_CPA_SECRET_KEY_SIZE in
|
||||||
|
let ind_cpa_public_key_s, rest = split rest v_PUBLIC_KEY_SIZE in
|
||||||
|
let ind_cpa_public_key_hash_s, implicit_rejection_value_s =
|
||||||
|
split rest Libcrux_ml_kem.Constants.v_H_DIGEST_SIZE
|
||||||
|
in
|
||||||
|
let
|
||||||
|
ind_cpa_secret_key, ind_cpa_public_key, ind_cpa_public_key_hash, implicit_rejection_value
|
||||||
|
=
|
||||||
|
result
|
||||||
|
in
|
||||||
|
ind_cpa_secret_key_s == ind_cpa_secret_key /\ ind_cpa_public_key_s == ind_cpa_public_key /\
|
||||||
|
ind_cpa_public_key_hash_s == ind_cpa_public_key_hash /\
|
||||||
|
implicit_rejection_value_s == implicit_rejection_value /\
|
||||||
|
Seq.length ind_cpa_secret_key == v v_CPA_SECRET_KEY_SIZE /\
|
||||||
|
Seq.length ind_cpa_public_key == v v_PUBLIC_KEY_SIZE /\
|
||||||
|
Seq.length ind_cpa_public_key_hash == v Libcrux_ml_kem.Constants.v_H_DIGEST_SIZE /\
|
||||||
|
Seq.length implicit_rejection_value ==
|
||||||
|
Seq.length private_key -
|
||||||
|
(v v_CPA_SECRET_KEY_SIZE + v v_PUBLIC_KEY_SIZE +
|
||||||
|
v Libcrux_ml_kem.Constants.v_H_DIGEST_SIZE)) =
|
||||||
|
let ind_cpa_secret_key, secret_key:(t_Slice u8 & t_Slice u8) =
|
||||||
|
Core.Slice.impl__split_at #u8 private_key v_CPA_SECRET_KEY_SIZE
|
||||||
|
in
|
||||||
|
let ind_cpa_public_key, secret_key:(t_Slice u8 & t_Slice u8) =
|
||||||
|
Core.Slice.impl__split_at #u8 secret_key v_PUBLIC_KEY_SIZE
|
||||||
|
in
|
||||||
|
let ind_cpa_public_key_hash, implicit_rejection_value:(t_Slice u8 & t_Slice u8) =
|
||||||
|
Core.Slice.impl__split_at #u8 secret_key Libcrux_ml_kem.Constants.v_H_DIGEST_SIZE
|
||||||
|
in
|
||||||
|
ind_cpa_secret_key, ind_cpa_public_key, ind_cpa_public_key_hash, implicit_rejection_value
|
||||||
|
<:
|
||||||
|
(t_Slice u8 & t_Slice u8 & t_Slice u8 & t_Slice u8)
|
||||||
101
proofs/fstar/models/Libcrux_ml_kem.Variant.fsti
Normal file
101
proofs/fstar/models/Libcrux_ml_kem.Variant.fsti
Normal file
|
|
@ -0,0 +1,101 @@
|
||||||
|
module Libcrux_ml_kem.Variant
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
let _ =
|
||||||
|
(* This module has implicit dependencies, here we make them explicit. *)
|
||||||
|
(* The implicit dependencies arise from typeclasses instances. *)
|
||||||
|
let open Libcrux_ml_kem.Hash_functions in
|
||||||
|
()
|
||||||
|
|
||||||
|
/// This trait collects differences in specification between ML-KEM
|
||||||
|
/// (FIPS 203) and the Round 3 CRYSTALS-Kyber submission in the
|
||||||
|
/// NIST PQ competition.
|
||||||
|
/// cf. FIPS 203, Appendix C
|
||||||
|
class t_Variant (v_Self: Type0) = {
|
||||||
|
f_kdf_pre:
|
||||||
|
v_K: usize ->
|
||||||
|
v_CIPHERTEXT_SIZE: usize ->
|
||||||
|
#v_Hasher: Type0 ->
|
||||||
|
{| i1: Libcrux_ml_kem.Hash_functions.t_Hash v_Hasher v_K |} ->
|
||||||
|
shared_secret: t_Slice u8 ->
|
||||||
|
ciphertext: Libcrux_ml_kem.Types.t_MlKemCiphertext v_CIPHERTEXT_SIZE
|
||||||
|
-> pred: Type0{(Core.Slice.impl__len #u8 shared_secret <: usize) =. mk_usize 32 ==> pred};
|
||||||
|
f_kdf_post:
|
||||||
|
v_K: usize ->
|
||||||
|
v_CIPHERTEXT_SIZE: usize ->
|
||||||
|
#v_Hasher: Type0 ->
|
||||||
|
{| i1: Libcrux_ml_kem.Hash_functions.t_Hash v_Hasher v_K |} ->
|
||||||
|
shared_secret: t_Slice u8 ->
|
||||||
|
ciphertext: Libcrux_ml_kem.Types.t_MlKemCiphertext v_CIPHERTEXT_SIZE ->
|
||||||
|
res: t_Array u8 (mk_usize 32)
|
||||||
|
-> pred: Type0{pred ==> res == shared_secret};
|
||||||
|
f_kdf:
|
||||||
|
v_K: usize ->
|
||||||
|
v_CIPHERTEXT_SIZE: usize ->
|
||||||
|
#v_Hasher: Type0 ->
|
||||||
|
{| i1: Libcrux_ml_kem.Hash_functions.t_Hash v_Hasher v_K |} ->
|
||||||
|
x0: t_Slice u8 ->
|
||||||
|
x1: Libcrux_ml_kem.Types.t_MlKemCiphertext v_CIPHERTEXT_SIZE
|
||||||
|
-> Prims.Pure (t_Array u8 (mk_usize 32))
|
||||||
|
(f_kdf_pre v_K v_CIPHERTEXT_SIZE #v_Hasher #i1 x0 x1)
|
||||||
|
(fun result -> f_kdf_post v_K v_CIPHERTEXT_SIZE #v_Hasher #i1 x0 x1 result);
|
||||||
|
f_entropy_preprocess_pre:
|
||||||
|
v_K: usize ->
|
||||||
|
#v_Hasher: Type0 ->
|
||||||
|
{| i3: Libcrux_ml_kem.Hash_functions.t_Hash v_Hasher v_K |} ->
|
||||||
|
randomness: t_Slice u8
|
||||||
|
-> pred: Type0{(Core.Slice.impl__len #u8 randomness <: usize) =. mk_usize 32 ==> pred};
|
||||||
|
f_entropy_preprocess_post:
|
||||||
|
v_K: usize ->
|
||||||
|
#v_Hasher: Type0 ->
|
||||||
|
{| i3: Libcrux_ml_kem.Hash_functions.t_Hash v_Hasher v_K |} ->
|
||||||
|
randomness: t_Slice u8 ->
|
||||||
|
res: t_Array u8 (mk_usize 32)
|
||||||
|
-> pred: Type0{pred ==> res == randomness};
|
||||||
|
f_entropy_preprocess:
|
||||||
|
v_K: usize ->
|
||||||
|
#v_Hasher: Type0 ->
|
||||||
|
{| i3: Libcrux_ml_kem.Hash_functions.t_Hash v_Hasher v_K |} ->
|
||||||
|
x0: t_Slice u8
|
||||||
|
-> Prims.Pure (t_Array u8 (mk_usize 32))
|
||||||
|
(f_entropy_preprocess_pre v_K #v_Hasher #i3 x0)
|
||||||
|
(fun result -> f_entropy_preprocess_post v_K #v_Hasher #i3 x0 result);
|
||||||
|
f_cpa_keygen_seed_pre:
|
||||||
|
v_K: usize ->
|
||||||
|
#v_Hasher: Type0 ->
|
||||||
|
{| i3: Libcrux_ml_kem.Hash_functions.t_Hash v_Hasher v_K |} ->
|
||||||
|
seed: t_Slice u8
|
||||||
|
-> pred: Type0{(Core.Slice.impl__len #u8 seed <: usize) =. mk_usize 32 ==> pred};
|
||||||
|
f_cpa_keygen_seed_post:
|
||||||
|
v_K: usize ->
|
||||||
|
#v_Hasher: Type0 ->
|
||||||
|
{| i3: Libcrux_ml_kem.Hash_functions.t_Hash v_Hasher v_K |} ->
|
||||||
|
seed: t_Slice u8 ->
|
||||||
|
res: t_Array u8 (mk_usize 64)
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ pred ==>
|
||||||
|
Seq.length seed == 32 ==>
|
||||||
|
res == Spec.Utils.v_G (Seq.append seed (Seq.create 1 (cast v_K <: u8))) };
|
||||||
|
f_cpa_keygen_seed:
|
||||||
|
v_K: usize ->
|
||||||
|
#v_Hasher: Type0 ->
|
||||||
|
{| i3: Libcrux_ml_kem.Hash_functions.t_Hash v_Hasher v_K |} ->
|
||||||
|
x0: t_Slice u8
|
||||||
|
-> Prims.Pure (t_Array u8 (mk_usize 64))
|
||||||
|
(f_cpa_keygen_seed_pre v_K #v_Hasher #i3 x0)
|
||||||
|
(fun result -> f_cpa_keygen_seed_post v_K #v_Hasher #i3 x0 result)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Implements [`Variant`], to perform the ML-KEM-specific actions
|
||||||
|
/// during encapsulation and decapsulation.
|
||||||
|
/// Specifically,
|
||||||
|
/// * during key generation, the seed hash is domain separated (this is a difference from the FIPS 203 IPD and Kyber)
|
||||||
|
/// * during encapsulation, the initial randomness is used without prior hashing,
|
||||||
|
/// * the derivation of the shared secret does not include a hash of the ML-KEM ciphertext.
|
||||||
|
type t_MlKem = | MlKem : t_MlKem
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl:t_Variant t_MlKem
|
||||||
451
proofs/fstar/models/Libcrux_ml_kem.Vector.Traits.fsti
Normal file
451
proofs/fstar/models/Libcrux_ml_kem.Vector.Traits.fsti
Normal file
|
|
@ -0,0 +1,451 @@
|
||||||
|
module Libcrux_ml_kem.Vector.Traits
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
let v_MONTGOMERY_R_SQUARED_MOD_FIELD_MODULUS: i16 = mk_i16 1353
|
||||||
|
|
||||||
|
let v_FIELD_MODULUS: i16 = mk_i16 3329
|
||||||
|
|
||||||
|
let v_FIELD_ELEMENTS_IN_VECTOR: usize = mk_usize 16
|
||||||
|
|
||||||
|
let v_INVERSE_OF_MODULUS_MOD_MONTGOMERY_R: u32 = mk_u32 62209
|
||||||
|
|
||||||
|
let v_BARRETT_SHIFT: i32 = mk_i32 26
|
||||||
|
|
||||||
|
let v_BARRETT_R: i32 = mk_i32 1 <<! v_BARRETT_SHIFT
|
||||||
|
|
||||||
|
class t_Repr (v_Self: Type0) = {
|
||||||
|
[@@@ FStar.Tactics.Typeclasses.no_method]_super_5883514518384729217:Core.Marker.t_Copy v_Self;
|
||||||
|
[@@@ FStar.Tactics.Typeclasses.no_method]_super_16027770981543256320:Core.Clone.t_Clone v_Self;
|
||||||
|
f_repr_pre:x: v_Self -> pred: Type0{true ==> pred};
|
||||||
|
f_repr_post:v_Self -> t_Array i16 (mk_usize 16) -> Type0;
|
||||||
|
f_repr:x0: v_Self
|
||||||
|
-> Prims.Pure (t_Array i16 (mk_usize 16)) (f_repr_pre x0) (fun result -> f_repr_post x0 result)
|
||||||
|
}
|
||||||
|
|
||||||
|
class t_Operations (v_Self: Type0) = {
|
||||||
|
[@@@ FStar.Tactics.Typeclasses.no_method]_super_5883514518384729217:Core.Marker.t_Copy v_Self;
|
||||||
|
[@@@ FStar.Tactics.Typeclasses.no_method]_super_16027770981543256320:Core.Clone.t_Clone v_Self;
|
||||||
|
[@@@ FStar.Tactics.Typeclasses.no_method]_super_15138760880757129450:t_Repr v_Self;
|
||||||
|
f_ZERO_pre:x: Prims.unit
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ (let _:Prims.unit = x in
|
||||||
|
true) ==>
|
||||||
|
pred };
|
||||||
|
f_ZERO_post:x: Prims.unit -> result: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ pred ==>
|
||||||
|
(let _:Prims.unit = x in
|
||||||
|
f_repr result == Seq.create 16 (mk_i16 0)) };
|
||||||
|
f_ZERO:x0: Prims.unit -> Prims.Pure v_Self (f_ZERO_pre x0) (fun result -> f_ZERO_post x0 result);
|
||||||
|
f_from_i16_array_pre:array: t_Slice i16
|
||||||
|
-> pred: Type0{(Core.Slice.impl__len #i16 array <: usize) =. mk_usize 16 ==> pred};
|
||||||
|
f_from_i16_array_post:array: t_Slice i16 -> result: v_Self
|
||||||
|
-> pred: Type0{pred ==> f_repr result == array};
|
||||||
|
f_from_i16_array:x0: t_Slice i16
|
||||||
|
-> Prims.Pure v_Self (f_from_i16_array_pre x0) (fun result -> f_from_i16_array_post x0 result);
|
||||||
|
f_to_i16_array_pre:x: v_Self -> pred: Type0{true ==> pred};
|
||||||
|
f_to_i16_array_post:x: v_Self -> result: t_Array i16 (mk_usize 16)
|
||||||
|
-> pred: Type0{pred ==> f_repr x == result};
|
||||||
|
f_to_i16_array:x0: v_Self
|
||||||
|
-> Prims.Pure (t_Array i16 (mk_usize 16))
|
||||||
|
(f_to_i16_array_pre x0)
|
||||||
|
(fun result -> f_to_i16_array_post x0 result);
|
||||||
|
f_from_bytes_pre:array: t_Slice u8
|
||||||
|
-> pred: Type0{(Core.Slice.impl__len #u8 array <: usize) >=. mk_usize 32 ==> pred};
|
||||||
|
f_from_bytes_post:t_Slice u8 -> v_Self -> Type0;
|
||||||
|
f_from_bytes:x0: t_Slice u8
|
||||||
|
-> Prims.Pure v_Self (f_from_bytes_pre x0) (fun result -> f_from_bytes_post x0 result);
|
||||||
|
f_to_bytes_pre:x: v_Self -> bytes: t_Slice u8
|
||||||
|
-> pred: Type0{(Core.Slice.impl__len #u8 bytes <: usize) >=. mk_usize 32 ==> pred};
|
||||||
|
f_to_bytes_post:v_Self -> t_Slice u8 -> t_Slice u8 -> Type0;
|
||||||
|
f_to_bytes:x0: v_Self -> x1: t_Slice u8
|
||||||
|
-> Prims.Pure (t_Slice u8) (f_to_bytes_pre x0 x1) (fun result -> f_to_bytes_post x0 x1 result);
|
||||||
|
f_add_pre:lhs: v_Self -> rhs: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ (forall i.
|
||||||
|
i < 16 ==>
|
||||||
|
Spec.Utils.is_intb (pow2 15 - 1)
|
||||||
|
(v (Seq.index (f_repr lhs) i) + v (Seq.index (f_repr rhs) i))) ==>
|
||||||
|
pred };
|
||||||
|
f_add_post:lhs: v_Self -> rhs: v_Self -> result: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ pred ==>
|
||||||
|
(forall i.
|
||||||
|
i < 16 ==>
|
||||||
|
(v (Seq.index (f_repr result) i) ==
|
||||||
|
v (Seq.index (f_repr lhs) i) + v (Seq.index (f_repr rhs) i))) };
|
||||||
|
f_add:x0: v_Self -> x1: v_Self
|
||||||
|
-> Prims.Pure v_Self (f_add_pre x0 x1) (fun result -> f_add_post x0 x1 result);
|
||||||
|
f_sub_pre:lhs: v_Self -> rhs: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ (forall i.
|
||||||
|
i < 16 ==>
|
||||||
|
Spec.Utils.is_intb (pow2 15 - 1)
|
||||||
|
(v (Seq.index (f_repr lhs) i) - v (Seq.index (f_repr rhs) i))) ==>
|
||||||
|
pred };
|
||||||
|
f_sub_post:lhs: v_Self -> rhs: v_Self -> result: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ pred ==>
|
||||||
|
(forall i.
|
||||||
|
i < 16 ==>
|
||||||
|
(v (Seq.index (f_repr result) i) ==
|
||||||
|
v (Seq.index (f_repr lhs) i) - v (Seq.index (f_repr rhs) i))) };
|
||||||
|
f_sub:x0: v_Self -> x1: v_Self
|
||||||
|
-> Prims.Pure v_Self (f_sub_pre x0 x1) (fun result -> f_sub_post x0 x1 result);
|
||||||
|
f_multiply_by_constant_pre:vec: v_Self -> c: i16
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ (forall i.
|
||||||
|
i < 16 ==> Spec.Utils.is_intb (pow2 15 - 1) (v (Seq.index (f_repr vec) i) * v c)) ==>
|
||||||
|
pred };
|
||||||
|
f_multiply_by_constant_post:vec: v_Self -> c: i16 -> result: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ pred ==>
|
||||||
|
(forall i.
|
||||||
|
i < 16 ==> (v (Seq.index (f_repr result) i) == v (Seq.index (f_repr vec) i) * v c)) };
|
||||||
|
f_multiply_by_constant:x0: v_Self -> x1: i16
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_multiply_by_constant_pre x0 x1)
|
||||||
|
(fun result -> f_multiply_by_constant_post x0 x1 result);
|
||||||
|
f_bitwise_and_with_constant_pre:v: v_Self -> c: i16 -> pred: Type0{true ==> pred};
|
||||||
|
f_bitwise_and_with_constant_post:v: v_Self -> c: i16 -> result: v_Self
|
||||||
|
-> pred: Type0{pred ==> f_repr result == Spec.Utils.map_array (fun x -> x &. c) (f_repr v)};
|
||||||
|
f_bitwise_and_with_constant:x0: v_Self -> x1: i16
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_bitwise_and_with_constant_pre x0 x1)
|
||||||
|
(fun result -> f_bitwise_and_with_constant_post x0 x1 result);
|
||||||
|
f_shift_right_pre:v_SHIFT_BY: i32 -> v: v_Self
|
||||||
|
-> pred: Type0{v_SHIFT_BY >=. mk_i32 0 && v_SHIFT_BY <. mk_i32 16 ==> pred};
|
||||||
|
f_shift_right_post:v_SHIFT_BY: i32 -> v: v_Self -> result: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ pred ==>
|
||||||
|
(v_SHIFT_BY >=. (mk_i32 0) /\ v_SHIFT_BY <. (mk_i32 16)) ==>
|
||||||
|
f_repr result == Spec.Utils.map_array (fun x -> x >>! v_SHIFT_BY) (f_repr v) };
|
||||||
|
f_shift_right:v_SHIFT_BY: i32 -> x0: v_Self
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_shift_right_pre v_SHIFT_BY x0)
|
||||||
|
(fun result -> f_shift_right_post v_SHIFT_BY x0 result);
|
||||||
|
f_cond_subtract_3329__pre:v: v_Self
|
||||||
|
-> pred: Type0{Spec.Utils.is_i16b_array (pow2 12 - 1) (f_repr v) ==> pred};
|
||||||
|
f_cond_subtract_3329__post:v: v_Self -> result: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ pred ==>
|
||||||
|
f_repr result ==
|
||||||
|
Spec.Utils.map_array (fun x -> if x >=. (mk_i16 3329) then x -! (mk_i16 3329) else x)
|
||||||
|
(f_repr v) };
|
||||||
|
f_cond_subtract_3329_:x0: v_Self
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_cond_subtract_3329__pre x0)
|
||||||
|
(fun result -> f_cond_subtract_3329__post x0 result);
|
||||||
|
f_barrett_reduce_pre:vector: v_Self
|
||||||
|
-> pred: Type0{Spec.Utils.is_i16b_array 28296 (f_repr vector) ==> pred};
|
||||||
|
f_barrett_reduce_post:v_Self -> v_Self -> Type0;
|
||||||
|
f_barrett_reduce:x0: v_Self
|
||||||
|
-> Prims.Pure v_Self (f_barrett_reduce_pre x0) (fun result -> f_barrett_reduce_post x0 result);
|
||||||
|
f_montgomery_multiply_by_constant_pre:v: v_Self -> c: i16
|
||||||
|
-> pred: Type0{Spec.Utils.is_i16b 1664 c ==> pred};
|
||||||
|
f_montgomery_multiply_by_constant_post:v_Self -> i16 -> v_Self -> Type0;
|
||||||
|
f_montgomery_multiply_by_constant:x0: v_Self -> x1: i16
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_montgomery_multiply_by_constant_pre x0 x1)
|
||||||
|
(fun result -> f_montgomery_multiply_by_constant_post x0 x1 result);
|
||||||
|
f_compress_1__pre:a: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ (forall (i: nat).
|
||||||
|
i < 16 ==> v (Seq.index (f_repr a) i) >= 0 /\ v (Seq.index (f_repr a) i) < 3329) ==>
|
||||||
|
pred };
|
||||||
|
f_compress_1__post:a: v_Self -> result: v_Self
|
||||||
|
-> pred: Type0{pred ==> (forall (i: nat). i < 16 ==> bounded (Seq.index (f_repr result) i) 1)};
|
||||||
|
f_compress_1_:x0: v_Self
|
||||||
|
-> Prims.Pure v_Self (f_compress_1__pre x0) (fun result -> f_compress_1__post x0 result);
|
||||||
|
f_compress_pre:v_COEFFICIENT_BITS: i32 -> a: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ (v v_COEFFICIENT_BITS == 4 \/ v v_COEFFICIENT_BITS == 5 \/ v v_COEFFICIENT_BITS == 10 \/
|
||||||
|
v v_COEFFICIENT_BITS == 11) /\
|
||||||
|
(forall (i: nat).
|
||||||
|
i < 16 ==> v (Seq.index (f_repr a) i) >= 0 /\ v (Seq.index (f_repr a) i) < 3329) ==>
|
||||||
|
pred };
|
||||||
|
f_compress_post:v_COEFFICIENT_BITS: i32 -> a: v_Self -> result: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ pred ==>
|
||||||
|
(v v_COEFFICIENT_BITS == 4 \/ v v_COEFFICIENT_BITS == 5 \/ v v_COEFFICIENT_BITS == 10 \/
|
||||||
|
v v_COEFFICIENT_BITS == 11) ==>
|
||||||
|
(forall (i: nat). i < 16 ==> bounded (Seq.index (f_repr result) i) (v v_COEFFICIENT_BITS))
|
||||||
|
};
|
||||||
|
f_compress:v_COEFFICIENT_BITS: i32 -> x0: v_Self
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_compress_pre v_COEFFICIENT_BITS x0)
|
||||||
|
(fun result -> f_compress_post v_COEFFICIENT_BITS x0 result);
|
||||||
|
f_decompress_ciphertext_coefficient_pre:v_COEFFICIENT_BITS: i32 -> a: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ (v v_COEFFICIENT_BITS == 4 \/ v v_COEFFICIENT_BITS == 5 \/ v v_COEFFICIENT_BITS == 10 \/
|
||||||
|
v v_COEFFICIENT_BITS == 11) /\
|
||||||
|
(forall (i: nat).
|
||||||
|
i < 16 ==>
|
||||||
|
v (Seq.index (f_repr a) i) >= 0 /\
|
||||||
|
v (Seq.index (f_repr a) i) < pow2 (v v_COEFFICIENT_BITS)) ==>
|
||||||
|
pred };
|
||||||
|
f_decompress_ciphertext_coefficient_post:v_COEFFICIENT_BITS: i32 -> v_Self -> v_Self -> Type0;
|
||||||
|
f_decompress_ciphertext_coefficient:v_COEFFICIENT_BITS: i32 -> x0: v_Self
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_decompress_ciphertext_coefficient_pre v_COEFFICIENT_BITS x0)
|
||||||
|
(fun result -> f_decompress_ciphertext_coefficient_post v_COEFFICIENT_BITS x0 result);
|
||||||
|
f_ntt_layer_1_step_pre:a: v_Self -> zeta0: i16 -> zeta1: i16 -> zeta2: i16 -> zeta3: i16
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ Spec.Utils.is_i16b 1664 zeta0 /\ Spec.Utils.is_i16b 1664 zeta1 /\
|
||||||
|
Spec.Utils.is_i16b 1664 zeta2 /\ Spec.Utils.is_i16b 1664 zeta3 /\
|
||||||
|
Spec.Utils.is_i16b_array (11207 + 5 * 3328) (f_repr a) ==>
|
||||||
|
pred };
|
||||||
|
f_ntt_layer_1_step_post:
|
||||||
|
a: v_Self ->
|
||||||
|
zeta0: i16 ->
|
||||||
|
zeta1: i16 ->
|
||||||
|
zeta2: i16 ->
|
||||||
|
zeta3: i16 ->
|
||||||
|
out: v_Self
|
||||||
|
-> pred: Type0{pred ==> Spec.Utils.is_i16b_array (11207 + 6 * 3328) (f_repr out)};
|
||||||
|
f_ntt_layer_1_step:x0: v_Self -> x1: i16 -> x2: i16 -> x3: i16 -> x4: i16
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_ntt_layer_1_step_pre x0 x1 x2 x3 x4)
|
||||||
|
(fun result -> f_ntt_layer_1_step_post x0 x1 x2 x3 x4 result);
|
||||||
|
f_ntt_layer_2_step_pre:a: v_Self -> zeta0: i16 -> zeta1: i16
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ Spec.Utils.is_i16b 1664 zeta0 /\ Spec.Utils.is_i16b 1664 zeta1 /\
|
||||||
|
Spec.Utils.is_i16b_array (11207 + 4 * 3328) (f_repr a) ==>
|
||||||
|
pred };
|
||||||
|
f_ntt_layer_2_step_post:a: v_Self -> zeta0: i16 -> zeta1: i16 -> out: v_Self
|
||||||
|
-> pred: Type0{pred ==> Spec.Utils.is_i16b_array (11207 + 5 * 3328) (f_repr out)};
|
||||||
|
f_ntt_layer_2_step:x0: v_Self -> x1: i16 -> x2: i16
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_ntt_layer_2_step_pre x0 x1 x2)
|
||||||
|
(fun result -> f_ntt_layer_2_step_post x0 x1 x2 result);
|
||||||
|
f_ntt_layer_3_step_pre:a: v_Self -> zeta: i16
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ Spec.Utils.is_i16b 1664 zeta /\ Spec.Utils.is_i16b_array (11207 + 3 * 3328) (f_repr a) ==>
|
||||||
|
pred };
|
||||||
|
f_ntt_layer_3_step_post:a: v_Self -> zeta: i16 -> out: v_Self
|
||||||
|
-> pred: Type0{pred ==> Spec.Utils.is_i16b_array (11207 + 4 * 3328) (f_repr out)};
|
||||||
|
f_ntt_layer_3_step:x0: v_Self -> x1: i16
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_ntt_layer_3_step_pre x0 x1)
|
||||||
|
(fun result -> f_ntt_layer_3_step_post x0 x1 result);
|
||||||
|
f_inv_ntt_layer_1_step_pre:a: v_Self -> zeta0: i16 -> zeta1: i16 -> zeta2: i16 -> zeta3: i16
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ Spec.Utils.is_i16b 1664 zeta0 /\ Spec.Utils.is_i16b 1664 zeta1 /\
|
||||||
|
Spec.Utils.is_i16b 1664 zeta2 /\ Spec.Utils.is_i16b 1664 zeta3 /\
|
||||||
|
Spec.Utils.is_i16b_array (4 * 3328) (f_repr a) ==>
|
||||||
|
pred };
|
||||||
|
f_inv_ntt_layer_1_step_post:
|
||||||
|
a: v_Self ->
|
||||||
|
zeta0: i16 ->
|
||||||
|
zeta1: i16 ->
|
||||||
|
zeta2: i16 ->
|
||||||
|
zeta3: i16 ->
|
||||||
|
out: v_Self
|
||||||
|
-> pred: Type0{pred ==> Spec.Utils.is_i16b_array 3328 (f_repr out)};
|
||||||
|
f_inv_ntt_layer_1_step:x0: v_Self -> x1: i16 -> x2: i16 -> x3: i16 -> x4: i16
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_inv_ntt_layer_1_step_pre x0 x1 x2 x3 x4)
|
||||||
|
(fun result -> f_inv_ntt_layer_1_step_post x0 x1 x2 x3 x4 result);
|
||||||
|
f_inv_ntt_layer_2_step_pre:a: v_Self -> zeta0: i16 -> zeta1: i16
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ Spec.Utils.is_i16b 1664 zeta0 /\ Spec.Utils.is_i16b 1664 zeta1 /\
|
||||||
|
Spec.Utils.is_i16b_array 3328 (f_repr a) ==>
|
||||||
|
pred };
|
||||||
|
f_inv_ntt_layer_2_step_post:a: v_Self -> zeta0: i16 -> zeta1: i16 -> out: v_Self
|
||||||
|
-> pred: Type0{pred ==> Spec.Utils.is_i16b_array 3328 (f_repr out)};
|
||||||
|
f_inv_ntt_layer_2_step:x0: v_Self -> x1: i16 -> x2: i16
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_inv_ntt_layer_2_step_pre x0 x1 x2)
|
||||||
|
(fun result -> f_inv_ntt_layer_2_step_post x0 x1 x2 result);
|
||||||
|
f_inv_ntt_layer_3_step_pre:a: v_Self -> zeta: i16
|
||||||
|
-> pred:
|
||||||
|
Type0{Spec.Utils.is_i16b 1664 zeta /\ Spec.Utils.is_i16b_array 3328 (f_repr a) ==> pred};
|
||||||
|
f_inv_ntt_layer_3_step_post:a: v_Self -> zeta: i16 -> out: v_Self
|
||||||
|
-> pred: Type0{pred ==> Spec.Utils.is_i16b_array 3328 (f_repr out)};
|
||||||
|
f_inv_ntt_layer_3_step:x0: v_Self -> x1: i16
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_inv_ntt_layer_3_step_pre x0 x1)
|
||||||
|
(fun result -> f_inv_ntt_layer_3_step_post x0 x1 result);
|
||||||
|
f_ntt_multiply_pre:
|
||||||
|
lhs: v_Self ->
|
||||||
|
rhs: v_Self ->
|
||||||
|
zeta0: i16 ->
|
||||||
|
zeta1: i16 ->
|
||||||
|
zeta2: i16 ->
|
||||||
|
zeta3: i16
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ Spec.Utils.is_i16b 1664 zeta0 /\ Spec.Utils.is_i16b 1664 zeta1 /\
|
||||||
|
Spec.Utils.is_i16b 1664 zeta2 /\ Spec.Utils.is_i16b 1664 zeta3 /\
|
||||||
|
Spec.Utils.is_i16b_array 3328 (f_repr lhs) /\ Spec.Utils.is_i16b_array 3328 (f_repr rhs) ==>
|
||||||
|
pred };
|
||||||
|
f_ntt_multiply_post:
|
||||||
|
lhs: v_Self ->
|
||||||
|
rhs: v_Self ->
|
||||||
|
zeta0: i16 ->
|
||||||
|
zeta1: i16 ->
|
||||||
|
zeta2: i16 ->
|
||||||
|
zeta3: i16 ->
|
||||||
|
out: v_Self
|
||||||
|
-> pred: Type0{pred ==> Spec.Utils.is_i16b_array 3328 (f_repr out)};
|
||||||
|
f_ntt_multiply:x0: v_Self -> x1: v_Self -> x2: i16 -> x3: i16 -> x4: i16 -> x5: i16
|
||||||
|
-> Prims.Pure v_Self
|
||||||
|
(f_ntt_multiply_pre x0 x1 x2 x3 x4 x5)
|
||||||
|
(fun result -> f_ntt_multiply_post x0 x1 x2 x3 x4 x5 result);
|
||||||
|
f_serialize_1__pre:a: v_Self -> pred: Type0{Spec.MLKEM.serialize_pre 1 (f_repr a) ==> pred};
|
||||||
|
f_serialize_1__post:a: v_Self -> result: t_Array u8 (mk_usize 2)
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ pred ==>
|
||||||
|
Spec.MLKEM.serialize_pre 1 (f_repr a) ==> Spec.MLKEM.serialize_post 1 (f_repr a) result };
|
||||||
|
f_serialize_1_:x0: v_Self
|
||||||
|
-> Prims.Pure (t_Array u8 (mk_usize 2))
|
||||||
|
(f_serialize_1__pre x0)
|
||||||
|
(fun result -> f_serialize_1__post x0 result);
|
||||||
|
f_deserialize_1__pre:a: t_Slice u8
|
||||||
|
-> pred: Type0{(Core.Slice.impl__len #u8 a <: usize) =. mk_usize 2 ==> pred};
|
||||||
|
f_deserialize_1__post:a: t_Slice u8 -> result: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0{pred ==> sz (Seq.length a) =. sz 2 ==> Spec.MLKEM.deserialize_post 1 a (f_repr result)};
|
||||||
|
f_deserialize_1_:x0: t_Slice u8
|
||||||
|
-> Prims.Pure v_Self (f_deserialize_1__pre x0) (fun result -> f_deserialize_1__post x0 result);
|
||||||
|
f_serialize_4__pre:a: v_Self -> pred: Type0{Spec.MLKEM.serialize_pre 4 (f_repr a) ==> pred};
|
||||||
|
f_serialize_4__post:a: v_Self -> result: t_Array u8 (mk_usize 8)
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ pred ==>
|
||||||
|
Spec.MLKEM.serialize_pre 4 (f_repr a) ==> Spec.MLKEM.serialize_post 4 (f_repr a) result };
|
||||||
|
f_serialize_4_:x0: v_Self
|
||||||
|
-> Prims.Pure (t_Array u8 (mk_usize 8))
|
||||||
|
(f_serialize_4__pre x0)
|
||||||
|
(fun result -> f_serialize_4__post x0 result);
|
||||||
|
f_deserialize_4__pre:a: t_Slice u8
|
||||||
|
-> pred: Type0{(Core.Slice.impl__len #u8 a <: usize) =. mk_usize 8 ==> pred};
|
||||||
|
f_deserialize_4__post:a: t_Slice u8 -> result: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0{pred ==> sz (Seq.length a) =. sz 8 ==> Spec.MLKEM.deserialize_post 4 a (f_repr result)};
|
||||||
|
f_deserialize_4_:x0: t_Slice u8
|
||||||
|
-> Prims.Pure v_Self (f_deserialize_4__pre x0) (fun result -> f_deserialize_4__post x0 result);
|
||||||
|
f_serialize_5__pre:v_Self -> Type0;
|
||||||
|
f_serialize_5__post:v_Self -> t_Array u8 (mk_usize 10) -> Type0;
|
||||||
|
f_serialize_5_:x0: v_Self
|
||||||
|
-> Prims.Pure (t_Array u8 (mk_usize 10))
|
||||||
|
(f_serialize_5__pre x0)
|
||||||
|
(fun result -> f_serialize_5__post x0 result);
|
||||||
|
f_deserialize_5__pre:a: t_Slice u8
|
||||||
|
-> pred: Type0{(Core.Slice.impl__len #u8 a <: usize) =. mk_usize 10 ==> pred};
|
||||||
|
f_deserialize_5__post:t_Slice u8 -> v_Self -> Type0;
|
||||||
|
f_deserialize_5_:x0: t_Slice u8
|
||||||
|
-> Prims.Pure v_Self (f_deserialize_5__pre x0) (fun result -> f_deserialize_5__post x0 result);
|
||||||
|
f_serialize_10__pre:a: v_Self -> pred: Type0{Spec.MLKEM.serialize_pre 10 (f_repr a) ==> pred};
|
||||||
|
f_serialize_10__post:a: v_Self -> result: t_Array u8 (mk_usize 20)
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ pred ==>
|
||||||
|
Spec.MLKEM.serialize_pre 10 (f_repr a) ==> Spec.MLKEM.serialize_post 10 (f_repr a) result
|
||||||
|
};
|
||||||
|
f_serialize_10_:x0: v_Self
|
||||||
|
-> Prims.Pure (t_Array u8 (mk_usize 20))
|
||||||
|
(f_serialize_10__pre x0)
|
||||||
|
(fun result -> f_serialize_10__post x0 result);
|
||||||
|
f_deserialize_10__pre:a: t_Slice u8
|
||||||
|
-> pred: Type0{(Core.Slice.impl__len #u8 a <: usize) =. mk_usize 20 ==> pred};
|
||||||
|
f_deserialize_10__post:a: t_Slice u8 -> result: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{pred ==> sz (Seq.length a) =. sz 20 ==> Spec.MLKEM.deserialize_post 10 a (f_repr result)};
|
||||||
|
f_deserialize_10_:x0: t_Slice u8
|
||||||
|
-> Prims.Pure v_Self (f_deserialize_10__pre x0) (fun result -> f_deserialize_10__post x0 result);
|
||||||
|
f_serialize_11__pre:v_Self -> Type0;
|
||||||
|
f_serialize_11__post:v_Self -> t_Array u8 (mk_usize 22) -> Type0;
|
||||||
|
f_serialize_11_:x0: v_Self
|
||||||
|
-> Prims.Pure (t_Array u8 (mk_usize 22))
|
||||||
|
(f_serialize_11__pre x0)
|
||||||
|
(fun result -> f_serialize_11__post x0 result);
|
||||||
|
f_deserialize_11__pre:a: t_Slice u8
|
||||||
|
-> pred: Type0{(Core.Slice.impl__len #u8 a <: usize) =. mk_usize 22 ==> pred};
|
||||||
|
f_deserialize_11__post:t_Slice u8 -> v_Self -> Type0;
|
||||||
|
f_deserialize_11_:x0: t_Slice u8
|
||||||
|
-> Prims.Pure v_Self (f_deserialize_11__pre x0) (fun result -> f_deserialize_11__post x0 result);
|
||||||
|
f_serialize_12__pre:a: v_Self -> pred: Type0{Spec.MLKEM.serialize_pre 12 (f_repr a) ==> pred};
|
||||||
|
f_serialize_12__post:a: v_Self -> result: t_Array u8 (mk_usize 24)
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ pred ==>
|
||||||
|
Spec.MLKEM.serialize_pre 12 (f_repr a) ==> Spec.MLKEM.serialize_post 12 (f_repr a) result
|
||||||
|
};
|
||||||
|
f_serialize_12_:x0: v_Self
|
||||||
|
-> Prims.Pure (t_Array u8 (mk_usize 24))
|
||||||
|
(f_serialize_12__pre x0)
|
||||||
|
(fun result -> f_serialize_12__post x0 result);
|
||||||
|
f_deserialize_12__pre:a: t_Slice u8
|
||||||
|
-> pred: Type0{(Core.Slice.impl__len #u8 a <: usize) =. mk_usize 24 ==> pred};
|
||||||
|
f_deserialize_12__post:a: t_Slice u8 -> result: v_Self
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{pred ==> sz (Seq.length a) =. sz 24 ==> Spec.MLKEM.deserialize_post 12 a (f_repr result)};
|
||||||
|
f_deserialize_12_:x0: t_Slice u8
|
||||||
|
-> Prims.Pure v_Self (f_deserialize_12__pre x0) (fun result -> f_deserialize_12__post x0 result);
|
||||||
|
f_rej_sample_pre:a: t_Slice u8 -> out: t_Slice i16
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ (Core.Slice.impl__len #u8 a <: usize) =. mk_usize 24 &&
|
||||||
|
(Core.Slice.impl__len #i16 out <: usize) =. mk_usize 16 ==>
|
||||||
|
pred };
|
||||||
|
f_rej_sample_post:a: t_Slice u8 -> out: t_Slice i16 -> x: (t_Slice i16 & usize)
|
||||||
|
-> pred:
|
||||||
|
Type0
|
||||||
|
{ pred ==>
|
||||||
|
(let out_future, result:(t_Slice i16 & usize) = x in
|
||||||
|
Seq.length out_future == Seq.length out /\ v result <= 16) };
|
||||||
|
f_rej_sample:x0: t_Slice u8 -> x1: t_Slice i16
|
||||||
|
-> Prims.Pure (t_Slice i16 & usize)
|
||||||
|
(f_rej_sample_pre x0 x1)
|
||||||
|
(fun result -> f_rej_sample_post x0 x1 result)
|
||||||
|
}
|
||||||
|
|
||||||
|
val montgomery_multiply_fe (#v_T: Type0) {| i1: t_Operations v_T |} (v: v_T) (fer: i16)
|
||||||
|
: Prims.Pure v_T (requires Spec.Utils.is_i16b 1664 fer) (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val to_standard_domain (#v_T: Type0) {| i1: t_Operations v_T |} (v: v_T)
|
||||||
|
: Prims.Pure v_T Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val to_unsigned_representative (#v_T: Type0) {| i1: t_Operations v_T |} (a: v_T)
|
||||||
|
: Prims.Pure v_T
|
||||||
|
(requires Spec.Utils.is_i16b_array 3328 (i1._super_15138760880757129450.f_repr a))
|
||||||
|
(ensures
|
||||||
|
fun result ->
|
||||||
|
let result:v_T = result in
|
||||||
|
forall i.
|
||||||
|
(let x = Seq.index (i1._super_15138760880757129450.f_repr a) i in
|
||||||
|
let y = Seq.index (i1._super_15138760880757129450.f_repr result) i in
|
||||||
|
(v y >= 0 /\ v y <= 3328 /\ (v y % 3329 == v x % 3329))))
|
||||||
|
|
||||||
|
val decompress_1_ (#v_T: Type0) {| i1: t_Operations v_T |} (vec: v_T)
|
||||||
|
: Prims.Pure v_T
|
||||||
|
(requires
|
||||||
|
forall i.
|
||||||
|
let x = Seq.index (i1._super_15138760880757129450.f_repr vec) i in
|
||||||
|
(x == mk_i16 0 \/ x == mk_i16 1))
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
271
proofs/fstar/models/Makefile
Normal file
271
proofs/fstar/models/Makefile
Normal file
|
|
@ -0,0 +1,271 @@
|
||||||
|
# This is a generically useful Makefile for F* that is self-contained
|
||||||
|
#
|
||||||
|
# We expect:
|
||||||
|
# 1. `fstar.exe` to be in PATH (alternatively, you can also set
|
||||||
|
# $FSTAR_HOME to be set to your F* repo/install directory)
|
||||||
|
#
|
||||||
|
# 2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH.
|
||||||
|
#
|
||||||
|
# 3. the extracted Cargo crate to have "hax-lib" as a dependency:
|
||||||
|
# `hax-lib = { version = "0.1.0-pre.1", git = "https://github.com/hacspec/hax"}`
|
||||||
|
#
|
||||||
|
# Optionally, you can set `HACL_HOME`.
|
||||||
|
#
|
||||||
|
# ROOTS contains all the top-level F* files you wish to verify
|
||||||
|
# The default target `verify` verified ROOTS and its dependencies
|
||||||
|
# To lax-check instead, set `OTHERFLAGS="--lax"` on the command-line
|
||||||
|
#
|
||||||
|
# To make F* emacs mode use the settings in this file, you need to
|
||||||
|
# add the following lines to your .emacs
|
||||||
|
#
|
||||||
|
# (setq-default fstar-executable "<YOUR_FSTAR_HOME>/bin/fstar.exe")
|
||||||
|
# (setq-default fstar-smt-executable "<YOUR_Z3_HOME>/bin/z3")
|
||||||
|
#
|
||||||
|
# (defun my-fstar-compute-prover-args-using-make ()
|
||||||
|
# "Construct arguments to pass to F* by calling make."
|
||||||
|
# (with-demoted-errors "Error when constructing arg string: %S"
|
||||||
|
# (let* ((fname (file-name-nondirectory buffer-file-name))
|
||||||
|
# (target (concat fname "-in"))
|
||||||
|
# (argstr (car (process-lines "make" "--quiet" target))))
|
||||||
|
# (split-string argstr))))
|
||||||
|
# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)
|
||||||
|
#
|
||||||
|
|
||||||
|
PATH_TO_CHILD_MAKEFILE := "$(abspath $(firstword $(MAKEFILE_LIST)))"
|
||||||
|
PATH_TO_TEMPLATE_MAKEFILE := "$(abspath $(lastword $(MAKEFILE_LIST)))"
|
||||||
|
|
||||||
|
HACL_HOME ?= $(HOME)/.hax/hacl_home
|
||||||
|
# Expand variable FSTAR_BIN_DETECT now, so that we don't run this over and over
|
||||||
|
|
||||||
|
FSTAR_BIN_DETECT := $(if $(shell command -v fstar.exe), fstar.exe, $(FSTAR_HOME)/bin/fstar.exe)
|
||||||
|
FSTAR_BIN ?= $(FSTAR_BIN_DETECT)
|
||||||
|
|
||||||
|
GIT_ROOT_DIR := $(shell git rev-parse --show-toplevel)/
|
||||||
|
CACHE_DIR ?= ${GIT_ROOT_DIR}.fstar-cache/checked
|
||||||
|
HINT_DIR ?= ${GIT_ROOT_DIR}.fstar-cache/hints
|
||||||
|
|
||||||
|
# Makes command quiet by default
|
||||||
|
Q ?= @
|
||||||
|
|
||||||
|
# Verify the required executable are in PATH
|
||||||
|
EXECUTABLES = cargo cargo-hax jq
|
||||||
|
K := $(foreach exec,$(EXECUTABLES),\
|
||||||
|
$(if $(shell which $(exec)),some string,$(error "No $(exec) in PATH")))
|
||||||
|
|
||||||
|
export ANSI_COLOR_BLUE=\033[34m
|
||||||
|
export ANSI_COLOR_RED=\033[31m
|
||||||
|
export ANSI_COLOR_BBLUE=\033[1;34m
|
||||||
|
export ANSI_COLOR_GRAY=\033[90m
|
||||||
|
export ANSI_COLOR_TONE=\033[35m
|
||||||
|
export ANSI_COLOR_RESET=\033[0m
|
||||||
|
|
||||||
|
ifdef NO_COLOR
|
||||||
|
export ANSI_COLOR_BLUE=
|
||||||
|
export ANSI_COLOR_RED=
|
||||||
|
export ANSI_COLOR_BBLUE=
|
||||||
|
export ANSI_COLOR_GRAY=
|
||||||
|
export ANSI_COLOR_TONE=
|
||||||
|
export ANSI_COLOR_RESET=
|
||||||
|
endif
|
||||||
|
|
||||||
|
# The following is a bash script that discovers F* libraries.
|
||||||
|
# Due to incompatibilities with make 4.3, I had to make a "oneliner" bash script...
|
||||||
|
define FINDLIBS
|
||||||
|
: "Prints a path if and only if it exists. Takes one argument: the path."; \
|
||||||
|
function print_if_exists() { \
|
||||||
|
if [ -d "$$1" ]; then \
|
||||||
|
echo "$$1"; \
|
||||||
|
fi; \
|
||||||
|
} ; \
|
||||||
|
: "Asks Cargo all the dependencies for the current crate or workspace,"; \
|
||||||
|
: "and extract all "root" directories for each. Takes zero argument."; \
|
||||||
|
function dependencies() { \
|
||||||
|
cargo metadata --format-version 1 | \
|
||||||
|
jq -r ".packages | .[] | .manifest_path | split(\"/\") | .[:-1] | join(\"/\")"; \
|
||||||
|
} ; \
|
||||||
|
: "Find hax libraries *around* a given path. Takes one argument: the"; \
|
||||||
|
: "path."; \
|
||||||
|
function find_hax_libraries_at_path() { \
|
||||||
|
path="$$1" ; \
|
||||||
|
: "if there is a [proofs/fstar/extraction] subfolder, then that s a F* library" ; \
|
||||||
|
print_if_exists "$$path/proofs/fstar/extraction" ; \
|
||||||
|
: "Maybe the [proof-libs] folder of hax is around?" ; \
|
||||||
|
MAYBE_PROOF_LIBS=$$(realpath -q "$$path/../proof-libs/fstar") ; \
|
||||||
|
if [ $$? -eq 0 ]; then \
|
||||||
|
print_if_exists "$$MAYBE_PROOF_LIBS/core" ; \
|
||||||
|
print_if_exists "$$MAYBE_PROOF_LIBS/rust_primitives" ; \
|
||||||
|
fi ; \
|
||||||
|
} ; \
|
||||||
|
{ while IFS= read path; do \
|
||||||
|
find_hax_libraries_at_path "$$path"; \
|
||||||
|
done < <(dependencies) ; } | sort -u
|
||||||
|
endef
|
||||||
|
export FINDLIBS
|
||||||
|
|
||||||
|
FSTAR_INCLUDE_DIRS_EXTRA ?=
|
||||||
|
FINDLIBS_OUTPUT := $(shell bash -c '${FINDLIBS}')
|
||||||
|
FSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(FSTAR_INCLUDE_DIRS_EXTRA) $(FINDLIBS_OUTPUT) ../models
|
||||||
|
|
||||||
|
# Make sure FSTAR_INCLUDE_DIRS has the `proof-libs`, print hints and
|
||||||
|
# an error message otherwise
|
||||||
|
ifneq (,$(findstring proof-libs/fstar,$(FSTAR_INCLUDE_DIRS)))
|
||||||
|
else
|
||||||
|
K += $(info )
|
||||||
|
ERROR := $(shell printf '${ANSI_COLOR_RED}Error: could not detect `proof-libs`!${ANSI_COLOR_RESET}')
|
||||||
|
K += $(info ${ERROR})
|
||||||
|
ERROR := $(shell printf ' > Do you have `${ANSI_COLOR_BLUE}hax-lib${ANSI_COLOR_RESET}` in your `${ANSI_COLOR_BLUE}Cargo.toml${ANSI_COLOR_RESET}` as a ${ANSI_COLOR_BLUE}git${ANSI_COLOR_RESET} or ${ANSI_COLOR_BLUE}path${ANSI_COLOR_RESET} dependency?')
|
||||||
|
K += $(info ${ERROR})
|
||||||
|
ERROR := $(shell printf ' ${ANSI_COLOR_BLUE}> Tip: you may want to run `cargo add --git https://github.com/hacspec/hax hax-lib`${ANSI_COLOR_RESET}')
|
||||||
|
K += $(info ${ERROR})
|
||||||
|
K += $(info )
|
||||||
|
K += $(error Fatal error: `proof-libs` is required.)
|
||||||
|
endif
|
||||||
|
|
||||||
|
.PHONY: all verify clean
|
||||||
|
|
||||||
|
all:
|
||||||
|
$(Q)rm -f .depend
|
||||||
|
$(Q)$(MAKE) -f $(PATH_TO_CHILD_MAKEFILE) .depend hax.fst.config.json verify
|
||||||
|
|
||||||
|
all-keep-going:
|
||||||
|
$(Q)rm -f .depend
|
||||||
|
$(Q)$(MAKE) -f $(PATH_TO_CHILD_MAKEFILE) --keep-going .depend hax.fst.config.json verify
|
||||||
|
|
||||||
|
# If $HACL_HOME doesn't exist, clone it
|
||||||
|
${HACL_HOME}:
|
||||||
|
$(Q)mkdir -p "${HACL_HOME}"
|
||||||
|
$(info Cloning Hacl* in ${HACL_HOME}...)
|
||||||
|
git clone --depth 1 https://github.com/hacl-star/hacl-star.git "${HACL_HOME}"
|
||||||
|
$(info Cloning Hacl* in ${HACL_HOME}... done!)
|
||||||
|
|
||||||
|
# If no any F* file is detected, we run hax
|
||||||
|
ifeq "$(wildcard *.fst *fsti)" ""
|
||||||
|
$(shell cargo hax into fstar)
|
||||||
|
endif
|
||||||
|
|
||||||
|
# By default, we process all the files in the current directory
|
||||||
|
ROOTS ?= $(wildcard *.fst *fsti)
|
||||||
|
ADMIT_MODULES ?=
|
||||||
|
|
||||||
|
ADMIT_MODULE_FLAGS ?= --admit_smt_queries true
|
||||||
|
|
||||||
|
# Can be useful for debugging purposes
|
||||||
|
FINDLIBS.sh:
|
||||||
|
$(Q)echo '${FINDLIBS}' > FINDLIBS.sh
|
||||||
|
include-dirs:
|
||||||
|
$(Q)bash -c '${FINDLIBS}'
|
||||||
|
|
||||||
|
FSTAR_FLAGS = \
|
||||||
|
--warn_error -321-331-241-274-239-271 \
|
||||||
|
--ext context_pruning --z3version 4.13.3 --query_stats \
|
||||||
|
--cache_checked_modules --cache_dir $(CACHE_DIR) \
|
||||||
|
--already_cached "+Prims+FStar+LowStar+C+Spec.Loops+TestLib" \
|
||||||
|
$(addprefix --include ,$(FSTAR_INCLUDE_DIRS))
|
||||||
|
|
||||||
|
FSTAR := $(FSTAR_BIN) $(FSTAR_FLAGS)
|
||||||
|
|
||||||
|
.depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS) $(HACL_HOME)
|
||||||
|
@$(FSTAR) --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@
|
||||||
|
|
||||||
|
include .depend
|
||||||
|
|
||||||
|
$(HINT_DIR) $(CACHE_DIR):
|
||||||
|
$(Q)mkdir -p $@
|
||||||
|
|
||||||
|
define HELPMESSAGE
|
||||||
|
echo "hax' default Makefile for F*"
|
||||||
|
echo ""
|
||||||
|
echo "The available targets are:"
|
||||||
|
echo ""
|
||||||
|
function target() {
|
||||||
|
printf ' ${ANSI_COLOR_BLUE}%-20b${ANSI_COLOR_RESET} %s\n' "$$1" "$$2"
|
||||||
|
}
|
||||||
|
target "all" "Verify every F* files (stops whenever an F* fails first)"
|
||||||
|
target "all-keep-going" "Verify every F* files (tries as many F* module as possible)"
|
||||||
|
target "" ""
|
||||||
|
target "run/${ANSI_COLOR_TONE}<MyModule.fst> " 'Runs F* on `MyModule.fst` only'
|
||||||
|
target "" ""
|
||||||
|
target "vscode" 'Generates a `hax.fst.config.json` file'
|
||||||
|
target "${ANSI_COLOR_TONE}<MyModule.fst>${ANSI_COLOR_BLUE}-in " 'Useful for Emacs, outputs the F* prefix command to be used'
|
||||||
|
target "" ""
|
||||||
|
target "clean" 'Cleanup the target'
|
||||||
|
target "include-dirs" 'List the F* include directories'
|
||||||
|
target "" ""
|
||||||
|
target "describe" 'List the F* root modules, and describe the environment.'
|
||||||
|
echo ""
|
||||||
|
echo "Variables:"
|
||||||
|
target "NO_COLOR" "Set to anything to disable colors"
|
||||||
|
target "ADMIT_MODULES" "List of modules where F* will assume every SMT query"
|
||||||
|
target "FSTAR_INCLUDE_DIRS_EXTRA" "List of extra include F* dirs"
|
||||||
|
endef
|
||||||
|
export HELPMESSAGE
|
||||||
|
|
||||||
|
describe:
|
||||||
|
@printf '${ANSI_COLOR_BBLUE}F* roots:${ANSI_COLOR_RESET}\n'
|
||||||
|
@for root in ${ROOTS}; do \
|
||||||
|
filename=$$(basename -- "$$root") ;\
|
||||||
|
ext="$${filename##*.}" ;\
|
||||||
|
noext="$${filename%.*}" ;\
|
||||||
|
printf "${ANSI_COLOR_GRAY}$$(dirname -- "$$root")/${ANSI_COLOR_RESET}%s${ANSI_COLOR_GRAY}.${ANSI_COLOR_TONE}%s${ANSI_COLOR_RESET}%b\n" "$$noext" "$$ext" $$([[ "${ADMIT_MODULES}" =~ (^| )$$root($$| ) ]] && echo '${ANSI_COLOR_RED}\t[ADMITTED]${ANSI_COLOR_RESET}'); \
|
||||||
|
done
|
||||||
|
@printf '\n${ANSI_COLOR_BBLUE}Environment:${ANSI_COLOR_RESET}\n'
|
||||||
|
@printf ' - ${ANSI_COLOR_BLUE}HACL_HOME${ANSI_COLOR_RESET} = %s\n' '${HACL_HOME}'
|
||||||
|
@printf ' - ${ANSI_COLOR_BLUE}FSTAR_BIN${ANSI_COLOR_RESET} = %s\n' '${FSTAR_BIN}'
|
||||||
|
@printf ' - ${ANSI_COLOR_BLUE}GIT_ROOT_DIR${ANSI_COLOR_RESET} = %s\n' '${GIT_ROOT_DIR}'
|
||||||
|
@printf ' - ${ANSI_COLOR_BLUE}CACHE_DIR${ANSI_COLOR_RESET} = %s\n' '${CACHE_DIR}'
|
||||||
|
@printf ' - ${ANSI_COLOR_BLUE}HINT_DIR${ANSI_COLOR_RESET} = %s\n' '${HINT_DIR}'
|
||||||
|
@printf ' - ${ANSI_COLOR_BLUE}ADMIT_MODULE_FLAGS${ANSI_COLOR_RESET} = %s\n' '${ADMIT_MODULE_FLAGS}'
|
||||||
|
@printf ' - ${ANSI_COLOR_BLUE}FSTAR_INCLUDE_DIRS_EXTRA${ANSI_COLOR_RESET} = %s\n' '${FSTAR_INCLUDE_DIRS_EXTRA}'
|
||||||
|
|
||||||
|
help: ;@bash -c "$$HELPMESSAGE"
|
||||||
|
h: ;@bash -c "$$HELPMESSAGE"
|
||||||
|
|
||||||
|
HEADER = $(Q)printf '${ANSI_COLOR_BBLUE}[CHECK] %s ${ANSI_COLOR_RESET}\n' "$(basename $(notdir $@))"
|
||||||
|
|
||||||
|
run/%: | .depend $(HINT_DIR) $(CACHE_DIR) $(HACL_HOME)
|
||||||
|
${HEADER}
|
||||||
|
$(Q)$(FSTAR) $(OTHERFLAGS) $(@:run/%=%)
|
||||||
|
|
||||||
|
VERIFIED_CHECKED = $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))
|
||||||
|
ADMIT_CHECKED = $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ADMIT_MODULES)))
|
||||||
|
|
||||||
|
$(ADMIT_CHECKED):
|
||||||
|
$(Q)printf '${ANSI_COLOR_BBLUE}[${ANSI_COLOR_TONE}ADMIT${ANSI_COLOR_BBLUE}] %s ${ANSI_COLOR_RESET}\n' "$(basename $(notdir $@))"
|
||||||
|
$(Q)$(FSTAR) $(OTHERFLAGS) $(ADMIT_MODULE_FLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints || { \
|
||||||
|
echo "" ; \
|
||||||
|
exit 1 ; \
|
||||||
|
}
|
||||||
|
$(Q)printf "\n\n"
|
||||||
|
|
||||||
|
$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(HACL_HOME)
|
||||||
|
${HEADER}
|
||||||
|
$(Q)$(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints || { \
|
||||||
|
echo "" ; \
|
||||||
|
exit 1 ; \
|
||||||
|
}
|
||||||
|
touch $@
|
||||||
|
$(Q)printf "\n\n"
|
||||||
|
|
||||||
|
verify: $(VERIFIED_CHECKED) $(ADMIT_CHECKED)
|
||||||
|
|
||||||
|
# Targets for Emacs
|
||||||
|
%.fst-in:
|
||||||
|
$(info $(FSTAR_FLAGS) \
|
||||||
|
$(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)
|
||||||
|
%.fsti-in:
|
||||||
|
$(info $(FSTAR_FLAGS) \
|
||||||
|
$(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)
|
||||||
|
|
||||||
|
# Targets for VSCode
|
||||||
|
hax.fst.config.json: .depend
|
||||||
|
$(Q)echo "$(FSTAR_INCLUDE_DIRS)" | jq --arg fstar "$(FSTAR_BIN)" -R 'split(" ") | {fstar_exe: $$fstar | gsub("^\\s+|\\s+$$";""), include_dirs: .}' > $@
|
||||||
|
vscode:
|
||||||
|
$(Q)rm -f .depend
|
||||||
|
$(Q)$(MAKE) -f $(PATH_TO_CHILD_MAKEFILE) hax.fst.config.json
|
||||||
|
|
||||||
|
SHELL=bash
|
||||||
|
|
||||||
|
# Clean target
|
||||||
|
clean:
|
||||||
|
rm -rf $(CACHE_DIR)/*
|
||||||
|
rm *.fst
|
||||||
59
proofs/fstar/models/MkSeq.fst
Normal file
59
proofs/fstar/models/MkSeq.fst
Normal file
|
|
@ -0,0 +1,59 @@
|
||||||
|
module MkSeq
|
||||||
|
open Core
|
||||||
|
|
||||||
|
open FStar.Tactics.V2
|
||||||
|
|
||||||
|
private let init (len: nat) (f: (i:nat{i < len}) -> Tac 'a): Tac (list 'a)
|
||||||
|
= let rec h (i: nat {i <= len}): Tac (list 'a)
|
||||||
|
= if i = len then [] else f i :: h (i + 1)
|
||||||
|
in h 0
|
||||||
|
|
||||||
|
private let tuple_proj (n: nat) (i: nat): Tac term
|
||||||
|
= if n = 1 then `(id) else
|
||||||
|
let name = "__proj__Mktuple" ^ string_of_int n ^ "__item___" ^ string_of_int (i + 1) in
|
||||||
|
Tv_FVar (pack_fv ["FStar";"Pervasives";"Native";name])
|
||||||
|
|
||||||
|
private let tuple_type (n: nat): Tac term
|
||||||
|
= if n = 1 then `(id) else
|
||||||
|
let name = "tuple" ^ string_of_int n in
|
||||||
|
Tv_FVar (pack_fv ["FStar";"Pervasives";"Native";name])
|
||||||
|
|
||||||
|
open Rust_primitives.Integers
|
||||||
|
|
||||||
|
private let create_gen_tac (n: nat): Tac sigelt
|
||||||
|
= let typ_bd = {fresh_binder_named "t" (`Type0) with qual = FStar.Reflection.V2.Q_Implicit} in
|
||||||
|
let typ = binder_to_term typ_bd in
|
||||||
|
let input_typ = mk_e_app (tuple_type n) (init n (fun _ -> typ)) in
|
||||||
|
let input_bd = fresh_binder_named "tup" input_typ in
|
||||||
|
let output_type = `t_Array (`#typ) (sz (`@n)) in
|
||||||
|
let nth i = `((`#(tuple_proj n i)) (`#input_bd)) in
|
||||||
|
let mk_and: term -> term -> Tac term = fun t u -> `(`#t /\ `#u) in
|
||||||
|
let post =
|
||||||
|
let mk_inv s i = `(Seq.index (`#s) (`@i) == (`#(tuple_proj n i)) (`#input_bd)) in
|
||||||
|
let invs s = Tactics.fold_left mk_and (`(Seq.length (`#s) == (`@n))) (init n (mk_inv s)) in
|
||||||
|
let bd = fresh_binder_named "s" output_type in
|
||||||
|
mk_abs [bd] (invs bd)
|
||||||
|
in
|
||||||
|
let comp = C_Eff [] ["Prims"; "Pure"]
|
||||||
|
(`t_Array (`#typ) (sz (`@n)))
|
||||||
|
[ (`(requires True), Q_Explicit); (post, Q_Explicit)] []
|
||||||
|
in
|
||||||
|
let args = [typ_bd; input_bd] in
|
||||||
|
let l = Tactics.fold_right (fun hd tl -> `((`#hd)::(`#tl))) (init n nth) (`[]) in
|
||||||
|
let indexes =
|
||||||
|
let f i = `((`#(nth i)) == List.Tot.index (`#l) (`@i)) in
|
||||||
|
Tactics.fold_left mk_and (`True) (init n f)
|
||||||
|
in
|
||||||
|
let lb_def = mk_abs args (`(
|
||||||
|
let l = `#l in
|
||||||
|
let s = Seq.createL l <: t_Array (`#typ) (sz (`@n)) in
|
||||||
|
FStar.Classical.forall_intro (Seq.lemma_index_is_nth s);
|
||||||
|
assert (`#indexes) by (Tactics.norm [primops; iota; delta; zeta]);
|
||||||
|
s
|
||||||
|
)) in
|
||||||
|
let lb_typ = mk_arr args (pack_comp comp) in
|
||||||
|
let open FStar.List.Tot in
|
||||||
|
let lb_fv = pack_fv (cur_module () @ ["create" ^ string_of_int n]) in
|
||||||
|
Sg_Let { isrec = false; lbs = [{ lb_fv; lb_us = []; lb_typ; lb_def }] }
|
||||||
|
|
||||||
|
%splice[] (init 13 (fun i -> create_gen_tac (i + 1)))
|
||||||
121
proofs/fstar/models/Num_enum.fsti
Normal file
121
proofs/fstar/models/Num_enum.fsti
Normal file
|
|
@ -0,0 +1,121 @@
|
||||||
|
module Num_enum
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 15"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
(* item error backend: (reject_TraitItemDefault) ExplicitRejection { reason: "a node of kind [Trait_item_default] have been found in the AST" }
|
||||||
|
Last available AST for this item:
|
||||||
|
|
||||||
|
#[feature(register_tool)]#[register_tool(_hax)]trait t_UnsafeFromPrimitive<Self_>{type f_Primitive: TodoPrintRustBoundsTyp;
|
||||||
|
fn f_from_unchecked((number: proj_asso_type!())) -> Self{num_enum::f_unchecked_transmute_from(number)}
|
||||||
|
#[_hax::json("\"TraitMethodNoPrePost\"")]fn f_unchecked_transmute_from_pre(_: proj_asso_type!()) -> bool;
|
||||||
|
#[_hax::json("\"TraitMethodNoPrePost\"")]fn f_unchecked_transmute_from_post(_: proj_asso_type!(),_: Self) -> bool;
|
||||||
|
fn f_unchecked_transmute_from(_: proj_asso_type!()) -> Self;}
|
||||||
|
|
||||||
|
Last AST:
|
||||||
|
/** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id =
|
||||||
|
{ Explicit_def_id.T.is_constructor = false;
|
||||||
|
def_id =
|
||||||
|
{ Types.index = (0, 0); is_local = true; kind = Types.Trait;
|
||||||
|
krate = "num_enum";
|
||||||
|
parent =
|
||||||
|
(Some { Types.contents =
|
||||||
|
{ Types.id = 0;
|
||||||
|
value =
|
||||||
|
{ Types.index = (0, 0); is_local = true; kind = Types.Mod;
|
||||||
|
krate = "num_enum"; parent = None; path = [] }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
path =
|
||||||
|
[{ Types.data = (Types.TypeNs "UnsafeFromPrimitive"); disambiguator = 0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
moved = None; suffix = None }) */
|
||||||
|
const _: () = ();
|
||||||
|
*)
|
||||||
|
|
||||||
|
class t_CannotDeriveBothFromPrimitiveAndTryFromPrimitive (v_Self: Type0) = {
|
||||||
|
__marker_trait:Prims.unit
|
||||||
|
}
|
||||||
|
|
||||||
|
(* class t_FromPrimitive (v_Self: Type0) = {
|
||||||
|
f_Primitive:Type0;
|
||||||
|
f_Primitive_8876061459599834537:Core.Marker.t_Copy f_Primitive;
|
||||||
|
f_Primitive_17391871992276743015:Core.Cmp.t_Eq f_Primitive;
|
||||||
|
f_from_primitive_pre:f_Primitive -> Type0;
|
||||||
|
f_from_primitive_post:f_Primitive -> v_Self -> Type0;
|
||||||
|
f_from_primitive:x0: f_Primitive
|
||||||
|
-> Prims.Pure v_Self (f_from_primitive_pre x0) (fun result -> f_from_primitive_post x0 result)
|
||||||
|
} *)
|
||||||
|
|
||||||
|
class t_TryFromPrimitive (v_Self: Type0) = {
|
||||||
|
f_Primitive:Type0;
|
||||||
|
(* f_Primitive_12399228673407067350:Core.Marker.t_Copy f_Primitive;
|
||||||
|
f_Primitive_5629480169667985622:Core.Cmp.t_Eq f_Primitive;
|
||||||
|
f_Primitive_10837566226016321784:Core.Fmt.t_Debug f_Primitive; *)
|
||||||
|
f_Error:Type0;
|
||||||
|
f_NAME:string;
|
||||||
|
f_try_from_primitive_pre:f_Primitive -> Type0;
|
||||||
|
f_try_from_primitive_post:f_Primitive -> Core.Result.t_Result v_Self f_Error -> Type0;
|
||||||
|
f_try_from_primitive:x0: f_Primitive
|
||||||
|
-> Prims.Pure (Core.Result.t_Result v_Self f_Error)
|
||||||
|
(f_try_from_primitive_pre x0)
|
||||||
|
(fun result -> f_try_from_primitive_post x0 result)
|
||||||
|
}
|
||||||
|
|
||||||
|
type t_TryFromPrimitiveError (v_Enum: Type0) (* {| i1: t_TryFromPrimitive v_Enum |} *) = {
|
||||||
|
f_number:(* i1.f_Primitive *) u8
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_5
|
||||||
|
(#v_Enum: Type0)
|
||||||
|
{| i1: Core.Clone.t_Clone v_Enum |}
|
||||||
|
{| i2: t_TryFromPrimitive v_Enum |}
|
||||||
|
{| i3: Core.Clone.t_Clone i2.f_Primitive |}
|
||||||
|
: Core.Clone.t_Clone (t_TryFromPrimitiveError v_Enum)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_4
|
||||||
|
(#v_Enum: Type0)
|
||||||
|
{| i1: Core.Marker.t_Copy v_Enum |}
|
||||||
|
{| i2: t_TryFromPrimitive v_Enum |}
|
||||||
|
{| i3: Core.Marker.t_Copy i2.f_Primitive |}
|
||||||
|
: Core.Marker.t_Copy (t_TryFromPrimitiveError v_Enum)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_6 (#v_Enum: Type0) {| i1: t_TryFromPrimitive v_Enum |}
|
||||||
|
: Core.Marker.t_StructuralPartialEq (t_TryFromPrimitiveError v_Enum)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_7
|
||||||
|
(#v_Enum: Type0)
|
||||||
|
{| i1: Core.Cmp.t_PartialEq v_Enum v_Enum |}
|
||||||
|
{| i2: t_TryFromPrimitive v_Enum |}
|
||||||
|
{| i3: Core.Cmp.t_PartialEq i2.f_Primitive i2.f_Primitive |}
|
||||||
|
: Core.Cmp.t_PartialEq (t_TryFromPrimitiveError v_Enum) (t_TryFromPrimitiveError v_Enum)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_8
|
||||||
|
(#v_Enum: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Eq v_Enum |}
|
||||||
|
{| i2: t_TryFromPrimitive v_Enum |}
|
||||||
|
{| i3: Core.Cmp.t_Eq i2.f_Primitive |}
|
||||||
|
: Core.Cmp.t_Eq (t_TryFromPrimitiveError v_Enum)
|
||||||
|
|
||||||
|
val impl__new (#v_Enum: Type0) {| i1: t_TryFromPrimitive v_Enum |} (number: i1.f_Primitive)
|
||||||
|
: Prims.Pure (t_TryFromPrimitiveError v_Enum) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_1 (#v_Enum: Type0) {| i1: t_TryFromPrimitive v_Enum |}
|
||||||
|
: Core.Fmt.t_Debug (t_TryFromPrimitiveError v_Enum)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_2 (#v_Enum: Type0) {| i1: t_TryFromPrimitive v_Enum |}
|
||||||
|
: Core.Fmt.t_Display (t_TryFromPrimitiveError v_Enum)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_3 (#v_Enum: Type0) {| i1: t_TryFromPrimitive v_Enum |}
|
||||||
|
: Core.Error.t_Error (t_TryFromPrimitiveError v_Enum)
|
||||||
26
proofs/fstar/models/Prost.Encoding.Wire_type.fsti
Normal file
26
proofs/fstar/models/Prost.Encoding.Wire_type.fsti
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
module Prost.Encoding.Wire_type
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 15"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
type t_WireType =
|
||||||
|
| WireType_Varint : t_WireType
|
||||||
|
| WireType_SixtyFourBit : t_WireType
|
||||||
|
| WireType_LengthDelimited : t_WireType
|
||||||
|
| WireType_StartGroup : t_WireType
|
||||||
|
| WireType_EndGroup : t_WireType
|
||||||
|
| WireType_ThirtyTwoBit : t_WireType
|
||||||
|
|
||||||
|
let discriminant_WireType_Varint: isize = mk_isize 0
|
||||||
|
|
||||||
|
let discriminant_WireType_SixtyFourBit: isize = mk_isize 1
|
||||||
|
|
||||||
|
let discriminant_WireType_LengthDelimited: isize = mk_isize 2
|
||||||
|
|
||||||
|
let discriminant_WireType_StartGroup: isize = mk_isize 3
|
||||||
|
|
||||||
|
let discriminant_WireType_EndGroup: isize = mk_isize 4
|
||||||
|
|
||||||
|
let discriminant_WireType_ThirtyTwoBit: isize = mk_isize 5
|
||||||
|
|
||||||
|
val t_WireType_cast_to_repr (x: t_WireType) : Prims.Pure isize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
6
proofs/fstar/models/Prost.Encoding.fsti
Normal file
6
proofs/fstar/models/Prost.Encoding.fsti
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
module Prost.Encoding
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 15"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
type t_DecodeContext = { f_recurse_count:u32 }
|
||||||
66
proofs/fstar/models/Prost.Error.fsti
Normal file
66
proofs/fstar/models/Prost.Error.fsti
Normal file
|
|
@ -0,0 +1,66 @@
|
||||||
|
module Prost.Error
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 15"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
type t_Inner = {
|
||||||
|
f_description:Alloc.Borrow.t_Cow string;
|
||||||
|
f_stack:Alloc.Vec.t_Vec (string & string) Alloc.Alloc.t_Global
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A Protobuf message decoding error.
|
||||||
|
/// `DecodeError` indicates that the input buffer does not contain a valid
|
||||||
|
/// Protobuf message. The error details should be considered 'best effort': in
|
||||||
|
/// general it is not possible to exactly pinpoint why data is malformed.
|
||||||
|
type t_DecodeError = { f_inner:Alloc.Boxed.t_Box t_Inner Alloc.Alloc.t_Global }
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl:Core.Clone.t_Clone t_DecodeError
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_1:Core.Marker.t_StructuralPartialEq t_DecodeError
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_2:Core.Cmp.t_PartialEq t_DecodeError t_DecodeError
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_3:Core.Cmp.t_Eq t_DecodeError
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_4:Core.Clone.t_Clone t_Inner
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_5:Core.Marker.t_StructuralPartialEq t_Inner
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_6:Core.Cmp.t_PartialEq t_Inner t_Inner
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_7:Core.Cmp.t_Eq t_Inner
|
||||||
|
|
||||||
|
/// A Protobuf message encoding error.
|
||||||
|
/// `EncodeError` always indicates that a message failed to encode because the
|
||||||
|
/// provided buffer had insufficient capacity. Message encoding is otherwise
|
||||||
|
/// infallible.
|
||||||
|
type t_EncodeError = {
|
||||||
|
f_required:usize;
|
||||||
|
f_remaining:usize
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_9:Core.Clone.t_Clone t_EncodeError
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_8:Core.Marker.t_Copy t_EncodeError
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_10:Core.Fmt.t_Debug t_EncodeError
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_11:Core.Marker.t_StructuralPartialEq t_EncodeError
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_12:Core.Cmp.t_PartialEq t_EncodeError t_EncodeError
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_13:Core.Cmp.t_Eq t_EncodeError
|
||||||
70
proofs/fstar/models/Prost.Message.fsti
Normal file
70
proofs/fstar/models/Prost.Message.fsti
Normal file
|
|
@ -0,0 +1,70 @@
|
||||||
|
module Prost.Message
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 15"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
let _ =
|
||||||
|
(* This module has implicit dependencies, here we make them explicit. *)
|
||||||
|
(* The implicit dependencies arise from typeclasses instances. *)
|
||||||
|
let open Bytes.Buf.Buf_impl in
|
||||||
|
let open Bytes.Buf.Buf_mut in
|
||||||
|
()
|
||||||
|
|
||||||
|
/// A Protocol Buffers message.
|
||||||
|
class t_Message (v_Self: Type0) = {
|
||||||
|
[@@@ FStar.Tactics.Typeclasses.no_method]_super_7459769351467436346:Core.Fmt.t_Debug v_Self;
|
||||||
|
[@@@ FStar.Tactics.Typeclasses.no_method]_super_10374730180605511532:Core.Marker.t_Send v_Self;
|
||||||
|
[@@@ FStar.Tactics.Typeclasses.no_method]_super_6360119584534035317:Core.Marker.t_Sync v_Self;
|
||||||
|
f_encode_pre:
|
||||||
|
#impl_806524398_: Type0 ->
|
||||||
|
{| i2: Bytes.Buf.Buf_mut.t_BufMut impl_806524398_ |} ->
|
||||||
|
v_Self ->
|
||||||
|
impl_806524398_
|
||||||
|
-> Type0;
|
||||||
|
f_encode_post:
|
||||||
|
#impl_806524398_: Type0 ->
|
||||||
|
{| i2: Bytes.Buf.Buf_mut.t_BufMut impl_806524398_ |} ->
|
||||||
|
v_Self ->
|
||||||
|
impl_806524398_ ->
|
||||||
|
(impl_806524398_ & Core.Result.t_Result Prims.unit Prost.Error.t_EncodeError)
|
||||||
|
-> Type0;
|
||||||
|
f_encode:
|
||||||
|
#impl_806524398_: Type0 ->
|
||||||
|
{| i2: Bytes.Buf.Buf_mut.t_BufMut impl_806524398_ |} ->
|
||||||
|
x0: v_Self ->
|
||||||
|
x1: impl_806524398_
|
||||||
|
-> Prims.Pure
|
||||||
|
(impl_806524398_ & Core.Result.t_Result Prims.unit Prost.Error.t_EncodeError)
|
||||||
|
(f_encode_pre #impl_806524398_ #i2 x0 x1)
|
||||||
|
(fun result -> f_encode_post #impl_806524398_ #i2 x0 x1 result);
|
||||||
|
f_encode_to_vec_pre:v_Self -> Type0;
|
||||||
|
f_encode_to_vec_post:v_Self -> Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global -> Type0;
|
||||||
|
f_encode_to_vec:x0: v_Self
|
||||||
|
-> Prims.Pure (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)
|
||||||
|
(f_encode_to_vec_pre x0)
|
||||||
|
(fun result -> f_encode_to_vec_post x0 result);
|
||||||
|
f_decode_pre:
|
||||||
|
#impl_75985673_: Type0 ->
|
||||||
|
{| i4: Core.Default.t_Default v_Self |} ->
|
||||||
|
{| i5: Bytes.Buf.Buf_impl.t_Buf impl_75985673_ |} ->
|
||||||
|
impl_75985673_
|
||||||
|
-> Type0;
|
||||||
|
f_decode_post:
|
||||||
|
#impl_75985673_: Type0 ->
|
||||||
|
{| i4: Core.Default.t_Default v_Self |} ->
|
||||||
|
{| i5: Bytes.Buf.Buf_impl.t_Buf impl_75985673_ |} ->
|
||||||
|
impl_75985673_ ->
|
||||||
|
Core.Result.t_Result v_Self Prost.Error.t_DecodeError
|
||||||
|
-> Type0;
|
||||||
|
f_decode:
|
||||||
|
#impl_75985673_: Type0 ->
|
||||||
|
{| i4: Core.Default.t_Default v_Self |} ->
|
||||||
|
{| i5: Bytes.Buf.Buf_impl.t_Buf impl_75985673_ |} ->
|
||||||
|
x0: impl_75985673_
|
||||||
|
-> Prims.Pure (Core.Result.t_Result v_Self Prost.Error.t_DecodeError)
|
||||||
|
(f_decode_pre #impl_75985673_ #i4 #i5 x0)
|
||||||
|
(fun result -> f_decode_post #impl_75985673_ #i4 #i5 x0 result);
|
||||||
|
f_clear_pre:v_Self -> Type0;
|
||||||
|
f_clear_post:v_Self -> v_Self -> Type0;
|
||||||
|
f_clear:x0: v_Self -> Prims.Pure v_Self (f_clear_pre x0) (fun result -> f_clear_post x0 result)
|
||||||
|
}
|
||||||
8
proofs/fstar/models/Rand.Rng.fsti
Normal file
8
proofs/fstar/models/Rand.Rng.fsti
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
module Rand.Rng
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 15"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
class t_Rng (t: Type) = {
|
||||||
|
dummy: unit
|
||||||
|
}
|
||||||
662
proofs/fstar/models/Sorted_vec.fsti
Normal file
662
proofs/fstar/models/Sorted_vec.fsti
Normal file
|
|
@ -0,0 +1,662 @@
|
||||||
|
module Sorted_vec
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 15"
|
||||||
|
open Core
|
||||||
|
open FStar.Mul
|
||||||
|
|
||||||
|
/// Forward sorted vector
|
||||||
|
type t_SortedVec (v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} = {
|
||||||
|
f_vec:Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global
|
||||||
|
}
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_18 (#v_T: Type0) {| i1: Core.Clone.t_Clone v_T |} {| i2: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Clone.t_Clone (t_SortedVec v_T)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_19 (#v_T: Type0) {| i1: Core.Fmt.t_Debug v_T |} {| i2: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Fmt.t_Debug (t_SortedVec v_T)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_22 (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Marker.t_StructuralPartialEq (t_SortedVec v_T)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_23 (#v_T: Type0) {| i1: Core.Cmp.t_PartialEq v_T v_T |} {| i2: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Cmp.t_PartialEq (t_SortedVec v_T) (t_SortedVec v_T)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_20 (#v_T: Type0) {| i1: Core.Cmp.t_Eq v_T |} {| i2: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Cmp.t_Eq (t_SortedVec v_T)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_24 (#v_T: Type0) {| i1: Core.Cmp.t_PartialOrd v_T v_T |} {| i2: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Cmp.t_PartialOrd (t_SortedVec v_T) (t_SortedVec v_T)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_21 (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} : Core.Cmp.t_Ord (t_SortedVec v_T)
|
||||||
|
|
||||||
|
/// Forward sorted set
|
||||||
|
type t_SortedSet (v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} = { f_set:t_SortedVec v_T }
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_25 (#v_T: Type0) {| i1: Core.Clone.t_Clone v_T |} {| i2: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Clone.t_Clone (t_SortedSet v_T)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_26 (#v_T: Type0) {| i1: Core.Fmt.t_Debug v_T |} {| i2: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Fmt.t_Debug (t_SortedSet v_T)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_29 (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Marker.t_StructuralPartialEq (t_SortedSet v_T)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_30 (#v_T: Type0) {| i1: Core.Cmp.t_PartialEq v_T v_T |} {| i2: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Cmp.t_PartialEq (t_SortedSet v_T) (t_SortedSet v_T)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_27 (#v_T: Type0) {| i1: Core.Cmp.t_Eq v_T |} {| i2: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Cmp.t_Eq (t_SortedSet v_T)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_31 (#v_T: Type0) {| i1: Core.Cmp.t_PartialOrd v_T v_T |} {| i2: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Cmp.t_PartialOrd (t_SortedSet v_T) (t_SortedSet v_T)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_28 (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} : Core.Cmp.t_Ord (t_SortedSet v_T)
|
||||||
|
|
||||||
|
/// Value returned when find_or_insert is used.
|
||||||
|
type t_FindOrInsert =
|
||||||
|
| FindOrInsert_Found : usize -> t_FindOrInsert
|
||||||
|
| FindOrInsert_Inserted : usize -> t_FindOrInsert
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_32:Core.Marker.t_StructuralPartialEq t_FindOrInsert
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_33:Core.Cmp.t_PartialEq t_FindOrInsert t_FindOrInsert
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_34:Core.Cmp.t_PartialOrd t_FindOrInsert t_FindOrInsert
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_35:Core.Cmp.t_Eq t_FindOrInsert
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_36:Core.Cmp.t_Ord t_FindOrInsert
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_37:Core.Fmt.t_Debug t_FindOrInsert
|
||||||
|
|
||||||
|
(* [@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_38:Core.Hash.t_Hash t_FindOrInsert *)
|
||||||
|
|
||||||
|
/// Converts from the binary_search result type into the FindOrInsert type
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl:Core.Convert.t_From t_FindOrInsert (Core.Result.t_Result usize usize)
|
||||||
|
|
||||||
|
/// Get the index of the element that was either found or inserted.
|
||||||
|
val impl_FindOrInsert__index (self: t_FindOrInsert)
|
||||||
|
: Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// If an equivalent element was found in the container, get the value of
|
||||||
|
/// its index. Otherwise get None.
|
||||||
|
val impl_FindOrInsert__found (self: t_FindOrInsert)
|
||||||
|
: Prims.Pure (Core.Option.t_Option usize) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// If the provided element was inserted into the container, get the value
|
||||||
|
/// of its index. Otherwise get None.
|
||||||
|
val impl_FindOrInsert__inserted (self: t_FindOrInsert)
|
||||||
|
: Prims.Pure (Core.Option.t_Option usize) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Returns true if the element was found.
|
||||||
|
val impl_FindOrInsert__is_found (self: t_FindOrInsert)
|
||||||
|
: Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Returns true if the element was inserted.
|
||||||
|
val impl_FindOrInsert__is_inserted (self: t_FindOrInsert)
|
||||||
|
: Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__new: #v_T: Type0 -> {| i1: Core.Cmp.t_Ord v_T |} -> Prims.unit
|
||||||
|
-> Prims.Pure (t_SortedVec v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__with_capacity (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} (capacity: usize)
|
||||||
|
: Prims.Pure (t_SortedVec v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Uses `sort_unstable()` to sort in place.
|
||||||
|
val impl_2__from_unsorted
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(vec: Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global)
|
||||||
|
: Prims.Pure (t_SortedVec v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Insert an element into sorted position, returning the order index at which
|
||||||
|
/// it was placed.
|
||||||
|
val impl_2__insert (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} (self: t_SortedVec v_T) (element: v_T)
|
||||||
|
: Prims.Pure (t_SortedVec v_T & usize) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Find the element and return the index with `Ok`, otherwise insert the
|
||||||
|
/// element and return the new element index with `Err`.
|
||||||
|
val impl_2__find_or_insert
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(self: t_SortedVec v_T)
|
||||||
|
(element: v_T)
|
||||||
|
: Prims.Pure (t_SortedVec v_T & t_FindOrInsert) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Same as insert, except performance is O(1) when the element belongs at the
|
||||||
|
/// back of the container. This avoids an O(log(N)) search for inserting
|
||||||
|
/// elements at the back.
|
||||||
|
val impl_2__push (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} (self: t_SortedVec v_T) (element: v_T)
|
||||||
|
: Prims.Pure (t_SortedVec v_T & usize) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Reserves additional capacity in the underlying vector.
|
||||||
|
/// See std::vec::Vec::reserve.
|
||||||
|
val impl_2__reserve
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(self: t_SortedVec v_T)
|
||||||
|
(additional: usize)
|
||||||
|
: Prims.Pure (t_SortedVec v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Same as find_or_insert, except performance is O(1) when the element
|
||||||
|
/// belongs at the back of the container.
|
||||||
|
val impl_2__find_or_push
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(self: t_SortedVec v_T)
|
||||||
|
(element: v_T)
|
||||||
|
: Prims.Pure (t_SortedVec v_T & t_FindOrInsert) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__remove_item
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(self: t_SortedVec v_T)
|
||||||
|
(item: v_T)
|
||||||
|
: Prims.Pure (t_SortedVec v_T & Core.Option.t_Option v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Panics if index is out of bounds
|
||||||
|
val impl_2__remove_index
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(self: t_SortedVec v_T)
|
||||||
|
(index: usize)
|
||||||
|
: Prims.Pure (t_SortedVec v_T & v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__pop (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} (self: t_SortedVec v_T)
|
||||||
|
: Prims.Pure (t_SortedVec v_T & Core.Option.t_Option v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__clear (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} (self: t_SortedVec v_T)
|
||||||
|
: Prims.Pure (t_SortedVec v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_2__dedup (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} (self: t_SortedVec v_T)
|
||||||
|
: Prims.Pure (t_SortedVec v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
(* item error backend: (DirectAndMut) The mutation of this [1m&mut[0m is not allowed here.
|
||||||
|
Last available AST for this item:
|
||||||
|
|
||||||
|
#[_hax::json("\"Erased\"")]
|
||||||
|
#[inline()]
|
||||||
|
#[feature(register_tool)]
|
||||||
|
#[register_tool(_hax)]
|
||||||
|
fn impl_2__dedup_by_key<Anonymous: 'unk, T, F, K>(
|
||||||
|
mut self: sorted_vec::t_SortedVec<T>,
|
||||||
|
key: F,
|
||||||
|
) -> tuple0
|
||||||
|
where
|
||||||
|
_: core::cmp::t_Ord<T>,
|
||||||
|
_: core::ops::function::t_FnMut<F, tuple1<&mut T>>,
|
||||||
|
F: core::ops::function::t_FnOnce<f_Output = K>,
|
||||||
|
_: core::cmp::t_PartialEq<K, K>,
|
||||||
|
{
|
||||||
|
{
|
||||||
|
let _: tuple0 = { rust_primitives::hax::dropped_body };
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Last AST:
|
||||||
|
/** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id =
|
||||||
|
{ Explicit_def_id.T.is_constructor = false;
|
||||||
|
def_id =
|
||||||
|
{ Types.index = (0, 0); is_local = true; kind = Types.AssocFn;
|
||||||
|
krate = "sorted_vec";
|
||||||
|
parent =
|
||||||
|
(Some { Types.contents =
|
||||||
|
{ Types.id = 0;
|
||||||
|
value =
|
||||||
|
{ Types.index = (0, 0); is_local = true;
|
||||||
|
kind = Types.Impl {of_trait = false}; krate = "sorted_vec";
|
||||||
|
parent =
|
||||||
|
(Some { Types.contents =
|
||||||
|
{ Types.id = 0;
|
||||||
|
value =
|
||||||
|
{ Types.index = (0, 0); is_local = true;
|
||||||
|
kind = Types.Mod; krate = "sorted_vec";
|
||||||
|
parent = None; path = [] }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
path = [{ Types.data = Types.Impl; disambiguator = 2 }] }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
path =
|
||||||
|
[{ Types.data = Types.Impl; disambiguator = 2 };
|
||||||
|
{ Types.data = (Types.ValueNs "dedup_by_key"); disambiguator = 0 }]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
moved = None; suffix = None }) */
|
||||||
|
const _: () = ();
|
||||||
|
*)
|
||||||
|
|
||||||
|
(* val impl_2__drain
|
||||||
|
(#v_T #v_R: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
{| i7: Core.Ops.Range.t_RangeBounds v_R usize |}
|
||||||
|
(self: t_SortedVec v_T)
|
||||||
|
(range: v_R)
|
||||||
|
: Prims.Pure (t_SortedVec v_T & Alloc.Vec.Drain.t_Drain v_T Alloc.Alloc.t_Global)
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True) *)
|
||||||
|
|
||||||
|
(* val impl_2__retain
|
||||||
|
(#v_T #v_F: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
{| i8: Core.Ops.Function.t_FnMut v_F v_T |}
|
||||||
|
(self: t_SortedVec v_T)
|
||||||
|
(f: v_F)
|
||||||
|
: Prims.Pure (t_SortedVec v_T) Prims.l_True (fun _ -> Prims.l_True) *)
|
||||||
|
|
||||||
|
/// NOTE: to_vec() is a slice method that is accessible through deref, use
|
||||||
|
/// this instead to avoid cloning
|
||||||
|
val impl_2__into_vec (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} (self: t_SortedVec v_T)
|
||||||
|
: Prims.Pure (Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
(* item error backend: (DirectAndMut) The mutation of this [1m&mut[0m is not allowed here.
|
||||||
|
Last available AST for this item:
|
||||||
|
|
||||||
|
#[_hax::json("\"Erased\"")]
|
||||||
|
/// Apply a closure mutating the sorted vector and use `sort_unstable()`
|
||||||
|
/// to re-sort the mutated vector
|
||||||
|
#[feature(register_tool)]
|
||||||
|
#[register_tool(_hax)]
|
||||||
|
fn impl_2__mutate_vec<Anonymous: 'unk, T, F, O>(
|
||||||
|
mut self: sorted_vec::t_SortedVec<T>,
|
||||||
|
f: F,
|
||||||
|
) -> O
|
||||||
|
where
|
||||||
|
_: core::cmp::t_Ord<T>,
|
||||||
|
_: core::ops::function::t_FnOnce<
|
||||||
|
F,
|
||||||
|
tuple1<&mut alloc::vec::t_Vec<T, alloc::alloc::t_Global>>,
|
||||||
|
>,
|
||||||
|
F: core::ops::function::t_FnOnce<f_Output = O>,
|
||||||
|
{
|
||||||
|
{
|
||||||
|
let hax_temp_output: O = { rust_primitives::hax::dropped_body };
|
||||||
|
Tuple2(self, hax_temp_output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Last AST:
|
||||||
|
/** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id =
|
||||||
|
{ Explicit_def_id.T.is_constructor = false;
|
||||||
|
def_id =
|
||||||
|
{ Types.index = (0, 0); is_local = true; kind = Types.AssocFn;
|
||||||
|
krate = "sorted_vec";
|
||||||
|
parent =
|
||||||
|
(Some { Types.contents =
|
||||||
|
{ Types.id = 0;
|
||||||
|
value =
|
||||||
|
{ Types.index = (0, 0); is_local = true;
|
||||||
|
kind = Types.Impl {of_trait = false}; krate = "sorted_vec";
|
||||||
|
parent =
|
||||||
|
(Some { Types.contents =
|
||||||
|
{ Types.id = 0;
|
||||||
|
value =
|
||||||
|
{ Types.index = (0, 0); is_local = true;
|
||||||
|
kind = Types.Mod; krate = "sorted_vec";
|
||||||
|
parent = None; path = [] }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
path = [{ Types.data = Types.Impl; disambiguator = 2 }] }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
path =
|
||||||
|
[{ Types.data = Types.Impl; disambiguator = 2 };
|
||||||
|
{ Types.data = (Types.ValueNs "mutate_vec"); disambiguator = 0 }]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
moved = None; suffix = None }) */
|
||||||
|
const _: () = ();
|
||||||
|
*)
|
||||||
|
|
||||||
|
/// The caller must ensure that the provided vector is already sorted.
|
||||||
|
val impl_2__from_sorted
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(vec: Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global)
|
||||||
|
: Prims.Pure (t_SortedVec v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Unsafe access to the underlying vector. The caller must ensure that any
|
||||||
|
/// changes to the values in the vector do not impact the ordering of the
|
||||||
|
/// elements inside, or else this container will misbehave.
|
||||||
|
(* val impl_2__get_unchecked_mut_vec (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} (self: t_SortedVec v_T)
|
||||||
|
: Prims.Pure Rust_primitives.Hax.failure Prims.l_True (fun _ -> Prims.l_True) *)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_3 (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} : Core.Default.t_Default (t_SortedVec v_T)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_4 (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Convert.t_From (t_SortedVec v_T) (Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_5 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core.Cmp.t_Ord v_T)
|
||||||
|
: Core.Ops.Deref.t_Deref (t_SortedVec v_T) =
|
||||||
|
{
|
||||||
|
f_Target = Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global;(*
|
||||||
|
f_deref_pre = (fun (self: t_SortedVec v_T) -> true);
|
||||||
|
f_deref_post
|
||||||
|
=
|
||||||
|
(fun (self: t_SortedVec v_T) (out: Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global) -> true); *)
|
||||||
|
f_deref = fun (self: t_SortedVec v_T) -> self.f_vec
|
||||||
|
}
|
||||||
|
|
||||||
|
(* [@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_6 (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Iter.Traits.Collect.t_Extend (t_SortedVec v_T) v_T *)
|
||||||
|
|
||||||
|
(* [@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_7 (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} {| i2: Core.Hash.t_Hash v_T |}
|
||||||
|
: Core.Hash.t_Hash (t_SortedVec v_T) *)
|
||||||
|
|
||||||
|
val impl_10__new: #v_T: Type0 -> {| i1: Core.Cmp.t_Ord v_T |} -> Prims.unit
|
||||||
|
-> Prims.Pure (t_SortedSet v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_10__with_capacity (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} (capacity: usize)
|
||||||
|
: Prims.Pure (t_SortedSet v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Uses `sort_unstable()` to sort in place and `dedup()` to remove
|
||||||
|
/// duplicates.
|
||||||
|
val impl_10__from_unsorted
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(vec: Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global)
|
||||||
|
: Prims.Pure (t_SortedSet v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Insert an element into sorted position, returning the order index at which
|
||||||
|
/// it was placed. If an existing item was found it will be returned.
|
||||||
|
val impl_10__replace
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(self: t_SortedSet v_T)
|
||||||
|
(element: v_T)
|
||||||
|
: Prims.Pure (t_SortedSet v_T & (usize & Core.Option.t_Option v_T))
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Find the element and return the index with `Ok`, otherwise insert the
|
||||||
|
/// element and return the new element index with `Err`.
|
||||||
|
val impl_10__find_or_insert
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(self: t_SortedSet v_T)
|
||||||
|
(element: v_T)
|
||||||
|
: Prims.Pure (t_SortedSet v_T & t_FindOrInsert) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Same as replace, except performance is O(1) when the element belongs at
|
||||||
|
/// the back of the container. This avoids an O(log(N)) search for inserting
|
||||||
|
/// elements at the back.
|
||||||
|
val impl_10__push (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} (self: t_SortedSet v_T) (element: v_T)
|
||||||
|
: Prims.Pure (t_SortedSet v_T & (usize & Core.Option.t_Option v_T))
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Reserves additional capacity in the underlying vector.
|
||||||
|
/// See std::vec::Vec::reserve.
|
||||||
|
val impl_10__reserve
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(self: t_SortedSet v_T)
|
||||||
|
(additional: usize)
|
||||||
|
: Prims.Pure (t_SortedSet v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Same as find_or_insert, except performance is O(1) when the element
|
||||||
|
/// belongs at the back of the container.
|
||||||
|
val impl_10__find_or_push
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(self: t_SortedSet v_T)
|
||||||
|
(element: v_T)
|
||||||
|
: Prims.Pure (t_SortedSet v_T & t_FindOrInsert) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_10__remove_item
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(self: t_SortedSet v_T)
|
||||||
|
(item: v_T)
|
||||||
|
: Prims.Pure (t_SortedSet v_T & Core.Option.t_Option v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Panics if index is out of bounds
|
||||||
|
val impl_10__remove_index
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(self: t_SortedSet v_T)
|
||||||
|
(index: usize)
|
||||||
|
: Prims.Pure (t_SortedSet v_T & v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_10__pop (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} (self: t_SortedSet v_T)
|
||||||
|
: Prims.Pure (t_SortedSet v_T & Core.Option.t_Option v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
val impl_10__clear (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} (self: t_SortedSet v_T)
|
||||||
|
: Prims.Pure (t_SortedSet v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
(* val impl_10__drain
|
||||||
|
(#v_T #v_R: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
{| i3: Core.Ops.Range.t_RangeBounds v_R usize |}
|
||||||
|
(self: t_SortedSet v_T)
|
||||||
|
(range: v_R)
|
||||||
|
: Prims.Pure (t_SortedSet v_T & Alloc.Vec.Drain.t_Drain v_T Alloc.Alloc.t_Global)
|
||||||
|
Prims.l_True
|
||||||
|
(fun _ -> Prims.l_True) *)
|
||||||
|
|
||||||
|
(* val impl_10__retain
|
||||||
|
(#v_T #v_F: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
{| i5: Core.Ops.Function.t_FnMut v_F v_T |}
|
||||||
|
(self: t_SortedSet v_T)
|
||||||
|
(f: v_F)
|
||||||
|
: Prims.Pure (t_SortedSet v_T) Prims.l_True (fun _ -> Prims.l_True) *)
|
||||||
|
|
||||||
|
/// NOTE: to_vec() is a slice method that is accessible through deref, use
|
||||||
|
/// this instead to avoid cloning
|
||||||
|
val impl_10__into_vec (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} (self: t_SortedSet v_T)
|
||||||
|
: Prims.Pure (Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
(* item error backend: (DirectAndMut) The mutation of this [1m&mut[0m is not allowed here.
|
||||||
|
Last available AST for this item:
|
||||||
|
|
||||||
|
#[_hax::json("\"Erased\"")]
|
||||||
|
/// Apply a closure mutating the sorted vector and use `sort_unstable()`
|
||||||
|
/// to re-sort the mutated vector and `dedup()` to remove any duplicate
|
||||||
|
/// values
|
||||||
|
#[feature(register_tool)]
|
||||||
|
#[register_tool(_hax)]
|
||||||
|
fn impl_10__mutate_vec<Anonymous: 'unk, T, F, O>(
|
||||||
|
mut self: sorted_vec::t_SortedSet<T>,
|
||||||
|
f: F,
|
||||||
|
) -> O
|
||||||
|
where
|
||||||
|
_: core::cmp::t_Ord<T>,
|
||||||
|
_: core::ops::function::t_FnOnce<
|
||||||
|
F,
|
||||||
|
tuple1<&mut alloc::vec::t_Vec<T, alloc::alloc::t_Global>>,
|
||||||
|
>,
|
||||||
|
F: core::ops::function::t_FnOnce<f_Output = O>,
|
||||||
|
{
|
||||||
|
{
|
||||||
|
let hax_temp_output: O = { rust_primitives::hax::dropped_body };
|
||||||
|
Tuple2(self, hax_temp_output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Last AST:
|
||||||
|
/** print_rust: pitem: not implemented (item: { Concrete_ident.T.def_id =
|
||||||
|
{ Explicit_def_id.T.is_constructor = false;
|
||||||
|
def_id =
|
||||||
|
{ Types.index = (0, 0); is_local = true; kind = Types.AssocFn;
|
||||||
|
krate = "sorted_vec";
|
||||||
|
parent =
|
||||||
|
(Some { Types.contents =
|
||||||
|
{ Types.id = 0;
|
||||||
|
value =
|
||||||
|
{ Types.index = (0, 0); is_local = true;
|
||||||
|
kind = Types.Impl {of_trait = false}; krate = "sorted_vec";
|
||||||
|
parent =
|
||||||
|
(Some { Types.contents =
|
||||||
|
{ Types.id = 0;
|
||||||
|
value =
|
||||||
|
{ Types.index = (0, 0); is_local = true;
|
||||||
|
kind = Types.Mod; krate = "sorted_vec";
|
||||||
|
parent = None; path = [] }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
path = [{ Types.data = Types.Impl; disambiguator = 10 }] }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
path =
|
||||||
|
[{ Types.data = Types.Impl; disambiguator = 10 };
|
||||||
|
{ Types.data = (Types.ValueNs "mutate_vec"); disambiguator = 0 }]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
moved = None; suffix = None }) */
|
||||||
|
const _: () = ();
|
||||||
|
*)
|
||||||
|
|
||||||
|
/// The caller must ensure that the provided vector is already sorted and
|
||||||
|
/// deduped.
|
||||||
|
val impl_10__from_sorted
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(vec: Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global)
|
||||||
|
: Prims.Pure (t_SortedSet v_T) Prims.l_True (fun _ -> Prims.l_True)
|
||||||
|
|
||||||
|
/// Unsafe access to the underlying vector. The caller must ensure that any
|
||||||
|
/// changes to the values in the vector do not impact the ordering of the
|
||||||
|
/// elements inside, or else this container will misbehave.
|
||||||
|
(* val impl_10__get_unchecked_mut_vec
|
||||||
|
(#v_T: Type0)
|
||||||
|
{| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
(self: t_SortedSet v_T)
|
||||||
|
: Prims.Pure Rust_primitives.Hax.failure Prims.l_True (fun _ -> Prims.l_True) *)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_11 (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} : Core.Default.t_Default (t_SortedSet v_T)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_12 (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Convert.t_From (t_SortedSet v_T) (Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global)
|
||||||
|
|
||||||
|
[@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_13 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core.Cmp.t_Ord v_T)
|
||||||
|
: Core.Ops.Deref.t_Deref (t_SortedSet v_T) =
|
||||||
|
{
|
||||||
|
f_Target = t_SortedVec v_T;
|
||||||
|
(* f_deref_pre = (fun (self: t_SortedSet v_T) -> true);
|
||||||
|
f_deref_post = (fun (self: t_SortedSet v_T) (out: t_SortedVec v_T) -> true); *)
|
||||||
|
f_deref = fun (self: t_SortedSet v_T) -> self.f_set
|
||||||
|
}
|
||||||
|
|
||||||
|
(* [@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_14 (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |}
|
||||||
|
: Core.Iter.Traits.Collect.t_Extend (t_SortedSet v_T) v_T *)
|
||||||
|
|
||||||
|
(* [@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
val impl_15 (#v_T: Type0) {| i1: Core.Cmp.t_Ord v_T |} {| i2: Core.Hash.t_Hash v_T |}
|
||||||
|
: Core.Hash.t_Hash (t_SortedSet v_T) *)
|
||||||
|
|
||||||
|
(* [@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_8 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core.Cmp.t_Ord v_T)
|
||||||
|
: Core.Iter.Traits.Collect.t_IntoIterator (t_SortedVec v_T) =
|
||||||
|
{
|
||||||
|
f_Item = v_T;
|
||||||
|
f_IntoIter = Alloc.Vec.Into_iter.t_IntoIter v_T Alloc.Alloc.t_Global;
|
||||||
|
f_IntoIter_8492263130362933403 = FStar.Tactics.Typeclasses.solve;
|
||||||
|
f_into_iter_pre = (fun (self: t_SortedVec v_T) -> true);
|
||||||
|
f_into_iter_post
|
||||||
|
=
|
||||||
|
(fun (self: t_SortedVec v_T) (out: Alloc.Vec.Into_iter.t_IntoIter v_T Alloc.Alloc.t_Global) ->
|
||||||
|
true);
|
||||||
|
f_into_iter
|
||||||
|
=
|
||||||
|
fun (self: t_SortedVec v_T) ->
|
||||||
|
Core.Iter.Traits.Collect.f_into_iter #(Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global)
|
||||||
|
#FStar.Tactics.Typeclasses.solve
|
||||||
|
self.f_vec
|
||||||
|
} *)
|
||||||
|
|
||||||
|
(* [@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_9 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core.Cmp.t_Ord v_T)
|
||||||
|
: Core.Iter.Traits.Collect.t_IntoIterator (t_SortedVec v_T) =
|
||||||
|
{
|
||||||
|
f_Item = v_T;
|
||||||
|
f_IntoIter = Core.Slice.Iter.t_Iter v_T;
|
||||||
|
f_IntoIter_8492263130362933403 = FStar.Tactics.Typeclasses.solve;
|
||||||
|
f_into_iter_pre = (fun (self: t_SortedVec v_T) -> true);
|
||||||
|
f_into_iter_post = (fun (self: t_SortedVec v_T) (out: Core.Slice.Iter.t_Iter v_T) -> true);
|
||||||
|
f_into_iter
|
||||||
|
=
|
||||||
|
fun (self: t_SortedVec v_T) ->
|
||||||
|
Core.Slice.impl__iter #v_T
|
||||||
|
(Core.Ops.Deref.f_deref #(Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global)
|
||||||
|
#FStar.Tactics.Typeclasses.solve
|
||||||
|
self.f_vec
|
||||||
|
<:
|
||||||
|
t_Slice v_T)
|
||||||
|
} *)
|
||||||
|
|
||||||
|
(* [@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_17 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core.Cmp.t_Ord v_T)
|
||||||
|
: Core.Iter.Traits.Collect.t_IntoIterator (t_SortedSet v_T) =
|
||||||
|
{
|
||||||
|
f_Item = v_T;
|
||||||
|
f_IntoIter = Core.Slice.Iter.t_Iter v_T;
|
||||||
|
f_IntoIter_8492263130362933403 = FStar.Tactics.Typeclasses.solve;
|
||||||
|
f_into_iter_pre = (fun (self: t_SortedSet v_T) -> true);
|
||||||
|
f_into_iter_post = (fun (self: t_SortedSet v_T) (out: Core.Slice.Iter.t_Iter v_T) -> true);
|
||||||
|
f_into_iter
|
||||||
|
=
|
||||||
|
fun (self: t_SortedSet v_T) ->
|
||||||
|
Core.Slice.impl__iter #v_T
|
||||||
|
(Core.Ops.Deref.f_deref #(Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global)
|
||||||
|
#FStar.Tactics.Typeclasses.solve
|
||||||
|
(Core.Ops.Deref.f_deref #(t_SortedVec v_T) #FStar.Tactics.Typeclasses.solve self.f_set
|
||||||
|
<:
|
||||||
|
Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global)
|
||||||
|
<:
|
||||||
|
t_Slice v_T)
|
||||||
|
} *)
|
||||||
|
|
||||||
|
(* [@@ FStar.Tactics.Typeclasses.tcinstance]
|
||||||
|
let impl_16 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core.Cmp.t_Ord v_T)
|
||||||
|
: Core.Iter.Traits.Collect.t_IntoIterator (t_SortedSet v_T) =
|
||||||
|
{
|
||||||
|
f_Item = v_T;
|
||||||
|
f_IntoIter = Alloc.Vec.Into_iter.t_IntoIter v_T Alloc.Alloc.t_Global;
|
||||||
|
f_IntoIter_8492263130362933403 = FStar.Tactics.Typeclasses.solve;
|
||||||
|
f_into_iter_pre = (fun (self: t_SortedSet v_T) -> true);
|
||||||
|
f_into_iter_post
|
||||||
|
=
|
||||||
|
(fun (self: t_SortedSet v_T) (out: Alloc.Vec.Into_iter.t_IntoIter v_T Alloc.Alloc.t_Global) ->
|
||||||
|
true);
|
||||||
|
f_into_iter
|
||||||
|
=
|
||||||
|
fun (self: t_SortedSet v_T) ->
|
||||||
|
Core.Iter.Traits.Collect.f_into_iter #(t_SortedVec v_T)
|
||||||
|
#FStar.Tactics.Typeclasses.solve
|
||||||
|
self.f_set
|
||||||
|
} *)
|
||||||
221
proofs/fstar/models/Spec.GF16.fst
Normal file
221
proofs/fstar/models/Spec.GF16.fst
Normal file
|
|
@ -0,0 +1,221 @@
|
||||||
|
module Spec.GF16
|
||||||
|
open Core
|
||||||
|
|
||||||
|
(** Boolean Operations **)
|
||||||
|
|
||||||
|
let bool_xor (x:bool) (y:bool) : bool =
|
||||||
|
match (x,y) with
|
||||||
|
| (true, true) -> false
|
||||||
|
| (false, false) -> false
|
||||||
|
| (true, false) -> true
|
||||||
|
| (false, true) -> true
|
||||||
|
|
||||||
|
let bool_or (x:bool) (y:bool) : bool = x || y
|
||||||
|
|
||||||
|
let bool_and (x:bool) (y:bool) : bool = x && y
|
||||||
|
|
||||||
|
let bool_not (x:bool) : bool = not x
|
||||||
|
|
||||||
|
(** Sequence Operations **)
|
||||||
|
|
||||||
|
(* The basic definition of a sequence as equivalent to a map function *)
|
||||||
|
assume val createi #a (len:nat) (f: (i:nat{i < len}) -> a)
|
||||||
|
: x:Seq.seq a{Seq.length x == len /\ (forall i. Seq.index x i == f i)}
|
||||||
|
|
||||||
|
let (.[]) #a (x:Seq.seq a) (i:nat{i < Seq.length x}) = Seq.index x i
|
||||||
|
|
||||||
|
let map2 #a #b #c (f: a -> b -> c) (x: Seq.seq a) (y: Seq.seq b{Seq.length x == Seq.length y})
|
||||||
|
: r:Seq.seq c{Seq.length r == Seq.length x} =
|
||||||
|
createi (Seq.length x) (fun i -> f x.[i] y.[i])
|
||||||
|
|
||||||
|
(** Bit Vectors **)
|
||||||
|
|
||||||
|
type bv (n:nat) = x:Seq.seq bool{Seq.length x == n}
|
||||||
|
|
||||||
|
let zero (#n:nat) : bv n = createi n (fun i -> false)
|
||||||
|
|
||||||
|
let lift (#n:nat) (x: bv n) (k:nat{k >= n}) : bv k =
|
||||||
|
createi k (fun i -> if i < n then x.[i] else false)
|
||||||
|
|
||||||
|
let lower1 (#n:pos) (x: bv n{x.[n-1] = false}) : bv (n-1) =
|
||||||
|
createi (n-1) (fun i -> x.[i])
|
||||||
|
|
||||||
|
let rec lower (#n:nat) (x: bv n) (k:nat{k <= n /\ (forall j. (j >= k /\ j < n) ==> x.[j] = false)}) : bv k =
|
||||||
|
if n = k then x
|
||||||
|
else lower (lower1 x) k
|
||||||
|
|
||||||
|
let bv_eq_intro #n (x y: bv n) :
|
||||||
|
Lemma (requires (forall (i:nat). i < n ==> x.[i] = y.[i]))
|
||||||
|
(ensures x == y) =
|
||||||
|
Seq.lemma_eq_intro x y
|
||||||
|
|
||||||
|
(** Galois Field Arithmetic **)
|
||||||
|
|
||||||
|
(* Addition and Subtraction *)
|
||||||
|
|
||||||
|
let max i j = if i < j then j else i
|
||||||
|
|
||||||
|
let gf_add #n #m (x: bv n) (y: bv m) : bv (max n m) =
|
||||||
|
map2 bool_xor (lift x (max n m)) (lift y (max n m))
|
||||||
|
|
||||||
|
let gf_sub #n #m (x: bv n) (y: bv m) : bv (max n m) =
|
||||||
|
gf_add x y
|
||||||
|
|
||||||
|
let lemma_add_zero (#n:nat) (x: bv n):
|
||||||
|
Lemma (gf_add x (zero #n) == x /\ gf_add (zero #n) x == x) =
|
||||||
|
bv_eq_intro (gf_add x (zero #n)) x;
|
||||||
|
bv_eq_intro (gf_add (zero #n) x) x
|
||||||
|
|
||||||
|
let lemma_add_lift (#n:nat) (#k:nat{k >= n}) (x: bv n) (y:bv k):
|
||||||
|
Lemma (gf_add x y == gf_add (lift x k) y /\
|
||||||
|
gf_add y x == gf_add y (lift x k)) =
|
||||||
|
bv_eq_intro (gf_add x y) (gf_add (lift x k) y);
|
||||||
|
bv_eq_intro (gf_add y x) (gf_add y (lift x k))
|
||||||
|
|
||||||
|
(* Polynomial (carry-less) Multiplication *)
|
||||||
|
|
||||||
|
let poly_mul_x_k #n (x: bv n) (k:nat) : bv (n+k) =
|
||||||
|
createi (n+k) (fun i -> if i < k then false else x.[i-k])
|
||||||
|
|
||||||
|
let rec poly_mul_i #n (x: bv n) (y: bv n) (i: nat{i <= n})
|
||||||
|
: Tot (bv (n+n)) (decreases i) =
|
||||||
|
if i = 0 then zero #(n+n)
|
||||||
|
else
|
||||||
|
let prev = poly_mul_i x y (i-1) in
|
||||||
|
if y.[i-1] then
|
||||||
|
gf_add prev (poly_mul_x_k x (i-1))
|
||||||
|
else prev
|
||||||
|
|
||||||
|
let poly_mul #n (x y: bv n) : bv (n+n) =
|
||||||
|
poly_mul_i x y n
|
||||||
|
|
||||||
|
(* Galois Field Assumptions *)
|
||||||
|
|
||||||
|
class galois_field = {
|
||||||
|
n: nat;
|
||||||
|
norm: #k:nat -> bv k -> bv n;
|
||||||
|
irred: p:bv (n+1){p.[n] /\ norm p == zero #n};
|
||||||
|
lemma_norm_lower1: #m:pos -> (x: bv m) -> Lemma(x.[m-1] = false ==> norm x == norm (lower1 x));
|
||||||
|
lemma_norm_lift: (#m:nat{m <= n}) -> (x: bv m) -> Lemma(norm x == lift x n);
|
||||||
|
lemma_norm_add: (#m: nat) -> (#o: nat) -> (x: bv m) -> (y: bv o) -> Lemma(norm (gf_add x y) = gf_add (norm x) (norm y));
|
||||||
|
lemma_norm_mul_x_k: (#m: nat) -> (x: bv m) -> (k:nat) -> Lemma(norm (poly_mul_x_k x k) == norm (poly_mul_x_k (norm x) k));
|
||||||
|
}
|
||||||
|
|
||||||
|
(* Reduction *)
|
||||||
|
|
||||||
|
assume val poly_reduce (#gf: galois_field) (#m:nat) (x:bv m)
|
||||||
|
: y:bv n{y == norm x}
|
||||||
|
|
||||||
|
let gf_mul (#gf: galois_field) (x:bv n) (y: bv n) : bv n =
|
||||||
|
poly_reduce (poly_mul x y)
|
||||||
|
|
||||||
|
(* Lemmas *)
|
||||||
|
let rec lemma_norm_zero (#gf: galois_field) (k:nat):
|
||||||
|
Lemma (gf.norm (zero #k) == zero #gf.n) =
|
||||||
|
if k <= gf.n then (
|
||||||
|
gf.lemma_norm_lift (zero #k);
|
||||||
|
bv_eq_intro (lift (zero #k) n) (zero #n))
|
||||||
|
else (
|
||||||
|
assert (k > 0);
|
||||||
|
let zero_k_minus_1 = lower1 (zero #k) in
|
||||||
|
gf.lemma_norm_lower1 (zero #k);
|
||||||
|
lemma_norm_zero #gf (k-1);
|
||||||
|
bv_eq_intro (lower1 (zero #k)) (zero #(k-1))
|
||||||
|
)
|
||||||
|
|
||||||
|
let lemma_norm_irred_mul_x_k (#gf: galois_field) (k:nat):
|
||||||
|
Lemma (gf.norm (poly_mul_x_k irred k) == zero #gf.n) =
|
||||||
|
lemma_norm_mul_x_k irred k;
|
||||||
|
bv_eq_intro (poly_mul_x_k zero k) (zero #(n+k));
|
||||||
|
lemma_norm_zero #gf (n+k)
|
||||||
|
|
||||||
|
let rec lemma_norm_lower (#gf: galois_field) (m:nat) (x:bv m):
|
||||||
|
Lemma
|
||||||
|
(requires (m >= gf.n /\ (forall j. (j >= n /\ j < m) ==> x.[j] = false)))
|
||||||
|
(ensures (gf.norm (lower x n) == gf.norm x)) =
|
||||||
|
if n = m then ()
|
||||||
|
else (
|
||||||
|
lemma_norm_lower1 x;
|
||||||
|
lemma_norm_lower #gf (m-1) (lower1 x)
|
||||||
|
)
|
||||||
|
|
||||||
|
(** Integers as Bit Vectors **)
|
||||||
|
|
||||||
|
(* Mappings between machine integers and int ops to bit vectors *)
|
||||||
|
|
||||||
|
assume val to_bv #t (u: int_t t) : bv (bits t)
|
||||||
|
// Concretely: to_bv u -> createi (bits t) (fun i -> (v u / pow2 i) % 2 = 0)
|
||||||
|
|
||||||
|
(* Axioms about integer operations *)
|
||||||
|
|
||||||
|
assume val zero_lemma #t:
|
||||||
|
Lemma (to_bv ( mk_int #t 0 ) == zero #(bits t))
|
||||||
|
|
||||||
|
assume val xor_lemma #t (x: int_t t) (y: int_t t):
|
||||||
|
Lemma (to_bv ( x ^. y) == map2 bool_xor (to_bv x) (to_bv y))
|
||||||
|
|
||||||
|
assume val or_lemma #t (x: int_t t) (y: int_t t):
|
||||||
|
Lemma (to_bv ( x |. y) == map2 bool_or (to_bv x) (to_bv y))
|
||||||
|
|
||||||
|
assume val and_lemma #t (x: int_t t) (y: int_t t):
|
||||||
|
Lemma (to_bv ( x &. y) == map2 bool_and (to_bv x) (to_bv y))
|
||||||
|
|
||||||
|
assume val shift_left_lemma #t #t' (x: int_t t) (y: int_t t'):
|
||||||
|
Lemma
|
||||||
|
(requires (v y >= 0 /\ v y < bits t))
|
||||||
|
(ensures to_bv ( x <<! y) ==
|
||||||
|
createi (bits t) (fun i -> if i < v y then false else (to_bv x).[i - v y]))
|
||||||
|
|
||||||
|
assume val up_cast_lemma #t (#t':inttype{bits t' >= bits t}) (x:int_t t):
|
||||||
|
Lemma (to_bv (cast (x <: int_t t) <: int_t t') == lift (to_bv x) (bits t'))
|
||||||
|
|
||||||
|
|
||||||
|
(* Lemmas lining integer arithmetic to bit-vector operations *)
|
||||||
|
|
||||||
|
assume val shift_left_bit_select_lemma #t #t' (x: int_t t) (i: int_t t'{v i >= 0 /\ v i < bits t}):
|
||||||
|
Lemma (((x &. (mk_int #t 1 <<! i)) == mk_int #t 0) <==>
|
||||||
|
((to_bv x).[v i] == false))
|
||||||
|
|
||||||
|
(* GF16 Lemmas *)
|
||||||
|
|
||||||
|
assume val up_cast_shift_left_lemma (x: u16) (shift: u32{v shift < 16}):
|
||||||
|
Lemma (to_bv ((cast x <: u32) <<! shift) ==
|
||||||
|
lift (poly_mul_x_k (to_bv x) (v shift)) 32)
|
||||||
|
|
||||||
|
let xor_is_gf_add_lemma #t (x y: int_t t):
|
||||||
|
Lemma (to_bv (x ^. y) == gf_add (to_bv x) (to_bv y)) =
|
||||||
|
xor_lemma x y;
|
||||||
|
bv_eq_intro (to_bv (x ^. y)) (gf_add (to_bv x) (to_bv y))
|
||||||
|
|
||||||
|
|
||||||
|
(* GF16 Implementation *)
|
||||||
|
|
||||||
|
instance gf16: galois_field = {
|
||||||
|
n = 16;
|
||||||
|
irred = to_bv (mk_i16 0x1100b);
|
||||||
|
norm = admit();
|
||||||
|
lemma_norm_lower1 = (fun x -> admit());
|
||||||
|
lemma_norm_lift = (fun x -> admit());
|
||||||
|
lemma_norm_add = (fun x -> fun y -> admit());
|
||||||
|
lemma_norm_mul_x_k = (fun x -> fun k -> admit())
|
||||||
|
}
|
||||||
|
|
||||||
|
let gf16_mul = gf_mul #gf16
|
||||||
|
|
||||||
|
(*
|
||||||
|
let rec clmul_aux #n1 #n2 (x: bv n1) (y: bv n2) (i: nat{i <= n2}):
|
||||||
|
Tot (bv (n1+n2)) (decreases (n2 - i)) =
|
||||||
|
if i = n2 then zero
|
||||||
|
else
|
||||||
|
let next = clmul_aux x y (i+1) in
|
||||||
|
if y.[i] then
|
||||||
|
add (mul_x_k x i) next
|
||||||
|
else next
|
||||||
|
*)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
(*
|
||||||
|
bv_intro (add x (zero #n)) x;
|
||||||
|
bv_intro (add (zero #n) x) x
|
||||||
|
*)
|
||||||
64
proofs/fstar/models/Spec.MLKEM.Instances.fst
Normal file
64
proofs/fstar/models/Spec.MLKEM.Instances.fst
Normal file
|
|
@ -0,0 +1,64 @@
|
||||||
|
module Spec.MLKEM.Instances
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 30"
|
||||||
|
open FStar.Mul
|
||||||
|
open Core
|
||||||
|
open Spec.Utils
|
||||||
|
open Spec.MLKEM.Math
|
||||||
|
open Spec.MLKEM
|
||||||
|
|
||||||
|
|
||||||
|
(** MLKEM-768 Instantiation *)
|
||||||
|
|
||||||
|
let mlkem768_rank : rank = sz 3
|
||||||
|
|
||||||
|
#set-options "--z3rlimit 350"
|
||||||
|
let mlkem768_generate_keypair (randomness:t_Array u8 (sz 64)):
|
||||||
|
(t_Array u8 (sz 2400) & t_Array u8 (sz 1184)) & bool =
|
||||||
|
ind_cca_generate_keypair mlkem768_rank randomness
|
||||||
|
|
||||||
|
let mlkem768_encapsulate (public_key: t_Array u8 (sz 1184)) (randomness: t_Array u8 (sz 32)):
|
||||||
|
(t_Array u8 (sz 1088) & t_Array u8 (sz 32)) & bool =
|
||||||
|
assert (v_CPA_CIPHERTEXT_SIZE mlkem768_rank == sz 1088);
|
||||||
|
ind_cca_encapsulate mlkem768_rank public_key randomness
|
||||||
|
|
||||||
|
let mlkem768_decapsulate (secret_key: t_Array u8 (sz 2400)) (ciphertext: t_Array u8 (sz 1088)):
|
||||||
|
t_Array u8 (sz 32) & bool =
|
||||||
|
ind_cca_decapsulate mlkem768_rank secret_key ciphertext
|
||||||
|
|
||||||
|
(** MLKEM-1024 Instantiation *)
|
||||||
|
|
||||||
|
let mlkem1024_rank = sz 4
|
||||||
|
|
||||||
|
let mlkem1024_generate_keypair (randomness:t_Array u8 (sz 64)):
|
||||||
|
(t_Array u8 (sz 3168) & t_Array u8 (sz 1568)) & bool =
|
||||||
|
ind_cca_generate_keypair mlkem1024_rank randomness
|
||||||
|
|
||||||
|
let mlkem1024_encapsulate (public_key: t_Array u8 (sz 1568)) (randomness: t_Array u8 (sz 32)):
|
||||||
|
(t_Array u8 (sz 1568) & t_Array u8 (sz 32)) & bool =
|
||||||
|
assert (v_CPA_CIPHERTEXT_SIZE mlkem1024_rank == sz 1568);
|
||||||
|
ind_cca_encapsulate mlkem1024_rank public_key randomness
|
||||||
|
|
||||||
|
let mlkem1024_decapsulate (secret_key: t_Array u8 (sz 3168)) (ciphertext: t_Array u8 (sz 1568)):
|
||||||
|
t_Array u8 (sz 32) & bool =
|
||||||
|
ind_cca_decapsulate mlkem1024_rank secret_key ciphertext
|
||||||
|
|
||||||
|
(** MLKEM-512 Instantiation *)
|
||||||
|
|
||||||
|
let mlkem512_rank : rank = sz 2
|
||||||
|
|
||||||
|
let mlkem512_generate_keypair (randomness:t_Array u8 (sz 64)):
|
||||||
|
(t_Array u8 (sz 1632) & t_Array u8 (sz 800)) & bool =
|
||||||
|
ind_cca_generate_keypair mlkem512_rank randomness
|
||||||
|
|
||||||
|
let mlkem512_encapsulate (public_key: t_Array u8 (sz 800)) (randomness: t_Array u8 (sz 32)):
|
||||||
|
(t_Array u8 (sz 768) & t_Array u8 (sz 32)) & bool =
|
||||||
|
assert (v_CPA_CIPHERTEXT_SIZE mlkem512_rank == sz 768);
|
||||||
|
ind_cca_encapsulate mlkem512_rank public_key randomness
|
||||||
|
|
||||||
|
|
||||||
|
let mlkem512_decapsulate (secret_key: t_Array u8 (sz 1632)) (ciphertext: t_Array u8 (sz 768)):
|
||||||
|
t_Array u8 (sz 32) & bool =
|
||||||
|
ind_cca_decapsulate mlkem512_rank secret_key ciphertext
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
299
proofs/fstar/models/Spec.MLKEM.Math.fst
Normal file
299
proofs/fstar/models/Spec.MLKEM.Math.fst
Normal file
|
|
@ -0,0 +1,299 @@
|
||||||
|
module Spec.MLKEM.Math
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
|
||||||
|
open FStar.Mul
|
||||||
|
open Core
|
||||||
|
open Spec.Utils
|
||||||
|
|
||||||
|
let v_FIELD_MODULUS: i32 = mk_i32 3329
|
||||||
|
let is_rank (r:usize) = r == sz 2 \/ r == sz 3 \/ r == sz 4
|
||||||
|
|
||||||
|
type rank = r:usize{is_rank r}
|
||||||
|
|
||||||
|
(** MLKEM Math and Sampling *)
|
||||||
|
|
||||||
|
type field_element = n:nat{n < v v_FIELD_MODULUS}
|
||||||
|
type polynomial = t_Array field_element (sz 256)
|
||||||
|
type vector (r:rank) = t_Array polynomial r
|
||||||
|
type matrix (r:rank) = t_Array (vector r) r
|
||||||
|
|
||||||
|
val field_add: field_element -> field_element -> field_element
|
||||||
|
let field_add a b = (a + b) % v v_FIELD_MODULUS
|
||||||
|
|
||||||
|
val field_sub: field_element -> field_element -> field_element
|
||||||
|
let field_sub a b = (a - b) % v v_FIELD_MODULUS
|
||||||
|
|
||||||
|
val field_neg: field_element -> field_element
|
||||||
|
let field_neg a = (0 - a) % v v_FIELD_MODULUS
|
||||||
|
|
||||||
|
val field_mul: field_element -> field_element -> field_element
|
||||||
|
let field_mul a b = (a * b) % v v_FIELD_MODULUS
|
||||||
|
|
||||||
|
val poly_add: polynomial -> polynomial -> polynomial
|
||||||
|
let poly_add a b = map2 field_add a b
|
||||||
|
|
||||||
|
val poly_sub: polynomial -> polynomial -> polynomial
|
||||||
|
let poly_sub a b = map2 field_sub a b
|
||||||
|
|
||||||
|
let int_to_spec_fe (m:int) : field_element =
|
||||||
|
let m_v = m % v v_FIELD_MODULUS in
|
||||||
|
assert (m_v > - v v_FIELD_MODULUS);
|
||||||
|
if m_v < 0 then
|
||||||
|
m_v + v v_FIELD_MODULUS
|
||||||
|
else m_v
|
||||||
|
|
||||||
|
(* Convert concrete code types to spec types *)
|
||||||
|
|
||||||
|
let to_spec_fe (m:i16) : field_element =
|
||||||
|
int_to_spec_fe (v m)
|
||||||
|
|
||||||
|
let to_spec_array #len (m:t_Array i16 len) : t_Array field_element len =
|
||||||
|
createi #field_element len (fun i -> to_spec_fe (m.[i]))
|
||||||
|
|
||||||
|
let to_spec_poly (m:t_Array i16 (sz 256)) : polynomial =
|
||||||
|
to_spec_array m
|
||||||
|
|
||||||
|
let to_spec_vector (#r:rank)
|
||||||
|
(m:t_Array (t_Array i16 (sz 256)) r)
|
||||||
|
: (vector r) =
|
||||||
|
createi r (fun i -> to_spec_poly (m.[i]))
|
||||||
|
|
||||||
|
let to_spec_matrix (#r:rank)
|
||||||
|
(m:t_Array (t_Array (t_Array i16 (sz 256)) r) r)
|
||||||
|
: (matrix r) =
|
||||||
|
createi r (fun i -> to_spec_vector (m.[i]))
|
||||||
|
|
||||||
|
(* Specifying NTT:
|
||||||
|
bitrev7 = [int('{:07b}'.format(x)[::-1], 2) for x in range(0,128)]
|
||||||
|
zetas = [pow(17,x) % 3329 for x in bitrev7]
|
||||||
|
zetas_mont = [pow(2,16) * x % 3329 for x in zetas]
|
||||||
|
zetas_mont_r = [(x - 3329 if x > 1664 else x) for x in zetas_mont]
|
||||||
|
|
||||||
|
bitrev7 is
|
||||||
|
[0, 64, 32, 96, 16, 80, 48, 112, 8, 72, 40, 104, 24, 88, 56, 120, 4, 68, 36, 100, 20, 84, 52, 116, 12, 76, 44, 108, 28, 92, 60, 124, 2, 66, 34, 98, 18, 82, 50, 114, 10, 74, 42, 106, 26, 90, 58, 122, 6, 70, 38, 102, 22, 86, 54, 118, 14, 78, 46, 110, 30, 94, 62, 126, 1, 65, 33, 97, 17, 81, 49, 113, 9, 73, 41, 105, 25, 89, 57, 121, 5, 69, 37, 101, 21, 85, 53, 117, 13, 77, 45, 109, 29, 93, 61, 125, 3, 67, 35, 99, 19, 83, 51, 115, 11, 75, 43, 107, 27, 91, 59, 123, 7, 71, 39, 103, 23, 87, 55, 119, 15, 79, 47, 111, 31, 95, 63, 127]
|
||||||
|
|
||||||
|
zetas = 17^bitrev7 is
|
||||||
|
[1, 1729, 2580, 3289, 2642, 630, 1897, 848, 1062, 1919, 193, 797, 2786, 3260, 569, 1746, 296, 2447, 1339, 1476, 3046, 56, 2240, 1333, 1426, 2094, 535, 2882, 2393, 2879, 1974, 821, 289, 331, 3253, 1756, 1197, 2304, 2277, 2055, 650, 1977, 2513, 632, 2865, 33, 1320, 1915, 2319, 1435, 807, 452, 1438, 2868, 1534, 2402, 2647, 2617, 1481, 648, 2474, 3110, 1227, 910, 17, 2761, 583, 2649, 1637, 723, 2288, 1100, 1409, 2662, 3281, 233, 756, 2156, 3015, 3050, 1703, 1651, 2789, 1789, 1847, 952, 1461, 2687, 939, 2308, 2437, 2388, 733, 2337, 268, 641, 1584, 2298, 2037, 3220, 375, 2549, 2090, 1645, 1063, 319, 2773, 757, 2099, 561, 2466, 2594, 2804, 1092, 403, 1026, 1143, 2150, 2775, 886, 1722, 1212, 1874, 1029, 2110, 2935, 885, 2154]
|
||||||
|
|
||||||
|
zetas_mont = zetas * 2^16 is
|
||||||
|
[2285, 2571, 2970, 1812, 1493, 1422, 287, 202, 3158, 622, 1577, 182, 962, 2127, 1855, 1468, 573, 2004, 264, 383, 2500, 1458, 1727, 3199, 2648, 1017, 732, 608, 1787, 411, 3124, 1758, 1223, 652, 2777, 1015, 2036, 1491, 3047, 1785, 516, 3321, 3009, 2663, 1711, 2167, 126, 1469, 2476, 3239, 3058, 830, 107, 1908, 3082, 2378, 2931, 961, 1821, 2604, 448, 2264, 677, 2054, 2226, 430, 555, 843, 2078, 871, 1550, 105, 422, 587, 177, 3094, 3038, 2869, 1574, 1653, 3083, 778, 1159, 3182, 2552, 1483, 2727, 1119, 1739, 644, 2457, 349, 418, 329, 3173, 3254, 817, 1097, 603, 610, 1322, 2044, 1864, 384, 2114, 3193, 1218, 1994, 2455, 220, 2142, 1670, 2144, 1799, 2051, 794, 1819, 2475, 2459, 478, 3221, 3021, 996, 991, 958, 1869, 1522, 1628]
|
||||||
|
|
||||||
|
zetas_mont_r = zetas_mont - 3329 if zetas_mont > 1664 else zetas_mont is
|
||||||
|
[-1044, -758, -359, -1517, 1493, 1422, 287, 202, -171, 622, 1577, 182, 962, -1202, -1474, 1468, 573, -1325, 264, 383, -829, 1458, -1602, -130, -681, 1017, 732, 608, -1542, 411, -205, -1571, 1223, 652, -552, 1015, -1293, 1491, -282, -1544, 516, -8, -320, -666, -1618, -1162, 126, 1469, -853, -90, -271, 830, 107, -1421, -247, -951, -398, 961, -1508, -725, 448, -1065, 677, -1275, -1103, 430, 555, 843, -1251, 871, 1550, 105, 422, 587, 177, -235, -291, -460, 1574, 1653, -246, 778, 1159, -147, -777, 1483, -602, 1119, -1590, 644, -872, 349, 418, 329, -156, -75, 817, 1097, 603, 610, 1322, -1285, -1465, 384, -1215, -136, 1218, -1335, -874, 220, -1187, -1659, -1185, -1530, -1278, 794, -1510, -854, -870, 478, -108, -308, 996, 991, 958, -1460, 1522, 1628]
|
||||||
|
*)
|
||||||
|
|
||||||
|
let zetas_list : list field_element = [1; 1729; 2580; 3289; 2642; 630; 1897; 848; 1062; 1919; 193; 797; 2786; 3260; 569; 1746; 296; 2447; 1339; 1476; 3046; 56; 2240; 1333; 1426; 2094; 535; 2882; 2393; 2879; 1974; 821; 289; 331; 3253; 1756; 1197; 2304; 2277; 2055; 650; 1977; 2513; 632; 2865; 33; 1320; 1915; 2319; 1435; 807; 452; 1438; 2868; 1534; 2402; 2647; 2617; 1481; 648; 2474; 3110; 1227; 910; 17; 2761; 583; 2649; 1637; 723; 2288; 1100; 1409; 2662; 3281; 233; 756; 2156; 3015; 3050; 1703; 1651; 2789; 1789; 1847; 952; 1461; 2687; 939; 2308; 2437; 2388; 733; 2337; 268; 641; 1584; 2298; 2037; 3220; 375; 2549; 2090; 1645; 1063; 319; 2773; 757; 2099; 561; 2466; 2594; 2804; 1092; 403; 1026; 1143; 2150; 2775; 886; 1722; 1212; 1874; 1029; 2110; 2935; 885; 2154]
|
||||||
|
|
||||||
|
let zetas : t_Array field_element (sz 128) =
|
||||||
|
assert_norm(List.Tot.length zetas_list == 128);
|
||||||
|
Rust_primitives.Arrays.of_list zetas_list
|
||||||
|
|
||||||
|
let poly_ntt_step (a:field_element) (b:field_element) (i:nat{i < 128}) =
|
||||||
|
let t = field_mul b zetas.[sz i] in
|
||||||
|
let b = field_sub a t in
|
||||||
|
let a = field_add a t in
|
||||||
|
(a,b)
|
||||||
|
|
||||||
|
#push-options "--split_queries always"
|
||||||
|
let poly_ntt_layer (p:polynomial) (l:nat{l > 0 /\ l < 8}) : polynomial =
|
||||||
|
let len = pow2 l in
|
||||||
|
let k = (128 / len) - 1 in
|
||||||
|
Rust_primitives.Arrays.createi (sz 256) (fun i ->
|
||||||
|
let round = v i / (2 * len) in
|
||||||
|
let idx = v i % (2 * len) in
|
||||||
|
let (idx0, idx1) = if idx < len then (idx, idx+len) else (idx-len,idx) in
|
||||||
|
let (a_ntt, b_ntt) = poly_ntt_step p.[sz idx0] p.[sz idx1] (round + k) in
|
||||||
|
if idx < len then a_ntt else b_ntt)
|
||||||
|
#pop-options
|
||||||
|
|
||||||
|
val poly_ntt: polynomial -> polynomial
|
||||||
|
[@ "opaque_to_smt"]
|
||||||
|
let poly_ntt p =
|
||||||
|
let p = poly_ntt_layer p 7 in
|
||||||
|
let p = poly_ntt_layer p 6 in
|
||||||
|
let p = poly_ntt_layer p 5 in
|
||||||
|
let p = poly_ntt_layer p 4 in
|
||||||
|
let p = poly_ntt_layer p 3 in
|
||||||
|
let p = poly_ntt_layer p 2 in
|
||||||
|
let p = poly_ntt_layer p 1 in
|
||||||
|
p
|
||||||
|
|
||||||
|
let poly_inv_ntt_step (a:field_element) (b:field_element) (i:nat{i < 128}) =
|
||||||
|
let b_minus_a = field_sub b a in
|
||||||
|
let a = field_add a b in
|
||||||
|
let b = field_mul b_minus_a zetas.[sz i] in
|
||||||
|
(a,b)
|
||||||
|
|
||||||
|
#push-options "--z3rlimit 150"
|
||||||
|
let poly_inv_ntt_layer (p:polynomial) (l:nat{l > 0 /\ l < 8}) : polynomial =
|
||||||
|
let len = pow2 l in
|
||||||
|
let k = (256 / len) - 1 in
|
||||||
|
Rust_primitives.Arrays.createi (sz 256) (fun i ->
|
||||||
|
let round = v i / (2 * len) in
|
||||||
|
let idx = v i % (2 * len) in
|
||||||
|
let (idx0, idx1) = if idx < len then (idx, idx+len) else (idx-len,idx) in
|
||||||
|
let (a_ntt, b_ntt) = poly_inv_ntt_step p.[sz idx0] p.[sz idx1] (k - round) in
|
||||||
|
if idx < len then a_ntt else b_ntt)
|
||||||
|
#pop-options
|
||||||
|
|
||||||
|
val poly_inv_ntt: polynomial -> polynomial
|
||||||
|
let poly_inv_ntt p =
|
||||||
|
let p = poly_inv_ntt_layer p 1 in
|
||||||
|
let p = poly_inv_ntt_layer p 2 in
|
||||||
|
let p = poly_inv_ntt_layer p 3 in
|
||||||
|
let p = poly_inv_ntt_layer p 4 in
|
||||||
|
let p = poly_inv_ntt_layer p 5 in
|
||||||
|
let p = poly_inv_ntt_layer p 6 in
|
||||||
|
let p = poly_inv_ntt_layer p 7 in
|
||||||
|
p
|
||||||
|
|
||||||
|
let poly_base_case_multiply (a0 a1 b0 b1 zeta:field_element) =
|
||||||
|
let c0 = field_add (field_mul a0 b0) (field_mul (field_mul a1 b1) zeta) in
|
||||||
|
let c1 = field_add (field_mul a0 b1) (field_mul a1 b0) in
|
||||||
|
(c0,c1)
|
||||||
|
|
||||||
|
val poly_mul_ntt: polynomial -> polynomial -> polynomial
|
||||||
|
let poly_mul_ntt a b =
|
||||||
|
Rust_primitives.Arrays.createi (sz 256) (fun i ->
|
||||||
|
let a0 = a.[sz (2 * (v i / 2))] in
|
||||||
|
let a1 = a.[sz (2 * (v i / 2) + 1)] in
|
||||||
|
let b0 = b.[sz (2 * (v i / 2))] in
|
||||||
|
let b1 = b.[sz (2 * (v i / 2) + 1)] in
|
||||||
|
let zeta_4 = zetas.[sz (64 + (v i/4))] in
|
||||||
|
let zeta = if v i % 4 < 2 then zeta_4 else field_neg zeta_4 in
|
||||||
|
let (c0,c1) = poly_base_case_multiply a0 a1 b0 b1 zeta in
|
||||||
|
if v i % 2 = 0 then c0 else c1)
|
||||||
|
|
||||||
|
|
||||||
|
val vector_add: #r:rank -> vector r -> vector r -> vector r
|
||||||
|
let vector_add #p a b = map2 poly_add a b
|
||||||
|
|
||||||
|
val vector_ntt: #r:rank -> vector r -> vector r
|
||||||
|
let vector_ntt #p v = map_array poly_ntt v
|
||||||
|
|
||||||
|
val vector_inv_ntt: #r:rank -> vector r -> vector r
|
||||||
|
let vector_inv_ntt #p v = map_array poly_inv_ntt v
|
||||||
|
|
||||||
|
val vector_mul_ntt: #r:rank -> vector r -> vector r -> vector r
|
||||||
|
let vector_mul_ntt #p a b = map2 poly_mul_ntt a b
|
||||||
|
|
||||||
|
val vector_sum: #r:rank -> vector r -> polynomial
|
||||||
|
let vector_sum #r a = repeati (r -! sz 1)
|
||||||
|
(fun i x -> assert (v i < v r - 1); poly_add x (a.[i +! sz 1])) a.[sz 0]
|
||||||
|
|
||||||
|
val vector_dot_product_ntt: #r:rank -> vector r -> vector r -> polynomial
|
||||||
|
let vector_dot_product_ntt a b = vector_sum (vector_mul_ntt a b)
|
||||||
|
|
||||||
|
val matrix_transpose: #r:rank -> matrix r -> matrix r
|
||||||
|
[@ "opaque_to_smt"]
|
||||||
|
let matrix_transpose #r m =
|
||||||
|
createi r (fun i ->
|
||||||
|
createi r (fun j ->
|
||||||
|
m.[j].[i]))
|
||||||
|
|
||||||
|
val matrix_vector_mul_ntt: #r:rank -> matrix r -> vector r -> vector r
|
||||||
|
let matrix_vector_mul_ntt #r m v =
|
||||||
|
createi r (fun i -> vector_dot_product_ntt m.[i] v)
|
||||||
|
|
||||||
|
val compute_As_plus_e_ntt: #r:rank -> a:matrix r -> s:vector r -> e:vector r -> vector r
|
||||||
|
[@ "opaque_to_smt"]
|
||||||
|
let compute_As_plus_e_ntt #p a s e = vector_add (matrix_vector_mul_ntt a s) e
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
type dT = d: nat {d = 1 \/ d = 4 \/ d = 5 \/ d = 10 \/ d = 11 \/ d = 12}
|
||||||
|
let max_d (d:dT) = if d < 12 then pow2 d else v v_FIELD_MODULUS
|
||||||
|
type field_element_d (d:dT) = n:nat{n < max_d d}
|
||||||
|
type polynomial_d (d:dT) = t_Array (field_element_d d) (sz 256)
|
||||||
|
type vector_d (r:rank) (d:dT) = t_Array (polynomial_d d) r
|
||||||
|
|
||||||
|
let bits_to_bytes (#bytes: usize) (bv: bit_vec (v bytes * 8))
|
||||||
|
: Pure (t_Array u8 bytes)
|
||||||
|
(requires True)
|
||||||
|
(ensures fun r -> (forall i. bit_vec_of_int_t_array r 8 i == bv i))
|
||||||
|
= bit_vec_to_int_t_array 8 bv
|
||||||
|
|
||||||
|
let bytes_to_bits (#bytes: usize) (r: t_Array u8 bytes)
|
||||||
|
: Pure (i: bit_vec (v bytes * 8))
|
||||||
|
(requires True)
|
||||||
|
(ensures fun f -> (forall i. bit_vec_of_int_t_array r 8 i == f i))
|
||||||
|
= bit_vec_of_int_t_array r 8
|
||||||
|
|
||||||
|
unfold let retype_bit_vector #a #b (#_:unit{a == b}) (x: a): b = x
|
||||||
|
|
||||||
|
|
||||||
|
let compress_d (d: dT {d <> 12}) (x: field_element): field_element_d d
|
||||||
|
= let r = (pow2 d * x + 1664) / v v_FIELD_MODULUS in
|
||||||
|
assert (r * v v_FIELD_MODULUS <= pow2 d * x + 1664);
|
||||||
|
assert (r * v v_FIELD_MODULUS <= pow2 d * (v v_FIELD_MODULUS - 1) + 1664);
|
||||||
|
Math.Lemmas.lemma_div_le (r * v v_FIELD_MODULUS) (pow2 d * (v v_FIELD_MODULUS - 1) + 1664) (v v_FIELD_MODULUS);
|
||||||
|
Math.Lemmas.cancel_mul_div r (v v_FIELD_MODULUS);
|
||||||
|
assert (r <= (pow2 d * (v v_FIELD_MODULUS - 1) + 1664) / v v_FIELD_MODULUS);
|
||||||
|
Math.Lemmas.lemma_div_mod_plus (1664 - pow2 d) (pow2 d) (v v_FIELD_MODULUS);
|
||||||
|
assert (r <= pow2 d + (1664 - pow2 d) / v v_FIELD_MODULUS);
|
||||||
|
assert (r <= pow2 d);
|
||||||
|
if r = pow2 d then 0 else r
|
||||||
|
|
||||||
|
let decompress_d (d: dT {d <> 12}) (x: field_element_d d): field_element
|
||||||
|
= let r = (x * v v_FIELD_MODULUS + 1664) / pow2 d in
|
||||||
|
r
|
||||||
|
|
||||||
|
[@ "opaque_to_smt"]
|
||||||
|
let byte_encode (d: dT) (coefficients: polynomial_d d): t_Array u8 (sz (32 * d))
|
||||||
|
= let coefficients' : t_Array nat (sz 256) = map_array #(field_element_d d) (fun x -> x <: nat) coefficients in
|
||||||
|
bits_to_bytes #(sz (32 * d))
|
||||||
|
(retype_bit_vector (bit_vec_of_nat_array coefficients' d))
|
||||||
|
|
||||||
|
[@ "opaque_to_smt"]
|
||||||
|
let byte_decode (d: dT) (coefficients: t_Array u8 (sz (32 * d))): polynomial_d d
|
||||||
|
= let bv = bytes_to_bits coefficients in
|
||||||
|
let arr: t_Array nat (sz 256) = bit_vec_to_nat_array d (retype_bit_vector bv) in
|
||||||
|
let p: polynomial_d d =
|
||||||
|
createi (sz 256) (fun i ->
|
||||||
|
let x_f : field_element = arr.[i] % v v_FIELD_MODULUS in
|
||||||
|
assert (d < 12 ==> arr.[i] < pow2 d);
|
||||||
|
let x_m : field_element_d d = x_f in
|
||||||
|
x_m)
|
||||||
|
in
|
||||||
|
p
|
||||||
|
|
||||||
|
let coerce_polynomial_12 (p:polynomial): polynomial_d 12 = p
|
||||||
|
let coerce_vector_12 (#r:rank) (v:vector r): vector_d r 12 = v
|
||||||
|
|
||||||
|
[@ "opaque_to_smt"]
|
||||||
|
let compress_then_byte_encode (d: dT {d <> 12}) (coefficients: polynomial): t_Array u8 (sz (32 * d))
|
||||||
|
= let coefs: t_Array (field_element_d d) (sz 256) = map_array (compress_d d) coefficients
|
||||||
|
in
|
||||||
|
byte_encode d coefs
|
||||||
|
|
||||||
|
[@ "opaque_to_smt"]
|
||||||
|
let byte_decode_then_decompress (d: dT {d <> 12}) (b:t_Array u8 (sz (32 * d))): polynomial
|
||||||
|
= map_array (decompress_d d) (byte_decode d b)
|
||||||
|
|
||||||
|
|
||||||
|
(**** Definitions to move or to rework *)
|
||||||
|
let serialize_pre
|
||||||
|
(d1: dT)
|
||||||
|
(coefficients: t_Array i16 (sz 16))
|
||||||
|
= forall i. i < 16 ==> bounded (Seq.index coefficients i) d1
|
||||||
|
|
||||||
|
// TODO: this is an alternative version of byte_encode
|
||||||
|
// rename to encoded bytes
|
||||||
|
#push-options "--z3rlimit 80 --split_queries always"
|
||||||
|
let serialize_post
|
||||||
|
(d1: dT)
|
||||||
|
(coefficients: t_Array i16 (sz 16) { serialize_pre d1 coefficients })
|
||||||
|
(output: t_Array u8 (sz (d1 * 2)))
|
||||||
|
= BitVecEq.int_t_array_bitwise_eq coefficients d1
|
||||||
|
output 8
|
||||||
|
|
||||||
|
// TODO: this is an alternative version of byte_decode
|
||||||
|
// rename to decoded bytes
|
||||||
|
let deserialize_post
|
||||||
|
(d1: dT)
|
||||||
|
(bytes: t_Array u8 (sz (d1 * 2)))
|
||||||
|
(output: t_Array i16 (sz 16))
|
||||||
|
= BitVecEq.int_t_array_bitwise_eq bytes 8
|
||||||
|
output d1 /\
|
||||||
|
forall (i:nat). i < 16 ==> bounded (Seq.index output i) d1
|
||||||
|
#pop-options
|
||||||
453
proofs/fstar/models/Spec.MLKEM.fst
Normal file
453
proofs/fstar/models/Spec.MLKEM.fst
Normal file
|
|
@ -0,0 +1,453 @@
|
||||||
|
module Spec.MLKEM
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 80"
|
||||||
|
open FStar.Mul
|
||||||
|
open Core
|
||||||
|
|
||||||
|
include Spec.Utils
|
||||||
|
include Spec.MLKEM.Math
|
||||||
|
|
||||||
|
(** ML-KEM Constants *)
|
||||||
|
let v_BITS_PER_COEFFICIENT: usize = sz 12
|
||||||
|
|
||||||
|
let v_COEFFICIENTS_IN_RING_ELEMENT: usize = sz 256
|
||||||
|
|
||||||
|
let v_BITS_PER_RING_ELEMENT: usize = sz 3072 // v_COEFFICIENTS_IN_RING_ELEMENT *! sz 12
|
||||||
|
|
||||||
|
let v_BYTES_PER_RING_ELEMENT: usize = sz 384 // v_BITS_PER_RING_ELEMENT /! sz 8
|
||||||
|
|
||||||
|
let v_CPA_KEY_GENERATION_SEED_SIZE: usize = sz 32
|
||||||
|
|
||||||
|
let v_H_DIGEST_SIZE: usize = sz 32
|
||||||
|
// same as Libcrux.Digest.digest_size (Libcrux.Digest.Algorithm_Sha3_256_ <: Libcrux.Digest.t_Algorithm)
|
||||||
|
|
||||||
|
let v_REJECTION_SAMPLING_SEED_SIZE: usize = sz 840 // sz 168 *! sz 5
|
||||||
|
|
||||||
|
let v_SHARED_SECRET_SIZE: usize = v_H_DIGEST_SIZE
|
||||||
|
|
||||||
|
val v_ETA1 (r:rank) : u:usize{u == sz 3 \/ u == sz 2}
|
||||||
|
let v_ETA1 (r:rank) : usize =
|
||||||
|
if r = sz 2 then sz 3 else
|
||||||
|
if r = sz 3 then sz 2 else
|
||||||
|
if r = sz 4 then sz 2 else (
|
||||||
|
assert (false);
|
||||||
|
sz 0)
|
||||||
|
|
||||||
|
|
||||||
|
let v_ETA2 (r:rank) : usize = sz 2
|
||||||
|
|
||||||
|
val v_VECTOR_U_COMPRESSION_FACTOR (r:rank) : u:usize{u == sz 10 \/ u == sz 11}
|
||||||
|
let v_VECTOR_U_COMPRESSION_FACTOR (r:rank) : usize =
|
||||||
|
if r = sz 2 then sz 10 else
|
||||||
|
if r = sz 3 then sz 10 else
|
||||||
|
if r = sz 4 then sz 11 else (
|
||||||
|
assert (false);
|
||||||
|
sz 0)
|
||||||
|
|
||||||
|
val v_VECTOR_V_COMPRESSION_FACTOR (r:rank) : u:usize{u == sz 4 \/ u == sz 5}
|
||||||
|
let v_VECTOR_V_COMPRESSION_FACTOR (r:rank) : usize =
|
||||||
|
if r = sz 2 then sz 4 else
|
||||||
|
if r = sz 3 then sz 4 else
|
||||||
|
if r = sz 4 then sz 5 else (
|
||||||
|
assert (false);
|
||||||
|
sz 0)
|
||||||
|
|
||||||
|
val v_ETA1_RANDOMNESS_SIZE (r:rank) : u:usize{u == sz 128 \/ u == sz 192}
|
||||||
|
let v_ETA1_RANDOMNESS_SIZE (r:rank) = v_ETA1 r *! sz 64
|
||||||
|
|
||||||
|
val v_ETA2_RANDOMNESS_SIZE (r:rank) : u:usize{u == sz 128}
|
||||||
|
let v_ETA2_RANDOMNESS_SIZE (r:rank) = v_ETA2 r *! sz 64
|
||||||
|
|
||||||
|
val v_RANKED_BYTES_PER_RING_ELEMENT (r:rank) : u:usize{u = sz 768 \/ u = sz 1152 \/ u = sz 1536}
|
||||||
|
let v_RANKED_BYTES_PER_RING_ELEMENT (r:rank) = r *! v_BYTES_PER_RING_ELEMENT
|
||||||
|
|
||||||
|
let v_T_AS_NTT_ENCODED_SIZE (r:rank) = v_RANKED_BYTES_PER_RING_ELEMENT r
|
||||||
|
let v_CPA_PRIVATE_KEY_SIZE (r:rank) = v_RANKED_BYTES_PER_RING_ELEMENT r
|
||||||
|
|
||||||
|
val v_CPA_PUBLIC_KEY_SIZE (r:rank) : u:usize{u = sz 800 \/ u = sz 1184 \/ u = sz 1568}
|
||||||
|
let v_CPA_PUBLIC_KEY_SIZE (r:rank) = v_RANKED_BYTES_PER_RING_ELEMENT r +! sz 32
|
||||||
|
|
||||||
|
val v_CCA_PRIVATE_KEY_SIZE (r:rank) : u:usize{u = sz 1632 \/ u = sz 2400 \/ u = sz 3168}
|
||||||
|
let v_CCA_PRIVATE_KEY_SIZE (r:rank) =
|
||||||
|
(v_CPA_PRIVATE_KEY_SIZE r +! v_CPA_PUBLIC_KEY_SIZE r +! v_H_DIGEST_SIZE +! v_SHARED_SECRET_SIZE)
|
||||||
|
|
||||||
|
let v_CCA_PUBLIC_KEY_SIZE (r:rank) = v_CPA_PUBLIC_KEY_SIZE r
|
||||||
|
|
||||||
|
val v_C1_BLOCK_SIZE (r:rank): u:usize{(u = sz 320 \/ u = sz 352) /\ v u == 32 * v (v_VECTOR_U_COMPRESSION_FACTOR r)}
|
||||||
|
let v_C1_BLOCK_SIZE (r:rank) = sz 32 *! v_VECTOR_U_COMPRESSION_FACTOR r
|
||||||
|
|
||||||
|
val v_C1_SIZE (r:rank) : u:usize{(u >=. sz 640 /\ u <=. sz 1448) /\
|
||||||
|
v u == v (v_C1_BLOCK_SIZE r) * v r}
|
||||||
|
let v_C1_SIZE (r:rank) = v_C1_BLOCK_SIZE r *! r
|
||||||
|
|
||||||
|
val v_C2_SIZE (r:rank) : u:usize{(u = sz 128 \/ u = sz 160) /\ v u == 32 * v (v_VECTOR_V_COMPRESSION_FACTOR r)}
|
||||||
|
let v_C2_SIZE (r:rank) = sz 32 *! v_VECTOR_V_COMPRESSION_FACTOR r
|
||||||
|
|
||||||
|
val v_CPA_CIPHERTEXT_SIZE (r:rank) : u:usize {v u = v (v_C1_SIZE r) + v (v_C2_SIZE r)}
|
||||||
|
let v_CPA_CIPHERTEXT_SIZE (r:rank) = v_C1_SIZE r +! v_C2_SIZE r
|
||||||
|
|
||||||
|
let v_CCA_CIPHERTEXT_SIZE (r:rank) = v_CPA_CIPHERTEXT_SIZE r
|
||||||
|
|
||||||
|
val v_IMPLICIT_REJECTION_HASH_INPUT_SIZE (r:rank): u:usize{v u == v v_SHARED_SECRET_SIZE +
|
||||||
|
v (v_CPA_CIPHERTEXT_SIZE r)}
|
||||||
|
let v_IMPLICIT_REJECTION_HASH_INPUT_SIZE (r:rank) =
|
||||||
|
v_SHARED_SECRET_SIZE +! v_CPA_CIPHERTEXT_SIZE r
|
||||||
|
|
||||||
|
val v_KEY_GENERATION_SEED_SIZE: u:usize{u = sz 64}
|
||||||
|
let v_KEY_GENERATION_SEED_SIZE: usize =
|
||||||
|
v_CPA_KEY_GENERATION_SEED_SIZE +!
|
||||||
|
v_SHARED_SECRET_SIZE
|
||||||
|
|
||||||
|
|
||||||
|
(** ML-KEM Types *)
|
||||||
|
|
||||||
|
type t_MLKEMPublicKey (r:rank) = t_Array u8 (v_CPA_PUBLIC_KEY_SIZE r)
|
||||||
|
type t_MLKEMPrivateKey (r:rank) = t_Array u8 (v_CCA_PRIVATE_KEY_SIZE r)
|
||||||
|
type t_MLKEMKeyPair (r:rank) = t_MLKEMPrivateKey r & t_MLKEMPublicKey r
|
||||||
|
|
||||||
|
type t_MLKEMCPAPrivateKey (r:rank) = t_Array u8 (v_CPA_PRIVATE_KEY_SIZE r)
|
||||||
|
type t_MLKEMCPAKeyPair (r:rank) = t_MLKEMCPAPrivateKey r & t_MLKEMPublicKey r
|
||||||
|
|
||||||
|
type t_MLKEMCiphertext (r:rank) = t_Array u8 (v_CPA_CIPHERTEXT_SIZE r)
|
||||||
|
type t_MLKEMSharedSecret = t_Array u8 (v_SHARED_SECRET_SIZE)
|
||||||
|
|
||||||
|
|
||||||
|
assume val sample_max: n:usize{v n < pow2 32 /\ v n >= 128 * 3 /\ v n % 3 = 0}
|
||||||
|
|
||||||
|
val sample_polynomial_ntt: seed:t_Array u8 (sz 34) -> (polynomial & bool)
|
||||||
|
let sample_polynomial_ntt seed =
|
||||||
|
let randomness = v_XOF sample_max seed in
|
||||||
|
let bv = bytes_to_bits randomness in
|
||||||
|
assert (v sample_max * 8 == (((v sample_max / 3) * 2) * 12));
|
||||||
|
let bv: bit_vec ((v (sz ((v sample_max / 3) * 2))) * 12) = retype_bit_vector bv in
|
||||||
|
let i16s = bit_vec_to_nat_array #(sz ((v sample_max / 3) * 2)) 12 bv in
|
||||||
|
assert ((v sample_max / 3) * 2 >= 256);
|
||||||
|
let poly0: polynomial = Seq.create 256 0 in
|
||||||
|
let index_t = n:nat{n <= 256} in
|
||||||
|
let (sampled, poly1) =
|
||||||
|
repeati #(index_t & polynomial) (sz ((v sample_max / 3) * 2))
|
||||||
|
(fun i (sampled,acc) ->
|
||||||
|
if sampled < 256 then
|
||||||
|
let sample = Seq.index i16s (v i) in
|
||||||
|
if sample < 3329 then
|
||||||
|
(sampled+1, Rust_primitives.Hax.update_at acc (sz sampled) sample)
|
||||||
|
else (sampled, acc)
|
||||||
|
else (sampled, acc))
|
||||||
|
(0,poly0) in
|
||||||
|
if sampled < 256 then poly0, false else poly1, true
|
||||||
|
|
||||||
|
let sample_polynomial_ntt_at_index (seed:t_Array u8 (sz 32)) (i j: (x:usize{v x <= 4})) : polynomial & bool =
|
||||||
|
let seed34 = Seq.append seed (Seq.create 2 (mk_u8 0)) in
|
||||||
|
let seed34 = Rust_primitives.Hax.update_at seed34 (sz 32) (mk_int #u8_inttype (v i)) in
|
||||||
|
let seed34 = Rust_primitives.Hax.update_at seed34 (sz 33) (mk_int #u8_inttype (v j)) in
|
||||||
|
sample_polynomial_ntt seed34
|
||||||
|
|
||||||
|
val sample_matrix_A_ntt: #r:rank -> seed:t_Array u8 (sz 32) -> (matrix r & bool)
|
||||||
|
[@ "opaque_to_smt"]
|
||||||
|
let sample_matrix_A_ntt #r seed =
|
||||||
|
let m =
|
||||||
|
createi r (fun i ->
|
||||||
|
createi r (fun j ->
|
||||||
|
let (p,b) = sample_polynomial_ntt_at_index seed i j in
|
||||||
|
p))
|
||||||
|
in
|
||||||
|
let sufficient_randomness =
|
||||||
|
repeati r (fun i b ->
|
||||||
|
repeati r (fun j b ->
|
||||||
|
let (p,v) = sample_polynomial_ntt_at_index seed i j in
|
||||||
|
b && v) b) true in
|
||||||
|
(m, sufficient_randomness)
|
||||||
|
|
||||||
|
assume val sample_poly_cbd: v_ETA:usize{v v_ETA == 2 \/ v v_ETA == 3} -> t_Array u8 (v_ETA *! sz 64) -> polynomial
|
||||||
|
|
||||||
|
open Rust_primitives.Integers
|
||||||
|
|
||||||
|
val sample_poly_cbd2: #r:rank -> seed:t_Array u8 (sz 32) -> domain_sep:usize{v domain_sep < 256} -> polynomial
|
||||||
|
let sample_poly_cbd2 #r seed domain_sep =
|
||||||
|
let prf_input = Seq.append seed (Seq.create 1 (mk_int #u8_inttype (v domain_sep))) in
|
||||||
|
let prf_output = v_PRF (v_ETA2_RANDOMNESS_SIZE r) prf_input in
|
||||||
|
sample_poly_cbd (v_ETA2 r) prf_output
|
||||||
|
|
||||||
|
let sample_vector_cbd1_prf_input (#r:rank) (seed:t_Array u8 (sz 32)) (domain_sep:usize{v domain_sep < 2 * v r}) (i:usize{i <. r}) : t_Array u8 (sz 33) =
|
||||||
|
Seq.append seed (Seq.create 1 (mk_int #u8_inttype (v domain_sep + v i)))
|
||||||
|
|
||||||
|
let sample_vector_cbd1_prf_output (#r:rank) (prf_output:t_Array (t_Array u8 (v_ETA1_RANDOMNESS_SIZE r)) r) (i:usize{i <. r}) : polynomial =
|
||||||
|
sample_poly_cbd (v_ETA1 r) prf_output.[i]
|
||||||
|
|
||||||
|
let sample_vector_cbd1 (#r:rank) (seed:t_Array u8 (sz 32)) (domain_sep:usize{v domain_sep < 2 * v r}) : vector r =
|
||||||
|
let prf_input = createi r (sample_vector_cbd1_prf_input #r seed domain_sep) in
|
||||||
|
let prf_output = v_PRFxN r (v_ETA1_RANDOMNESS_SIZE r) prf_input in
|
||||||
|
createi r (sample_vector_cbd1_prf_output #r prf_output)
|
||||||
|
|
||||||
|
let sample_vector_cbd2_prf_input (#r:rank) (seed:t_Array u8 (sz 32)) (domain_sep:usize{v domain_sep < 2 * v r}) (i:usize{i <. r}) : t_Array u8 (sz 33) =
|
||||||
|
Seq.append seed (Seq.create 1 (mk_int #u8_inttype (v domain_sep + v i)))
|
||||||
|
|
||||||
|
let sample_vector_cbd2_prf_output (#r:rank) (prf_output:t_Array (t_Array u8 (v_ETA2_RANDOMNESS_SIZE r)) r) (i:usize{i <. r}) : polynomial =
|
||||||
|
sample_poly_cbd (v_ETA2 r) prf_output.[i]
|
||||||
|
|
||||||
|
let sample_vector_cbd2 (#r:rank) (seed:t_Array u8 (sz 32)) (domain_sep:usize{v domain_sep < 2 * v r}) : vector r =
|
||||||
|
let prf_input = createi r (sample_vector_cbd2_prf_input #r seed domain_sep) in
|
||||||
|
let prf_output = v_PRFxN r (v_ETA2_RANDOMNESS_SIZE r) prf_input in
|
||||||
|
createi r (sample_vector_cbd2_prf_output #r prf_output)
|
||||||
|
|
||||||
|
[@ "opaque_to_smt"]
|
||||||
|
let sample_vector_cbd_then_ntt (#r:rank) (seed:t_Array u8 (sz 32)) (domain_sep:usize{v domain_sep < 2 * v r}) : vector r =
|
||||||
|
vector_ntt (sample_vector_cbd1 #r seed domain_sep)
|
||||||
|
|
||||||
|
[@ "opaque_to_smt"]
|
||||||
|
let vector_encode_12 (#r:rank) (v: vector r) : t_Array u8 (v_T_AS_NTT_ENCODED_SIZE r)
|
||||||
|
= let s: t_Array (t_Array _ (sz 384)) r = map_array (byte_encode 12) (coerce_vector_12 v) in
|
||||||
|
flatten s
|
||||||
|
|
||||||
|
let vector_decode_12 (#r:rank) (arr: t_Array u8 (v_T_AS_NTT_ENCODED_SIZE r)): vector r
|
||||||
|
= createi r (fun block ->
|
||||||
|
let block_size = (sz (32 * 12)) in
|
||||||
|
let slice = Seq.slice arr (v block * v block_size)
|
||||||
|
(v block * v block_size + v block_size) in
|
||||||
|
byte_decode 12 slice
|
||||||
|
)
|
||||||
|
|
||||||
|
let compress_then_encode_message (p:polynomial) : t_Array u8 v_SHARED_SECRET_SIZE
|
||||||
|
= compress_then_byte_encode 1 p
|
||||||
|
|
||||||
|
let decode_then_decompress_message (b:t_Array u8 v_SHARED_SECRET_SIZE): polynomial
|
||||||
|
= byte_decode_then_decompress 1 b
|
||||||
|
|
||||||
|
let compress_then_encode_u (#r:rank) (vec: vector r): t_Array u8 (v_C1_SIZE r)
|
||||||
|
= let d = v (v_VECTOR_U_COMPRESSION_FACTOR r) in
|
||||||
|
flatten (map_array (compress_then_byte_encode d) vec)
|
||||||
|
|
||||||
|
let decode_then_decompress_u (#r:rank) (arr: t_Array u8 (v_C1_SIZE r)): vector r
|
||||||
|
= let d = v_VECTOR_U_COMPRESSION_FACTOR r in
|
||||||
|
createi r (fun block ->
|
||||||
|
let block_size = v_C1_BLOCK_SIZE r in
|
||||||
|
let slice = Seq.slice arr (v block * v block_size)
|
||||||
|
(v block * v block_size + v block_size) in
|
||||||
|
byte_decode_then_decompress (v d) slice
|
||||||
|
)
|
||||||
|
|
||||||
|
let compress_then_encode_v (#r:rank): polynomial -> t_Array u8 (v_C2_SIZE r)
|
||||||
|
= compress_then_byte_encode (v (v_VECTOR_V_COMPRESSION_FACTOR r))
|
||||||
|
|
||||||
|
let decode_then_decompress_v (#r:rank): t_Array u8 (v_C2_SIZE r) -> polynomial
|
||||||
|
= byte_decode_then_decompress (v (v_VECTOR_V_COMPRESSION_FACTOR r))
|
||||||
|
|
||||||
|
(** IND-CPA Functions *)
|
||||||
|
|
||||||
|
val ind_cpa_generate_keypair_unpacked (r:rank) (randomness:t_Array u8 v_CPA_KEY_GENERATION_SEED_SIZE) :
|
||||||
|
(((((vector r) & (t_Array u8 (sz 32))) & (matrix r)) & (vector r)) & bool)
|
||||||
|
let ind_cpa_generate_keypair_unpacked r randomness =
|
||||||
|
let hashed = v_G (Seq.append randomness (Seq.create 1 (cast r <: u8))) in
|
||||||
|
let (seed_for_A, seed_for_secret_and_error) = split hashed (sz 32) in
|
||||||
|
let (matrix_A_as_ntt, sufficient_randomness) = sample_matrix_A_ntt #r seed_for_A in
|
||||||
|
let secret_as_ntt = sample_vector_cbd_then_ntt #r seed_for_secret_and_error (sz 0) in
|
||||||
|
let error_as_ntt = sample_vector_cbd_then_ntt #r seed_for_secret_and_error r in
|
||||||
|
let t_as_ntt = compute_As_plus_e_ntt #r matrix_A_as_ntt secret_as_ntt error_as_ntt in
|
||||||
|
(((t_as_ntt,seed_for_A), matrix_A_as_ntt), secret_as_ntt), sufficient_randomness
|
||||||
|
|
||||||
|
/// This function implements most of <strong>Algorithm 12</strong> of the
|
||||||
|
/// NIST FIPS 203 specification; this is the MLKEM CPA-PKE key generation algorithm.
|
||||||
|
///
|
||||||
|
/// We say "most of" since Algorithm 12 samples the required randomness within
|
||||||
|
/// the function itself, whereas this implementation expects it to be provided
|
||||||
|
/// through the `key_generation_seed` parameter.
|
||||||
|
|
||||||
|
val ind_cpa_generate_keypair (r:rank) (randomness:t_Array u8 v_CPA_KEY_GENERATION_SEED_SIZE) :
|
||||||
|
(t_MLKEMCPAKeyPair r & bool)
|
||||||
|
let ind_cpa_generate_keypair r randomness =
|
||||||
|
let ((((t_as_ntt,seed_for_A), _), secret_as_ntt), sufficient_randomness) =
|
||||||
|
ind_cpa_generate_keypair_unpacked r randomness in
|
||||||
|
let public_key_serialized = Seq.append (vector_encode_12 #r t_as_ntt) seed_for_A in
|
||||||
|
let secret_key_serialized = vector_encode_12 #r secret_as_ntt in
|
||||||
|
((secret_key_serialized,public_key_serialized), sufficient_randomness)
|
||||||
|
|
||||||
|
val ind_cpa_encrypt_unpacked (r:rank)
|
||||||
|
(message: t_Array u8 v_SHARED_SECRET_SIZE)
|
||||||
|
(randomness:t_Array u8 v_SHARED_SECRET_SIZE)
|
||||||
|
(t_as_ntt:vector r)
|
||||||
|
(matrix_A_as_ntt:matrix r) :
|
||||||
|
t_MLKEMCiphertext r
|
||||||
|
|
||||||
|
#push-options "--z3rlimit 500 --ext context_pruning"
|
||||||
|
let ind_cpa_encrypt_unpacked r message randomness t_as_ntt matrix_A_as_ntt =
|
||||||
|
let r_as_ntt = sample_vector_cbd_then_ntt #r randomness (sz 0) in
|
||||||
|
let error_1 = sample_vector_cbd2 #r randomness r in
|
||||||
|
let error_2 = sample_poly_cbd2 #r randomness (r +! r) in
|
||||||
|
let u = vector_add (vector_inv_ntt (matrix_vector_mul_ntt matrix_A_as_ntt r_as_ntt)) error_1 in
|
||||||
|
let mu = decode_then_decompress_message message in
|
||||||
|
let v = poly_add (poly_add (vector_dot_product_ntt t_as_ntt r_as_ntt) error_2) mu in
|
||||||
|
let c1 = compress_then_encode_u #r u in
|
||||||
|
let c2 = compress_then_encode_v #r v in
|
||||||
|
concat c1 c2
|
||||||
|
#pop-options
|
||||||
|
|
||||||
|
/// This function implements <strong>Algorithm 13</strong> of the
|
||||||
|
/// NIST FIPS 203 specification; this is the MLKEM CPA-PKE encryption algorithm.
|
||||||
|
|
||||||
|
val ind_cpa_encrypt (r:rank) (public_key: t_MLKEMPublicKey r)
|
||||||
|
(message: t_Array u8 v_SHARED_SECRET_SIZE)
|
||||||
|
(randomness:t_Array u8 v_SHARED_SECRET_SIZE) :
|
||||||
|
(t_MLKEMCiphertext r & bool)
|
||||||
|
|
||||||
|
[@ "opaque_to_smt"]
|
||||||
|
let ind_cpa_encrypt r public_key message randomness =
|
||||||
|
let (t_as_ntt_bytes, seed_for_A) = split public_key (v_T_AS_NTT_ENCODED_SIZE r) in
|
||||||
|
let t_as_ntt = vector_decode_12 #r t_as_ntt_bytes in
|
||||||
|
let matrix_A_as_ntt, sufficient_randomness = sample_matrix_A_ntt #r seed_for_A in
|
||||||
|
let c = ind_cpa_encrypt_unpacked r message randomness t_as_ntt (matrix_transpose matrix_A_as_ntt) in
|
||||||
|
(c, sufficient_randomness)
|
||||||
|
|
||||||
|
val ind_cpa_decrypt_unpacked (r:rank)
|
||||||
|
(ciphertext: t_MLKEMCiphertext r) (secret_as_ntt:vector r):
|
||||||
|
t_MLKEMSharedSecret
|
||||||
|
|
||||||
|
let ind_cpa_decrypt_unpacked r ciphertext secret_as_ntt =
|
||||||
|
let (c1,c2) = split ciphertext (v_C1_SIZE r) in
|
||||||
|
let u = decode_then_decompress_u #r c1 in
|
||||||
|
let v = decode_then_decompress_v #r c2 in
|
||||||
|
let w = poly_sub v (poly_inv_ntt (vector_dot_product_ntt secret_as_ntt (vector_ntt u))) in
|
||||||
|
compress_then_encode_message w
|
||||||
|
|
||||||
|
/// This function implements <strong>Algorithm 14</strong> of the
|
||||||
|
/// NIST FIPS 203 specification; this is the MLKEM CPA-PKE decryption algorithm.
|
||||||
|
|
||||||
|
val ind_cpa_decrypt (r:rank) (secret_key: t_MLKEMCPAPrivateKey r)
|
||||||
|
(ciphertext: t_MLKEMCiphertext r):
|
||||||
|
t_MLKEMSharedSecret
|
||||||
|
|
||||||
|
[@ "opaque_to_smt"]
|
||||||
|
let ind_cpa_decrypt r secret_key ciphertext =
|
||||||
|
let secret_as_ntt = vector_decode_12 #r secret_key in
|
||||||
|
ind_cpa_decrypt_unpacked r ciphertext secret_as_ntt
|
||||||
|
|
||||||
|
(** IND-CCA Functions *)
|
||||||
|
|
||||||
|
|
||||||
|
/// This function implements most of Algorithm 15 of the
|
||||||
|
/// NIST FIPS 203 specification; this is the MLKEM CCA-KEM key generation algorithm.
|
||||||
|
///
|
||||||
|
/// We say "most of" since Algorithm 15 samples the required randomness within
|
||||||
|
/// the function itself, whereas this implementation expects it to be provided
|
||||||
|
/// through the `randomness` parameter.
|
||||||
|
///
|
||||||
|
/// TODO: input validation
|
||||||
|
|
||||||
|
val ind_cca_generate_keypair (r:rank) (randomness:t_Array u8 v_KEY_GENERATION_SEED_SIZE) :
|
||||||
|
t_MLKEMKeyPair r & bool
|
||||||
|
let ind_cca_generate_keypair p randomness =
|
||||||
|
let (ind_cpa_keypair_randomness, implicit_rejection_value) =
|
||||||
|
split randomness v_CPA_KEY_GENERATION_SEED_SIZE in
|
||||||
|
|
||||||
|
let (ind_cpa_secret_key,ind_cpa_public_key), sufficient_randomness = ind_cpa_generate_keypair p ind_cpa_keypair_randomness in
|
||||||
|
let ind_cca_secret_key = Seq.append ind_cpa_secret_key (
|
||||||
|
Seq.append ind_cpa_public_key (
|
||||||
|
Seq.append (v_H ind_cpa_public_key) implicit_rejection_value)) in
|
||||||
|
(ind_cca_secret_key, ind_cpa_public_key), sufficient_randomness
|
||||||
|
|
||||||
|
/// This function implements most of Algorithm 16 of the
|
||||||
|
/// NIST FIPS 203 specification; this is the MLKEM CCA-KEM encapsulation algorithm.
|
||||||
|
///
|
||||||
|
/// We say "most of" since Algorithm 16 samples the required randomness within
|
||||||
|
/// the function itself, whereas this implementation expects it to be provided
|
||||||
|
/// through the `randomness` parameter.
|
||||||
|
///
|
||||||
|
/// TODO: input validation
|
||||||
|
|
||||||
|
val ind_cca_encapsulate (r:rank) (public_key: t_MLKEMPublicKey r)
|
||||||
|
(randomness:t_Array u8 v_SHARED_SECRET_SIZE) :
|
||||||
|
(t_MLKEMCiphertext r & t_MLKEMSharedSecret) & bool
|
||||||
|
let ind_cca_encapsulate p public_key randomness =
|
||||||
|
let to_hash = concat randomness (v_H public_key) in
|
||||||
|
let hashed = v_G to_hash in
|
||||||
|
let (shared_secret, pseudorandomness) = split hashed v_SHARED_SECRET_SIZE in
|
||||||
|
let ciphertext, sufficient_randomness = ind_cpa_encrypt p public_key randomness pseudorandomness in
|
||||||
|
(ciphertext,shared_secret), sufficient_randomness
|
||||||
|
|
||||||
|
|
||||||
|
/// This function implements Algorithm 17 of the
|
||||||
|
/// NIST FIPS 203 specification; this is the MLKEM CCA-KEM encapsulation algorithm.
|
||||||
|
|
||||||
|
val ind_cca_decapsulate (r:rank) (secret_key: t_MLKEMPrivateKey r)
|
||||||
|
(ciphertext: t_MLKEMCiphertext r):
|
||||||
|
t_MLKEMSharedSecret & bool
|
||||||
|
let ind_cca_decapsulate p secret_key ciphertext =
|
||||||
|
let (ind_cpa_secret_key,rest) = split secret_key (v_CPA_PRIVATE_KEY_SIZE p) in
|
||||||
|
let (ind_cpa_public_key,rest) = split rest (v_CPA_PUBLIC_KEY_SIZE p) in
|
||||||
|
let (ind_cpa_public_key_hash,implicit_rejection_value) = split rest v_H_DIGEST_SIZE in
|
||||||
|
|
||||||
|
let decrypted = ind_cpa_decrypt p ind_cpa_secret_key ciphertext in
|
||||||
|
let to_hash = concat decrypted ind_cpa_public_key_hash in
|
||||||
|
let hashed = v_G to_hash in
|
||||||
|
let (success_shared_secret, pseudorandomness) = split hashed v_SHARED_SECRET_SIZE in
|
||||||
|
|
||||||
|
assert (Seq.length implicit_rejection_value = 32);
|
||||||
|
let to_hash = concat implicit_rejection_value ciphertext in
|
||||||
|
let rejection_shared_secret = v_J to_hash in
|
||||||
|
|
||||||
|
let reencrypted, sufficient_randomness = ind_cpa_encrypt p ind_cpa_public_key decrypted pseudorandomness in
|
||||||
|
if reencrypted = ciphertext
|
||||||
|
then success_shared_secret, sufficient_randomness
|
||||||
|
else rejection_shared_secret, sufficient_randomness
|
||||||
|
|
||||||
|
val ind_cca_unpack_public_key (r:rank) (public_key: t_MLKEMPublicKey r) :
|
||||||
|
t_Array u8 (sz 32) & (t_Array u8 (sz 32) & (vector r & (matrix r & bool)))
|
||||||
|
let ind_cca_unpack_public_key p public_key =
|
||||||
|
let (ring_elements, seed) = split public_key (v_T_AS_NTT_ENCODED_SIZE p) in
|
||||||
|
let deserialized_pk = vector_decode_12 #p ring_elements in
|
||||||
|
let (matrix_A, sufficient_randomness) = sample_matrix_A_ntt seed in
|
||||||
|
let matrix_A = matrix_transpose #p matrix_A in
|
||||||
|
let public_key_hash = v_H public_key in
|
||||||
|
public_key_hash, (seed, (deserialized_pk, (matrix_A, sufficient_randomness)))
|
||||||
|
|
||||||
|
let matrix_A_as_ntt_j (#r:rank) (matrix_A_as_ntt:matrix r) (i:usize{i <. r}) (j:usize{j <. r}) : polynomial =
|
||||||
|
Seq.index (Seq.index matrix_A_as_ntt (v j)) (v i)
|
||||||
|
|
||||||
|
let matrix_A_as_ntt_i (#r:rank) (matrix_A_as_ntt:matrix r) (i:usize{i <. r}) : vector r =
|
||||||
|
createi r (matrix_A_as_ntt_j matrix_A_as_ntt i)
|
||||||
|
|
||||||
|
val ind_cca_unpack_generate_keypair (r:rank) (randomness:t_Array u8 v_KEY_GENERATION_SEED_SIZE) :
|
||||||
|
((matrix r & t_Array u8 (sz 32)) & t_Array u8 (sz 32)) & bool
|
||||||
|
let ind_cca_unpack_generate_keypair p randomness =
|
||||||
|
let (ind_cpa_keypair_randomness, implicit_rejection_value) = split randomness v_CPA_KEY_GENERATION_SEED_SIZE in
|
||||||
|
let ((((t_as_ntt,seed_for_A), matrix_A_as_ntt), secret_as_ntt), sufficient_randomness) =
|
||||||
|
ind_cpa_generate_keypair_unpacked p ind_cpa_keypair_randomness in
|
||||||
|
// let m_A =
|
||||||
|
// createi p (fun i ->
|
||||||
|
// createi p (fun j ->
|
||||||
|
// Seq.index (Seq.index matrix_A_as_ntt j) i
|
||||||
|
// ))
|
||||||
|
// in
|
||||||
|
let m_A = createi p (matrix_A_as_ntt_i matrix_A_as_ntt) in
|
||||||
|
let pk_serialized = Seq.append (vector_encode_12 t_as_ntt) seed_for_A in
|
||||||
|
let public_key_hash = v_H pk_serialized in
|
||||||
|
((m_A, public_key_hash), implicit_rejection_value), sufficient_randomness
|
||||||
|
|
||||||
|
val ind_cca_unpack_encapsulate (r:rank) (public_key_hash:t_Array u8 (sz 32))
|
||||||
|
(t_as_ntt:vector r)
|
||||||
|
(matrix_A_as_ntt:matrix r)
|
||||||
|
(randomness:t_Array u8 v_SHARED_SECRET_SIZE) :
|
||||||
|
(t_MLKEMCiphertext r & t_Array u8 v_SHARED_SECRET_SIZE)
|
||||||
|
let ind_cca_unpack_encapsulate r public_key_hash t_as_ntt matrix_A_as_ntt randomness =
|
||||||
|
let to_hash = concat randomness public_key_hash in
|
||||||
|
let hashed = v_G to_hash in
|
||||||
|
let (shared_secret, pseudorandomness) = split hashed v_SHARED_SECRET_SIZE in
|
||||||
|
let ciphertext = ind_cpa_encrypt_unpacked r randomness pseudorandomness t_as_ntt matrix_A_as_ntt in
|
||||||
|
ciphertext, shared_secret
|
||||||
|
|
||||||
|
val ind_cca_unpack_decapsulate (r:rank) (public_key_hash:t_Array u8 (sz 32))
|
||||||
|
(implicit_rejection_value:t_Array u8 (sz 32))
|
||||||
|
(ciphertext: t_MLKEMCiphertext r)
|
||||||
|
(secret_as_ntt:vector r)
|
||||||
|
(t_as_ntt:vector r)
|
||||||
|
(matrix_A_as_ntt:matrix r) :
|
||||||
|
t_Array u8 v_SHARED_SECRET_SIZE
|
||||||
|
let ind_cca_unpack_decapsulate r public_key_hash implicit_rejection_value ciphertext secret_as_ntt t_as_ntt matrix_A_as_ntt =
|
||||||
|
let decrypted = ind_cpa_decrypt_unpacked r ciphertext secret_as_ntt in
|
||||||
|
let to_hash = concat decrypted public_key_hash in
|
||||||
|
let hashed = v_G to_hash in
|
||||||
|
let (shared_secret, pseudorandomness) = split hashed v_SHARED_SECRET_SIZE in
|
||||||
|
let to_hash:t_Array u8 (v_IMPLICIT_REJECTION_HASH_INPUT_SIZE r) = concat implicit_rejection_value ciphertext in
|
||||||
|
let implicit_rejection_shared_secret = v_PRF v_SHARED_SECRET_SIZE to_hash in
|
||||||
|
let expected_ciphertext = ind_cpa_encrypt_unpacked r decrypted pseudorandomness t_as_ntt matrix_A_as_ntt in
|
||||||
|
if ciphertext = expected_ciphertext
|
||||||
|
then shared_secret
|
||||||
|
else implicit_rejection_shared_secret
|
||||||
248
proofs/fstar/models/Spec.Utils.fsti
Normal file
248
proofs/fstar/models/Spec.Utils.fsti
Normal file
|
|
@ -0,0 +1,248 @@
|
||||||
|
module Spec.Utils
|
||||||
|
#set-options "--fuel 0 --ifuel 1 --z3rlimit 100"
|
||||||
|
open FStar.Mul
|
||||||
|
open Core
|
||||||
|
|
||||||
|
(** Utils *)
|
||||||
|
let map_slice #a #b
|
||||||
|
(f:a -> b)
|
||||||
|
(s: t_Slice a)
|
||||||
|
= createi (length s) (fun i -> f (Seq.index s (v i)))
|
||||||
|
|
||||||
|
let map_array #a #b #len
|
||||||
|
(f:a -> b)
|
||||||
|
(s: t_Array a len)
|
||||||
|
= createi (length s) (fun i -> f (Seq.index s (v i)))
|
||||||
|
|
||||||
|
let map2 #a #b #c #len
|
||||||
|
(f:a -> b -> c)
|
||||||
|
(x: t_Array a len) (y: t_Array b len)
|
||||||
|
= createi (length x) (fun i -> f (Seq.index x (v i)) (Seq.index y (v i)))
|
||||||
|
|
||||||
|
let create len c = createi len (fun i -> c)
|
||||||
|
|
||||||
|
let repeati #acc (l:usize) (f:(i:usize{v i < v l}) -> acc -> acc) acc0 : acc = Lib.LoopCombinators.repeati (v l) (fun i acc -> f (sz i) acc) acc0
|
||||||
|
|
||||||
|
let createL len l = Rust_primitives.Hax.array_of_list len l
|
||||||
|
|
||||||
|
let create16 v15 v14 v13 v12 v11 v10 v9 v8 v7 v6 v5 v4 v3 v2 v1 v0 =
|
||||||
|
let l = [v15; v14; v13; v12; v11; v10; v9; v8; v7; v6; v5; v4; v3; v2; v1; v0] in
|
||||||
|
assert_norm (List.Tot.length l == 16);
|
||||||
|
createL 16 l
|
||||||
|
|
||||||
|
val lemma_createL_index #a len l i :
|
||||||
|
Lemma (Seq.index (createL #a len l) i == List.Tot.index l i)
|
||||||
|
[SMTPat (Seq.index (createL #a len l) i)]
|
||||||
|
|
||||||
|
val lemma_create16_index #a v15 v14 v13 v12 v11 v10 v9 v8 v7 v6 v5 v4 v3 v2 v1 v0 i :
|
||||||
|
Lemma (Seq.index (create16 #a v15 v14 v13 v12 v11 v10 v9 v8 v7 v6 v5 v4 v3 v2 v1 v0) i ==
|
||||||
|
(if i = 0 then v15 else
|
||||||
|
if i = 1 then v14 else
|
||||||
|
if i = 2 then v13 else
|
||||||
|
if i = 3 then v12 else
|
||||||
|
if i = 4 then v11 else
|
||||||
|
if i = 5 then v10 else
|
||||||
|
if i = 6 then v9 else
|
||||||
|
if i = 7 then v8 else
|
||||||
|
if i = 8 then v7 else
|
||||||
|
if i = 9 then v6 else
|
||||||
|
if i = 10 then v5 else
|
||||||
|
if i = 11 then v4 else
|
||||||
|
if i = 12 then v3 else
|
||||||
|
if i = 13 then v2 else
|
||||||
|
if i = 14 then v1 else
|
||||||
|
if i = 15 then v0))
|
||||||
|
[SMTPat (Seq.index (create16 #a v15 v14 v13 v12 v11 v10 v9 v8 v7 v6 v5 v4 v3 v2 v1 v0) i)]
|
||||||
|
|
||||||
|
val lemma_createi_index #a len f i :
|
||||||
|
Lemma (Seq.index (createi #a len f) i == f (sz i))
|
||||||
|
[SMTPat (Seq.index (createi #a len f) i)]
|
||||||
|
|
||||||
|
val lemma_create_index #a len c i:
|
||||||
|
Lemma (Seq.index (create #a len c) i == c)
|
||||||
|
[SMTPat (Seq.index (create #a len c) i)]
|
||||||
|
|
||||||
|
val lemma_bitand_properties #t (x:int_t t) :
|
||||||
|
Lemma ((x &. ones) == x /\ (x &. mk_int #t 0) == mk_int #t 0 /\ (ones #t &. x) == x /\ (mk_int #t 0 &. x) == mk_int #t 0)
|
||||||
|
|
||||||
|
#push-options "--z3rlimit 15"
|
||||||
|
let flatten #t #n
|
||||||
|
(#m: usize {range (v n * v m) usize_inttype})
|
||||||
|
(x: t_Array (t_Array t m) n)
|
||||||
|
: t_Array t (m *! n)
|
||||||
|
= createi (m *! n) (fun i -> Seq.index (Seq.index x (v i / v m)) (v i % v m))
|
||||||
|
#pop-options
|
||||||
|
|
||||||
|
type t_Error = | Error_RejectionSampling : t_Error
|
||||||
|
|
||||||
|
type t_Result a b =
|
||||||
|
| Ok: a -> t_Result a b
|
||||||
|
| Err: b -> t_Result a b
|
||||||
|
|
||||||
|
val v_G (input: t_Slice u8) : t_Array u8 (sz 64)
|
||||||
|
val v_H (input: t_Slice u8) : t_Array u8 (sz 32)
|
||||||
|
val v_PRF (v_LEN: usize{v v_LEN < pow2 32}) (input: t_Slice u8) : t_Array u8 v_LEN
|
||||||
|
|
||||||
|
val v_PRFxN (r:usize{v r == 2 \/ v r == 3 \/ v r == 4}) (v_LEN: usize{v v_LEN < pow2 32})
|
||||||
|
(input: t_Array (t_Array u8 (sz 33)) r) : t_Array (t_Array u8 v_LEN) r
|
||||||
|
|
||||||
|
val v_J (input: t_Slice u8) : t_Array u8 (sz 32)
|
||||||
|
|
||||||
|
val v_XOF (v_LEN: usize{v v_LEN < pow2 32}) (input: t_Slice u8) : t_Array u8 v_LEN
|
||||||
|
|
||||||
|
val update_at_range_lemma #n
|
||||||
|
(s: t_Slice 't)
|
||||||
|
(i: Core.Ops.Range.t_Range (int_t n) {(Core.Ops.Range.impl_index_range_slice 't n).f_index_pre s i})
|
||||||
|
(x: t_Slice 't)
|
||||||
|
: Lemma
|
||||||
|
(requires (Seq.length x == v i.f_end - v i.f_start))
|
||||||
|
(ensures (
|
||||||
|
let s' = Rust_primitives.Hax.Monomorphized_update_at.update_at_range s i x in
|
||||||
|
let len = v i.f_start in
|
||||||
|
forall (i: nat). i < len ==> Seq.index s i == Seq.index s' i
|
||||||
|
))
|
||||||
|
[SMTPat (Rust_primitives.Hax.Monomorphized_update_at.update_at_range s i x)]
|
||||||
|
|
||||||
|
/// Bounded integers
|
||||||
|
|
||||||
|
let is_intb (l:nat) (x:int) = (x <= l) && (x >= -l)
|
||||||
|
let is_i16b (l:nat) (x:i16) = is_intb l (v x)
|
||||||
|
let is_i16b_array (l:nat) (x:t_Slice i16) = forall i. i < Seq.length x ==> is_i16b l (Seq.index x i)
|
||||||
|
let is_i16b_vector (l:nat) (r:usize) (x:t_Array (t_Array i16 (sz 256)) r) = forall i. i < v r ==> is_i16b_array l (Seq.index x i)
|
||||||
|
let is_i16b_matrix (l:nat) (r:usize) (x:t_Array (t_Array (t_Array i16 (sz 256)) r) r) = forall i. i < v r ==> is_i16b_vector l r (Seq.index x i)
|
||||||
|
|
||||||
|
[@ "opaque_to_smt"]
|
||||||
|
let is_i16b_array_opaque (l:nat) (x:t_Slice i16) = is_i16b_array l x
|
||||||
|
|
||||||
|
let is_i32b (l:nat) (x:i32) = is_intb l (v x)
|
||||||
|
let is_i32b_array (l:nat) (x:t_Slice i32) = forall i. i < Seq.length x ==> is_i32b l (Seq.index x i)
|
||||||
|
|
||||||
|
let is_i64b (l:nat) (x:i64) = is_intb l (v x)
|
||||||
|
|
||||||
|
let nat_div_ceil (x:nat) (y:pos) : nat = if (x % y = 0) then x/y else (x/y)+1
|
||||||
|
|
||||||
|
val lemma_intb_le b b'
|
||||||
|
: Lemma (requires (b <= b'))
|
||||||
|
(ensures (forall n. is_intb b n ==> is_intb b' n))
|
||||||
|
|
||||||
|
#push-options "--z3rlimit 200"
|
||||||
|
val lemma_mul_intb (b1 b2: nat) (n1 n2: int)
|
||||||
|
: Lemma (requires (is_intb b1 n1 /\ is_intb b2 n2))
|
||||||
|
(ensures (is_intb (b1 * b2) (n1 * n2)))
|
||||||
|
#pop-options
|
||||||
|
|
||||||
|
#push-options "--z3rlimit 200"
|
||||||
|
val lemma_mul_i16b (b1 b2: nat) (n1 n2: i16)
|
||||||
|
: Lemma (requires (is_i16b b1 n1 /\ is_i16b b2 n2 /\ b1 * b2 < pow2 31))
|
||||||
|
(ensures (range (v n1 * v n2) i32_inttype /\
|
||||||
|
is_i32b (b1 * b2) ((cast n1 <: i32) *! (cast n2 <: i32)) /\
|
||||||
|
v ((cast n1 <: i32) *! (cast n2 <: i32)) == v n1 * v n2))
|
||||||
|
#pop-options
|
||||||
|
|
||||||
|
#push-options "--z3rlimit 200"
|
||||||
|
val lemma_mul_i32b (b1 b2: nat) (n1 n2: i32)
|
||||||
|
: Lemma (requires (is_i32b b1 n1 /\ is_i32b b2 n2 /\ b1 * b2 < pow2 63))
|
||||||
|
(ensures (range (v n1 * v n2) i64_inttype /\
|
||||||
|
is_i64b (b1 * b2) ((cast n1 <: i64) *! (cast n2 <: i64)) /\
|
||||||
|
v ((cast n1 <: i64) *! (cast n2 <: i64)) == v n1 * v n2))
|
||||||
|
#pop-options
|
||||||
|
|
||||||
|
val lemma_add_i16b (b1 b2:nat) (n1 n2:i16) :
|
||||||
|
Lemma (requires (is_i16b b1 n1 /\ is_i16b b2 n2 /\ b1 + b2 < pow2 15))
|
||||||
|
(ensures (range (v n1 + v n2) i16_inttype /\
|
||||||
|
is_i16b (b1 + b2) (n1 +! n2)))
|
||||||
|
|
||||||
|
val lemma_range_at_percent (v:int) (p:int{p>0/\ p%2=0 /\ v < p/2 /\ v >= -p / 2}):
|
||||||
|
Lemma (v @% p == v)
|
||||||
|
|
||||||
|
val lemma_sub_i16b (b1 b2:nat) (n1 n2:i16) :
|
||||||
|
Lemma (requires (is_i16b b1 n1 /\ is_i16b b2 n2 /\ b1 + b2 < pow2 15))
|
||||||
|
(ensures (range (v n1 - v n2) i16_inttype /\
|
||||||
|
is_i16b (b1 + b2) (n1 -. n2) /\
|
||||||
|
v (n1 -. n2) == v n1 - v n2))
|
||||||
|
|
||||||
|
let mont_mul_red_i16 (x:i16) (y:i16) : i16=
|
||||||
|
let vlow = x *. y in
|
||||||
|
let k = vlow *. (neg (mk_i16 3327)) in
|
||||||
|
let k_times_modulus = cast (((cast k <: i32) *. (mk_i32 3329)) >>! (mk_i32 16)) <: i16 in
|
||||||
|
let vhigh = cast (((cast x <: i32) *. (cast y <: i32)) >>! (mk_i32 16)) <: i16 in
|
||||||
|
vhigh -. k_times_modulus
|
||||||
|
|
||||||
|
let mont_red_i32 (x:i32) : i16 =
|
||||||
|
let vlow = cast x <: i16 in
|
||||||
|
let k = vlow *. (neg (mk_i16 3327)) in
|
||||||
|
let k_times_modulus = cast (((cast k <: i32) *. (mk_i32 3329)) >>! (mk_i32 16)) <: i16 in
|
||||||
|
let vhigh = cast (x >>! (mk_i32 16)) <: i16 in
|
||||||
|
vhigh -. k_times_modulus
|
||||||
|
|
||||||
|
val lemma_at_percent_mod (v:int) (p:int{p>0/\ p%2=0}):
|
||||||
|
Lemma ((v @% p) % p == v % p)
|
||||||
|
|
||||||
|
val lemma_div_at_percent (v:int) (p:int{p>0/\ p%2=0 /\ (v/p) < p/2 /\ (v/p) >= -p / 2}):
|
||||||
|
Lemma ((v / p) @% p == v / p)
|
||||||
|
|
||||||
|
val lemma_mont_red_i32 (x:i32): Lemma
|
||||||
|
(requires (is_i32b (3328 * pow2 16) x))
|
||||||
|
(ensures (
|
||||||
|
let result:i16 = mont_red_i32 x in
|
||||||
|
is_i16b (3328 + 1665) result /\
|
||||||
|
(is_i32b (3328 * pow2 15) x ==> is_i16b 3328 result) /\
|
||||||
|
v result % 3329 == (v x * 169) % 3329))
|
||||||
|
|
||||||
|
val lemma_mont_mul_red_i16_int (x y:i16): Lemma
|
||||||
|
(requires (is_intb (3326 * pow2 15) (v x * v y)))
|
||||||
|
(ensures (
|
||||||
|
let result:i16 = mont_mul_red_i16 x y in
|
||||||
|
is_i16b 3328 result /\
|
||||||
|
v result % 3329 == (v x * v y * 169) % 3329))
|
||||||
|
|
||||||
|
val lemma_mont_mul_red_i16 (x y:i16): Lemma
|
||||||
|
(requires (is_i16b 1664 y \/ is_intb (3326 * pow2 15) (v x * v y)))
|
||||||
|
(ensures (
|
||||||
|
let result:i16 = mont_mul_red_i16 x y in
|
||||||
|
is_i16b 3328 result /\
|
||||||
|
v result % 3329 == (v x * v y * 169) % 3329))
|
||||||
|
[SMTPat (mont_mul_red_i16 x y)]
|
||||||
|
|
||||||
|
let barrett_red (x:i16) =
|
||||||
|
let t1 = cast (((cast x <: i32) *. (cast (mk_i16 20159) <: i32)) >>! (mk_i32 16)) <: i16 in
|
||||||
|
let t2 = t1 +. (mk_i16 512) in
|
||||||
|
let q = t2 >>! (mk_i32 10) in
|
||||||
|
let qm = q *. (mk_i16 3329) in
|
||||||
|
x -. qm
|
||||||
|
|
||||||
|
val lemma_barrett_red (x:i16) : Lemma
|
||||||
|
(requires (is_i16b 28296 x))
|
||||||
|
(ensures (let result = barrett_red x in
|
||||||
|
is_i16b 3328 result /\
|
||||||
|
v result % 3329 == v x % 3329))
|
||||||
|
[SMTPat (barrett_red x)]
|
||||||
|
|
||||||
|
let cond_sub (x:i16) =
|
||||||
|
let xm = x -. (mk_i16 3329) in
|
||||||
|
let mask = xm >>! (mk_i32 15) in
|
||||||
|
let mm = mask &. (mk_i16 3329) in
|
||||||
|
xm +. mm
|
||||||
|
|
||||||
|
val lemma_cond_sub x:
|
||||||
|
Lemma (let r = cond_sub x in
|
||||||
|
if x >=. (mk_i16 3329) then r == x -! (mk_i16 3329) else r == x)
|
||||||
|
[SMTPat (cond_sub x)]
|
||||||
|
|
||||||
|
val lemma_shift_right_15_i16 (x:i16):
|
||||||
|
Lemma (if v x >= 0 then (x >>! (mk_i32 15)) == mk_i16 0 else (x >>! (mk_i32 15)) == (mk_i16 (-1)))
|
||||||
|
|
||||||
|
let ntt_spec #len (vec_in: t_Array i16 len) (zeta: int) (i: nat{i < v len}) (j: nat{j < v len})
|
||||||
|
(vec_out: t_Array i16 len) : Type0 =
|
||||||
|
((v (Seq.index vec_out i) % 3329) ==
|
||||||
|
((v (Seq.index vec_in i) + (v (Seq.index vec_in j) * zeta * 169)) % 3329)) /\
|
||||||
|
((v (Seq.index vec_out j) % 3329) ==
|
||||||
|
((v (Seq.index vec_in i) - (v (Seq.index vec_in j) * zeta * 169)) % 3329))
|
||||||
|
|
||||||
|
let inv_ntt_spec #len (vec_in: t_Array i16 len) (zeta: int) (i: nat{i < v len}) (j: nat{j < v len})
|
||||||
|
(vec_out: t_Array i16 len) : Type0 =
|
||||||
|
((v (Seq.index vec_out i) % 3329) ==
|
||||||
|
((v (Seq.index vec_in j) + v (Seq.index vec_in i)) % 3329)) /\
|
||||||
|
((v (Seq.index vec_out j) % 3329) ==
|
||||||
|
(((v (Seq.index vec_in j) - v (Seq.index vec_in i)) * zeta * 169) % 3329))
|
||||||
|
|
||||||
3
proofs/proverif/README.md
Normal file
3
proofs/proverif/README.md
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
# ProVerif model for SPQR
|
||||||
|
|
||||||
|
This folder contains a ProVerif model for the SPQR protocol.
|
||||||
44
proofs/proverif/cryptolib.pvl
Normal file
44
proofs/proverif/cryptolib.pvl
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
(* Minimal generic crypto library *)
|
||||||
|
|
||||||
|
free c: channel.
|
||||||
|
event Reachable.
|
||||||
|
type principal.
|
||||||
|
|
||||||
|
|
||||||
|
type symkey.
|
||||||
|
fun aead_enc(symkey,bitstring,bitstring): bitstring.
|
||||||
|
fun aead_dec(symkey,bitstring,bitstring): bitstring
|
||||||
|
reduc forall k:symkey, m:bitstring, ad:bitstring;
|
||||||
|
aead_dec(k, aead_enc(k,m,ad), ad) = m.
|
||||||
|
|
||||||
|
type seed.
|
||||||
|
type enckey.
|
||||||
|
type deckey.
|
||||||
|
type ctsecret.
|
||||||
|
fun dk2seed(deckey): seed.
|
||||||
|
fun dk2enckey(deckey): enckey.
|
||||||
|
fun pkenc1(ctsecret, seed, symkey): bitstring.
|
||||||
|
fun pkenc2(ctsecret, enckey): bitstring.
|
||||||
|
fun pkdec(deckey, bitstring, bitstring): symkey
|
||||||
|
reduc forall dk:deckey, sk:symkey, r: ctsecret;
|
||||||
|
pkdec(dk, pkenc1(r, dk2seed(dk), sk), pkenc2(r, dk2enckey(dk))) = sk.
|
||||||
|
|
||||||
|
fun extractsecret(ctsecret, bitstring): symkey
|
||||||
|
reduc forall sk: symkey, s: seed, r: ctsecret;
|
||||||
|
extractsecret(r, pkenc1(r, s, sk)) = sk.
|
||||||
|
|
||||||
|
letfun kem_keygen() =
|
||||||
|
new dk: deckey;
|
||||||
|
(dk, dk2seed(dk), dk2enckey(dk)).
|
||||||
|
|
||||||
|
letfun kem_decap(dk: deckey, ct1: bitstring, ct2: bitstring) =
|
||||||
|
pkdec(dk, ct1, ct2).
|
||||||
|
|
||||||
|
type authenticator.
|
||||||
|
fun mac(authenticator, bitstring): bitstring.
|
||||||
|
fun auth_update(authenticator, symkey): authenticator.
|
||||||
|
|
||||||
|
(* hash function *)
|
||||||
|
fun h(bitstring): bitstring.
|
||||||
|
|
||||||
|
fun kdf(symkey, bitstring): symkey.
|
||||||
377
proofs/proverif/spqr-cka.pv
Normal file
377
proofs/proverif/spqr-cka.pv
Normal file
|
|
@ -0,0 +1,377 @@
|
||||||
|
(* Protocol-specific crypto *)
|
||||||
|
|
||||||
|
free ct_label: bitstring.
|
||||||
|
free hdr_label: bitstring.
|
||||||
|
letfun mac_ct(auth: authenticator, ct1: bitstring, ct2: bitstring) =
|
||||||
|
mac(auth, (ct_label, ct1, ct2)).
|
||||||
|
letfun mac_header(auth: authenticator, ep: nat, ekseed: seed, ek_hash: bitstring) =
|
||||||
|
mac(auth, (hdr_label, ep, ekseed, ek_hash)).
|
||||||
|
|
||||||
|
free cka_label: bitstring.
|
||||||
|
|
||||||
|
(* Protocol Data structures *)
|
||||||
|
|
||||||
|
type opt_symkey.
|
||||||
|
fun SK_None(): opt_symkey [data].
|
||||||
|
fun SK(symkey): opt_symkey [data].
|
||||||
|
|
||||||
|
type opt_mac.
|
||||||
|
fun MAC_None(): opt_mac [data].
|
||||||
|
fun MAC(bitstring): opt_mac [data].
|
||||||
|
|
||||||
|
type opt_keypair.
|
||||||
|
fun KP_None(): opt_keypair [data].
|
||||||
|
fun KP(deckey, seed, enckey): opt_keypair [data].
|
||||||
|
|
||||||
|
(* Requestor/EK Generator States *)
|
||||||
|
type eksender.
|
||||||
|
fun NeedToSample(nat): eksender [data].
|
||||||
|
fun SentHeader(nat, deckey, seed, enckey): eksender [data].
|
||||||
|
fun ReceivedCt1(nat, deckey, seed, enckey, bitstring): eksender [data].
|
||||||
|
fun SentEk(nat, deckey, seed, enckey): eksender [data].
|
||||||
|
fun SentEkReceivedCt1(nat, deckey, seed, enckey, bitstring): eksender [data].
|
||||||
|
|
||||||
|
(* Responder/CT Generator States *)
|
||||||
|
type ctsender.
|
||||||
|
fun Waiting(nat): ctsender [data].
|
||||||
|
fun ReceivedHeader(nat, seed, bitstring): ctsender [data].
|
||||||
|
fun SentCt1(nat, ctsecret, seed, bitstring, bitstring, symkey): ctsender [data].
|
||||||
|
fun SentCt1ReceivedEk(nat, ctsecret, seed, enckey, bitstring, symkey): ctsender [data].
|
||||||
|
fun SentCt2(nat, symkey): ctsender [data].
|
||||||
|
|
||||||
|
reduc forall ep: nat; RequestorEpoch(NeedToSample(ep)) = ep;
|
||||||
|
forall ep: nat, dk: deckey, ekseed: seed, ek: enckey; RequestorEpoch(SentHeader(ep, dk, ekseed, ek)) = ep;
|
||||||
|
forall ep: nat, dk: deckey, ekseed: seed, ek: enckey, ct1: bitstring; RequestorEpoch(ReceivedCt1(ep, dk, ekseed, ek, ct1)) = ep;
|
||||||
|
forall ep: nat, dk: deckey, ekseed: seed, ek: enckey; RequestorEpoch(SentEk(ep, dk, ekseed, ek)) = ep;
|
||||||
|
forall ep: nat, dk: deckey, ekseed: seed, ek: enckey, ct1: bitstring; RequestorEpoch(SentEkReceivedCt1(ep, dk, ekseed, ek, ct1)) = ep
|
||||||
|
.
|
||||||
|
|
||||||
|
(* Requestor/EK Generator Functions *)
|
||||||
|
|
||||||
|
letfun sendHeader(req: eksender, auth: authenticator) =
|
||||||
|
let NeedToSample(ep) = req in
|
||||||
|
let (dk: deckey, ekseed: seed, ek: enckey) = kem_keygen() in
|
||||||
|
let ek_hash = h((ekseed, ek)) in
|
||||||
|
let header_mac = mac_header(auth, ep, ekseed, ek_hash) in
|
||||||
|
(SentHeader(ep, dk, ekseed, ek), (ekseed, ek_hash, header_mac))
|
||||||
|
.
|
||||||
|
|
||||||
|
letfun sendEK(req: eksender, auth: authenticator) =
|
||||||
|
let SentHeader(ep, dk, ekseed, ek) = req in
|
||||||
|
(SentEk(ep, dk, ekseed, ek), ek)
|
||||||
|
else let ReceivedCt1(ep, dk, ekseed, ek, ct1) = req in
|
||||||
|
(SentEkReceivedCt1(ep, dk, ekseed, ek, ct1), ek)
|
||||||
|
.
|
||||||
|
|
||||||
|
letfun recvCT1(req: eksender, ct1: bitstring) =
|
||||||
|
let SentEk(ep, dk, ekseed, ek) = req in
|
||||||
|
SentEkReceivedCt1(ep, dk, ekseed, ek, ct1)
|
||||||
|
else let SentHeader(ep, dk, ekseed, ek) = req in
|
||||||
|
ReceivedCt1(ep, dk, ekseed, ek, ct1)
|
||||||
|
.
|
||||||
|
|
||||||
|
letfun recvCT2(req: eksender, auth: authenticator, ct2: bitstring, ct_mac: bitstring) =
|
||||||
|
let SentEkReceivedCt1(ep, dk, ekseed, ek, ct1) = req in
|
||||||
|
let ss = kem_decap(dk, ct1, ct2) in
|
||||||
|
let k = kdf(ss, (h((ekseed,ek)), ep, cka_label)) in
|
||||||
|
let new_auth = auth_update(auth, k) in
|
||||||
|
if mac_ct(new_auth, ct1, ct2) = ct_mac then
|
||||||
|
(Waiting(ep+1), (new_auth, ep, k))
|
||||||
|
.
|
||||||
|
|
||||||
|
reduc forall ep: nat; ResponderEpoch(Waiting(ep)) = ep;
|
||||||
|
forall ep: nat, ekseed: seed, ek_hash: bitstring; ResponderEpoch(ReceivedHeader(ep, ekseed, ek_hash)) = ep;
|
||||||
|
forall ep: nat, r: ctsecret, ekseed: seed, ek_hash: bitstring, ct1: bitstring, k: symkey; ResponderEpoch(SentCt1(ep, r, ekseed, ek_hash, ct1, k)) = ep;
|
||||||
|
forall ep: nat, r: ctsecret, ekseed: seed, ek: enckey, ct1: bitstring, k: symkey; ResponderEpoch(SentCt1ReceivedEk(ep, r, ekseed, ek, ct1, k)) = ep;
|
||||||
|
forall ep: nat, k: symkey; ResponderEpoch(SentCt2(ep, k)) = ep
|
||||||
|
.
|
||||||
|
|
||||||
|
letfun recvHeader(rsp: ctsender, auth: authenticator, ekseed: seed, ek_hash: bitstring, header_mac: bitstring) =
|
||||||
|
let Waiting(ep) = rsp in
|
||||||
|
if mac_header(auth, ep, ekseed, ek_hash) = header_mac then
|
||||||
|
ReceivedHeader(ep, ekseed, ek_hash)
|
||||||
|
.
|
||||||
|
|
||||||
|
letfun sendCT1(rsp: ctsender, auth: authenticator) =
|
||||||
|
let ReceivedHeader(ep, ekseed, ek_hash) = rsp in
|
||||||
|
new r: ctsecret;
|
||||||
|
new sk: symkey;
|
||||||
|
let ct1 = pkenc1(r, ekseed, sk) in
|
||||||
|
let k = kdf(sk, (ek_hash, ep, cka_label)) in
|
||||||
|
let new_auth = auth_update(auth, k) in
|
||||||
|
(SentCt1(ep, r, ekseed, ek_hash, ct1, k), (new_auth, ct1))
|
||||||
|
.
|
||||||
|
|
||||||
|
(* we can probably get rid of SentCt1ReceivedEk and go straight to SentCt2 here *)
|
||||||
|
letfun recvEK(rsp: ctsender, auth: authenticator, ek: enckey) =
|
||||||
|
let SentCt1(ep, r, ekseed, ek_hash, ct1, k) = rsp in
|
||||||
|
if ek_hash = h((ekseed, ek)) then
|
||||||
|
SentCt1ReceivedEk(ep, r, ekseed, ek, ct1, k)
|
||||||
|
.
|
||||||
|
|
||||||
|
letfun sendCT2(rsp: ctsender, auth: authenticator) =
|
||||||
|
let SentCt1ReceivedEk(ep, r, ekseed, ek, ct1, k) = rsp in
|
||||||
|
let ct2: bitstring = pkenc2(r, ek) in
|
||||||
|
let ct_mac = mac_ct(auth, ct1, ct2) in
|
||||||
|
(SentCt2(ep, k), (ct2, ct_mac))
|
||||||
|
.
|
||||||
|
|
||||||
|
letfun takeResponderKey(rsp: ctsender) =
|
||||||
|
let SentCt2(ep, k) = rsp in
|
||||||
|
(NeedToSample(ep+1), (ep,k))
|
||||||
|
.
|
||||||
|
|
||||||
|
(* Main processes *)
|
||||||
|
|
||||||
|
free A: principal.
|
||||||
|
free B: principal.
|
||||||
|
|
||||||
|
table AStates(principal, principal, eksender, authenticator).
|
||||||
|
table BStates(principal, principal, ctsender, authenticator).
|
||||||
|
|
||||||
|
event StartedA(principal, principal, nat, seed).
|
||||||
|
event CompletedA(principal, principal, nat, symkey).
|
||||||
|
|
||||||
|
event StartedB(principal, principal, nat, seed).
|
||||||
|
event CompletedB(principal, principal, nat, symkey).
|
||||||
|
|
||||||
|
letfun max_epoch() = 5.
|
||||||
|
|
||||||
|
let SendEk0() =
|
||||||
|
get AStates(a, b, req, auth) in
|
||||||
|
let (req': eksender, (ekseed: seed, ek_hash: bitstring, header_mac: bitstring)) = sendHeader(req, auth) in
|
||||||
|
let ep = RequestorEpoch(req') in
|
||||||
|
event StartedA(a, b, ep, ekseed);
|
||||||
|
out(c, (ekseed, ek_hash, header_mac));
|
||||||
|
insert AStates(a, b, req', auth).
|
||||||
|
|
||||||
|
let SendEk1a() =
|
||||||
|
get AStates(a, b, req, auth) in
|
||||||
|
in (c, ct1: bitstring);
|
||||||
|
let req' = recvCT1(req, ct1) in
|
||||||
|
insert AStates(a, b, req', auth)
|
||||||
|
.
|
||||||
|
|
||||||
|
let SendEk1b() =
|
||||||
|
get AStates(a, b, req, auth) in
|
||||||
|
let (req': eksender, ek: enckey) = sendEK(req, auth) in
|
||||||
|
out(c, ek);
|
||||||
|
insert AStates(a, b, req', auth)
|
||||||
|
.
|
||||||
|
|
||||||
|
let SendEk2() =
|
||||||
|
get AStates(a, b, req, auth) in
|
||||||
|
in (c, (ct2: bitstring, ct_mac: bitstring));
|
||||||
|
let (req': ctsender,
|
||||||
|
(new_auth: authenticator,
|
||||||
|
ep: nat,
|
||||||
|
k: symkey)) = recvCT2(req, auth, ct2, ct_mac) in
|
||||||
|
event CompletedA(a, b, ep, k);
|
||||||
|
if ep < max_epoch() then insert BStates(a, b, req', new_auth).
|
||||||
|
|
||||||
|
let SendEkProc() =
|
||||||
|
SendEk0() | SendEk1a() | SendEk1b() | SendEk2()
|
||||||
|
.
|
||||||
|
|
||||||
|
let SendCt0() =
|
||||||
|
get BStates(b, a, rsp, auth) in
|
||||||
|
let ep = ResponderEpoch(rsp) in
|
||||||
|
in(c, (ekseed: seed, ek_hash: bitstring, header_mac: bitstring));
|
||||||
|
event StartedB(b, a, ep, ekseed);
|
||||||
|
let rsp' = recvHeader(rsp, auth, ekseed, ek_hash, header_mac) in
|
||||||
|
insert BStates(b, a, rsp', auth)
|
||||||
|
.
|
||||||
|
|
||||||
|
let SendCt1() =
|
||||||
|
get BStates(b, a, rsp, auth) in
|
||||||
|
let (rsp': ctsender, (new_auth: authenticator, ct1: bitstring)) = sendCT1(rsp, auth) in
|
||||||
|
out (c, ct1);
|
||||||
|
insert BStates(b, a, rsp', new_auth)
|
||||||
|
.
|
||||||
|
|
||||||
|
let SendCt2() =
|
||||||
|
get BStates(b, a, rsp, auth) in
|
||||||
|
in(c, ek: enckey);
|
||||||
|
let rsp': ctsender = recvEK(rsp, auth, ek) in
|
||||||
|
insert BStates(b, a, rsp', auth)
|
||||||
|
.
|
||||||
|
|
||||||
|
let SendCt3() =
|
||||||
|
get BStates(b, a, rsp, auth) in
|
||||||
|
let (rsp': ctsender, (ct2: bitstring, ct_mac: bitstring)) = sendCT2(rsp, auth) in
|
||||||
|
out(c, (ct2, ct_mac));
|
||||||
|
let (rsp'': eksender, (ep: nat, k: symkey)) = takeResponderKey(rsp') in
|
||||||
|
event CompletedB(b, a, ep, k);
|
||||||
|
if ep < max_epoch() then insert AStates(b, a, rsp'', auth)
|
||||||
|
.
|
||||||
|
|
||||||
|
let SendCtProc() =
|
||||||
|
SendCt0() | SendCt1() | SendCt2() | SendCt3()
|
||||||
|
.
|
||||||
|
|
||||||
|
(* Compromise Scenarions *)
|
||||||
|
event CompromisedKeysA(principal, principal, nat).
|
||||||
|
let CompromiseKeysA(a: principal, b:principal, ep:nat) =
|
||||||
|
(get AStates(=a, =b, req, auth) in
|
||||||
|
let SentHeader(=ep, dk, ekseed, ek) = req in
|
||||||
|
event CompromisedKeysA(a,b,ep);
|
||||||
|
out(c,dk)
|
||||||
|
else let ReceivedCt1(=ep, dk, ekseed, ek, ct1) = req in
|
||||||
|
event CompromisedKeysA(a,b,ep);
|
||||||
|
out(c,dk)
|
||||||
|
else let SentEk(=ep, dk, ekseed, ek) = req in
|
||||||
|
event CompromisedKeysA(a,b,ep);
|
||||||
|
out(c,dk)
|
||||||
|
else let SentEkReceivedCt1(=ep, dk, ekseed, ek, ct1) = req in
|
||||||
|
event CompromisedKeysA(a,b,ep);
|
||||||
|
out(c,dk))
|
||||||
|
.
|
||||||
|
|
||||||
|
event CompromisedAuthA(principal, principal, nat).
|
||||||
|
let CompromiseAuthA(a: principal, b:principal, ep:nat) =
|
||||||
|
(get AStates(=a, =b, req, auth) in
|
||||||
|
if ep = RequestorEpoch(req) then (
|
||||||
|
event CompromisedAuthA(a,b,ep);
|
||||||
|
out(c,auth)))
|
||||||
|
.
|
||||||
|
|
||||||
|
event CompromisedKeysB(principal, principal, nat).
|
||||||
|
let CompromiseKeysB(a: principal, b:principal, ep:nat) =
|
||||||
|
(get BStates(=a, =b, rsp, auth) in
|
||||||
|
let SentCt1(=ep, r, ekseed, ek_hash, ct1, k) = rsp in
|
||||||
|
event CompromisedKeysB(a,b,ep);
|
||||||
|
out(c,(r,k))
|
||||||
|
else let SentCt1ReceivedEk(=ep, r, ekseed, ek, ct1, k) = rsp in
|
||||||
|
event CompromisedKeysB(a,b,ep);
|
||||||
|
out(c,(r,k))
|
||||||
|
else let SentCt2(=ep, k) = rsp in
|
||||||
|
event CompromisedKeysB(a,b,ep);
|
||||||
|
out(c,k))
|
||||||
|
.
|
||||||
|
|
||||||
|
event CompromisedAuthB(principal, principal, nat).
|
||||||
|
let CompromiseAuthB(a: principal, b: principal, ep: nat) =
|
||||||
|
(get BStates(=a, =b, rsp, auth) in
|
||||||
|
if ep = ResponderEpoch(rsp) then (
|
||||||
|
event CompromisedAuthB(a,b,ep);
|
||||||
|
out(c,auth)))
|
||||||
|
.
|
||||||
|
|
||||||
|
(* Security Queries *)
|
||||||
|
|
||||||
|
(* Reachability Queries *)
|
||||||
|
|
||||||
|
query ep:nat, ek:seed;
|
||||||
|
event(StartedA(A,B,4,ek));
|
||||||
|
event(StartedB(B,A,4,ek))
|
||||||
|
.
|
||||||
|
|
||||||
|
query ep:nat, sk:symkey;
|
||||||
|
event(CompletedA(A,B,4,sk));
|
||||||
|
event(CompletedB(B,A,4,sk))
|
||||||
|
.
|
||||||
|
|
||||||
|
query ep:nat, ek:seed;
|
||||||
|
event(StartedA(B,A,3,ek));
|
||||||
|
event(StartedB(A,B,3,ek))
|
||||||
|
.
|
||||||
|
|
||||||
|
query ep:nat, sk:symkey;
|
||||||
|
event(CompletedA(B,A,3,sk));
|
||||||
|
event(CompletedB(A,B,3,sk))
|
||||||
|
.
|
||||||
|
|
||||||
|
(* Confidentiality Queries *)
|
||||||
|
|
||||||
|
query ep:nat, sk:symkey, ep_:nat, x:principal, y:principal;
|
||||||
|
event(CompletedA(A,B,0,sk)) && attacker(sk);
|
||||||
|
event(CompletedB(B,A,0,sk)) && attacker(sk);
|
||||||
|
event(CompletedA(B,A,1,sk)) && attacker(sk);
|
||||||
|
event(CompletedB(A,B,1,sk)) && attacker(sk);
|
||||||
|
event(CompletedA(A,B,2,sk)) && attacker(sk);
|
||||||
|
event(CompletedB(B,A,2,sk)) && attacker(sk);
|
||||||
|
event(CompletedA(B,A,3,sk)) && attacker(sk);
|
||||||
|
event(CompletedB(A,B,3,sk)) && attacker(sk);
|
||||||
|
|
||||||
|
event(CompletedA(x,y,ep,sk)) && attacker(sk) ==>
|
||||||
|
event(CompromisedKeysB(y,x,ep));
|
||||||
|
event(CompletedA(x,y,ep,sk)) && attacker(sk) ==>
|
||||||
|
event(CompromisedKeysA(x,y,ep));
|
||||||
|
event(CompletedB(x,y,ep,sk)) && attacker(sk) ==>
|
||||||
|
event(CompromisedKeysB(x,y,ep));
|
||||||
|
event(CompletedB(x,y,ep,sk)) && attacker(sk) ==>
|
||||||
|
event(CompromisedKeysA(y,x,ep));
|
||||||
|
|
||||||
|
event(CompletedA(x,y,ep,sk)) && attacker(sk) ==>
|
||||||
|
(event(CompromisedKeysA(x,y,ep)) || event(CompromisedKeysB(y,x,ep)));
|
||||||
|
event(CompletedB(x,y,ep,sk)) && attacker(sk) ==>
|
||||||
|
(event(CompromisedKeysB(x,y,ep)) || event(CompromisedKeysA(y,x,ep)));
|
||||||
|
|
||||||
|
(* An epoch key can be known to the attacker if either the states in that
|
||||||
|
epoch were compromised, or the MAC key or some prior epoch was compromised.
|
||||||
|
Compromising later keys has no effect. This encodes Forward Secrecy. *)
|
||||||
|
(* Furthermore, since we compromise all authentication keys in phase 1,
|
||||||
|
this also encodes post-compromise security *)
|
||||||
|
|
||||||
|
|
||||||
|
event(CompletedA(x,y,ep,sk))@i && attacker(sk) ==>
|
||||||
|
(event(CompromisedKeysA(x,y,ep)) || event(CompromisedKeysB(y,x,ep)) ||
|
||||||
|
(ep_ <= ep && event(CompromisedAuthA(y,x,ep_))@j && j < i) ||
|
||||||
|
(ep_ <= ep && event(CompromisedAuthA(x,y,ep_))@j && j < i) ||
|
||||||
|
(ep_ <= ep && event(CompromisedAuthB(x,y,ep_))@j && j < i) ||
|
||||||
|
(ep_ <= ep && event(CompromisedAuthB(y,x,ep_))@j && j < i));
|
||||||
|
event(CompletedB(x,y,ep,sk))@i && attacker(sk) ==>
|
||||||
|
(event(CompromisedKeysB(x,y,ep)) || event(CompromisedKeysA(y,x,ep)) ||
|
||||||
|
(ep_ <= ep && event(CompromisedAuthA(x,y,ep_))@j && j < i
|
||||||
|
(* && event(CompletedA(A,B,ep-1,sk')) && attacker(sk') *)) ||
|
||||||
|
(ep_ <= ep && event(CompromisedAuthA(y,x,ep_))@j && j < i
|
||||||
|
(* && event(CompletedA(A,B,ep-1,sk')) && attacker(sk') *)) ||
|
||||||
|
(ep_ <= ep && event(CompromisedAuthB(x,y,ep_))@j && j < i)||
|
||||||
|
(ep_ <= ep && event(CompromisedAuthB(y,x,ep_))@j && j < i))
|
||||||
|
|
||||||
|
.
|
||||||
|
|
||||||
|
(* Authentication Queries *)
|
||||||
|
|
||||||
|
|
||||||
|
query x: principal, y: principal, ep, ep_:nat, ek:seed, sk:symkey;
|
||||||
|
event(CompletedB(y,x,ep,sk)) ==> event(StartedA(x,y,ep,ek));
|
||||||
|
event(CompletedB(y,x,ep,sk)) ==>
|
||||||
|
(event(StartedA(x,y,ep,ek)) ||
|
||||||
|
(ep_ <= ep && (event(CompromisedAuthA(x,y,ep_)) || event(CompromisedAuthA(y,x,ep_))
|
||||||
|
|| event(CompromisedAuthB(y,x,ep_)) || event(CompromisedAuthB(x,y,ep_)))));
|
||||||
|
event(CompletedA(x,y,ep,sk)) ==> event(StartedB(y,x,ep,ek));
|
||||||
|
event(CompletedA(x,y,ep,sk)) ==>
|
||||||
|
(event(StartedB(y,x,ep,ek)) ||
|
||||||
|
(ep_ <= ep && (event(CompromisedAuthA(x,y,ep_)) || event(CompromisedAuthA(y,x,ep_))
|
||||||
|
|| event(CompromisedAuthB(y,x,ep_)) || event(CompromisedAuthB(x,y,ep_)))))
|
||||||
|
.
|
||||||
|
|
||||||
|
process
|
||||||
|
new authAB: authenticator;
|
||||||
|
insert AStates(A, B, NeedToSample(0), authAB);
|
||||||
|
insert BStates(B, A, Waiting(0), authAB);
|
||||||
|
(!SendEkProc() | !SendCtProc() |
|
||||||
|
(* Compromise Scenarios: comment out different options below to experiment *)
|
||||||
|
|
||||||
|
(* Compromise Private Keys *)
|
||||||
|
|
||||||
|
CompromiseKeysA(A,B,0) | CompromiseKeysB(B,A,0) |
|
||||||
|
CompromiseKeysA(B,A,1) | CompromiseKeysB(A,B,1) |
|
||||||
|
CompromiseKeysA(A,B,2) | CompromiseKeysB(B,A,2) |
|
||||||
|
CompromiseKeysA(B,A,3) | CompromiseKeysB(A,B,3) |
|
||||||
|
CompromiseKeysA(B,A,4) | CompromiseKeysB(A,B,4) |
|
||||||
|
|
||||||
|
|
||||||
|
(* Compromise MAC Keys *)
|
||||||
|
|
||||||
|
CompromiseAuthA(A,B,0) | CompromiseAuthB(B,A,0) |
|
||||||
|
CompromiseAuthA(B,A,1) | CompromiseAuthB(A,B,1) |
|
||||||
|
CompromiseAuthA(A,B,2) | CompromiseAuthB(B,A,2) |
|
||||||
|
CompromiseAuthA(B,A,3) | CompromiseAuthB(A,B,3) |
|
||||||
|
CompromiseAuthA(B,A,4) | CompromiseAuthB(A,B,4) |
|
||||||
|
|
||||||
|
(* Post-Compromise Secrecy: Passively Compromise MAC Keys *after* all epochs are done *)
|
||||||
|
(phase 1; (out(c,authAB) |
|
||||||
|
(in (c,(x:principal, y:principal, ep:nat));
|
||||||
|
(CompromiseAuthA(x,y,ep) | CompromiseAuthB(x,y,ep))))))
|
||||||
162
proofs/proverif/spqr-dr.pv
Normal file
162
proofs/proverif/spqr-dr.pv
Normal file
|
|
@ -0,0 +1,162 @@
|
||||||
|
type dir.
|
||||||
|
free a2b:dir.
|
||||||
|
free b2a:dir.
|
||||||
|
|
||||||
|
table SharedKeys(principal, principal, dir, nat, symkey). (* a,b,dir,epoch,k: if dir is a2b then a as initiator esablished k at epoch ep with b using SPQR *)
|
||||||
|
table RootKeys(principal, principal, dir, nat, symkey). (* a <-> b: dir, epoch, rk *)
|
||||||
|
table ChainKeys(principal, principal, dir, nat, nat, symkey). (* a <-> b: dir, epoch, ctr, ck *)
|
||||||
|
table MsgKeys(principal, principal, dir, nat, nat, symkey). (* a <-> b: dir, epoch, ctr, mk *)
|
||||||
|
|
||||||
|
letfun max_epoch() = 3.
|
||||||
|
letfun max_ctr() = 3.
|
||||||
|
|
||||||
|
free root_key_label: bitstring.
|
||||||
|
free send_chain_key_label: bitstring.
|
||||||
|
free recv_chain_key_label: bitstring.
|
||||||
|
|
||||||
|
event CompromisedSharedKey(principal, principal, dir, nat).
|
||||||
|
|
||||||
|
let CKA_Key0(a:principal, b:principal) =
|
||||||
|
(new k:symkey;
|
||||||
|
insert SharedKeys(a, b, a2b, 0, k);
|
||||||
|
insert SharedKeys(b, a, b2a, 0, k))
|
||||||
|
(* We should allow attacker to choose 2 different keys *)
|
||||||
|
| (in (c, k:symkey);
|
||||||
|
event CompromisedSharedKey(a, b, a2b, 0);
|
||||||
|
insert SharedKeys(a, b, a2b, 0, k);
|
||||||
|
insert SharedKeys(b, a, b2a, 0, k)).
|
||||||
|
|
||||||
|
let CKA_KeyN(a:principal, b:principal) =
|
||||||
|
get SharedKeys(=a, =b, =a2b, ep, oldk) in
|
||||||
|
if ep+1 <= max_epoch() then (
|
||||||
|
(new k:symkey;
|
||||||
|
insert SharedKeys(a, b, a2b, ep+1, k);
|
||||||
|
insert SharedKeys(b, a, b2a, ep+1, k))
|
||||||
|
(* We should allow attacker to choose 2 different keys *)
|
||||||
|
| (in (c, k:symkey);
|
||||||
|
event CompromisedSharedKey(a, b, a2b, ep+1);
|
||||||
|
insert SharedKeys(a, b, a2b, ep+1, k);
|
||||||
|
insert SharedKeys(b, a, b2a, ep+1, k))).
|
||||||
|
|
||||||
|
|
||||||
|
event RootKey(principal, principal, dir, nat, symkey).
|
||||||
|
|
||||||
|
letfun SR_InitState(a:principal, b:principal, d:dir) =
|
||||||
|
get SharedKeys(=a, =b, =d, 0, k) in
|
||||||
|
let rk = kdf(k, root_key_label) in
|
||||||
|
let cks = kdf(k, send_chain_key_label) in
|
||||||
|
let ckr = kdf(k, recv_chain_key_label) in
|
||||||
|
event RootKey(a, b, d, 0, rk);
|
||||||
|
insert RootKeys(a, b, d, 0, rk);
|
||||||
|
if d = a2b then (
|
||||||
|
insert ChainKeys(a, b, a2b, 0, 0, cks);
|
||||||
|
insert ChainKeys(a, b, b2a, 0, 0, ckr);
|
||||||
|
0)
|
||||||
|
else (
|
||||||
|
insert ChainKeys(a, b, a2b, 0, 0, ckr);
|
||||||
|
insert ChainKeys(a, b, b2a, 0, 0, cks);
|
||||||
|
0).
|
||||||
|
|
||||||
|
letfun SR_NextEpoch(a:principal, b:principal, ep_:nat) =
|
||||||
|
get RootKeys(=a, =b, d, ep, rk) in
|
||||||
|
get SharedKeys(=a, =b, =d, key_epoch, k) in
|
||||||
|
if ep + 1 = key_epoch && key_epoch <= max_epoch() then (
|
||||||
|
let nrk = kdf(rk, (k,root_key_label)) in
|
||||||
|
let cks = kdf(rk, (k,send_chain_key_label)) in
|
||||||
|
let ckr = kdf(rk, (k,recv_chain_key_label)) in
|
||||||
|
event RootKey(a, b, d, key_epoch, nrk);
|
||||||
|
insert RootKeys(a, b, d, key_epoch, nrk);
|
||||||
|
if d = a2b then (
|
||||||
|
insert ChainKeys(a, b, a2b, key_epoch, 0, cks);
|
||||||
|
insert ChainKeys(a, b, b2a, key_epoch, 0, ckr);
|
||||||
|
0)
|
||||||
|
else (
|
||||||
|
insert ChainKeys(a, b, a2b, key_epoch, 0, ckr);
|
||||||
|
insert ChainKeys(a, b, b2a, key_epoch, 0, cks);
|
||||||
|
0))
|
||||||
|
else 0.
|
||||||
|
|
||||||
|
free chain_key_ratchet_label: bitstring.
|
||||||
|
free msg_key_label: bitstring.
|
||||||
|
|
||||||
|
event MsgKey(principal, principal, dir, nat, nat, symkey).
|
||||||
|
|
||||||
|
letfun SR_NextCtr(a:principal, b:principal, d:dir, key_epoch:nat, ctr:nat) =
|
||||||
|
get ChainKeys(=a, =b, =d, =key_epoch, =ctr, ck) in
|
||||||
|
if ctr + 1 <= max_ctr() then (
|
||||||
|
let nck = kdf(ck, chain_key_ratchet_label) in
|
||||||
|
let mk = kdf(ck, msg_key_label) in
|
||||||
|
insert ChainKeys(a, b, d, key_epoch, ctr+1, nck);
|
||||||
|
event MsgKey(a, b, d, key_epoch, ctr, mk);
|
||||||
|
insert MsgKeys(a, b, d, key_epoch, ctr, mk);
|
||||||
|
0)
|
||||||
|
else 0.
|
||||||
|
|
||||||
|
let SR_Init(a:principal, b:principal, d:dir) =
|
||||||
|
let r = SR_InitState(a, b, d) in
|
||||||
|
0.
|
||||||
|
|
||||||
|
let SR_AddEpoch(a:principal, b:principal) =
|
||||||
|
get RootKeys(=a, =b, d, ep, rk) in
|
||||||
|
let r = SR_NextEpoch(a, b, ep) in
|
||||||
|
0.
|
||||||
|
|
||||||
|
let SR_NextKey(a:principal, b:principal) =
|
||||||
|
get ChainKeys(=a, =b, d, ep, ctr, ck) in
|
||||||
|
let s0 = SR_NextCtr(a, b, d, ep, ctr) in
|
||||||
|
0.
|
||||||
|
|
||||||
|
event CompromisedRootKey(principal, principal, dir, nat, symkey).
|
||||||
|
event CompromisedChainKey(principal, principal, dir, nat, nat, symkey).
|
||||||
|
|
||||||
|
let CompromiseState(a:principal) =
|
||||||
|
(get RootKeys(=a, b, d, ep, rk) in
|
||||||
|
event CompromisedRootKey(a,b,d,ep,rk);
|
||||||
|
out (c,rk))
|
||||||
|
| (get ChainKeys(=a, b, d, ep, ctr, ck) in
|
||||||
|
event CompromisedChainKey(a,b,d,ep,ctr,ck);
|
||||||
|
out (c,ck)).
|
||||||
|
|
||||||
|
free A:principal.
|
||||||
|
free B:principal.
|
||||||
|
|
||||||
|
(* Reachability Queries *)
|
||||||
|
|
||||||
|
query a:principal, b:principal, ep:nat, ctr:nat, k:symkey;
|
||||||
|
event(MsgKey(a,b,a2b,0,0,k));
|
||||||
|
event(MsgKey(a,b,b2a,0,0,k));
|
||||||
|
event(MsgKey(a,b,a2b,2,2,k));
|
||||||
|
event(MsgKey(a,b,b2a,2,2,k)).
|
||||||
|
|
||||||
|
(* Confidentiality Queries *)
|
||||||
|
|
||||||
|
query a:principal, b:principal, ep:nat, ctr:nat, k:symkey, kk:symkey, ep_:nat;
|
||||||
|
event(MsgKey(a,b,a2b,0,0,k)) && attacker(k);
|
||||||
|
event(MsgKey(a,b,b2a,0,0,k)) && attacker(k);
|
||||||
|
|
||||||
|
(* Confidentiality for first epoch *)
|
||||||
|
(* Forward secrecy: Compromising later keys makes no difference *)
|
||||||
|
event(MsgKey(a,b,a2b,0,0,k)) && attacker(k) ==>
|
||||||
|
(event(CompromisedSharedKey(a,b,a2b,0)) ||
|
||||||
|
event(CompromisedSharedKey(b,a,a2b,0)));
|
||||||
|
|
||||||
|
(* Confidentiality for first epoch *)
|
||||||
|
(* Forward secrecy: Compromising later keys makes no difference *)
|
||||||
|
(* Post-Compromise Security: Compromising earlier keys makes no difference *)
|
||||||
|
event(MsgKey(a,b,a2b,ep+1,0,k)) && attacker(k) ==>
|
||||||
|
(event(CompromisedSharedKey(a,b,a2b,ep+1)) ||
|
||||||
|
event(CompromisedSharedKey(b,a,a2b,ep+1))).
|
||||||
|
|
||||||
|
process
|
||||||
|
CKA_Key0(A,B) |
|
||||||
|
!CKA_KeyN(A,B) |
|
||||||
|
!SR_Init(A,B,a2b) |
|
||||||
|
!SR_Init(B,A,b2a) |
|
||||||
|
!SR_AddEpoch(A,B) |
|
||||||
|
!SR_AddEpoch(B,A) |
|
||||||
|
!SR_NextKey(A,B) |
|
||||||
|
!SR_NextKey(B,A) (* |
|
||||||
|
!CompromiseState(A) |
|
||||||
|
!CompromiseState(B) *)
|
||||||
|
|
||||||
|
|
||||||
105
src/authenticator.rs
Normal file
105
src/authenticator.rs
Normal file
|
|
@ -0,0 +1,105 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use libcrux_hmac::hmac;
|
||||||
|
|
||||||
|
use crate::{kdf, util::compare, Epoch};
|
||||||
|
pub mod serialize;
|
||||||
|
pub type Mac = Vec<u8>;
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error("Ciphertext MAC is invalid")]
|
||||||
|
InvalidCtMac,
|
||||||
|
#[error("Encapsulation key MAC is invalid")]
|
||||||
|
InvalidHdrMac,
|
||||||
|
#[error("Authenticator previous root key present when should be erased")]
|
||||||
|
AuthenticatorRootKeyPresent,
|
||||||
|
#[error("Authenticator previous root key missing")]
|
||||||
|
AuthenticatorRootKeyMissing,
|
||||||
|
#[error("Authenticator previous MAC key present when should be erased")]
|
||||||
|
AuthenticatorMacKeyPresent,
|
||||||
|
#[error("Authenticator previous MAC key missing")]
|
||||||
|
AuthenticatorMacKeyMissing,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct Authenticator {
|
||||||
|
root_key: Mac,
|
||||||
|
mac_key: Mac,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl Authenticator {
|
||||||
|
pub const MACSIZE: usize = 32usize;
|
||||||
|
pub fn new(root_key: Vec<u8>, ep: Epoch) -> Self {
|
||||||
|
let mut result = Self {
|
||||||
|
root_key: vec![0u8; 32],
|
||||||
|
mac_key: vec![0u8; 32],
|
||||||
|
};
|
||||||
|
result.update(ep, &root_key);
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update(&mut self, ep: Epoch, k: &[u8]) {
|
||||||
|
let ikm = [self.root_key.as_slice(), k].concat();
|
||||||
|
let info = [
|
||||||
|
b"Signal_PQCKA_V1_MLKEM768:Authenticator Update".as_slice(),
|
||||||
|
&ep.to_be_bytes(),
|
||||||
|
]
|
||||||
|
.concat();
|
||||||
|
let kdf_out = kdf::hkdf_to_vec(&[0u8; 32], &ikm, &info, 64);
|
||||||
|
self.root_key = kdf_out[..32].to_vec();
|
||||||
|
self.mac_key = kdf_out[32..].to_vec();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(expected_mac.len() == Authenticator::MACSIZE)]
|
||||||
|
pub fn verify_ct(&self, ep: Epoch, ct: &[u8], expected_mac: &[u8]) -> Result<(), Error> {
|
||||||
|
if compare(expected_mac, &self.mac_ct(ep, ct)) != 0 {
|
||||||
|
Err(Error::InvalidCtMac)
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::ensures(|res| res.len() == Authenticator::MACSIZE)]
|
||||||
|
pub fn mac_ct(&self, ep: Epoch, ct: &[u8]) -> Mac {
|
||||||
|
let ct_mac_data = [
|
||||||
|
b"Signal_PQCKA_V1_MLKEM768:ciphertext".as_slice(),
|
||||||
|
&ep.to_be_bytes(),
|
||||||
|
ct,
|
||||||
|
]
|
||||||
|
.concat();
|
||||||
|
hmac(
|
||||||
|
libcrux_hmac::Algorithm::Sha256,
|
||||||
|
&self.mac_key,
|
||||||
|
&ct_mac_data,
|
||||||
|
Some(Self::MACSIZE),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(expected_mac.len() == Authenticator::MACSIZE)]
|
||||||
|
pub fn verify_hdr(&self, ep: Epoch, hdr: &[u8], expected_mac: &[u8]) -> Result<(), Error> {
|
||||||
|
if compare(expected_mac, &self.mac_hdr(ep, hdr)) != 0 {
|
||||||
|
Err(Error::InvalidHdrMac)
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::ensures(|res| res.len() == Authenticator::MACSIZE)]
|
||||||
|
pub fn mac_hdr(&self, ep: Epoch, hdr: &[u8]) -> Mac {
|
||||||
|
let ct_mac_data = [
|
||||||
|
b"Signal_PQCKA_V1_MLKEM768:ekheader".as_slice(),
|
||||||
|
&ep.to_be_bytes(),
|
||||||
|
hdr,
|
||||||
|
]
|
||||||
|
.concat();
|
||||||
|
hmac(
|
||||||
|
libcrux_hmac::Algorithm::Sha256,
|
||||||
|
&self.mac_key,
|
||||||
|
&ct_mac_data,
|
||||||
|
Some(Self::MACSIZE),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
40
src/authenticator/serialize.rs
Normal file
40
src/authenticator/serialize.rs
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use crate::proto;
|
||||||
|
|
||||||
|
use super::Authenticator;
|
||||||
|
|
||||||
|
impl Authenticator {
|
||||||
|
pub fn into_pb(self) -> proto::pq_ratchet::Authenticator {
|
||||||
|
proto::pq_ratchet::Authenticator {
|
||||||
|
root_key: self.root_key,
|
||||||
|
mac_key: self.mac_key,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: &proto::pq_ratchet::Authenticator) -> Self {
|
||||||
|
Self {
|
||||||
|
root_key: pb.root_key.clone(),
|
||||||
|
mac_key: pb.mac_key.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use crate::authenticator::Authenticator;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn round_trip() {
|
||||||
|
let auth = Authenticator::new(vec![42u8; 32], 1);
|
||||||
|
let ack = auth.mac_ct(1, b"123");
|
||||||
|
|
||||||
|
let pb_auth = auth.into_pb();
|
||||||
|
|
||||||
|
let new_auth = Authenticator::from_pb(&pb_auth);
|
||||||
|
let new_mac = new_auth.mac_ct(1, b"123");
|
||||||
|
|
||||||
|
assert_eq!(ack, new_mac);
|
||||||
|
}
|
||||||
|
}
|
||||||
406
src/chain.rs
Normal file
406
src/chain.rs
Normal file
|
|
@ -0,0 +1,406 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use super::{Direction, Epoch, EpochSecret, Error};
|
||||||
|
use crate::kdf;
|
||||||
|
use crate::proto::pq_ratchet as pqrpb;
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
use std::collections::VecDeque;
|
||||||
|
|
||||||
|
struct KeyHistory {
|
||||||
|
// Keys are stored as [u8; 4][u8; 32], where the first is the index as a BE32
|
||||||
|
// and the second is the key.
|
||||||
|
// data.len() <= KEY_SIZE*TRIM_SIZE
|
||||||
|
data: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// ChainEpochDirection keeps track of keys related to either half of send/recv.
|
||||||
|
struct ChainEpochDirection {
|
||||||
|
ctr: u32,
|
||||||
|
// next.len() == 32
|
||||||
|
next: Vec<u8>,
|
||||||
|
prev: KeyHistory,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// ChainEpoch keeps state on a single epoch's keys.
|
||||||
|
struct ChainEpoch {
|
||||||
|
send: ChainEpochDirection,
|
||||||
|
recv: ChainEpochDirection,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Chain keeps track of keys for all epochs.
|
||||||
|
#[hax_lib::fstar::verification_status(lax)]
|
||||||
|
pub struct Chain {
|
||||||
|
dir: Direction,
|
||||||
|
current_epoch: Epoch,
|
||||||
|
// links.len() <= EPOCHS_TO_KEEP
|
||||||
|
links: VecDeque<ChainEpoch>, // stores [link[current_epoch-N] .. link[current_epoch]]
|
||||||
|
// next_root.len() == 32
|
||||||
|
next_root: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Disallow requesting a key that is more than MAX_JUMP ahead of `ctr`.
|
||||||
|
pub const MAX_JUMP: usize = 25_000; // from libsignal/rust/protocol/src/consts.rs
|
||||||
|
/// Keep around keys back to at least `ctr - MAX_OOO_KEYS`, in case an out-of-order
|
||||||
|
/// message comes in. Messages older than this that arrive out-of-order
|
||||||
|
/// will not be able to be decrypted and will return Error::KeyTrimmed.
|
||||||
|
pub(crate) const MAX_OOO_KEYS: usize = 2000;
|
||||||
|
/// When the size of our key history exceeds this amount, we run a
|
||||||
|
/// garbage collection on it.
|
||||||
|
pub(crate) const TRIM_SIZE: usize = MAX_OOO_KEYS * 11 / 10;
|
||||||
|
/// We keep around this many epochs (including the current one) for out-of-order
|
||||||
|
/// messages. Currently, since there's O(10s) of messages required to exit a
|
||||||
|
/// single epoch and our MAX_OOO_KEYS is close to that number as well, we only
|
||||||
|
/// keep one additional epoch around.
|
||||||
|
const EPOCHS_TO_KEEP: usize = 2;
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl KeyHistory {
|
||||||
|
/// Size in bytes of a single key stored within a KeyHistory.
|
||||||
|
const KEY_SIZE: usize = 4 + 32;
|
||||||
|
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
data: Vec::with_capacity(Self::KEY_SIZE * 2),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(self.data.len() <= KeyHistory::KEY_SIZE * TRIM_SIZE)]
|
||||||
|
fn add(&mut self, k: (u32, [u8; 32])) {
|
||||||
|
self.data.extend_from_slice(&k.0.to_be_bytes()[..]);
|
||||||
|
self.data.extend_from_slice(&k.1[..]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::opaque] // ordering of slices needed
|
||||||
|
fn gc(&mut self, current_key: u32) {
|
||||||
|
if self.data.len() >= TRIM_SIZE * Self::KEY_SIZE {
|
||||||
|
// We assume that k.0 is the highest key index we've ever seen, and base
|
||||||
|
// our trimming on that.
|
||||||
|
assert!(current_key >= MAX_OOO_KEYS as u32);
|
||||||
|
let trim_horizon = &(current_key - MAX_OOO_KEYS as u32).to_be_bytes()[..];
|
||||||
|
|
||||||
|
// This does a single O(n) pass over our list, dropping all keys less than
|
||||||
|
// our computed trim horizon.
|
||||||
|
let mut i: usize = 0;
|
||||||
|
while i < self.data.len() {
|
||||||
|
if matches!(
|
||||||
|
trim_horizon.cmp(&self.data[i..i + 4]),
|
||||||
|
std::cmp::Ordering::Greater
|
||||||
|
) {
|
||||||
|
self.remove(i);
|
||||||
|
// Don't advance i here; we could have replaced the value there-in
|
||||||
|
// with another old key.
|
||||||
|
} else {
|
||||||
|
i += Self::KEY_SIZE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn clear(&mut self) {
|
||||||
|
self.data.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(my_array_index <= self.data.len() && self.data.len() <= KeyHistory::KEY_SIZE * TRIM_SIZE)]
|
||||||
|
fn remove(&mut self, mut my_array_index: usize) {
|
||||||
|
if my_array_index + Self::KEY_SIZE < self.data.len() {
|
||||||
|
let new_end = self.data.len() - Self::KEY_SIZE;
|
||||||
|
self.data.copy_within(new_end.., my_array_index);
|
||||||
|
my_array_index = new_end;
|
||||||
|
}
|
||||||
|
self.data.truncate(my_array_index);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::opaque] // needs a model of step_by loop with return
|
||||||
|
fn get(&mut self, at: u32, current_ctr: u32) -> Result<Vec<u8>, Error> {
|
||||||
|
assert_eq!(self.data.len() % Self::KEY_SIZE, 0);
|
||||||
|
let want = at.to_be_bytes();
|
||||||
|
for i in (0..self.data.len()).step_by(Self::KEY_SIZE) {
|
||||||
|
if self.data[i..i + 4] == want {
|
||||||
|
let out = self.data[i + 4..i + Self::KEY_SIZE].to_vec();
|
||||||
|
self.remove(i);
|
||||||
|
return Ok(out);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if at + (MAX_OOO_KEYS as u32) < current_ctr {
|
||||||
|
// We've already discarded this because it's too old.
|
||||||
|
Err(Error::KeyTrimmed(at))
|
||||||
|
} else {
|
||||||
|
// This is a key we should have and we don't, so it must have already
|
||||||
|
// been requested.
|
||||||
|
Err(Error::KeyAlreadyRequested(at))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ChainEpochDirection {
|
||||||
|
fn new(k: &[u8]) -> Self {
|
||||||
|
Self {
|
||||||
|
ctr: 0,
|
||||||
|
prev: KeyHistory::new(),
|
||||||
|
next: k.to_vec(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn next_key(&mut self) -> (u32, Vec<u8>) {
|
||||||
|
let (idx, key) = Self::next_key_internal(&mut self.next, &mut self.ctr);
|
||||||
|
(idx, key.to_vec())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn next_key_internal(next: &mut [u8], ctr: &mut u32) -> (u32, [u8; 32]) {
|
||||||
|
hax_lib::fstar!("admit()");
|
||||||
|
*ctr += 1;
|
||||||
|
let mut gen = [0u8; 64];
|
||||||
|
kdf::hkdf_to_slice(
|
||||||
|
&[0u8; 32], // 32 is the hash output length
|
||||||
|
&*next,
|
||||||
|
&[
|
||||||
|
ctr.to_be_bytes().as_slice(),
|
||||||
|
b"Signal PQ Ratchet V1 Chain Next",
|
||||||
|
]
|
||||||
|
.concat(),
|
||||||
|
&mut gen,
|
||||||
|
);
|
||||||
|
next.copy_from_slice(&gen[..32]);
|
||||||
|
(*ctr, gen[32..].try_into().expect("correct size"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn key(&mut self, at: u32) -> Result<Vec<u8>, Error> {
|
||||||
|
hax_lib::fstar!("admit()");
|
||||||
|
match at.cmp(&self.ctr) {
|
||||||
|
Ordering::Greater => {
|
||||||
|
if at - self.ctr > MAX_JUMP as u32 {
|
||||||
|
return Err(Error::KeyJump(self.ctr, at));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ordering::Less => {
|
||||||
|
return self.prev.get(at, self.ctr);
|
||||||
|
}
|
||||||
|
Ordering::Equal => {
|
||||||
|
// We've already returned this key once, we won't do it again.
|
||||||
|
return Err(Error::KeyAlreadyRequested(at));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if at > self.ctr + (MAX_OOO_KEYS as u32) {
|
||||||
|
// We're about to make all currently-held keys obsolete - just remove
|
||||||
|
// them all.
|
||||||
|
self.prev.clear();
|
||||||
|
}
|
||||||
|
while at > self.ctr + 1 {
|
||||||
|
let k = Self::next_key_internal(&mut self.next, &mut self.ctr);
|
||||||
|
// Only add keys into our history if we're not going to immediately GC them.
|
||||||
|
if self.ctr + (MAX_OOO_KEYS as u32) >= at {
|
||||||
|
self.prev.add(k);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// After we've potentially added some new keys, see if there's any we
|
||||||
|
// want to throw away.
|
||||||
|
self.prev.gc(self.ctr);
|
||||||
|
|
||||||
|
Ok(Self::next_key_internal(&mut self.next, &mut self.ctr)
|
||||||
|
.1
|
||||||
|
.to_vec())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn into_pb(self) -> pqrpb::chain::epoch::Direction {
|
||||||
|
pqrpb::chain::epoch::Direction {
|
||||||
|
ctr: self.ctr,
|
||||||
|
next: self.next,
|
||||||
|
prev: self.prev.data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_pb(pb: pqrpb::chain::epoch::Direction) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
ctr: pb.ctr,
|
||||||
|
next: pb.next,
|
||||||
|
prev: KeyHistory { data: pb.prev },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl Chain {
|
||||||
|
#[hax_lib::requires(gen.len() == 96)]
|
||||||
|
fn ced_for_direction(gen: &[u8], dir: &Direction) -> ChainEpochDirection {
|
||||||
|
ChainEpochDirection::new(match dir {
|
||||||
|
Direction::A2B => &gen[32..64],
|
||||||
|
Direction::B2A => &gen[64..96],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new(initial_key: &[u8], dir: Direction) -> Self {
|
||||||
|
hax_lib::fstar!("admit ()");
|
||||||
|
let mut gen = [0u8; 96];
|
||||||
|
kdf::hkdf_to_slice(
|
||||||
|
&[0u8; 32],
|
||||||
|
initial_key,
|
||||||
|
b"Signal PQ Ratchet V1 Chain Start",
|
||||||
|
&mut gen,
|
||||||
|
);
|
||||||
|
Self {
|
||||||
|
dir,
|
||||||
|
current_epoch: 0,
|
||||||
|
links: VecDeque::from([ChainEpoch {
|
||||||
|
send: Self::ced_for_direction(&gen, &dir),
|
||||||
|
recv: Self::ced_for_direction(&gen, &dir.switch()),
|
||||||
|
}]),
|
||||||
|
next_root: gen[0..32].to_vec(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_epoch(&mut self, epoch_secret: EpochSecret) {
|
||||||
|
hax_lib::fstar!("admit ()");
|
||||||
|
assert!(epoch_secret.epoch == self.current_epoch + 1);
|
||||||
|
let mut gen = [0u8; 96];
|
||||||
|
kdf::hkdf_to_slice(
|
||||||
|
&self.next_root,
|
||||||
|
&epoch_secret.secret,
|
||||||
|
b"Signal PQ Ratchet V1 Chain Add Epoch",
|
||||||
|
&mut gen,
|
||||||
|
);
|
||||||
|
self.current_epoch = epoch_secret.epoch;
|
||||||
|
self.next_root = gen[0..32].to_vec();
|
||||||
|
self.links.push_back(ChainEpoch {
|
||||||
|
send: Self::ced_for_direction(&gen, &self.dir),
|
||||||
|
recv: Self::ced_for_direction(&gen, &self.dir.switch()),
|
||||||
|
});
|
||||||
|
if self.links.len() > EPOCHS_TO_KEEP {
|
||||||
|
self.links.pop_front();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn epoch_idx(&mut self, epoch: Epoch) -> Result<usize, Error> {
|
||||||
|
if epoch > self.current_epoch {
|
||||||
|
return Err(Error::EpochOutOfRange(epoch));
|
||||||
|
}
|
||||||
|
let back = (self.current_epoch - epoch) as usize;
|
||||||
|
let links = self.links.len();
|
||||||
|
if back >= links {
|
||||||
|
return Err(Error::EpochOutOfRange(epoch));
|
||||||
|
}
|
||||||
|
Ok(links - 1 - back)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn send_key(&mut self, epoch: Epoch) -> Result<(u32, Vec<u8>), Error> {
|
||||||
|
hax_lib::fstar!("admit ()");
|
||||||
|
let epoch_index = self.epoch_idx(epoch)?;
|
||||||
|
Ok(self.links[epoch_index].send.next_key())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn recv_key(&mut self, epoch: Epoch, index: u32) -> Result<Vec<u8>, Error> {
|
||||||
|
hax_lib::fstar!("admit ()");
|
||||||
|
let epoch_index = self.epoch_idx(epoch)?;
|
||||||
|
self.links[epoch_index].recv.key(index)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::opaque] // into_iter for vec_deque
|
||||||
|
pub fn into_pb(self) -> pqrpb::Chain {
|
||||||
|
pqrpb::Chain {
|
||||||
|
a2b: matches!(self.dir, Direction::A2B),
|
||||||
|
current_epoch: self.current_epoch,
|
||||||
|
links: self
|
||||||
|
.links
|
||||||
|
.into_iter()
|
||||||
|
.map(|link| pqrpb::chain::Epoch {
|
||||||
|
send: Some(link.send.into_pb()),
|
||||||
|
recv: Some(link.recv.into_pb()),
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
next_root: self.next_root,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::opaque] // into_iter and map
|
||||||
|
pub fn from_pb(pb: pqrpb::Chain) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
dir: if pb.a2b {
|
||||||
|
Direction::A2B
|
||||||
|
} else {
|
||||||
|
Direction::B2A
|
||||||
|
},
|
||||||
|
current_epoch: pb.current_epoch,
|
||||||
|
next_root: pb.next_root,
|
||||||
|
links: pb
|
||||||
|
.links
|
||||||
|
.into_iter()
|
||||||
|
.map(|link| {
|
||||||
|
Ok::<ChainEpoch, Error>(ChainEpoch {
|
||||||
|
send: ChainEpochDirection::from_pb(link.send.ok_or(Error::StateDecode)?)?,
|
||||||
|
recv: ChainEpochDirection::from_pb(link.recv.ok_or(Error::StateDecode)?)?,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect::<Result<VecDeque<_>, _>>()?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::{Chain, MAX_JUMP, MAX_OOO_KEYS, TRIM_SIZE};
|
||||||
|
use crate::{Direction, EpochSecret, Error};
|
||||||
|
use rand::seq::SliceRandom;
|
||||||
|
use rand::TryRngCore;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn directions_match() {
|
||||||
|
let mut a2b = Chain::new(b"1", Direction::A2B);
|
||||||
|
let mut b2a = Chain::new(b"1", Direction::B2A);
|
||||||
|
let sk1 = a2b.send_key(0).unwrap();
|
||||||
|
assert_eq!(sk1.0, 1);
|
||||||
|
assert_eq!(sk1.1, b2a.recv_key(0, 1).unwrap());
|
||||||
|
a2b.add_epoch(EpochSecret {
|
||||||
|
epoch: 1,
|
||||||
|
secret: vec![2],
|
||||||
|
});
|
||||||
|
b2a.add_epoch(EpochSecret {
|
||||||
|
epoch: 1,
|
||||||
|
secret: vec![2],
|
||||||
|
});
|
||||||
|
let sk2 = a2b.send_key(1).unwrap();
|
||||||
|
assert_eq!(sk2.0, 1);
|
||||||
|
assert_eq!(sk2.1, b2a.recv_key(1, 1).unwrap());
|
||||||
|
for _i in 2..10 {
|
||||||
|
a2b.send_key(1).unwrap();
|
||||||
|
}
|
||||||
|
let sk3 = a2b.send_key(1).unwrap();
|
||||||
|
assert_eq!(sk3.0, 10);
|
||||||
|
assert_eq!(sk3.1, b2a.recv_key(1, 10).unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn previously_returned_key() {
|
||||||
|
let mut a2b = Chain::new(b"1", Direction::A2B);
|
||||||
|
a2b.recv_key(0, 2).expect("should get key first time");
|
||||||
|
assert!(matches!(
|
||||||
|
a2b.recv_key(0, 2),
|
||||||
|
Err(Error::KeyAlreadyRequested(2))
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn very_old_keys_are_trimmed() {
|
||||||
|
let mut a2b = Chain::new(b"1", Direction::A2B);
|
||||||
|
let mut i = 1u32;
|
||||||
|
while (i as usize) < TRIM_SIZE + 1 {
|
||||||
|
i += MAX_JUMP as u32 - 1;
|
||||||
|
a2b.recv_key(0, i).expect("should allow this jump");
|
||||||
|
}
|
||||||
|
assert!(matches!(a2b.recv_key(0, 1), Err(Error::KeyTrimmed(1))));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn out_of_order_keys() {
|
||||||
|
let mut a2b = Chain::new(b"1", Direction::A2B);
|
||||||
|
let mut b2a = Chain::new(b"1", Direction::B2A);
|
||||||
|
let mut keys = Vec::with_capacity(MAX_OOO_KEYS);
|
||||||
|
for _i in 0..MAX_OOO_KEYS {
|
||||||
|
keys.push(a2b.send_key(0).unwrap());
|
||||||
|
}
|
||||||
|
let mut rng = rand::rngs::OsRng.unwrap_err();
|
||||||
|
keys.shuffle(&mut rng);
|
||||||
|
for (idx, key) in keys {
|
||||||
|
assert_eq!(b2a.recv_key(0, idx).unwrap(), key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
127
src/encoding.rs
Normal file
127
src/encoding.rs
Normal file
|
|
@ -0,0 +1,127 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
pub mod gf;
|
||||||
|
pub mod polynomial;
|
||||||
|
pub mod round_robin;
|
||||||
|
|
||||||
|
use crate::proto::pq_ratchet as pqrpb;
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error, Copy, Clone, PartialEq)]
|
||||||
|
pub enum EncodingError {
|
||||||
|
#[error("Polynomial error: {0}")]
|
||||||
|
PolynomialError(polynomial::PolynomialError),
|
||||||
|
#[error("Index decoding error")]
|
||||||
|
ChunkIndexDecodingError,
|
||||||
|
#[error("Data decoding error")]
|
||||||
|
ChunkDataDecodingError,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<polynomial::PolynomialError> for EncodingError {
|
||||||
|
fn from(value: polynomial::PolynomialError) -> Self {
|
||||||
|
Self::PolynomialError(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
pub struct Chunk {
|
||||||
|
pub index: u16,
|
||||||
|
pub data: [u8; 32],
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Chunk {
|
||||||
|
pub fn into_pb(self) -> pqrpb::Chunk {
|
||||||
|
pqrpb::Chunk {
|
||||||
|
index: self.index as u32,
|
||||||
|
data: self.data[..].to_vec(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::Chunk) -> Result<Self, EncodingError> {
|
||||||
|
Ok(Self {
|
||||||
|
index: pb
|
||||||
|
.index
|
||||||
|
.try_into()
|
||||||
|
.map_err(|_| EncodingError::ChunkIndexDecodingError)?,
|
||||||
|
data: pb
|
||||||
|
.data
|
||||||
|
.as_slice()
|
||||||
|
.try_into()
|
||||||
|
.map_err(|_| EncodingError::ChunkDataDecodingError)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait Encoder {
|
||||||
|
fn encode_bytes(msg: &[u8]) -> Result<Self, EncodingError>
|
||||||
|
where
|
||||||
|
Self: Sized;
|
||||||
|
fn next_chunk(&mut self) -> Chunk;
|
||||||
|
fn data(&self) -> &Vec<u8>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait Decoder {
|
||||||
|
fn new(len_bytes: usize) -> Result<Self, EncodingError>
|
||||||
|
where
|
||||||
|
Self: Sized;
|
||||||
|
fn add_chunk(&mut self, chunk: &Chunk);
|
||||||
|
fn decoded_message(&self) -> Option<Vec<u8>>;
|
||||||
|
//fn take_decoded_message(&mut self) -> Option<Vec<u8>>;
|
||||||
|
fn is_complete(&self) -> bool;
|
||||||
|
}
|
||||||
|
|
||||||
|
// XXX: For ease of formal verification with hax, we avoid using
|
||||||
|
// functions that return mutable references, such as Option::take.
|
||||||
|
// We therefore `take` the value out and store it back for the
|
||||||
|
// encoder and decoder.
|
||||||
|
#[hax_lib::opaque] // Needed for abstract precondition
|
||||||
|
impl<T: Encoder> Encoder for Option<T> {
|
||||||
|
fn encode_bytes(msg: &[u8]) -> Result<Self, EncodingError>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
Ok(Some(T::encode_bytes(msg)?))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn next_chunk(&mut self) -> Chunk {
|
||||||
|
let mut tmp = self.take().unwrap();
|
||||||
|
let chunk = T::next_chunk(&mut tmp);
|
||||||
|
*self = Some(tmp);
|
||||||
|
chunk
|
||||||
|
}
|
||||||
|
|
||||||
|
fn data(&self) -> &Vec<u8> {
|
||||||
|
T::data(self.as_ref().unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::opaque] // Needed for abstract precondition
|
||||||
|
impl<T: Decoder> Decoder for Option<T> {
|
||||||
|
fn new(len_bytes: usize) -> Result<Self, EncodingError>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
Ok(Some(T::new(len_bytes)?))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_chunk(&mut self, chunk: &Chunk) {
|
||||||
|
let mut tmp = self.take().unwrap();
|
||||||
|
T::add_chunk(&mut tmp, chunk);
|
||||||
|
*self = Some(tmp);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decoded_message(&self) -> Option<Vec<u8>> {
|
||||||
|
T::decoded_message(self.as_ref().unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
/* fn take_decoded_message(&mut self) -> Option<Vec<u8>> {
|
||||||
|
let mut tmp = self.take().unwrap();
|
||||||
|
let result = T::take_decoded_message(&mut tmp);
|
||||||
|
*self = Some(tmp);
|
||||||
|
result
|
||||||
|
} */
|
||||||
|
|
||||||
|
fn is_complete(&self) -> bool {
|
||||||
|
T::is_complete(self.as_ref().unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
710
src/encoding/gf.rs
Normal file
710
src/encoding/gf.rs
Normal file
|
|
@ -0,0 +1,710 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use std::ops;
|
||||||
|
|
||||||
|
// https://web.eecs.utk.edu/~jplank/plank/papers/CS-07-593/primitive-polynomial-table.txt
|
||||||
|
pub const POLY: u32 = 0x1100b; // 0o210013
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
|
||||||
|
#[repr(transparent)]
|
||||||
|
#[hax_lib::fstar::after(
|
||||||
|
r#"
|
||||||
|
let to_gf (s: t_GF16) = Spec.GF16.to_bv s.f_value
|
||||||
|
"#
|
||||||
|
)]
|
||||||
|
pub struct GF16 {
|
||||||
|
pub value: u16,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::AddAssign<&GF16> for GF16 {
|
||||||
|
#[allow(clippy::suspicious_op_assign_impl)]
|
||||||
|
#[requires(true)]
|
||||||
|
#[hax_lib::ensures(|result| fstar!(r#"
|
||||||
|
to_gf self_e_future ==
|
||||||
|
Spec.GF16.gf_add (to_gf self_) (to_gf other)
|
||||||
|
"#))]
|
||||||
|
fn add_assign(&mut self, other: &Self) {
|
||||||
|
hax_lib::fstar!("Spec.GF16.xor_is_gf_add_lemma self.f_value other.f_value");
|
||||||
|
self.value ^= other.value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::AddAssign for GF16 {
|
||||||
|
#[hax_lib::ensures(|result| fstar!(r#"
|
||||||
|
to_gf self_e_future ==
|
||||||
|
Spec.GF16.gf_add (to_gf self_) (to_gf other)
|
||||||
|
"#))]
|
||||||
|
fn add_assign(&mut self, other: Self) {
|
||||||
|
hax_lib::fstar!("Spec.GF16.xor_is_gf_add_lemma self.f_value other.f_value");
|
||||||
|
self.add_assign(&other);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::Add for GF16 {
|
||||||
|
type Output = Self;
|
||||||
|
#[hax_lib::ensures(|result| fstar!(r#"
|
||||||
|
to_gf result ==
|
||||||
|
Spec.GF16.gf_add (to_gf self_) (to_gf other)
|
||||||
|
"#))]
|
||||||
|
fn add(self, other: Self) -> Self {
|
||||||
|
let mut out = self;
|
||||||
|
out += &other;
|
||||||
|
out
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::Add<&GF16> for GF16 {
|
||||||
|
type Output = Self;
|
||||||
|
#[hax_lib::ensures(|result| fstar!(r#"
|
||||||
|
to_gf result ==
|
||||||
|
Spec.GF16.gf_add (to_gf self_) (to_gf other)
|
||||||
|
"#))]
|
||||||
|
fn add(self, other: &Self) -> Self {
|
||||||
|
let mut out = self;
|
||||||
|
out += other;
|
||||||
|
out
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::SubAssign<&GF16> for GF16 {
|
||||||
|
#[allow(clippy::suspicious_op_assign_impl)]
|
||||||
|
#[hax_lib::ensures(|result| fstar!(r#"
|
||||||
|
to_gf self_e_future ==
|
||||||
|
Spec.GF16.gf_sub (to_gf self_) (to_gf other)
|
||||||
|
"#))]
|
||||||
|
fn sub_assign(&mut self, other: &Self) {
|
||||||
|
*self += other;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::SubAssign for GF16 {
|
||||||
|
#[hax_lib::ensures(|result| fstar!(r#"
|
||||||
|
to_gf self_e_future ==
|
||||||
|
Spec.GF16.gf_sub (to_gf self_) (to_gf other)
|
||||||
|
"#))]
|
||||||
|
fn sub_assign(&mut self, other: Self) {
|
||||||
|
self.sub_assign(&other);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::Sub for GF16 {
|
||||||
|
type Output = Self;
|
||||||
|
#[hax_lib::ensures(|result| fstar!(r#"
|
||||||
|
to_gf result ==
|
||||||
|
Spec.GF16.gf_sub (to_gf self_) (to_gf other)
|
||||||
|
"#))]
|
||||||
|
fn sub(self, other: Self) -> Self {
|
||||||
|
let mut out = self;
|
||||||
|
out -= &other;
|
||||||
|
out
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::Sub<&GF16> for GF16 {
|
||||||
|
type Output = Self;
|
||||||
|
#[hax_lib::ensures(|result| fstar!(r#"
|
||||||
|
to_gf result ==
|
||||||
|
Spec.GF16.gf_sub (to_gf self_) (to_gf other)
|
||||||
|
"#))]
|
||||||
|
fn sub(self, other: &Self) -> Self {
|
||||||
|
let mut out = self;
|
||||||
|
out -= other;
|
||||||
|
out
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::MulAssign<&GF16> for GF16 {
|
||||||
|
fn mul_assign(&mut self, other: &Self) {
|
||||||
|
#[cfg(all(
|
||||||
|
not(hax),
|
||||||
|
any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")
|
||||||
|
))]
|
||||||
|
if check_accelerated::TOKEN.get() {
|
||||||
|
self.value = accelerated::mul(self.value, other.value);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
self.value = unaccelerated::mul(self.value, other.value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::MulAssign for GF16 {
|
||||||
|
fn mul_assign(&mut self, other: Self) {
|
||||||
|
self.mul_assign(&other);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::Mul for GF16 {
|
||||||
|
type Output = Self;
|
||||||
|
fn mul(self, other: Self) -> Self {
|
||||||
|
let mut out = self;
|
||||||
|
out *= &other;
|
||||||
|
out
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::Mul<&GF16> for GF16 {
|
||||||
|
type Output = Self;
|
||||||
|
fn mul(self, other: &Self) -> Self {
|
||||||
|
let mut out = self;
|
||||||
|
out *= other;
|
||||||
|
out
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::DivAssign<&GF16> for GF16 {
|
||||||
|
#[allow(clippy::suspicious_op_assign_impl)]
|
||||||
|
fn div_assign(&mut self, other: &Self) {
|
||||||
|
*self = self.div_impl(other);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::DivAssign for GF16 {
|
||||||
|
fn div_assign(&mut self, other: Self) {
|
||||||
|
*self = self.div_impl(&other);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::Div for GF16 {
|
||||||
|
type Output = Self;
|
||||||
|
fn div(self, other: Self) -> Self {
|
||||||
|
self.div_impl(&other)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl ops::Div<&GF16> for GF16 {
|
||||||
|
type Output = Self;
|
||||||
|
fn div(self, other: &Self) -> Self {
|
||||||
|
self.div_impl(other)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
#[hax_lib::opaque]
|
||||||
|
pub fn parallel_mult(a: GF16, into: &mut [GF16]) {
|
||||||
|
let mut i = 0;
|
||||||
|
while i + 2 <= into.len() {
|
||||||
|
hax_lib::loop_decreases!(into.len() - i);
|
||||||
|
(into[i].value, into[i + 1].value) = mul2_u16(a.value, into[i].value, into[i + 1].value);
|
||||||
|
i += 2;
|
||||||
|
}
|
||||||
|
if i < into.len() {
|
||||||
|
into[i] *= a;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::opaque]
|
||||||
|
fn mul2_u16(a: u16, b1: u16, b2: u16) -> (u16, u16) {
|
||||||
|
#[cfg(all(
|
||||||
|
not(hax),
|
||||||
|
any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")
|
||||||
|
))]
|
||||||
|
if check_accelerated::TOKEN.get() {
|
||||||
|
return accelerated::mul2(a, b1, b2);
|
||||||
|
}
|
||||||
|
(unaccelerated::mul(a, b1), unaccelerated::mul(a, b2))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
|
||||||
|
mod accelerated {
|
||||||
|
#[cfg(target_arch = "x86")]
|
||||||
|
use core::arch::x86 as arch;
|
||||||
|
#[cfg(target_arch = "x86_64")]
|
||||||
|
use core::arch::x86_64 as arch;
|
||||||
|
|
||||||
|
pub fn mul(a: u16, b: u16) -> u16 {
|
||||||
|
mul2(a, b, 0).0
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
#[target_feature(enable = "pclmulqdq")]
|
||||||
|
unsafe fn mul2_unreduced(a: u16, b1: u16, b2: u16) -> (u32, u32) {
|
||||||
|
let a = arch::_mm_set_epi64x(0, a as i64);
|
||||||
|
let b = arch::_mm_set_epi64x(0, ((b2 as i64) << 32) | (b1 as i64));
|
||||||
|
let clmul = arch::_mm_clmulepi64_si128(a, b, 0);
|
||||||
|
|
||||||
|
// Some architectures have _mm_cvtsi128_si64, which pulls out the full 64
|
||||||
|
// bits at once. However, more architectures have _mm_cvtsi128_si32, which
|
||||||
|
// just pulls out 32, and it turns out that doing that twice appears to have
|
||||||
|
// about the same latency.
|
||||||
|
let b1out = arch::_mm_cvtsi128_si32(clmul) as u32;
|
||||||
|
// To pull out the higher bits (for b2), shift our result right by 4 bytes
|
||||||
|
// (32 bits), then pull out the lowest 32 bits.
|
||||||
|
let b2out = arch::_mm_cvtsi128_si32(arch::_mm_srli_si128(clmul, 4)) as u32;
|
||||||
|
(b1out, b2out)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn mul2(a: u16, b1: u16, b2: u16) -> (u16, u16) {
|
||||||
|
let unreduced_products = unsafe { mul2_unreduced(a, b1, b2) };
|
||||||
|
(
|
||||||
|
super::reduce::poly_reduce(unreduced_products.0),
|
||||||
|
super::reduce::poly_reduce(unreduced_products.1),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_arch = "aarch64")]
|
||||||
|
mod accelerated {
|
||||||
|
use core::arch::aarch64;
|
||||||
|
|
||||||
|
pub fn mul(a: u16, b: u16) -> u16 {
|
||||||
|
mul2(a, b, 0).0
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
#[target_feature(enable = "neon")]
|
||||||
|
unsafe fn mul2_unreduced(a: u16, b1: u16, b2: u16) -> u128 {
|
||||||
|
aarch64::vmull_p64(a as u64, ((b1 as u64) << 32) | (b2 as u64))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn mul2(a: u16, b1: u16, b2: u16) -> (u16, u16) {
|
||||||
|
let unreduced_product = unsafe { mul2_unreduced(a, b1, b2) };
|
||||||
|
(
|
||||||
|
super::reduce::poly_reduce((unreduced_product >> 32) as u32),
|
||||||
|
super::reduce::poly_reduce(unreduced_product as u32),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
#[cfg(target_arch = "arm")]
|
||||||
|
mod accelerated {
|
||||||
|
use core::arch::arm;
|
||||||
|
|
||||||
|
pub fn mul(a: u16, b: u16) -> u16 {
|
||||||
|
mul2(a, b, 0).0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn mul2(a: u16, b1: u16, b2: u16) -> (u16, u16) {
|
||||||
|
// 32-bit ARM only provides polynomial multiplication of 8-bit
|
||||||
|
// values, but it does provide _parallel_ multiplication of those values.
|
||||||
|
// We use this to implement simple long-multiplication, in the form:
|
||||||
|
// AB
|
||||||
|
// *CD
|
||||||
|
// ------
|
||||||
|
// BD // 0 shifts
|
||||||
|
// + BC // 1 shift
|
||||||
|
// + AD // 1 shift
|
||||||
|
// + AC // 2 shifts
|
||||||
|
//
|
||||||
|
// We use vmull_p8 to compute all sub-multiplications, then
|
||||||
|
// XOR the results together with appropriate shifts to get the final
|
||||||
|
// result.
|
||||||
|
|
||||||
|
let a_mul = [
|
||||||
|
(a >> 8) as u8,
|
||||||
|
(a >> 8) as u8,
|
||||||
|
(a & 0xff) as u8,
|
||||||
|
(a & 0xff) as u8,
|
||||||
|
(a >> 8) as u8,
|
||||||
|
(a >> 8) as u8,
|
||||||
|
(a & 0xff) as u8,
|
||||||
|
(a & 0xff) as u8,
|
||||||
|
];
|
||||||
|
let b_mul = [
|
||||||
|
(b1 >> 8) as u8,
|
||||||
|
(b1 & 0xff) as u8,
|
||||||
|
(b1 >> 8) as u8,
|
||||||
|
(b1 & 0xff) as u8,
|
||||||
|
(b2 >> 8) as u8,
|
||||||
|
(b2 & 0xff) as u8,
|
||||||
|
(b2 >> 8) as u8,
|
||||||
|
(b2 & 0xff) as u8,
|
||||||
|
];
|
||||||
|
let out = unsafe {
|
||||||
|
let a_p8 = arm::vld1_p8(&a_mul as *const u8);
|
||||||
|
let b_p8 = arm::vld1_p8(&b_mul as *const u8);
|
||||||
|
let ab_p16 = arm::vmull_p8(a_p8, b_p8);
|
||||||
|
let mut out = [0u16; 8];
|
||||||
|
arm::vst1q_p16(&mut out as *mut u16, ab_p16);
|
||||||
|
out
|
||||||
|
};
|
||||||
|
let (b1out, b2out) = (
|
||||||
|
((out[0] as u32) << 16)
|
||||||
|
^ ((out[1] as u32) << 8)
|
||||||
|
^ ((out[2] as u32) << 8)
|
||||||
|
^ (out[3] as u32),
|
||||||
|
((out[4] as u32) << 16)
|
||||||
|
^ ((out[5] as u32) << 8)
|
||||||
|
^ ((out[6] as u32) << 8)
|
||||||
|
^ (out[7] as u32),
|
||||||
|
);
|
||||||
|
(
|
||||||
|
super::reduce::poly_reduce(b1out),
|
||||||
|
super::reduce::poly_reduce(b2out),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
#[cfg(all(
|
||||||
|
not(hax),
|
||||||
|
any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")
|
||||||
|
))]
|
||||||
|
mod check_accelerated {
|
||||||
|
#[cfg(target_arch = "aarch64")]
|
||||||
|
cpufeatures::new!(use_accelerated, "aes"); // `aes` implies PMULL
|
||||||
|
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
|
||||||
|
cpufeatures::new!(use_accelerated, "pclmulqdq");
|
||||||
|
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
|
pub(crate) static TOKEN: LazyLock<use_accelerated::InitToken> =
|
||||||
|
LazyLock::new(use_accelerated::init);
|
||||||
|
}
|
||||||
|
|
||||||
|
mod unaccelerated {
|
||||||
|
#[hax_lib::fstar::options("--fuel 2")]
|
||||||
|
#[hax_lib::ensures(|result| fstar!(r#"
|
||||||
|
let open Spec.GF16 in
|
||||||
|
to_bv result ==
|
||||||
|
poly_mul (to_bv a) (to_bv b)"#))]
|
||||||
|
const fn poly_mul(a: u16, b: u16) -> u32 {
|
||||||
|
let mut acc: u32 = 0;
|
||||||
|
let me = a as u32;
|
||||||
|
// Long multiplication.
|
||||||
|
let mut shift: u32 = 0;
|
||||||
|
hax_lib::fstar!(
|
||||||
|
r#"
|
||||||
|
let open Spec.GF16 in
|
||||||
|
assert_norm Spec.GF16.(poly_mul_i (to_bv a) (to_bv b) 0 == zero #32);
|
||||||
|
zero_lemma #U32;
|
||||||
|
up_cast_lemma #U16 #U32 a
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
while shift < 16 {
|
||||||
|
hax_lib::loop_invariant!(fstar!(
|
||||||
|
r#"
|
||||||
|
let open Spec.GF16 in
|
||||||
|
v shift <= 16 /\
|
||||||
|
to_bv acc == poly_mul_i (to_bv a) (to_bv b) (v shift)
|
||||||
|
"#
|
||||||
|
));
|
||||||
|
hax_lib::loop_decreases!(16 - shift);
|
||||||
|
hax_lib::fstar!(
|
||||||
|
r#"
|
||||||
|
let open Spec.GF16 in
|
||||||
|
shift_left_bit_select_lemma b shift;
|
||||||
|
up_cast_shift_left_lemma a shift;
|
||||||
|
lemma_add_lift #(16+v shift) #32 (poly_mul_x_k (to_bv a) (v shift)) (to_bv acc);
|
||||||
|
xor_is_gf_add_lemma acc (me <<! shift)
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
if 0 != b & (1 << shift) {
|
||||||
|
hax_lib::fstar!(
|
||||||
|
r#"
|
||||||
|
let open Spec.GF16 in
|
||||||
|
assert ((to_bv b).[v shift] == true);
|
||||||
|
assert (poly_mul_i (to_bv a) (to_bv b) (v shift + 1) ==
|
||||||
|
gf_add (poly_mul_i (to_bv a) (to_bv b) (v shift))
|
||||||
|
(poly_mul_x_k (to_bv a) (v shift)))
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
acc ^= me << shift;
|
||||||
|
}
|
||||||
|
shift += 1;
|
||||||
|
}
|
||||||
|
acc
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[hax_lib::ensures(|result| fstar!(r#"
|
||||||
|
let open Spec.GF16 in
|
||||||
|
let (r1, r2) = result in
|
||||||
|
to_bv r1 == gf16_mul (to_bv a) (to_bv b1) /\
|
||||||
|
to_bv r2 == gf16_mul (to_bv a) (to_bv b2)
|
||||||
|
"#))]
|
||||||
|
pub fn mul2(a: u16, b1: u16, b2: u16) -> (u16, u16) {
|
||||||
|
(mul(a, b1), mul(a, b2))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::ensures(|result| fstar!(r#"
|
||||||
|
let open Spec.GF16 in
|
||||||
|
to_bv result ==
|
||||||
|
gf16_mul (to_bv a) (to_bv b)"#))]
|
||||||
|
pub const fn mul(a: u16, b: u16) -> u16 {
|
||||||
|
super::reduce::poly_reduce(poly_mul(a, b))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mod reduce {
|
||||||
|
use super::POLY;
|
||||||
|
|
||||||
|
/// This is a somewhat optimized reduction that's architecture-agnostic.
|
||||||
|
/// When reducing, we look from the highest- to lowest-order bits in the
|
||||||
|
/// range 31..16, and we clear each 1 we find by XOR-ing with the POLY,
|
||||||
|
/// whose topmost bit (1<<16) is set. POLY is 17 bit long, meaning that
|
||||||
|
/// we clear the topmost bit while affecting the 16 bits below it.
|
||||||
|
/// Now, let's consider the topmost BYTE of a u32. Assuming that byte is
|
||||||
|
/// some constant C, we will always perform the same set of (up to) 8
|
||||||
|
/// XORs on the 2 bytes below C, and these can be precompted as a 16-byte
|
||||||
|
/// XOR against those bytes. Also, once we're done processing C, we never
|
||||||
|
/// use its data again, as we (correctly) assume that it will be zero'd out.
|
||||||
|
/// So, given the (big-endian) u32 with bytes Cxyz, no matter what xyz are,
|
||||||
|
/// we will always do:
|
||||||
|
/// Cxyz ^ Cab0 -> 0Dwz
|
||||||
|
/// where the bytes `ab` are dependent entirely on C. Assume again that
|
||||||
|
/// we now have a new u32 with bytes 0Dwz. We will again XOR this based
|
||||||
|
/// entirely on the bits in D, and it will look something like:
|
||||||
|
/// 0Dwz ^ 0Def -> 00uv
|
||||||
|
/// where again, `ef` is dependent on D. And, if D==C, then ef==ab.
|
||||||
|
/// Note as well that since we never look at the high order bits again,
|
||||||
|
/// the XORs by C and D are unnecessary. Rather than:
|
||||||
|
/// Cxyz ^ Cab0 -> 0Dwz
|
||||||
|
/// we can do:
|
||||||
|
/// Cxyz ^ 0ab0 -> CDwz
|
||||||
|
/// then:
|
||||||
|
/// CDwz ^ 00ef -> CDuv
|
||||||
|
/// as we're just going to return the lowest 16 bits to the caller.
|
||||||
|
/// Given that we're doing this byte-by-byte and there's only 256 total
|
||||||
|
/// potential bytes, we precompute all XORs into the REDUCE_BYTES
|
||||||
|
/// buffer. In the above example:
|
||||||
|
/// REDUCE_BYTES[C] -> ab
|
||||||
|
/// REDUCE_BYTES[D] -> ef
|
||||||
|
/// Since we're mapping every byte to a u16, we take up 512B of space
|
||||||
|
/// to do this, and our reduction is just a couple of pipelined shifts/XORs.
|
||||||
|
#[hax_lib::fstar::verification_status(panic_free)]
|
||||||
|
#[hax_lib::ensures(|result| fstar!(r#"
|
||||||
|
Spec.GF16.(to_bv result == poly_reduce #gf16 (to_bv v))
|
||||||
|
"#))]
|
||||||
|
pub const fn poly_reduce(v: u32) -> u16 {
|
||||||
|
let mut v = v;
|
||||||
|
let i1 = (v >> 24) as usize;
|
||||||
|
v ^= (REDUCE_BYTES[i1] as u32) << 8;
|
||||||
|
let shifted_v = (v >> 16) as usize;
|
||||||
|
let i2 = shifted_v & 0xFF;
|
||||||
|
hax_lib::fstar!("logand_lemma $shifted_v (mk_usize 255)");
|
||||||
|
v ^= REDUCE_BYTES[i2] as u32;
|
||||||
|
v as u16
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compute the u16 reduction associated with u8 `a`. See the comment
|
||||||
|
/// in poly_reduce for more details.
|
||||||
|
const fn reduce_from_byte(mut a: u8) -> u32 {
|
||||||
|
let mut out = 0u32;
|
||||||
|
let mut i: u32 = 8;
|
||||||
|
while i > 0 {
|
||||||
|
hax_lib::loop_invariant!(i <= 8);
|
||||||
|
hax_lib::loop_decreases!(i);
|
||||||
|
i -= 1;
|
||||||
|
if (1 << i) & a != 0 {
|
||||||
|
out ^= POLY << i;
|
||||||
|
a ^= ((POLY << i) >> 16) as u8;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compute the u16 reductions for all bytes. See the comment in
|
||||||
|
/// poly_reduce for more details.
|
||||||
|
const fn reduce_bytes() -> [u16; 256] {
|
||||||
|
let mut out = [0u16; 256];
|
||||||
|
let mut i = 0;
|
||||||
|
while i < 256 {
|
||||||
|
hax_lib::loop_invariant!(hax_lib::prop::constructors::and(
|
||||||
|
(i <= 256).into(),
|
||||||
|
hax_lib::forall(|j: usize| hax_lib::implies(
|
||||||
|
j < i,
|
||||||
|
out[j] == (reduce_from_byte(j as u8) as u16)
|
||||||
|
))
|
||||||
|
));
|
||||||
|
hax_lib::loop_decreases!(256 - i);
|
||||||
|
out[i] = reduce_from_byte(i as u8) as u16;
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
const REDUCE_BYTES: [u16; 256] = reduce_bytes();
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GF16 {
|
||||||
|
pub const ZERO: Self = Self { value: 0 };
|
||||||
|
pub const ONE: Self = Self { value: 1 };
|
||||||
|
|
||||||
|
pub fn new(value: u16) -> Self {
|
||||||
|
Self { value }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn inv(&self) -> GF16 {
|
||||||
|
GF16::ONE.div_impl(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn div_impl(&self, other: &Self) -> Self {
|
||||||
|
// Within GF(p^n), inv(a) == a^(p^n-2). We're GF(2^16) == GF(65536),
|
||||||
|
// so we can compute GF(65534).
|
||||||
|
let mut square = *other * *other;
|
||||||
|
let mut out = *self;
|
||||||
|
for _i in 1..16 {
|
||||||
|
(square.value, out.value) = mul2_u16(square.value, square.value, out.value);
|
||||||
|
}
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const fn const_mul(&self, other: &Self) -> Self {
|
||||||
|
Self {
|
||||||
|
value: unaccelerated::mul(self.value, other.value),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const fn const_sub(&self, other: &Self) -> Self {
|
||||||
|
Self {
|
||||||
|
value: self.value ^ other.value,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const fn const_add(&self, other: &Self) -> Self {
|
||||||
|
Self {
|
||||||
|
value: self.value ^ other.value,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const fn const_div(&self, other: &Self) -> Self {
|
||||||
|
// Within GF(p^n), inv(a) == a^(p^n-2). We're GF(2^16) == GF(65536),
|
||||||
|
// so we can compute GF(65534).
|
||||||
|
let mut square = *other;
|
||||||
|
let mut out = *self;
|
||||||
|
{
|
||||||
|
// const for loop
|
||||||
|
let mut i: usize = 1;
|
||||||
|
while i < 16 {
|
||||||
|
hax_lib::loop_invariant!(i <= 16);
|
||||||
|
hax_lib::loop_decreases!(16 - i);
|
||||||
|
square = square.const_mul(&square);
|
||||||
|
out = out.const_mul(&square);
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const fn const_inv(&self) -> GF16 {
|
||||||
|
GF16::ONE.const_div(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use galois_field_2pm::gf2::GFu16;
|
||||||
|
use galois_field_2pm::GaloisField;
|
||||||
|
use rand::RngCore;
|
||||||
|
|
||||||
|
// https://web.eecs.utk.edu/~jplank/plank/papers/CS-07-593/primitive-polynomial-table.txt
|
||||||
|
type ExternalGF16 = GFu16<{ POLY as u128 }>;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn add() {
|
||||||
|
let mut rng = rand::rng();
|
||||||
|
for _i in 0..100 {
|
||||||
|
let x = rng.next_u32() as u16;
|
||||||
|
let y = rng.next_u32() as u16;
|
||||||
|
assert_eq!(
|
||||||
|
(GF16 { value: x } + GF16 { value: y }).value,
|
||||||
|
(ExternalGF16::new(x) + ExternalGF16::new(y)).value
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn mul() {
|
||||||
|
let mut rng = rand::rng();
|
||||||
|
for _i in 0..100 {
|
||||||
|
let x = rng.next_u32() as u16;
|
||||||
|
let y = rng.next_u32() as u16;
|
||||||
|
let a = (GF16 { value: x } * GF16 { value: y }).value;
|
||||||
|
let b = (ExternalGF16::new(x) * ExternalGF16::new(y)).value;
|
||||||
|
println!("{x:04x} * {y:04x} = {b:04x}");
|
||||||
|
assert_eq!(a, b);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn inv() {
|
||||||
|
let mut rng = rand::rng();
|
||||||
|
for _i in 0..100 {
|
||||||
|
let x = rng.next_u32() as u16;
|
||||||
|
assert_eq!(
|
||||||
|
(GF16 { value: x } * GF16 { value: x }.inv()).value,
|
||||||
|
GF16::ONE.value
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn div() {
|
||||||
|
let mut rng = rand::rng();
|
||||||
|
for _i in 0..100 {
|
||||||
|
let x = rng.next_u32() as u16;
|
||||||
|
let y = rng.next_u32() as u16;
|
||||||
|
if y == 0 {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
assert_eq!(
|
||||||
|
(GF16 { value: x } / GF16 { value: y }).value,
|
||||||
|
(ExternalGF16::new(x) / ExternalGF16::new(y)).value
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[cfg(all(target_arch = "x86_64", target_feature = "sse2",))]
|
||||||
|
#[test]
|
||||||
|
fn x86_barrett_reduction() {
|
||||||
|
// Barrett reduction. Not noticeably faster than noarch::poly_reduce,
|
||||||
|
// but left here for posterity.
|
||||||
|
|
||||||
|
use super::POLY;
|
||||||
|
const POLY_128: [u32; 4] = [POLY, 0, 0, 0]; // Little-endian
|
||||||
|
const POLY_M128I: core::arch::x86_64::__m128i = unsafe { core::mem::transmute(POLY_128) };
|
||||||
|
const POLY_BARRETT_REDUCTION: u32 = 0x1111a; // 2^32 / POLY with carryless division
|
||||||
|
const POLYB_128: [u32; 4] = [POLY_BARRETT_REDUCTION, 0, 0, 0]; // Little-endian
|
||||||
|
const POLYB_M128I: core::arch::x86_64::__m128i = unsafe { core::mem::transmute(POLYB_128) };
|
||||||
|
let (a, b) = (0xf5u16, 0x93u16);
|
||||||
|
|
||||||
|
// initial multiplication
|
||||||
|
let unreduced_product = unsafe {
|
||||||
|
let a = core::arch::x86_64::_mm_set_epi64x(0, a as i64);
|
||||||
|
let b = core::arch::x86_64::_mm_set_epi64x(0, b as i64);
|
||||||
|
core::arch::x86_64::_mm_clmulepi64_si128(a, b, 0)
|
||||||
|
};
|
||||||
|
let result = unsafe {
|
||||||
|
// We perform a Barrett reduction with the precomputed POLYB=2^32/POLY,
|
||||||
|
// manually computed via XOR-based long division.
|
||||||
|
let quotient =
|
||||||
|
core::arch::x86_64::_mm_clmulepi64_si128(unreduced_product, POLYB_M128I, 0);
|
||||||
|
// We need to shift the quotient down 32 bits, so we'll do a register
|
||||||
|
// shuffle. We now the highest 32 bits of the 128b register are zero,
|
||||||
|
// so we'll use those for the top 3 32-bit portions of the register.
|
||||||
|
// So, given a 128-bit register with u32 values [0, a, b, c],
|
||||||
|
// we end up with a register with [0, 0, 0, b].
|
||||||
|
let quotient_shifted = core::arch::x86_64::_mm_shuffle_epi32(quotient, 0xf9);
|
||||||
|
// Now that we have the quotient q=floor(a*b/POLY), we subtract
|
||||||
|
// POLY*q from a*b to get the remainder:
|
||||||
|
let subtrahend =
|
||||||
|
core::arch::x86_64::_mm_clmulepi64_si128(POLY_M128I, quotient_shifted, 0);
|
||||||
|
// Of course, our difference is computed using XOR
|
||||||
|
core::arch::x86_64::_mm_cvtsi128_si64(core::arch::x86_64::_mm_xor_si128(
|
||||||
|
unreduced_product,
|
||||||
|
subtrahend,
|
||||||
|
)) as u16
|
||||||
|
};
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
super::reduce::poly_reduce(unsafe {
|
||||||
|
core::arch::x86_64::_mm_cvtsi128_si64(unreduced_product)
|
||||||
|
} as u32)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
987
src/encoding/polynomial.rs
Normal file
987
src/encoding/polynomial.rs
Normal file
|
|
@ -0,0 +1,987 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use sorted_vec::SortedSet;
|
||||||
|
|
||||||
|
use super::{Chunk, Decoder, Encoder};
|
||||||
|
|
||||||
|
use crate::encoding::gf::{self, GF16};
|
||||||
|
use crate::proto;
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error, Copy, Clone, PartialEq)]
|
||||||
|
pub enum PolynomialError {
|
||||||
|
#[error("Message length must be divisible by 2")]
|
||||||
|
MessageLengthEven,
|
||||||
|
#[error("Message length is too long")]
|
||||||
|
MessageLengthTooLong,
|
||||||
|
#[error("Serialization invalid")]
|
||||||
|
SerializationInvalid,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Eq)]
|
||||||
|
/// Pt is an internal cartesian point for a function in the GF16
|
||||||
|
/// space. It's mostly used to allow for lookup and ordering by
|
||||||
|
/// the X value in a BTreeSet.
|
||||||
|
struct Pt {
|
||||||
|
x: GF16,
|
||||||
|
y: GF16,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Pt {
|
||||||
|
fn serialize(&self) -> [u8; 4] {
|
||||||
|
let mut out = [0u8; 4];
|
||||||
|
out[..2].clone_from_slice(&self.x.value.to_be_bytes()[..]);
|
||||||
|
out[2..].clone_from_slice(&self.y.value.to_be_bytes()[..]);
|
||||||
|
out
|
||||||
|
}
|
||||||
|
fn deserialize(s: [u8; 4]) -> Self {
|
||||||
|
Self {
|
||||||
|
x: GF16::new(u16::from_be_bytes(s[..2].try_into().unwrap())),
|
||||||
|
y: GF16::new(u16::from_be_bytes(s[2..].try_into().unwrap())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for Pt {
|
||||||
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
|
self.partial_cmp(other).unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for Pt {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
|
Some(self.x.value.cmp(&other.x.value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for Pt {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.x.value == other.x.value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The highest degree polynomial that will be stored for Protocol V1
|
||||||
|
pub const MAX_STORED_POLYNOMIAL_DEGREE_V1: usize = 35;
|
||||||
|
|
||||||
|
// The highest degree polynomial that will be constructed in intermediate
|
||||||
|
// calculations for Protocol V1
|
||||||
|
pub const MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1: usize = 36;
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq)]
|
||||||
|
pub(crate) struct Poly {
|
||||||
|
// For Protocol V1 we interpolate at most 36 values, which produces a
|
||||||
|
// degree 35 polynomial (with 36 coefficients). In an intermediate calculation
|
||||||
|
// during Lagrange interpolation, we need to compute a polynomial one degree
|
||||||
|
// higher, thus we get the following constraint:
|
||||||
|
//
|
||||||
|
// coefficients.len() <= MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1 + 1
|
||||||
|
pub coefficients: Vec<GF16>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl Poly {
|
||||||
|
//
|
||||||
|
// capacity <= MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1 + 1
|
||||||
|
fn zero(capacity: usize) -> Self {
|
||||||
|
Self {
|
||||||
|
coefficients: Vec::with_capacity(capacity),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Given a set of points with unique X values, return a Poly that
|
||||||
|
/// computes f(pts[i].x) == pts[i].y for all points.
|
||||||
|
///
|
||||||
|
/// This takes O(N^2) work and O(N) space, carefully allocated up
|
||||||
|
/// front to avoid overhead.
|
||||||
|
#[hax_lib::requires(pts.len() <= MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1)]
|
||||||
|
fn lagrange_interpolate(pts: &[Pt]) -> Self {
|
||||||
|
let mut out = Self::zero(pts.len());
|
||||||
|
if pts.is_empty() {
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
let template = Self::lagrange_interpolate_prepare(pts);
|
||||||
|
let mut working = template.clone();
|
||||||
|
|
||||||
|
// Unroll the first loop to skip some unnecessary work.
|
||||||
|
working.lagrange_interpolate_complete(pts, 0);
|
||||||
|
// Note that `working` is `x * <the polynomial we need>`.
|
||||||
|
out.coefficients
|
||||||
|
.extend_from_slice(&working.coefficients[1..]);
|
||||||
|
|
||||||
|
let _w_l = working.coefficients.len();
|
||||||
|
for i in 1..pts.len() {
|
||||||
|
hax_lib::loop_invariant!(
|
||||||
|
|_: usize| out.coefficients.len() == _w_l - 1 && working.coefficients.len() == _w_l
|
||||||
|
);
|
||||||
|
working.coefficients.copy_from_slice(&template.coefficients);
|
||||||
|
working.lagrange_interpolate_complete(pts, i);
|
||||||
|
// We can't use `add_assign` because `working` is `x * <the polynomial we need>`.
|
||||||
|
// Removing the lowest coefficient would cost a memmove.
|
||||||
|
// So we just skip it in this loop to effectively "divide by x".
|
||||||
|
for j in 0..out.coefficients.len() {
|
||||||
|
hax_lib::loop_invariant!(|_: usize| out.coefficients.len() == _w_l - 1);
|
||||||
|
out.coefficients[j] += working.coefficients[j + 1];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Computes `PRODUCT(x - pi.x)` as part of Lagrange interpolation.
|
||||||
|
///
|
||||||
|
/// This takes O(N^2) work: for each of N points, we are multiplying every coefficient of a
|
||||||
|
/// 1..N-degree polynomial.
|
||||||
|
#[hax_lib::requires(pts.len() <= MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1)]
|
||||||
|
fn lagrange_interpolate_prepare(pts: &[Pt]) -> Self {
|
||||||
|
// We're going to fill in the coefficients from largest to smallest, so we start by putting
|
||||||
|
// a 1 in the *highest* field rather than the lowest. This lets us avoid sliding
|
||||||
|
// coefficients as we go, but it also means we have to track the offset of which
|
||||||
|
// coefficients have been initialized manually.
|
||||||
|
let mut p = Self::zero(pts.len() + 1);
|
||||||
|
p.coefficients.resize(pts.len() + 1, GF16::ZERO);
|
||||||
|
let offset = pts.len();
|
||||||
|
p.coefficients[offset] = GF16::ONE;
|
||||||
|
|
||||||
|
#[allow(clippy::needless_range_loop)]
|
||||||
|
for i in 0..offset {
|
||||||
|
hax_lib::loop_invariant!(|_: usize| p.coefficients.len() == offset + 1);
|
||||||
|
let pi = pts[i];
|
||||||
|
p.mult_xdiff_assign_trailing(offset - i, pi.x);
|
||||||
|
}
|
||||||
|
#[cfg(not(hax))]
|
||||||
|
debug_assert_eq!(p.coefficients[pts.len()], GF16::ONE);
|
||||||
|
p
|
||||||
|
}
|
||||||
|
|
||||||
|
/// self[start..] *= (x - difference)
|
||||||
|
///
|
||||||
|
/// Interprets the trailing N coefficients as a smaller polynomial, and multiplies *that*
|
||||||
|
/// polynomial by `(x - difference)`, with the "carry" propagating into self[start-1]. This
|
||||||
|
/// works because (x-d)(poly) = x*poly - d*poly.
|
||||||
|
///
|
||||||
|
/// This allows us to build up a polynomial from its *largest* coefficient, and thus avoid
|
||||||
|
/// sliding coefficients in the vector as we go.
|
||||||
|
#[hax_lib::requires(0 < start && start <= self.coefficients.len())]
|
||||||
|
fn mult_xdiff_assign_trailing(&mut self, start: usize, difference: GF16) {
|
||||||
|
let l = self.coefficients.len();
|
||||||
|
for i in start..l {
|
||||||
|
hax_lib::loop_invariant!(|_: usize| self.coefficients.len() == l);
|
||||||
|
let delta = self.coefficients[i] * difference;
|
||||||
|
self.coefficients[i - 1] -= delta;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Given `PRODUCT(x - pi.x)`, creates something very close to the Lagrange poly for `pts[i]` in
|
||||||
|
/// `pts`.
|
||||||
|
///
|
||||||
|
/// This computes `f(pts[i].x) == pts[i].y`, and `f(pts[*].x) == 0` for all other points. It
|
||||||
|
/// does so by first dividing out the specific `(x - pi.x)` we care about, then scaling so that
|
||||||
|
/// the remaining polynomial produces `pi.y` at `pi.x`.
|
||||||
|
///
|
||||||
|
/// However, due to our representation, the result we actually end up with is scaled by `x`.
|
||||||
|
/// It's cheaper to have callers deal with that manually than to adjust it here, though.
|
||||||
|
///
|
||||||
|
/// This does O(N) work: one loop over the coefficients to divide out the term we need, one loop
|
||||||
|
/// over the points to calculate the scaling factor, and then one final loop to scale the
|
||||||
|
/// coefficients.
|
||||||
|
#[hax_lib::requires(i < pts.len())]
|
||||||
|
fn lagrange_interpolate_complete(&mut self, pts: &[Pt], i: usize) {
|
||||||
|
let pi = &pts[i];
|
||||||
|
|
||||||
|
// Compute the scaling factor.
|
||||||
|
let mut denominator = GF16::ONE;
|
||||||
|
for pj in pts {
|
||||||
|
if pi.x == pj.x {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
denominator *= pi.x - pj.x;
|
||||||
|
}
|
||||||
|
let scale = pi.y / denominator;
|
||||||
|
|
||||||
|
// Divide out (x - pi.x) using plain old long division, and scale as we go.
|
||||||
|
// This avoids having to reload the same value from memory twice.
|
||||||
|
// Remember our coefficients are in little-endian order, so we start from the end.
|
||||||
|
let _init_l = self.coefficients.len();
|
||||||
|
for j in 1..self.coefficients.len() {
|
||||||
|
hax_lib::loop_invariant!(|_: usize| self.coefficients.len() == _init_l);
|
||||||
|
let i = self.coefficients.len() - j;
|
||||||
|
let negative_delta = self.coefficients[i] * pi.x;
|
||||||
|
self.coefficients[i] *= scale;
|
||||||
|
self.coefficients[i - 1] += negative_delta;
|
||||||
|
}
|
||||||
|
#[cfg(not(hax))]
|
||||||
|
debug_assert_eq!(self.coefficients[0], GF16::ZERO, "should divide cleanly");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create the Lagrange poly for `pts[i]` in `pts`, which computes
|
||||||
|
/// f(pts[i].x) == f(pts[i].y), and f(pts[*].x) == 0 for all other points.
|
||||||
|
///
|
||||||
|
/// This interface is used only as a fallback for encoding; we do not rely
|
||||||
|
/// on it for speed, so it's okay that it's doing a bit of extra work.
|
||||||
|
#[hax_lib::requires(pts.len() <= MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1 && i < pts.len())]
|
||||||
|
fn lagrange_interpolate_pt(pts: &[Pt], i: usize) -> Self {
|
||||||
|
let mut result = Self::lagrange_interpolate_prepare(pts);
|
||||||
|
result.lagrange_interpolate_complete(pts, i);
|
||||||
|
result.coefficients.remove(0);
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// self += other
|
||||||
|
fn add_assign(&mut self, other: &Self) {
|
||||||
|
for (i, v) in other.coefficients.iter().enumerate() {
|
||||||
|
if i < self.coefficients.len() {
|
||||||
|
self.coefficients[i] += *v;
|
||||||
|
} else {
|
||||||
|
self.coefficients.push(*v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// self *= m
|
||||||
|
fn mult_assign(&mut self, m: GF16) {
|
||||||
|
gf::parallel_mult(m, &mut self.coefficients);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_at(&self, x: GF16) -> GF16 {
|
||||||
|
// Compute x^0 .. x^N
|
||||||
|
let mut xs = Vec::with_capacity(self.coefficients.len());
|
||||||
|
xs.push(GF16::ONE);
|
||||||
|
xs.push(x);
|
||||||
|
for i in 2..self.coefficients.len() {
|
||||||
|
hax_lib::loop_invariant!(|i: usize| i == xs.len() && i / 2 < i && i >= 2);
|
||||||
|
let a = xs[i / 2];
|
||||||
|
let b = xs[(i / 2) + (i % 2)];
|
||||||
|
xs.push(a * b);
|
||||||
|
}
|
||||||
|
// Multiply and sum
|
||||||
|
let mut out = GF16::ZERO;
|
||||||
|
for (a, b) in self.coefficients.iter().zip(xs.iter()) {
|
||||||
|
out += *a * *b;
|
||||||
|
}
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Internal function for lagrange_polynomial_from_complete_points.
|
||||||
|
fn lagrange_sum(pts: &[Pt], polys: &[Poly]) -> Poly {
|
||||||
|
let mut out = Poly::zero(pts.len());
|
||||||
|
for (pt, poly) in pts.iter().zip(polys.iter()) {
|
||||||
|
let mut p = poly.clone();
|
||||||
|
p.mult_assign(pt.y);
|
||||||
|
out.add_assign(&p);
|
||||||
|
}
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Given a set of "complete" points with x values that fully fill the
|
||||||
|
/// range [0..pts.len()), return a polynomial that computes those points.
|
||||||
|
#[hax_lib::requires(pts.len() == 0 || pts.len() == 1 || pts.len() == 3 || pts.len() == 5
|
||||||
|
|| pts.len() == 30 || pts.len() == 34 || pts.len() == 36)]
|
||||||
|
fn from_complete_points(pts: &[Pt]) -> Result<Poly, ()> {
|
||||||
|
for (i, pt) in pts.iter().enumerate() {
|
||||||
|
if pt.x.value != i as u16 {
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// The `as u64` is for hax.
|
||||||
|
// The following constraint holds for Protocol V1
|
||||||
|
// polys.len() <= MAX_STORED_POLYNOMIAL_DEGREE_V1 + 1
|
||||||
|
let polys = match pts.len() as u64 {
|
||||||
|
0 => vec![],
|
||||||
|
1 => const_polys_to_polys(&COMPLETE_POINTS_POLYS_1),
|
||||||
|
3 => const_polys_to_polys(&COMPLETE_POINTS_POLYS_3),
|
||||||
|
5 => const_polys_to_polys(&COMPLETE_POINTS_POLYS_5),
|
||||||
|
30 => const_polys_to_polys(&COMPLETE_POINTS_POLYS_30),
|
||||||
|
34 => const_polys_to_polys(&COMPLETE_POINTS_POLYS_34),
|
||||||
|
36 => const_polys_to_polys(&COMPLETE_POINTS_POLYS_36),
|
||||||
|
_ => {
|
||||||
|
debug_assert!(false, "missing precomputed poly of size {}", pts.len());
|
||||||
|
let ones = pts
|
||||||
|
.iter()
|
||||||
|
.map(|pt| Pt {
|
||||||
|
x: pt.x,
|
||||||
|
y: GF16::ONE,
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
pts.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, _pt)| Self::lagrange_interpolate_pt(&ones, i))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok(Self::lagrange_sum(pts, &polys))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(self.coefficients.len() <= MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1)]
|
||||||
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
// For Protocol V1 the polynomials that get serialized will always have
|
||||||
|
// coefficients.len() <= MAX_STORED_POLYNOMIAL_DEGREE_V1 + 1
|
||||||
|
let mut out = Vec::<u8>::with_capacity(self.coefficients.len() * 2);
|
||||||
|
for i in 0..self.coefficients.len() {
|
||||||
|
hax_lib::loop_invariant!(|i: usize| out.len() == 2 * i);
|
||||||
|
let c = self.coefficients[i];
|
||||||
|
out.extend_from_slice(&c.value.to_be_bytes()[..]);
|
||||||
|
}
|
||||||
|
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deserialize(serialized: &[u8]) -> Result<Self, PolynomialError> {
|
||||||
|
if serialized.is_empty() || serialized.len() % 2 == 1 {
|
||||||
|
return Err(PolynomialError::SerializationInvalid);
|
||||||
|
}
|
||||||
|
let mut coefficients = Vec::<GF16>::with_capacity(serialized.len() / 2);
|
||||||
|
for coeff in serialized.chunks_exact(2) {
|
||||||
|
coefficients.push(GF16::new(u16::from_be_bytes(coeff.try_into().unwrap())));
|
||||||
|
}
|
||||||
|
Ok(Self { coefficients })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For Protocol V1 with MLKEM-768: N <= MAX_STORED_POLYNOMIAL_DEGREE_V1 + 1
|
||||||
|
struct PolyConst<const N: usize> {
|
||||||
|
coefficients: [GF16; N],
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl<const N: usize> PolyConst<N> {
|
||||||
|
const ZEROS: Self = Self {
|
||||||
|
coefficients: [GF16::ZERO; N],
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Create the Lagrange poly for `pts[i]` in `pts`, which computes
|
||||||
|
/// f(pts[i].x) == pts[i].y, and f(pts[*].x) == 0 for all other points.
|
||||||
|
#[hax_lib::requires(i < N && pts.len() >= N && N > 0)]
|
||||||
|
const fn lagrange_interpolate_pt(pts: &[Pt], i: usize) -> Self {
|
||||||
|
let pi = &pts[i];
|
||||||
|
let mut p = Self {
|
||||||
|
coefficients: [GF16::ZERO; N],
|
||||||
|
};
|
||||||
|
p.coefficients[0] = GF16::ONE;
|
||||||
|
let mut denominator = GF16::ONE;
|
||||||
|
{
|
||||||
|
// const for loop
|
||||||
|
let mut j: usize = 0;
|
||||||
|
while j < N {
|
||||||
|
hax_lib::loop_invariant!(j <= N);
|
||||||
|
hax_lib::loop_decreases!(N - j);
|
||||||
|
let pj = &pts[j];
|
||||||
|
j += 1;
|
||||||
|
if pi.x.value == pj.x.value {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// p.coefficients[N - 1].value == 0
|
||||||
|
p = p.mult_xdiff(pj.x);
|
||||||
|
denominator = denominator.const_mul(&pi.x.const_sub(&pj.x));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// mul_assign(pi.y / denominator)
|
||||||
|
p.mult(pi.y.const_div(&denominator))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// self * m
|
||||||
|
const fn mult(&self, m: GF16) -> Self {
|
||||||
|
let mut i: usize = 0;
|
||||||
|
let mut out = Self {
|
||||||
|
coefficients: self.coefficients,
|
||||||
|
};
|
||||||
|
while i < N {
|
||||||
|
hax_lib::loop_invariant!(i <= N);
|
||||||
|
hax_lib::loop_decreases!(N - i);
|
||||||
|
out.coefficients[i] = out.coefficients[i].const_mul(&m);
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
/// self * (x - difference)
|
||||||
|
// #[hax_lib::requires(N > 0 && self.coefficients[N - 1].value == 0)]
|
||||||
|
#[hax_lib::opaque] // The precondition above is needed to prove panic freedom here but hard to prove for calls
|
||||||
|
const fn mult_xdiff(&self, difference: GF16) -> Self {
|
||||||
|
// Because we're constant-sized, we can't overflow, so check in advance
|
||||||
|
// that we won't.
|
||||||
|
if self.coefficients[N - 1].value != 0 {
|
||||||
|
panic!("overflow in const mult_xdiff");
|
||||||
|
}
|
||||||
|
// We're multiplying (x-d)(poly), so we distribute that
|
||||||
|
// into two operations: x*poly - d*poly.
|
||||||
|
// We'll store the first in xp and the second in dp.
|
||||||
|
let mut xp = [GF16::ZERO; N];
|
||||||
|
let mut dp = [GF16::ZERO; N];
|
||||||
|
|
||||||
|
{
|
||||||
|
// const for loop
|
||||||
|
let mut i: usize = 0;
|
||||||
|
while i < N {
|
||||||
|
hax_lib::loop_invariant!(i <= N);
|
||||||
|
hax_lib::loop_decreases!(N - i);
|
||||||
|
// First, we make xp[*] into x*poly. This simply shifts the coefficients over by one.
|
||||||
|
if i < N - 1 {
|
||||||
|
xp[i + 1] = self.coefficients[i];
|
||||||
|
}
|
||||||
|
// Then, we make dp[*] into d*poly.
|
||||||
|
dp[i] = self.coefficients[i].const_mul(&difference);
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Finally, we subtract: x*poly - d*poly -> xp[*] - dp[*]
|
||||||
|
{
|
||||||
|
// const for loop
|
||||||
|
let mut i: usize = 0;
|
||||||
|
while i < N {
|
||||||
|
hax_lib::loop_invariant!(i <= N);
|
||||||
|
hax_lib::loop_decreases!(N - i);
|
||||||
|
xp[i] = xp[i].const_sub(&dp[i]);
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Self { coefficients: xp }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_poly(&self) -> Poly {
|
||||||
|
Poly {
|
||||||
|
coefficients: self.coefficients.to_vec(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For Protocol V1 N <= 36
|
||||||
|
fn const_polys_to_polys<const N: usize>(cps: &[PolyConst<N>; N]) -> Vec<Poly> {
|
||||||
|
cps.iter().map(|x| x.to_poly()).collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
const fn lagrange_polys_for_complete_points<const N: usize>() -> [PolyConst<N>; N] {
|
||||||
|
let mut ones = [Pt {
|
||||||
|
x: GF16::ZERO,
|
||||||
|
y: GF16::ONE,
|
||||||
|
}; N];
|
||||||
|
{
|
||||||
|
// const for loop
|
||||||
|
let mut i: usize = 0;
|
||||||
|
while i < N {
|
||||||
|
hax_lib::loop_invariant!(i <= N);
|
||||||
|
hax_lib::loop_decreases!(N - i);
|
||||||
|
ones[i].x.value = i as u16;
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut out = [PolyConst::<N>::ZEROS; N];
|
||||||
|
{
|
||||||
|
// const for loop
|
||||||
|
let mut i: usize = 0;
|
||||||
|
while i < N {
|
||||||
|
hax_lib::loop_invariant!(i <= N);
|
||||||
|
hax_lib::loop_decreases!(N - i);
|
||||||
|
out[i] = PolyConst::<N>::lagrange_interpolate_pt(&ones, i);
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
// Precompute Lagrange polynomials for each message size we need when running
|
||||||
|
// the v1 protocol using MLKEM-768 and when running tests.
|
||||||
|
const COMPLETE_POINTS_POLYS_1: [PolyConst<1>; 1] = lagrange_polys_for_complete_points::<1>();
|
||||||
|
const COMPLETE_POINTS_POLYS_3: [PolyConst<3>; 3] = lagrange_polys_for_complete_points::<3>();
|
||||||
|
const COMPLETE_POINTS_POLYS_5: [PolyConst<5>; 5] = lagrange_polys_for_complete_points::<5>();
|
||||||
|
const COMPLETE_POINTS_POLYS_30: [PolyConst<30>; 30] = lagrange_polys_for_complete_points::<30>();
|
||||||
|
const COMPLETE_POINTS_POLYS_34: [PolyConst<34>; 34] = lagrange_polys_for_complete_points::<34>();
|
||||||
|
const COMPLETE_POINTS_POLYS_36: [PolyConst<36>; 36] = lagrange_polys_for_complete_points::<36>();
|
||||||
|
|
||||||
|
// Size of a chunk in bytes
|
||||||
|
const CHUNK_SIZE: usize = 32;
|
||||||
|
// Number of polys or points that need to be tracked when using GF(2^16) with 2-byte elements
|
||||||
|
const NUM_POLYS: usize = CHUNK_SIZE / 2;
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub(crate) enum EncoderState {
|
||||||
|
// For 32B chunks the outer vector has length 16.
|
||||||
|
// Using MLKEM-768 the inner vector has length <= MAX_STORED_POLYNOMIAL_DEGREE_V1 + 1
|
||||||
|
Points([Vec<GF16>; NUM_POLYS]),
|
||||||
|
// For 32B chunks this vector has length 16.
|
||||||
|
Polys([Poly; NUM_POLYS]),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct PolyEncoder {
|
||||||
|
idx: u32,
|
||||||
|
s: EncoderState,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl PolyEncoder {
|
||||||
|
#[allow(dead_code)] // used in hax annotations
|
||||||
|
pub(crate) fn get_encoder_state(&self) -> &EncoderState {
|
||||||
|
&self.s
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(match self.s {
|
||||||
|
EncoderState::Points(points) => hax_lib::Prop::from(points.len() == 16).and(hax_lib::prop::forall(|pts: &Vec<GF16>|
|
||||||
|
hax_lib::prop::implies(points.contains(pts), pts.len() <= MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1))),
|
||||||
|
EncoderState::Polys(polys) => hax_lib::Prop::from(polys.len() == 16).and(hax_lib::prop::forall(|poly: &Poly|
|
||||||
|
hax_lib::prop::implies(polys.contains(poly), poly.coefficients.len() <= MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1)))
|
||||||
|
})]
|
||||||
|
pub fn into_pb(self) -> proto::pq_ratchet::PolynomialEncoder {
|
||||||
|
let mut out = proto::pq_ratchet::PolynomialEncoder {
|
||||||
|
idx: self.idx,
|
||||||
|
pts: Vec::with_capacity(16),
|
||||||
|
polys: Vec::with_capacity(16),
|
||||||
|
};
|
||||||
|
match self.s {
|
||||||
|
EncoderState::Points(ref points) =>
|
||||||
|
{
|
||||||
|
#[allow(clippy::needless_range_loop)]
|
||||||
|
for j in 0..points.len() {
|
||||||
|
hax_lib::loop_invariant!(|j: usize| out.pts.len() == j);
|
||||||
|
let pts = &points[j];
|
||||||
|
let mut v = Vec::<u8>::with_capacity(2 * pts.len());
|
||||||
|
#[allow(clippy::needless_range_loop)]
|
||||||
|
for i in 0..pts.len() {
|
||||||
|
hax_lib::loop_invariant!(|i: usize| v.len() == 2 * i);
|
||||||
|
let pt = pts[i];
|
||||||
|
v.extend_from_slice(&pt.value.to_be_bytes()[..]);
|
||||||
|
}
|
||||||
|
out.pts.push(v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EncoderState::Polys(ref polys) => {
|
||||||
|
for poly in polys.iter() {
|
||||||
|
out.polys.push(poly.serialize());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: proto::pq_ratchet::PolynomialEncoder) -> Result<Self, PolynomialError> {
|
||||||
|
let s = if !pb.pts.is_empty() {
|
||||||
|
if !pb.polys.is_empty() {
|
||||||
|
return Err(PolynomialError::SerializationInvalid);
|
||||||
|
}
|
||||||
|
if pb.pts.len() != NUM_POLYS {
|
||||||
|
return Err(PolynomialError::SerializationInvalid);
|
||||||
|
}
|
||||||
|
let mut out = core::array::from_fn(|_| Vec::<GF16>::new());
|
||||||
|
|
||||||
|
#[allow(clippy::needless_range_loop)]
|
||||||
|
for i in 0..NUM_POLYS {
|
||||||
|
hax_lib::loop_invariant!(|_: usize| hax_lib::prop::forall(|pts: &Vec<GF16>| {
|
||||||
|
hax_lib::prop::implies(
|
||||||
|
out.contains(pts),
|
||||||
|
pts.len() <= MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1,
|
||||||
|
)
|
||||||
|
}));
|
||||||
|
let pts = &pb.pts[i];
|
||||||
|
if pts.len() % 2 != 0 {
|
||||||
|
return Err(PolynomialError::SerializationInvalid);
|
||||||
|
}
|
||||||
|
let mut v = Vec::<GF16>::with_capacity(pts.len());
|
||||||
|
for pt in pts.chunks_exact(2) {
|
||||||
|
v.push(GF16::new(u16::from_be_bytes(pt.try_into().unwrap())));
|
||||||
|
}
|
||||||
|
out[i] = v;
|
||||||
|
}
|
||||||
|
EncoderState::Points(out)
|
||||||
|
} else if pb.polys.len() == NUM_POLYS {
|
||||||
|
let mut out: [Poly; NUM_POLYS] = core::array::from_fn(|_| Poly::zero(1));
|
||||||
|
for (i, poly) in pb.polys.iter().enumerate() {
|
||||||
|
out[i] = Poly::deserialize(poly)?;
|
||||||
|
}
|
||||||
|
EncoderState::Polys(out)
|
||||||
|
} else {
|
||||||
|
return Err(PolynomialError::SerializationInvalid);
|
||||||
|
};
|
||||||
|
Ok(Self { idx: pb.idx, s })
|
||||||
|
}
|
||||||
|
|
||||||
|
#[requires(poly < 16)]
|
||||||
|
fn point_at(&mut self, poly: usize, idx: usize) -> GF16 {
|
||||||
|
if let EncoderState::Points(ref pts) = self.s {
|
||||||
|
hax_lib::assume!(pts.len() == 16);
|
||||||
|
if idx < pts[poly].len() {
|
||||||
|
return pts[poly][idx];
|
||||||
|
}
|
||||||
|
// If we reach here, we've come to the first point we want to
|
||||||
|
// find that wasn't part of the original set of points. We
|
||||||
|
// assume that from here on, we're always going to have to compute
|
||||||
|
// points, so we replace our set of points with an associated
|
||||||
|
// set of polys that allow us to compute any point.
|
||||||
|
let mut polys: [Poly; NUM_POLYS] = core::array::from_fn(|_| Poly::zero(1));
|
||||||
|
for i in 0..NUM_POLYS {
|
||||||
|
let pt_vec = pts[i]
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(x, y)| Pt {
|
||||||
|
x: GF16::new(x as u16),
|
||||||
|
y: *y,
|
||||||
|
})
|
||||||
|
.collect::<Vec<Pt>>();
|
||||||
|
hax_lib::assume!(
|
||||||
|
pt_vec.len() == 0
|
||||||
|
|| pt_vec.len() == 1
|
||||||
|
|| pt_vec.len() == 3
|
||||||
|
|| pt_vec.len() == 5
|
||||||
|
|| pt_vec.len() == 30
|
||||||
|
|| pt_vec.len() == 34
|
||||||
|
|| pt_vec.len() == 36
|
||||||
|
);
|
||||||
|
let res = Poly::from_complete_points(&pt_vec);
|
||||||
|
hax_lib::assume!(res.is_ok());
|
||||||
|
polys[i] = res.expect("pt_vec should be complete")
|
||||||
|
}
|
||||||
|
self.s = EncoderState::Polys(polys);
|
||||||
|
}
|
||||||
|
if let EncoderState::Polys(ref polys) = self.s {
|
||||||
|
hax_lib::assume!(polys.len() == 16);
|
||||||
|
polys[poly].compute_at(GF16::new(idx as u16))
|
||||||
|
} else {
|
||||||
|
panic!("if we reach here, we should have polys");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn encode_bytes_base(msg: &[u8]) -> Result<Self, super::EncodingError> {
|
||||||
|
if msg.len() % 2 != 0 {
|
||||||
|
return Err(PolynomialError::MessageLengthEven.into());
|
||||||
|
} else if msg.len() > (1 << 16) * NUM_POLYS {
|
||||||
|
return Err(PolynomialError::MessageLengthTooLong.into());
|
||||||
|
}
|
||||||
|
let mut pts: [Vec<GF16>; NUM_POLYS] =
|
||||||
|
core::array::from_fn(|_| Vec::<GF16>::with_capacity(msg.len() / 2));
|
||||||
|
for (i, c) in msg.chunks_exact(2).enumerate() {
|
||||||
|
hax_lib::loop_invariant!(|_: usize| pts.len() >= NUM_POLYS);
|
||||||
|
let poly = i % pts.len();
|
||||||
|
pts[poly].push(GF16::new(((c[0] as u16) << 8) + (c[1] as u16)));
|
||||||
|
}
|
||||||
|
Ok(Self {
|
||||||
|
idx: 0,
|
||||||
|
s: EncoderState::Points(pts),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// public for benchmarking
|
||||||
|
pub fn chunk_at(&mut self, idx: u16) -> Chunk {
|
||||||
|
let mut out = Vec::with_capacity(32);
|
||||||
|
let _p = 16;
|
||||||
|
for i in 0..16 {
|
||||||
|
hax_lib::loop_invariant!(|i: usize| _p == 16 && out.len() == 2 * i);
|
||||||
|
let total_idx = (idx as usize) * 16 + i;
|
||||||
|
let poly = total_idx % 16;
|
||||||
|
let poly_idx = total_idx / 16;
|
||||||
|
let p = self.point_at(poly, poly_idx).value;
|
||||||
|
out.push((p >> 8) as u8);
|
||||||
|
out.push(p as u8);
|
||||||
|
}
|
||||||
|
Chunk {
|
||||||
|
index: idx,
|
||||||
|
data: (&out[..]).try_into().expect("should be exactly 32 bytes"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl Encoder for PolyEncoder {
|
||||||
|
fn encode_bytes(msg: &[u8]) -> Result<Self, super::EncodingError> {
|
||||||
|
Self::encode_bytes_base(msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn next_chunk(&mut self) -> Chunk {
|
||||||
|
let out = self.chunk_at(self.idx as u16);
|
||||||
|
self.idx = self.idx.wrapping_add(1);
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(false)]
|
||||||
|
fn data(&self) -> &Vec<u8> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct PolyDecoder {
|
||||||
|
// When using MLKEM-768 pts_needed <= 576
|
||||||
|
pub pts_needed: usize,
|
||||||
|
// polys == (size of an encoding chunk)/(size of a field element)
|
||||||
|
//polys: usize,
|
||||||
|
|
||||||
|
// A set of points ordered and equality-checked by the X value. When using
|
||||||
|
// MLKEM-768, the size of the sorted set will not exceed
|
||||||
|
// 2*MAX_STORED_POLYNOMIAL_DEGREE_V1 + 1
|
||||||
|
//
|
||||||
|
// It can get this large because when we will only add a new chunk if it has
|
||||||
|
// index less than the degree of the polynomial plus 1 (to allow decoding
|
||||||
|
// without interpolation) or if we do not have enough chunks yet. Thus it is
|
||||||
|
// possible for us to receive MAX_STORED_POLYNOMIAL_DEGREE_V1 chunks with
|
||||||
|
// index > MAX_STORED_POLYNOMIAL_DEGREE_V1+1 and also receive all
|
||||||
|
// MAX_STORED_POLYNOMIAL_DEGREE_V1 + 1 chunks with index below
|
||||||
|
// MAX_STORED_POLYNOMIAL_DEGREE_V1+1 before decoding the message.
|
||||||
|
pts: [SortedSet<Pt>; 16],
|
||||||
|
is_complete: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl PolyDecoder {
|
||||||
|
pub fn get_pts_needed(&self) -> usize {
|
||||||
|
self.pts_needed
|
||||||
|
}
|
||||||
|
|
||||||
|
fn necessary_points(&self, poly: usize) -> usize {
|
||||||
|
let points_per_poly = self.pts_needed / 16;
|
||||||
|
let points_remaining = self.pts_needed % 16;
|
||||||
|
if poly < points_remaining {
|
||||||
|
points_per_poly + 1
|
||||||
|
} else {
|
||||||
|
points_per_poly
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new_with_poly_count(len_bytes: usize, _polys: usize) -> Result<Self, super::EncodingError> {
|
||||||
|
if len_bytes % 2 != 0 {
|
||||||
|
return Err(PolynomialError::MessageLengthEven.into());
|
||||||
|
}
|
||||||
|
Ok(Self {
|
||||||
|
pts_needed: len_bytes / 2,
|
||||||
|
pts: core::array::from_fn(|_| SortedSet::new()),
|
||||||
|
is_complete: false,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_pb(self) -> proto::pq_ratchet::PolynomialDecoder {
|
||||||
|
let mut out = proto::pq_ratchet::PolynomialDecoder {
|
||||||
|
pts_needed: self.pts_needed as u32,
|
||||||
|
polys: 16,
|
||||||
|
is_complete: self.is_complete,
|
||||||
|
pts: Vec::with_capacity(self.pts.len()),
|
||||||
|
};
|
||||||
|
for pts in self.pts.iter() {
|
||||||
|
hax_lib::assume!(pts.len() <= 2 * MAX_STORED_POLYNOMIAL_DEGREE_V1 + 1);
|
||||||
|
let mut v = Vec::<u8>::with_capacity(4 * pts.len());
|
||||||
|
for i in 0..pts.len() {
|
||||||
|
hax_lib::loop_invariant!(|i: usize| (v.len() == i * 4));
|
||||||
|
let pt = &pts[i];
|
||||||
|
v.extend_from_slice(&pt.serialize()[..]);
|
||||||
|
}
|
||||||
|
out.pts.push(v);
|
||||||
|
}
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: proto::pq_ratchet::PolynomialDecoder) -> Result<Self, PolynomialError> {
|
||||||
|
if pb.pts.len() != 16 {
|
||||||
|
return Err(PolynomialError::SerializationInvalid);
|
||||||
|
}
|
||||||
|
let mut out = Self {
|
||||||
|
pts_needed: pb.pts_needed as usize,
|
||||||
|
is_complete: pb.is_complete,
|
||||||
|
pts: core::array::from_fn(|_| SortedSet::new()),
|
||||||
|
};
|
||||||
|
for i in 0..16 {
|
||||||
|
let pts = &pb.pts[i];
|
||||||
|
if pts.len() % 4 != 0 {
|
||||||
|
return Err(PolynomialError::SerializationInvalid);
|
||||||
|
}
|
||||||
|
let mut v = SortedSet::with_capacity(pts.len() / 4);
|
||||||
|
for pt in pts.chunks_exact(4) {
|
||||||
|
v.push(Pt::deserialize(pt.try_into().unwrap()));
|
||||||
|
}
|
||||||
|
out.pts[i] = v;
|
||||||
|
}
|
||||||
|
Ok(out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl Decoder for PolyDecoder {
|
||||||
|
fn new(len_bytes: usize) -> Result<Self, super::EncodingError> {
|
||||||
|
Self::new_with_poly_count(len_bytes, 16)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(self.pts.len() == 16)]
|
||||||
|
fn add_chunk(&mut self, chunk: &Chunk) {
|
||||||
|
for i in 0usize..16 {
|
||||||
|
hax_lib::loop_invariant!(|_: usize| self.pts.len() == 16);
|
||||||
|
let total_idx = (chunk.index as usize) * 16 + i;
|
||||||
|
let poly = total_idx % 16;
|
||||||
|
let poly_idx = total_idx / 16;
|
||||||
|
let x = GF16::new(poly_idx as u16);
|
||||||
|
let y1 = chunk.data[i * 2] as u16;
|
||||||
|
let y2 = chunk.data[i * 2 + 1] as u16;
|
||||||
|
let y = GF16::new((y1 << 8) + y2);
|
||||||
|
// Only add a point if it is needed or if it has a small index
|
||||||
|
// so it may help us decode without interpolating
|
||||||
|
if poly_idx < self.necessary_points(i)
|
||||||
|
|| self.pts[poly].len() < self.necessary_points(i)
|
||||||
|
{
|
||||||
|
// This will discard new points whose X value matches a previous
|
||||||
|
// old point, since we've implemented equality for the Pt object
|
||||||
|
// to only care about the X value.
|
||||||
|
self.pts[poly].push(Pt { x, y });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(self.pts_needed < MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1)]
|
||||||
|
fn decoded_message(&self) -> Option<Vec<u8>> {
|
||||||
|
if self.is_complete {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let mut points_vecs = Vec::with_capacity(self.pts.len());
|
||||||
|
for i in 0..(self.pts.len()) {
|
||||||
|
let pts = &self.pts[i];
|
||||||
|
if pts.len() < self.necessary_points(i) {
|
||||||
|
return None;
|
||||||
|
} else {
|
||||||
|
points_vecs.push(&pts[..self.necessary_points(i)]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// We may or may not need these vectors of points (only if we need
|
||||||
|
// to do a lagrange_interpolate call). For now, we just create
|
||||||
|
// them regardless. However, we could optimize to lazily create them
|
||||||
|
// only when it's proven necessary.
|
||||||
|
let mut polys: [Option<Poly>; 16] = core::array::from_fn(|_| None);
|
||||||
|
let mut out: Vec<u8> = Vec::with_capacity(self.pts_needed * 2);
|
||||||
|
for i in 0..self.pts_needed {
|
||||||
|
let poly = i % 16;
|
||||||
|
let poly_idx = i / 16;
|
||||||
|
let pt = Pt {
|
||||||
|
x: GF16::new(poly_idx as u16),
|
||||||
|
y: GF16::ZERO,
|
||||||
|
};
|
||||||
|
let y = if let Ok(i) = self.pts[poly].binary_search(&pt) {
|
||||||
|
hax_lib::assume!(i < self.pts[poly].len()); // TODO Needs a postcondition on binary_search
|
||||||
|
self.pts[poly][i].y
|
||||||
|
} else {
|
||||||
|
hax_lib::assume!(poly < polys.len());
|
||||||
|
if polys[poly].is_none() {
|
||||||
|
hax_lib::assume!(poly < points_vecs.len());
|
||||||
|
hax_lib::assume!(
|
||||||
|
points_vecs[poly].len() <= MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1
|
||||||
|
);
|
||||||
|
polys[poly] = Some(Poly::lagrange_interpolate(points_vecs[poly]));
|
||||||
|
}
|
||||||
|
polys[poly]
|
||||||
|
.as_ref()
|
||||||
|
.expect("already computed lazily")
|
||||||
|
.compute_at(pt.x)
|
||||||
|
};
|
||||||
|
out.push((y.value >> 8) as u8);
|
||||||
|
out.push(y.value as u8);
|
||||||
|
}
|
||||||
|
Some(out)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_complete(&self) -> bool {
|
||||||
|
self.is_complete
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use rand::RngCore;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_and_decode_small() {
|
||||||
|
let mut encoder = PolyEncoder::encode_bytes(b"abcdefghij").expect("should work");
|
||||||
|
let mut decoder = PolyDecoder::new(10).expect("should work");
|
||||||
|
decoder.add_chunk(&encoder.chunk_at(1));
|
||||||
|
decoder.add_chunk(&encoder.chunk_at(2));
|
||||||
|
let msg = decoder.decoded_message();
|
||||||
|
assert_eq!(msg.expect("decode should succeed"), b"abcdefghij");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_and_decode_large() {
|
||||||
|
let mut chunks = Vec::<Chunk>::new();
|
||||||
|
// chunk 0 is missing
|
||||||
|
let chunks_needed = 1088 / 32 + 1;
|
||||||
|
|
||||||
|
// Provide a set of chunks, none of which contain the initial data.
|
||||||
|
// This provides the worst-case scenario of both the encoder and decoder
|
||||||
|
// needing to compute all actual data.
|
||||||
|
{
|
||||||
|
let mut encoder = PolyEncoder::encode_bytes(&[3u8; 1088]).expect("should work");
|
||||||
|
for i in chunks_needed..chunks_needed * 2 + 1 {
|
||||||
|
chunks.push(encoder.chunk_at(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut decoder = PolyDecoder::new(1088).expect("should work");
|
||||||
|
for chunk in chunks {
|
||||||
|
decoder.add_chunk(&chunk);
|
||||||
|
let msg = decoder.decoded_message();
|
||||||
|
if let Some(m) = msg {
|
||||||
|
assert_eq!(m, &[3u8; 1088]);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
panic!("should have already decoded by here");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn poly_lagrange_interpolate() {
|
||||||
|
let mut pts = Vec::<Pt>::new();
|
||||||
|
let mut rng = rand::rng();
|
||||||
|
for i in 0..30 {
|
||||||
|
pts.push(Pt {
|
||||||
|
x: GF16::new(i as u16),
|
||||||
|
y: GF16::new(rng.next_u32() as u16),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let p = Poly::lagrange_interpolate(&pts);
|
||||||
|
for pt in pts.iter() {
|
||||||
|
assert_eq!(pt.y, p.compute_at(pt.x));
|
||||||
|
}
|
||||||
|
let mut pts2 = Vec::<Pt>::new();
|
||||||
|
for i in 0..30 {
|
||||||
|
let x = GF16::new((i + 30) as u16);
|
||||||
|
pts2.push(Pt {
|
||||||
|
x,
|
||||||
|
y: p.compute_at(x),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let p2 = Poly::lagrange_interpolate(&pts2);
|
||||||
|
for pt in pts.iter() {
|
||||||
|
assert_eq!(pt.y, p2.compute_at(pt.x));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn point_serialize_deserialize() {
|
||||||
|
let pt = Pt {
|
||||||
|
x: GF16::new(0x1234),
|
||||||
|
y: GF16::new(0x5678),
|
||||||
|
};
|
||||||
|
let s = pt.serialize();
|
||||||
|
let pt2 = Pt::deserialize(s);
|
||||||
|
assert_eq!(pt.x, pt2.x);
|
||||||
|
assert_eq!(pt.y, pt2.y);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn to_and_from_pb() {
|
||||||
|
let chunks_needed = 1088 / 32;
|
||||||
|
|
||||||
|
let mut encoder = PolyEncoder::encode_bytes(&[3u8; 1088]).expect("should work");
|
||||||
|
let mut decoder = PolyDecoder::new(1088).expect("should work");
|
||||||
|
|
||||||
|
// 2 chunks remain after this.
|
||||||
|
for i in 2..chunks_needed {
|
||||||
|
decoder.add_chunk(&encoder.chunk_at(i));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Before receiving/processing remaining chunks, do a round-trip for
|
||||||
|
// both encoder/decoder to/from protobuf.
|
||||||
|
let mut encoder2 = PolyEncoder::from_pb(encoder.into_pb()).unwrap();
|
||||||
|
let mut decoder2 = PolyDecoder::from_pb(decoder.into_pb()).unwrap();
|
||||||
|
|
||||||
|
for i in 0..2 {
|
||||||
|
decoder2.add_chunk(&encoder2.chunk_at(i + chunks_needed));
|
||||||
|
}
|
||||||
|
let m = decoder2.decoded_message().unwrap();
|
||||||
|
assert_eq!(m, &[3u8; 1088]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn const_polys() {
|
||||||
|
lagrange_polys_for_complete_points::<35>();
|
||||||
|
}
|
||||||
|
}
|
||||||
107
src/encoding/round_robin.rs
Normal file
107
src/encoding/round_robin.rs
Normal file
|
|
@ -0,0 +1,107 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
#![cfg(test)]
|
||||||
|
use super::{Chunk, Decoder, Encoder};
|
||||||
|
|
||||||
|
pub struct RoundRobinEncoder {
|
||||||
|
data: Vec<u8>,
|
||||||
|
next_idx: u16,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RoundRobinEncoder {
|
||||||
|
fn num_chunks(&self) -> usize {
|
||||||
|
self.data.len() / 32 + if self.data.len() % 32 != 0 { 1 } else { 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn chunk_at(&self, idx: u16) -> Chunk {
|
||||||
|
let index = (idx as usize) % self.num_chunks();
|
||||||
|
let lb = index * 32usize;
|
||||||
|
let ub = lb + 32usize;
|
||||||
|
|
||||||
|
// Prove the unwrap is safe
|
||||||
|
Chunk {
|
||||||
|
index: idx,
|
||||||
|
data: self.data.as_slice()[lb..ub].try_into().unwrap(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Encoder for RoundRobinEncoder {
|
||||||
|
fn encode_bytes(msg: &[u8]) -> Result<Self, super::EncodingError> {
|
||||||
|
Ok(Self {
|
||||||
|
data: msg.to_vec(),
|
||||||
|
next_idx: 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn next_chunk(&mut self) -> Chunk {
|
||||||
|
let index = self.next_idx;
|
||||||
|
self.next_idx += 1;
|
||||||
|
self.chunk_at(index)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn data(&self) -> &Vec<u8> {
|
||||||
|
&self.data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type ChunkData = [u8; 32];
|
||||||
|
pub struct RoundRobinDecoder {
|
||||||
|
chunks: Vec<Option<ChunkData>>,
|
||||||
|
is_complete: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RoundRobinDecoder {
|
||||||
|
fn can_reconstruct(&self) -> bool {
|
||||||
|
self.chunks.iter().all(|d| d.is_some())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Decoder for RoundRobinDecoder {
|
||||||
|
fn new(len_bytes: usize) -> Result<Self, super::EncodingError> {
|
||||||
|
let len_chunks = (len_bytes / 32) + if len_bytes % 32 != 0 { 1 } else { 0 };
|
||||||
|
let chunks = vec![None; len_chunks];
|
||||||
|
Ok(Self {
|
||||||
|
chunks,
|
||||||
|
is_complete: false,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_chunk(&mut self, chunk: &Chunk) {
|
||||||
|
let idx = (chunk.index as usize) % self.chunks.len();
|
||||||
|
if let Some(data) = self.chunks[idx] {
|
||||||
|
assert_eq!(data, chunk.data);
|
||||||
|
} else {
|
||||||
|
self.chunks[idx] = Some(chunk.data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decoded_message(&self) -> Option<Vec<u8>> {
|
||||||
|
if self.is_complete {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
if self.can_reconstruct() {
|
||||||
|
let msg: Vec<u8> = self
|
||||||
|
.chunks
|
||||||
|
.iter()
|
||||||
|
.map(|data| data.unwrap())
|
||||||
|
.flat_map(|d| d.into_iter())
|
||||||
|
.collect();
|
||||||
|
Some(msg)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* fn take_decoded_message(&mut self) -> Option<Vec<u8>> {
|
||||||
|
let data = self.decoded_message();
|
||||||
|
if data.is_some() {
|
||||||
|
self.is_complete = true;
|
||||||
|
}
|
||||||
|
data
|
||||||
|
} */
|
||||||
|
|
||||||
|
fn is_complete(&self) -> bool {
|
||||||
|
self.is_complete
|
||||||
|
}
|
||||||
|
}
|
||||||
112
src/incremental_mlkem768.rs
Normal file
112
src/incremental_mlkem768.rs
Normal file
|
|
@ -0,0 +1,112 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use crate::Secret;
|
||||||
|
use libcrux_ml_kem::mlkem768::incremental;
|
||||||
|
use rand::{CryptoRng, Rng};
|
||||||
|
|
||||||
|
pub const CIPHERTEXT1_SIZE: usize = incremental::Ciphertext1::len();
|
||||||
|
pub type Ciphertext1 = Vec<u8>;
|
||||||
|
pub type EncapsulationState = Vec<u8>;
|
||||||
|
pub const CIPHERTEXT2_SIZE: usize = incremental::Ciphertext2::len();
|
||||||
|
pub type Ciphertext2 = Vec<u8>;
|
||||||
|
pub const HEADER_SIZE: usize = incremental::pk1_len();
|
||||||
|
pub type Header = Vec<u8>;
|
||||||
|
pub const ENCAPSULATION_KEY_SIZE: usize = incremental::pk2_len();
|
||||||
|
pub type EncapsulationKey = Vec<u8>;
|
||||||
|
pub type DecapsulationKey = Vec<u8>;
|
||||||
|
|
||||||
|
// pub const ENCAPSULATION_STATE_SIZE: usize = incremental::encaps_state_len();
|
||||||
|
// pub const DECAPSULATION_KEY_SIZE: usize = incremental::key_pair_compressed_len();
|
||||||
|
|
||||||
|
pub struct Keys {
|
||||||
|
pub ek: EncapsulationKey,
|
||||||
|
pub dk: DecapsulationKey,
|
||||||
|
pub hdr: Header,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ek_matches_header(ek: &EncapsulationKey, hdr: &Header) -> bool {
|
||||||
|
incremental::validate_pk_bytes(hdr, ek).is_ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate a new keypair and associated header.
|
||||||
|
#[hax_lib::ensures(|result| result.hdr.len() == 64 && result.ek.len() == 1152 && result.dk.len() == 2400)]
|
||||||
|
pub fn generate<R: Rng + CryptoRng>(rng: &mut R) -> Keys {
|
||||||
|
let mut randomness = [0u8; libcrux_ml_kem::KEY_GENERATION_SEED_SIZE];
|
||||||
|
rng.fill_bytes(&mut randomness);
|
||||||
|
let k = incremental::KeyPairCompressedBytes::from_seed(randomness);
|
||||||
|
Keys {
|
||||||
|
hdr: k.pk1().to_vec(),
|
||||||
|
ek: k.pk2().to_vec(),
|
||||||
|
dk: k.sk().to_vec(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encapsulate with header to get initial ciphertext.
|
||||||
|
#[hax_lib::requires(hdr.len() == 64)]
|
||||||
|
#[hax_lib::ensures(|(ct1,es,ss)| ct1.len() == 960 && es.len() == 2080 && ss.len() == 32)]
|
||||||
|
pub fn encaps1<R: Rng + CryptoRng>(
|
||||||
|
hdr: &Header,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> (Ciphertext1, EncapsulationState, Secret) {
|
||||||
|
let mut randomness = [0u8; libcrux_ml_kem::SHARED_SECRET_SIZE];
|
||||||
|
rng.fill_bytes(&mut randomness);
|
||||||
|
let mut state = vec![0u8; incremental::encaps_state_len()];
|
||||||
|
let mut ss = vec![0u8; libcrux_ml_kem::SHARED_SECRET_SIZE];
|
||||||
|
let ct1 = incremental::encapsulate1(hdr.as_slice(), randomness, &mut state, &mut ss);
|
||||||
|
hax_lib::assume!(ct1.is_ok());
|
||||||
|
hax_lib::assume!(state.len() == 2080 && ss.len() == 32);
|
||||||
|
(
|
||||||
|
ct1.expect("should only fail based on sizes, all sizes should be correct")
|
||||||
|
.value
|
||||||
|
.to_vec(),
|
||||||
|
state,
|
||||||
|
ss,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encapsulate with header and EK.
|
||||||
|
#[hax_lib::requires(es.len() == 2080 && ek.len() == 1152)]
|
||||||
|
#[hax_lib::ensures(|result| result.len() == 128)]
|
||||||
|
pub fn encaps2(ek: &EncapsulationKey, es: &EncapsulationState) -> Ciphertext2 {
|
||||||
|
let ct2 = incremental::encapsulate2(
|
||||||
|
es.as_slice().try_into().expect("size should be correct"),
|
||||||
|
ek.as_slice().try_into().expect("size should be correct"),
|
||||||
|
);
|
||||||
|
ct2.value.to_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decapsulate ciphertext to get shared secret.
|
||||||
|
#[hax_lib::requires(ct1.len() == 960 && ct2.len() == 128 && dk.len() == 2400)]
|
||||||
|
#[hax_lib::ensures(|result| result.len() == 32)]
|
||||||
|
pub fn decaps(dk: &DecapsulationKey, ct1: &Ciphertext1, ct2: &Ciphertext2) -> Secret {
|
||||||
|
let ct1 = incremental::Ciphertext1 {
|
||||||
|
value: ct1.as_slice().try_into().expect("size should be correct"),
|
||||||
|
};
|
||||||
|
let ct2 = incremental::Ciphertext2 {
|
||||||
|
value: ct2.as_slice().try_into().expect("size should be correct"),
|
||||||
|
};
|
||||||
|
incremental::decapsulate_compressed_key(
|
||||||
|
dk.as_slice().try_into().expect("size should be correct"),
|
||||||
|
&ct1,
|
||||||
|
&ct2,
|
||||||
|
)
|
||||||
|
.to_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use rand::TryRngCore;
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn incremental_mlkem768_round_trip() {
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
let keys = generate(&mut rng);
|
||||||
|
let (ct1, es, ss1) = encaps1(&keys.hdr, &mut rng);
|
||||||
|
let ct2 = encaps2(&keys.ek, &es);
|
||||||
|
let ss2 = decaps(&keys.dk, &ct1, &ct2);
|
||||||
|
assert_eq!(ss1, ss2);
|
||||||
|
}
|
||||||
|
}
|
||||||
26
src/kdf.rs
Normal file
26
src/kdf.rs
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
#[hax_lib::opaque]
|
||||||
|
#[hax_lib::ensures(|res| res.len() >= okm_len)]
|
||||||
|
pub fn hkdf_to_vec(salt: &[u8], ikm: &[u8], info: &[u8], okm_len: usize) -> Vec<u8> {
|
||||||
|
if cfg!(feature = "proof") {
|
||||||
|
libcrux_hkdf::hkdf(libcrux_hkdf::Algorithm::Sha256, salt, ikm, info, okm_len)
|
||||||
|
.expect("all lengths should work for SHA256")
|
||||||
|
} else {
|
||||||
|
let mut out = vec![0u8; okm_len];
|
||||||
|
hkdf_to_slice(salt, ikm, info, &mut out);
|
||||||
|
out
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::opaque]
|
||||||
|
pub fn hkdf_to_slice(salt: &[u8], ikm: &[u8], info: &[u8], okm: &mut [u8]) {
|
||||||
|
if cfg!(feature = "proof") {
|
||||||
|
okm.copy_from_slice(&hkdf_to_vec(salt, ikm, info, okm.len()));
|
||||||
|
} else {
|
||||||
|
hkdf::Hkdf::<sha2::Sha256>::new(Some(salt), ikm)
|
||||||
|
.expand(info, okm)
|
||||||
|
.expect("all lengths should work for SHA256");
|
||||||
|
}
|
||||||
|
}
|
||||||
562
src/lib.rs
Normal file
562
src/lib.rs
Normal file
|
|
@ -0,0 +1,562 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
pub mod authenticator;
|
||||||
|
pub mod chain;
|
||||||
|
pub mod encoding;
|
||||||
|
pub(crate) mod incremental_mlkem768;
|
||||||
|
pub(crate) mod kdf;
|
||||||
|
pub mod proto;
|
||||||
|
pub mod serialize;
|
||||||
|
pub(crate) mod test;
|
||||||
|
pub(crate) mod util;
|
||||||
|
mod v1;
|
||||||
|
|
||||||
|
use crate::chain::Chain;
|
||||||
|
use crate::proto::pq_ratchet as pqrpb;
|
||||||
|
use num_enum::IntoPrimitive;
|
||||||
|
use prost::Message;
|
||||||
|
use rand::{CryptoRng, Rng};
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
use v1::chunked::states as v1states;
|
||||||
|
|
||||||
|
pub type Epoch = u64;
|
||||||
|
pub type Secret = Vec<u8>;
|
||||||
|
pub type MessageKey = Option<Vec<u8>>;
|
||||||
|
pub type SerializedState = Vec<u8>;
|
||||||
|
pub type SerializedMessage = Vec<u8>;
|
||||||
|
|
||||||
|
pub struct EpochSecret {
|
||||||
|
pub epoch: Epoch,
|
||||||
|
pub secret: Secret,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub enum Direction {
|
||||||
|
A2B,
|
||||||
|
B2A,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Direction {
|
||||||
|
fn switch(&self) -> Self {
|
||||||
|
match self {
|
||||||
|
Direction::A2B => Direction::B2A,
|
||||||
|
Direction::B2A => Direction::A2B,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq, Debug)]
|
||||||
|
pub enum SecretOutput {
|
||||||
|
/// Receipt of the message has resulted in no additional shared secrets
|
||||||
|
/// to mix in.
|
||||||
|
None,
|
||||||
|
/// Receipt of the message has resulted in a shared secret which should
|
||||||
|
/// be mixed into the sending chain before using it to encrypt/send the
|
||||||
|
/// next message sent by this client.
|
||||||
|
Send(Secret),
|
||||||
|
/// Receipt of the message has resulted in a shared secret which will be
|
||||||
|
/// used to encrypt the next message we receive, and thus should be mixed
|
||||||
|
/// into our new receiving chain.
|
||||||
|
Recv(Secret),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error("state decode failed")]
|
||||||
|
StateDecode,
|
||||||
|
#[error("not yet implemented")]
|
||||||
|
NotImplemented,
|
||||||
|
#[error("message decode failed")]
|
||||||
|
MsgDecode,
|
||||||
|
#[error("MAC verification failed")]
|
||||||
|
MacVerifyFailed,
|
||||||
|
#[error("epoch not in valid range: {0}")]
|
||||||
|
EpochOutOfRange(Epoch),
|
||||||
|
#[error("epoch failure")]
|
||||||
|
EpochFailure,
|
||||||
|
#[error("MAC should have key but doesn't")]
|
||||||
|
MacStateInvalid,
|
||||||
|
#[error("Underlying state machine in the wrong state")]
|
||||||
|
BaseStateInvalid,
|
||||||
|
#[error("Encoding error: {0}")]
|
||||||
|
EncodingDecoding(encoding::EncodingError),
|
||||||
|
#[error("Serialization: {0}")]
|
||||||
|
Serialization(serialize::Error),
|
||||||
|
#[error("Version mismatch after negotiation")]
|
||||||
|
VersionMismatch,
|
||||||
|
#[error("Minimum version")]
|
||||||
|
MinimumVersion,
|
||||||
|
#[error("Key jump: {0} - {1}")]
|
||||||
|
KeyJump(u32, u32),
|
||||||
|
#[error("Key trimmed: {0}")]
|
||||||
|
KeyTrimmed(u32),
|
||||||
|
#[error("Key already requested: {0}")]
|
||||||
|
KeyAlreadyRequested(u32),
|
||||||
|
#[error("Erroneous data received from remote party")]
|
||||||
|
ErroneousDataReceived,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<encoding::EncodingError> for Error {
|
||||||
|
fn from(e: encoding::EncodingError) -> Error {
|
||||||
|
Error::EncodingDecoding(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<serialize::Error> for Error {
|
||||||
|
fn from(v: serialize::Error) -> Self {
|
||||||
|
Error::Serialization(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<authenticator::Error> for Error {
|
||||||
|
fn from(_v: authenticator::Error) -> Self {
|
||||||
|
Error::MacVerifyFailed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SecretOutput {
|
||||||
|
pub fn send_secret(&self) -> Option<&Secret> {
|
||||||
|
match self {
|
||||||
|
SecretOutput::Send(s) => Some(s),
|
||||||
|
SecretOutput::Recv(_) => None,
|
||||||
|
SecretOutput::None => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn recv_secret(&self) -> Option<&Secret> {
|
||||||
|
match self {
|
||||||
|
SecretOutput::Send(_) => None,
|
||||||
|
SecretOutput::Recv(s) => Some(s),
|
||||||
|
SecretOutput::None => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn secret(&self) -> Option<&Secret> {
|
||||||
|
match self {
|
||||||
|
SecretOutput::Send(s) | SecretOutput::Recv(s) => Some(s),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn has_secret(&self) -> bool {
|
||||||
|
!matches!(self, Self::None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Protocol version.
|
||||||
|
///
|
||||||
|
/// Note that these versions are strictly ordered: if vX > vY, it is
|
||||||
|
/// assumed that vX is preferred to vY and should be used if both
|
||||||
|
/// parties support it.
|
||||||
|
#[derive(Copy, Clone, IntoPrimitive)]
|
||||||
|
#[repr(u8)]
|
||||||
|
pub enum Version {
|
||||||
|
/// V0 is not using PQ ratcheting at all. All sends are empty, and no
|
||||||
|
/// secrets are ever returned.
|
||||||
|
V0 = 0,
|
||||||
|
/// V1 uses an incremental ML-KEM 768 negotiation with polynomial encoders
|
||||||
|
/// based on GF16.
|
||||||
|
V1 = 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::opaque]
|
||||||
|
impl TryFrom<u8> for Version {
|
||||||
|
type Error = String;
|
||||||
|
fn try_from(value: u8) -> Result<Self, Self::Error> {
|
||||||
|
match value {
|
||||||
|
0 => Ok(Version::V0),
|
||||||
|
1 => Ok(Version::V1),
|
||||||
|
_ => Err("Expected 0 or 1".to_owned()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Version {
|
||||||
|
pub const MAX: Version = Self::V1;
|
||||||
|
|
||||||
|
pub fn initial_alice_state(&self, auth_key: &[u8], min_version: Version) -> SerializedState {
|
||||||
|
hax_lib::fstar!("admit()");
|
||||||
|
pqrpb::PqRatchetState {
|
||||||
|
inner: self.init_alice_inner(auth_key),
|
||||||
|
version_negotiation: Some(pqrpb::pq_ratchet_state::VersionNegotiation {
|
||||||
|
auth_key: auth_key.to_vec(),
|
||||||
|
alice: true,
|
||||||
|
min_version: min_version as u32,
|
||||||
|
}),
|
||||||
|
chain: Some(Chain::new(auth_key, Direction::A2B).into_pb()),
|
||||||
|
}
|
||||||
|
.encode_to_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn initial_bob_state(&self, auth_key: &[u8], min_version: Version) -> SerializedState {
|
||||||
|
hax_lib::fstar!("admit()");
|
||||||
|
pqrpb::PqRatchetState {
|
||||||
|
inner: self.init_bob_inner(auth_key),
|
||||||
|
version_negotiation: Some(pqrpb::pq_ratchet_state::VersionNegotiation {
|
||||||
|
auth_key: auth_key.to_vec(),
|
||||||
|
alice: false,
|
||||||
|
min_version: min_version as u32,
|
||||||
|
}),
|
||||||
|
chain: Some(Chain::new(auth_key, Direction::B2A).into_pb()),
|
||||||
|
}
|
||||||
|
.encode_to_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn init_alice_inner(&self, auth_key: &[u8]) -> Option<pqrpb::pq_ratchet_state::Inner> {
|
||||||
|
match self {
|
||||||
|
Version::V0 => None,
|
||||||
|
Version::V1 => Some(pqrpb::pq_ratchet_state::Inner::V1(
|
||||||
|
v1states::States::init_a(auth_key).into_pb(),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn init_bob_inner(&self, auth_key: &[u8]) -> Option<pqrpb::pq_ratchet_state::Inner> {
|
||||||
|
match self {
|
||||||
|
Version::V0 => None,
|
||||||
|
Version::V1 => Some(pqrpb::pq_ratchet_state::Inner::V1(
|
||||||
|
v1states::States::init_b(auth_key).into_pb(),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Send {
|
||||||
|
pub state: SerializedState,
|
||||||
|
pub msg: SerializedMessage,
|
||||||
|
pub key: MessageKey,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::fstar::verification_status(lax)]
|
||||||
|
pub fn send<R: Rng + CryptoRng>(state: &SerializedState, rng: &mut R) -> Result<Send, Error> {
|
||||||
|
let state_pb = decode_state(state)?;
|
||||||
|
match state_pb.inner {
|
||||||
|
None => Ok(Send {
|
||||||
|
state: vec![],
|
||||||
|
msg: vec![],
|
||||||
|
key: None,
|
||||||
|
}),
|
||||||
|
Some(pqrpb::pq_ratchet_state::Inner::V1(pb)) => {
|
||||||
|
let mut chain = Chain::from_pb(state_pb.chain.ok_or(Error::StateDecode)?)?;
|
||||||
|
|
||||||
|
let v1states::Send { msg, key, state } = v1states::States::from_pb(pb)?.send(rng)?;
|
||||||
|
|
||||||
|
if let Some(epoch_secret) = key {
|
||||||
|
chain.add_epoch(epoch_secret);
|
||||||
|
}
|
||||||
|
let (index, msg_key) = chain.send_key(msg.epoch - 1)?;
|
||||||
|
|
||||||
|
let msg = msg.serialize(index);
|
||||||
|
assert!(!msg.is_empty());
|
||||||
|
assert_eq!(msg[0], Version::V1.into());
|
||||||
|
Ok(Send {
|
||||||
|
state: pqrpb::PqRatchetState {
|
||||||
|
inner: Some(pqrpb::pq_ratchet_state::Inner::V1(state.into_pb())),
|
||||||
|
// Sending never changes our version negotiation.
|
||||||
|
version_negotiation: state_pb.version_negotiation,
|
||||||
|
chain: Some(chain.into_pb()),
|
||||||
|
}
|
||||||
|
.encode_to_vec(),
|
||||||
|
msg,
|
||||||
|
key: Some(msg_key),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Recv {
|
||||||
|
pub state: SerializedState,
|
||||||
|
pub key: MessageKey,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::fstar::verification_status(lax)]
|
||||||
|
pub fn recv(state: &SerializedState, msg: &SerializedMessage) -> Result<Recv, Error> {
|
||||||
|
// Perform version negotiation. At the beginning of our interaction
|
||||||
|
// with a remote party, we are set to allow negotiation. This
|
||||||
|
// allows either side to downgrade the connection to a protocol version
|
||||||
|
// that that side supports, while still using the highest protocol
|
||||||
|
// version supported by both sides.
|
||||||
|
let prenegotiated_state_pb = decode_state(state)?;
|
||||||
|
let state_pb = match msg_version(msg) {
|
||||||
|
None => {
|
||||||
|
// They have presented a version we don't support; it's too high for us,
|
||||||
|
// so ignore it and keep sending our current version's format.
|
||||||
|
return Ok(Recv {
|
||||||
|
state: state.to_vec(),
|
||||||
|
key: None,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Some(v) => match (v as u8).cmp(&(state_version(&prenegotiated_state_pb) as u8)) {
|
||||||
|
Ordering::Equal => {
|
||||||
|
// Our versions are equal; proceed with existing state
|
||||||
|
prenegotiated_state_pb
|
||||||
|
}
|
||||||
|
Ordering::Greater => {
|
||||||
|
// Their version is greater than ours, but still one we support.
|
||||||
|
// This should not happen, since we should use our highest supported
|
||||||
|
// version.
|
||||||
|
return Err(Error::VersionMismatch);
|
||||||
|
}
|
||||||
|
Ordering::Less => {
|
||||||
|
// Their version is less than ours. If we are allowed to negotiate, we
|
||||||
|
// should. Otherwise, we should error out.
|
||||||
|
//
|
||||||
|
// When negotiating down a level, we disallow future negotiation.
|
||||||
|
match prenegotiated_state_pb.version_negotiation {
|
||||||
|
None => {
|
||||||
|
return Err(Error::VersionMismatch);
|
||||||
|
}
|
||||||
|
Some(ref vn) => {
|
||||||
|
if (v as u32) < vn.min_version {
|
||||||
|
return Err(Error::MinimumVersion);
|
||||||
|
}
|
||||||
|
pqrpb::PqRatchetState {
|
||||||
|
inner: if vn.alice {
|
||||||
|
v.init_alice_inner(&vn.auth_key)
|
||||||
|
} else {
|
||||||
|
v.init_bob_inner(&vn.auth_key)
|
||||||
|
},
|
||||||
|
// This is our negotiation; we disallow any further.
|
||||||
|
version_negotiation: None,
|
||||||
|
chain: prenegotiated_state_pb.chain,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// At this point, we have finished version negotiation and have made sure
|
||||||
|
// that our state version matches. Proceed with receiving and processing
|
||||||
|
// the associated message.
|
||||||
|
match state_pb.inner {
|
||||||
|
None => Ok(Recv {
|
||||||
|
state: vec![],
|
||||||
|
key: None,
|
||||||
|
}),
|
||||||
|
Some(pqrpb::pq_ratchet_state::Inner::V1(pb)) => {
|
||||||
|
let mut chain = Chain::from_pb(state_pb.chain.ok_or(Error::StateDecode)?)?;
|
||||||
|
let (scka_msg, index, _) = v1states::Message::deserialize(msg)?;
|
||||||
|
|
||||||
|
let v1states::Recv { key, state } = v1states::States::from_pb(pb)?.recv(&scka_msg)?;
|
||||||
|
|
||||||
|
if let Some(epoch_secret) = key {
|
||||||
|
chain.add_epoch(epoch_secret);
|
||||||
|
}
|
||||||
|
|
||||||
|
let msg_key = chain.recv_key(scka_msg.epoch - 1, index)?;
|
||||||
|
Ok(Recv {
|
||||||
|
state: pqrpb::PqRatchetState {
|
||||||
|
inner: Some(pqrpb::pq_ratchet_state::Inner::V1(state.into_pb())),
|
||||||
|
// Receiving clears our version negotiation.
|
||||||
|
version_negotiation: None,
|
||||||
|
chain: Some(chain.into_pb()),
|
||||||
|
}
|
||||||
|
.encode_to_vec(),
|
||||||
|
key: Some(msg_key),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn state_version(state: &pqrpb::PqRatchetState) -> Version {
|
||||||
|
match state.inner {
|
||||||
|
None => Version::V0,
|
||||||
|
Some(proto::pq_ratchet::pq_ratchet_state::Inner::V1(_)) => Version::V1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::fstar::verification_status(lax)]
|
||||||
|
fn msg_version(msg: &SerializedMessage) -> Option<Version> {
|
||||||
|
if msg.is_empty() {
|
||||||
|
Some(Version::V0)
|
||||||
|
} else {
|
||||||
|
msg[0].try_into().ok()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::fstar::verification_status(lax)]
|
||||||
|
fn decode_state(s: &SerializedState) -> Result<pqrpb::PqRatchetState, Error> {
|
||||||
|
if s.is_empty() {
|
||||||
|
Ok(proto::pq_ratchet::PqRatchetState {
|
||||||
|
inner: None,
|
||||||
|
version_negotiation: None,
|
||||||
|
chain: None,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
proto::pq_ratchet::PqRatchetState::decode(s.as_slice()).map_err(|_| Error::StateDecode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod lib_test {
|
||||||
|
use rand::Rng;
|
||||||
|
use rand::TryRngCore;
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use crate::{recv, send, Error, Recv, Send, SerializedState, Version};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ratchet() -> Result<(), Error> {
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
|
||||||
|
let version = Version::V1;
|
||||||
|
|
||||||
|
let alex_pq_state = version.initial_alice_state(&[41u8; 32], Version::V1);
|
||||||
|
let blake_pq_state = version.initial_bob_state(&[41u8; 32], Version::V1);
|
||||||
|
|
||||||
|
// Now let's send some messages
|
||||||
|
let Send {
|
||||||
|
state: alex_pq_state,
|
||||||
|
msg,
|
||||||
|
key: alex_key,
|
||||||
|
} = send(&alex_pq_state, &mut rng)?;
|
||||||
|
|
||||||
|
let Recv {
|
||||||
|
state: blake_pq_state,
|
||||||
|
key: blake_key,
|
||||||
|
} = recv(&blake_pq_state, &msg)?;
|
||||||
|
|
||||||
|
assert_eq!(alex_key, blake_key);
|
||||||
|
|
||||||
|
let Send {
|
||||||
|
state: mut blake_pq_state,
|
||||||
|
msg,
|
||||||
|
key: blake_key,
|
||||||
|
} = send(&blake_pq_state, &mut rng)?;
|
||||||
|
|
||||||
|
let Recv {
|
||||||
|
state: mut alex_pq_state,
|
||||||
|
key: alex_key,
|
||||||
|
} = recv(&alex_pq_state, &msg)?;
|
||||||
|
|
||||||
|
assert_eq!(alex_key, blake_key);
|
||||||
|
|
||||||
|
// now let's mix it up a little
|
||||||
|
for _ in 0..1000 {
|
||||||
|
let a_send = rng.random_bool(0.5);
|
||||||
|
let b_send = rng.random_bool(0.5);
|
||||||
|
let a_recv = rng.random_bool(0.7);
|
||||||
|
let b_recv = rng.random_bool(0.7);
|
||||||
|
|
||||||
|
if a_send {
|
||||||
|
let Send {
|
||||||
|
state,
|
||||||
|
msg,
|
||||||
|
key: alex_key,
|
||||||
|
} = send(&alex_pq_state, &mut rng)?;
|
||||||
|
alex_pq_state = state;
|
||||||
|
if b_recv {
|
||||||
|
let Recv {
|
||||||
|
state,
|
||||||
|
key: blake_key,
|
||||||
|
} = recv(&blake_pq_state, &msg)?;
|
||||||
|
blake_pq_state = state;
|
||||||
|
|
||||||
|
assert_eq!(alex_key, blake_key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if b_send {
|
||||||
|
let Send {
|
||||||
|
state,
|
||||||
|
msg,
|
||||||
|
key: blake_key,
|
||||||
|
} = send(&blake_pq_state, &mut rng)?;
|
||||||
|
blake_pq_state = state;
|
||||||
|
if a_recv {
|
||||||
|
let Recv {
|
||||||
|
state,
|
||||||
|
key: alex_key,
|
||||||
|
} = recv(&alex_pq_state, &msg)?;
|
||||||
|
alex_pq_state = state;
|
||||||
|
|
||||||
|
assert_eq!(alex_key, blake_key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ratchet_v0_empty_states() -> Result<(), Error> {
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
|
||||||
|
// SPQR should treat empty states as V0.
|
||||||
|
|
||||||
|
let alex_pq_state = SerializedState::new();
|
||||||
|
let blake_pq_state = SerializedState::new();
|
||||||
|
|
||||||
|
// Now let's send some messages
|
||||||
|
let Send {
|
||||||
|
state: alex_pq_state,
|
||||||
|
msg,
|
||||||
|
key: alex_key,
|
||||||
|
} = send(&alex_pq_state, &mut rng)?;
|
||||||
|
|
||||||
|
let Recv {
|
||||||
|
state: blake_pq_state,
|
||||||
|
key: blake_key,
|
||||||
|
} = recv(&blake_pq_state, &msg)?;
|
||||||
|
|
||||||
|
assert_eq!(alex_key, blake_key);
|
||||||
|
|
||||||
|
let Send {
|
||||||
|
state: mut blake_pq_state,
|
||||||
|
msg,
|
||||||
|
key: blake_key,
|
||||||
|
} = send(&blake_pq_state, &mut rng)?;
|
||||||
|
|
||||||
|
let Recv {
|
||||||
|
state: mut alex_pq_state,
|
||||||
|
key: alex_key,
|
||||||
|
} = recv(&alex_pq_state, &msg)?;
|
||||||
|
|
||||||
|
assert_eq!(alex_key, blake_key);
|
||||||
|
|
||||||
|
// now let's mix it up a little
|
||||||
|
for _ in 0..1000 {
|
||||||
|
let a_send = rng.random_bool(0.5);
|
||||||
|
let b_send = rng.random_bool(0.5);
|
||||||
|
let a_recv = rng.random_bool(0.7);
|
||||||
|
let b_recv = rng.random_bool(0.7);
|
||||||
|
|
||||||
|
if a_send {
|
||||||
|
let Send {
|
||||||
|
state,
|
||||||
|
msg,
|
||||||
|
key: alex_key,
|
||||||
|
} = send(&alex_pq_state, &mut rng)?;
|
||||||
|
alex_pq_state = state;
|
||||||
|
if b_recv {
|
||||||
|
let Recv {
|
||||||
|
state,
|
||||||
|
key: blake_key,
|
||||||
|
} = recv(&blake_pq_state, &msg)?;
|
||||||
|
blake_pq_state = state;
|
||||||
|
|
||||||
|
assert_eq!(alex_key, blake_key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if b_send {
|
||||||
|
let Send {
|
||||||
|
state,
|
||||||
|
msg,
|
||||||
|
key: blake_key,
|
||||||
|
} = send(&blake_pq_state, &mut rng)?;
|
||||||
|
blake_pq_state = state;
|
||||||
|
if a_recv {
|
||||||
|
let Recv {
|
||||||
|
state,
|
||||||
|
key: alex_key,
|
||||||
|
} = recv(&alex_pq_state, &msg)?;
|
||||||
|
alex_pq_state = state;
|
||||||
|
|
||||||
|
assert_eq!(alex_key, blake_key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
4
src/proto.rs
Normal file
4
src/proto.rs
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
pub mod pq_ratchet;
|
||||||
208
src/proto/pq_ratchet.proto
Normal file
208
src/proto/pq_ratchet.proto
Normal file
|
|
@ -0,0 +1,208 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC.
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package signal.proto.pq_ratchet;
|
||||||
|
|
||||||
|
message PolynomialEncoder {
|
||||||
|
uint32 idx = 1;
|
||||||
|
|
||||||
|
// We'd like to use a oneof here, but proto3 doesn't allow
|
||||||
|
// a combination of `oneof` and `repeated`. So, we just
|
||||||
|
// only set one of these values to non-empty:
|
||||||
|
repeated bytes pts = 2;
|
||||||
|
repeated bytes polys = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message PolynomialDecoder {
|
||||||
|
uint32 pts_needed = 1;
|
||||||
|
uint32 polys = 2;
|
||||||
|
repeated bytes pts = 3;
|
||||||
|
bool is_complete = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
message PqRatchetState {
|
||||||
|
message VersionNegotiation {
|
||||||
|
bytes auth_key = 1;
|
||||||
|
bool alice = 2;
|
||||||
|
uint32 min_version = 3;
|
||||||
|
}
|
||||||
|
VersionNegotiation version_negotiation = 1;
|
||||||
|
Chain chain = 2;
|
||||||
|
|
||||||
|
oneof inner {
|
||||||
|
V1State v1 = 3;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
message Chunk {
|
||||||
|
uint32 index = 1;
|
||||||
|
bytes data = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message V1Msg {
|
||||||
|
uint64 epoch = 1;
|
||||||
|
uint32 index = 2;
|
||||||
|
oneof inner_msg {
|
||||||
|
// send_ek
|
||||||
|
Chunk hdr = 3;
|
||||||
|
Chunk ek = 4;
|
||||||
|
Chunk ek_ct1_ack = 5;
|
||||||
|
bool ct1_ack = 6;
|
||||||
|
|
||||||
|
// send_ct
|
||||||
|
Chunk ct1 = 7;
|
||||||
|
Chunk ct2 = 8;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
message Authenticator {
|
||||||
|
bytes root_key = 1;
|
||||||
|
bytes mac_key = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message V1State {
|
||||||
|
message Unchunked {
|
||||||
|
//// send_ek ////
|
||||||
|
message KeysUnsampled {
|
||||||
|
uint64 epoch = 1;
|
||||||
|
Authenticator auth = 2;
|
||||||
|
}
|
||||||
|
message HeaderSent {
|
||||||
|
uint64 epoch = 1;
|
||||||
|
Authenticator auth = 2;
|
||||||
|
bytes ek = 3;
|
||||||
|
bytes dk = 4;
|
||||||
|
}
|
||||||
|
message EkSent {
|
||||||
|
uint64 epoch = 1;
|
||||||
|
Authenticator auth = 2;
|
||||||
|
bytes dk = 3;
|
||||||
|
}
|
||||||
|
message EkSentCt1Received {
|
||||||
|
uint64 epoch = 1;
|
||||||
|
Authenticator auth = 2;
|
||||||
|
bytes dk = 3;
|
||||||
|
bytes ct1 = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
//// send_ct ////
|
||||||
|
message NoHeaderReceived {
|
||||||
|
uint64 epoch = 1;
|
||||||
|
Authenticator auth = 2;
|
||||||
|
}
|
||||||
|
message HeaderReceived {
|
||||||
|
uint64 epoch = 1;
|
||||||
|
Authenticator auth = 2;
|
||||||
|
bytes hdr = 3;
|
||||||
|
}
|
||||||
|
message EkReceived {
|
||||||
|
uint64 epoch = 1;
|
||||||
|
Authenticator auth = 2;
|
||||||
|
bytes hdr = 3;
|
||||||
|
bytes ek = 4;
|
||||||
|
}
|
||||||
|
message Ct1Sent {
|
||||||
|
uint64 epoch = 1;
|
||||||
|
Authenticator auth = 2;
|
||||||
|
bytes hdr = 3;
|
||||||
|
bytes es = 4;
|
||||||
|
bytes ct1 = 5;
|
||||||
|
}
|
||||||
|
message Ct1SentEkReceived {
|
||||||
|
uint64 epoch = 1;
|
||||||
|
Authenticator auth = 2;
|
||||||
|
bytes es = 3;
|
||||||
|
bytes ek = 4;
|
||||||
|
bytes ct1 = 5;
|
||||||
|
}
|
||||||
|
message Ct2Sent {
|
||||||
|
uint64 epoch = 1;
|
||||||
|
Authenticator auth = 2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
message Chunked {
|
||||||
|
//// send_ek ////
|
||||||
|
message KeysUnsampled {
|
||||||
|
Unchunked.KeysUnsampled uc = 1;
|
||||||
|
}
|
||||||
|
message KeysSampled {
|
||||||
|
Unchunked.HeaderSent uc = 1;
|
||||||
|
PolynomialEncoder sending_hdr = 2;
|
||||||
|
}
|
||||||
|
message HeaderSent {
|
||||||
|
Unchunked.EkSent uc = 1;
|
||||||
|
PolynomialEncoder sending_ek = 2;
|
||||||
|
PolynomialDecoder receiving_ct1 = 3;
|
||||||
|
}
|
||||||
|
message Ct1Received {
|
||||||
|
Unchunked.EkSentCt1Received uc = 1;
|
||||||
|
PolynomialEncoder sending_ek = 2;
|
||||||
|
}
|
||||||
|
message EkSentCt1Received {
|
||||||
|
Unchunked.EkSentCt1Received uc = 1;
|
||||||
|
PolynomialDecoder receiving_ct2 = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
//// send_ct ////
|
||||||
|
message NoHeaderReceived {
|
||||||
|
Unchunked.NoHeaderReceived uc = 1;
|
||||||
|
PolynomialDecoder receiving_hdr = 2;
|
||||||
|
}
|
||||||
|
message HeaderReceived {
|
||||||
|
Unchunked.HeaderReceived uc = 1;
|
||||||
|
PolynomialDecoder receiving_ek = 2;
|
||||||
|
}
|
||||||
|
message Ct1Sampled {
|
||||||
|
Unchunked.Ct1Sent uc = 1;
|
||||||
|
PolynomialEncoder sending_ct1 = 2;
|
||||||
|
PolynomialDecoder receiving_ek = 3;
|
||||||
|
}
|
||||||
|
message EkReceivedCt1Sampled {
|
||||||
|
Unchunked.Ct1SentEkReceived uc = 1;
|
||||||
|
PolynomialEncoder sending_ct1 = 2;
|
||||||
|
}
|
||||||
|
message Ct1Acknowledged {
|
||||||
|
Unchunked.Ct1Sent uc = 1;
|
||||||
|
PolynomialDecoder receiving_ek = 2;
|
||||||
|
}
|
||||||
|
message Ct2Sampled {
|
||||||
|
Unchunked.Ct2Sent uc = 1;
|
||||||
|
PolynomialEncoder sending_ct2 = 2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
oneof inner_state {
|
||||||
|
//// send_ek ////
|
||||||
|
Chunked.KeysUnsampled keys_unsampled = 1;
|
||||||
|
Chunked.KeysSampled keys_sampled = 2;
|
||||||
|
Chunked.HeaderSent header_sent = 3;
|
||||||
|
Chunked.Ct1Received ct1_received = 4;
|
||||||
|
Chunked.EkSentCt1Received ek_sent_ct1_received = 5;
|
||||||
|
|
||||||
|
//// send_ct ////
|
||||||
|
Chunked.NoHeaderReceived no_header_received = 6;
|
||||||
|
Chunked.HeaderReceived header_received = 7;
|
||||||
|
Chunked.Ct1Sampled ct1_sampled = 8;
|
||||||
|
Chunked.EkReceivedCt1Sampled ek_received_ct1_sampled = 9;
|
||||||
|
Chunked.Ct1Acknowledged ct1_acknowledged = 10;
|
||||||
|
Chunked.Ct2Sampled ct2_sampled = 11;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
message Chain {
|
||||||
|
message Epoch {
|
||||||
|
message Direction {
|
||||||
|
uint32 ctr = 1;
|
||||||
|
bytes next = 2;
|
||||||
|
bytes prev = 3;
|
||||||
|
}
|
||||||
|
Direction send = 1;
|
||||||
|
Direction recv = 2;
|
||||||
|
}
|
||||||
|
bool a2b = 1;
|
||||||
|
uint64 current_epoch = 2;
|
||||||
|
repeated Epoch links = 3;
|
||||||
|
bytes next_root = 4;
|
||||||
|
}
|
||||||
6
src/proto/pq_ratchet.rs
Normal file
6
src/proto/pq_ratchet.rs
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
#![allow(clippy::derive_partial_eq_without_eq)]
|
||||||
|
|
||||||
|
include!(concat!(env!("OUT_DIR"), "/signal.proto.pq_ratchet.rs"));
|
||||||
18
src/serialize.rs
Normal file
18
src/serialize.rs
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use crate::encoding;
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error, Copy, Clone, PartialEq)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error("General deserialization error")]
|
||||||
|
Deserialization,
|
||||||
|
#[error("Error with encoder/decoder serialization")]
|
||||||
|
EncodingDecoding,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<encoding::polynomial::PolynomialError> for Error {
|
||||||
|
fn from(_e: encoding::polynomial::PolynomialError) -> Error {
|
||||||
|
Error::EncodingDecoding
|
||||||
|
}
|
||||||
|
}
|
||||||
14
src/test.rs
Normal file
14
src/test.rs
Normal file
|
|
@ -0,0 +1,14 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
#![cfg(test)]
|
||||||
|
pub(crate) mod basic_messaging_behavior;
|
||||||
|
pub(crate) mod generic_dr;
|
||||||
|
pub(crate) mod messaging_behavior;
|
||||||
|
pub(crate) mod messaging_scka;
|
||||||
|
pub(crate) mod onlineoffline;
|
||||||
|
pub(crate) mod orchestrator;
|
||||||
|
pub(crate) mod pingpong_messaging_behavior;
|
||||||
|
pub(crate) mod scka;
|
||||||
|
pub(crate) mod v1_impls;
|
||||||
|
pub(crate) mod x25519_scka;
|
||||||
46
src/test/basic_messaging_behavior.rs
Normal file
46
src/test/basic_messaging_behavior.rs
Normal file
|
|
@ -0,0 +1,46 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use rand::Rng;
|
||||||
|
use rand_core::CryptoRng;
|
||||||
|
|
||||||
|
use super::messaging_behavior::{self, Agent, Command, MessagingBehavior};
|
||||||
|
|
||||||
|
pub struct BasicMessagingBehavior {
|
||||||
|
p_a: f64,
|
||||||
|
p_b: f64,
|
||||||
|
receive_all_probability: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BasicMessagingBehavior {
|
||||||
|
pub fn new(p_a: f64, p_b: f64, receive_all_probability: f64) -> Self {
|
||||||
|
Self {
|
||||||
|
p_a,
|
||||||
|
p_b,
|
||||||
|
receive_all_probability,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MessagingBehavior for BasicMessagingBehavior {
|
||||||
|
fn next_commands<R: CryptoRng>(&mut self, rng: &mut R) -> Vec<messaging_behavior::Command> {
|
||||||
|
let mut cmds = Vec::new();
|
||||||
|
|
||||||
|
let a_sends = rng.random_bool(self.p_a);
|
||||||
|
let b_sends = rng.random_bool(self.p_b);
|
||||||
|
let do_receive = rng.random_bool(self.receive_all_probability);
|
||||||
|
|
||||||
|
if do_receive {
|
||||||
|
cmds.push(Command::ReceiveAll(Agent::Alex));
|
||||||
|
cmds.push(Command::ReceiveAll(Agent::Blake));
|
||||||
|
}
|
||||||
|
if a_sends {
|
||||||
|
cmds.push(Command::Send(Agent::Alex));
|
||||||
|
}
|
||||||
|
if b_sends {
|
||||||
|
cmds.push(Command::Send(Agent::Blake));
|
||||||
|
}
|
||||||
|
|
||||||
|
cmds
|
||||||
|
}
|
||||||
|
}
|
||||||
246
src/test/generic_dr.rs
Normal file
246
src/test/generic_dr.rs
Normal file
|
|
@ -0,0 +1,246 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use rand_core::CryptoRng;
|
||||||
|
|
||||||
|
use crate::{chain, EpochSecret, Error};
|
||||||
|
|
||||||
|
use super::scka::{Scka, SckaMessage};
|
||||||
|
|
||||||
|
pub struct DoubleRatchet<SCKA: Scka> {
|
||||||
|
symratchet: chain::Chain,
|
||||||
|
asymratchet: SCKA,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Send<SCKA: Scka> {
|
||||||
|
pub dr: DoubleRatchet<SCKA>,
|
||||||
|
pub msg: SCKA::Message,
|
||||||
|
index: u32,
|
||||||
|
key: Option<[u8; 32]>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dr_send<SCKA: Scka, R: CryptoRng>(
|
||||||
|
dr: DoubleRatchet<SCKA>,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> Result<Send<SCKA>, Error> {
|
||||||
|
let DoubleRatchet {
|
||||||
|
asymratchet,
|
||||||
|
mut symratchet,
|
||||||
|
} = dr;
|
||||||
|
|
||||||
|
let (so, msg, asymratchet) = asymratchet.scka_send(rng)?;
|
||||||
|
|
||||||
|
if let Some((epoch, key)) = so.output_key {
|
||||||
|
symratchet.add_epoch(EpochSecret {
|
||||||
|
epoch,
|
||||||
|
secret: key.to_vec(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let (index, msg_key) = symratchet.send_key(so.sending_epoch)?;
|
||||||
|
|
||||||
|
Ok(Send {
|
||||||
|
dr: DoubleRatchet {
|
||||||
|
asymratchet,
|
||||||
|
symratchet,
|
||||||
|
},
|
||||||
|
msg,
|
||||||
|
index,
|
||||||
|
key: Some(msg_key.try_into().expect("msg_key is 32B")),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Recv<SCKA: Scka> {
|
||||||
|
pub dr: DoubleRatchet<SCKA>,
|
||||||
|
key: Option<[u8; 32]>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dr_recv<SCKA: Scka>(
|
||||||
|
dr: DoubleRatchet<SCKA>,
|
||||||
|
msg: &SCKA::Message,
|
||||||
|
index: u32,
|
||||||
|
) -> Result<Recv<SCKA>, Error> {
|
||||||
|
let DoubleRatchet {
|
||||||
|
asymratchet,
|
||||||
|
mut symratchet,
|
||||||
|
} = dr;
|
||||||
|
let (ro, asymratchet) = asymratchet.scka_recv(msg)?;
|
||||||
|
|
||||||
|
if let Some((epoch, key)) = ro.output_key {
|
||||||
|
symratchet.add_epoch(EpochSecret {
|
||||||
|
epoch,
|
||||||
|
secret: key.to_vec(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let msg_key = symratchet.recv_key(msg.epoch(), index)?;
|
||||||
|
|
||||||
|
Ok(Recv {
|
||||||
|
dr: DoubleRatchet {
|
||||||
|
symratchet,
|
||||||
|
asymratchet,
|
||||||
|
},
|
||||||
|
key: Some(msg_key.try_into().expect("msg_key is 32B")),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
mod test {
|
||||||
|
use rand::Rng;
|
||||||
|
use rand::TryRngCore;
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
chain, kdf, recv, send,
|
||||||
|
test::{scka::Scka, x25519_scka},
|
||||||
|
Error, Secret, SerializedMessage, SerializedState, Version,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::{dr_recv, dr_send, DoubleRatchet};
|
||||||
|
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
|
fn send_hybrid_message<SCKA: Scka>(
|
||||||
|
pq_state: &SerializedState,
|
||||||
|
ec_state: DoubleRatchet<SCKA>,
|
||||||
|
) -> Result<
|
||||||
|
(
|
||||||
|
SerializedState,
|
||||||
|
SerializedMessage,
|
||||||
|
DoubleRatchet<SCKA>,
|
||||||
|
SCKA::Message,
|
||||||
|
u32,
|
||||||
|
Secret,
|
||||||
|
),
|
||||||
|
Error,
|
||||||
|
> {
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
let (pq_send, ec_send) = (send(pq_state, &mut rng)?, dr_send(ec_state, &mut rng)?);
|
||||||
|
|
||||||
|
let key = kdf::hkdf_to_vec(
|
||||||
|
&[0u8; 32],
|
||||||
|
&[pq_send.key.unwrap(), ec_send.key.unwrap().to_vec()].concat(),
|
||||||
|
b"hybrid ratchet merge",
|
||||||
|
32,
|
||||||
|
);
|
||||||
|
Ok((
|
||||||
|
pq_send.state,
|
||||||
|
pq_send.msg,
|
||||||
|
ec_send.dr,
|
||||||
|
ec_send.msg,
|
||||||
|
ec_send.index,
|
||||||
|
key,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn receive_hybrid_message<SCKA: Scka>(
|
||||||
|
pq_state: &SerializedState,
|
||||||
|
pq_msg: &SerializedMessage,
|
||||||
|
ec_state: DoubleRatchet<SCKA>,
|
||||||
|
ec_msg: &SCKA::Message,
|
||||||
|
ec_idx: u32,
|
||||||
|
) -> Result<(SerializedState, DoubleRatchet<SCKA>, Secret), Error> {
|
||||||
|
let (pq_recv, ec_recv) = (recv(pq_state, pq_msg)?, dr_recv(ec_state, ec_msg, ec_idx)?);
|
||||||
|
|
||||||
|
let key = kdf::hkdf_to_vec(
|
||||||
|
&[0u8; 32],
|
||||||
|
&[pq_recv.key.unwrap(), ec_recv.key.unwrap().to_vec()].concat(),
|
||||||
|
b"hybrid ratchet merge",
|
||||||
|
32,
|
||||||
|
);
|
||||||
|
Ok((pq_recv.state, ec_recv.dr, key))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hybrid_ratchet() -> Result<(), Error> {
|
||||||
|
let alex_ec_ratchet = x25519_scka::states::States::init_a();
|
||||||
|
let alex_ec_chain = chain::Chain::new(&[43u8; 32], crate::Direction::A2B);
|
||||||
|
|
||||||
|
let alex_ec_state = DoubleRatchet {
|
||||||
|
asymratchet: alex_ec_ratchet,
|
||||||
|
symratchet: alex_ec_chain,
|
||||||
|
};
|
||||||
|
|
||||||
|
let blake_ec_ratchet = x25519_scka::states::States::init_b();
|
||||||
|
let blake_ec_chain = chain::Chain::new(&[43u8; 32], crate::Direction::B2A);
|
||||||
|
|
||||||
|
let blake_ec_state = DoubleRatchet {
|
||||||
|
asymratchet: blake_ec_ratchet,
|
||||||
|
symratchet: blake_ec_chain,
|
||||||
|
};
|
||||||
|
|
||||||
|
let version = Version::V1;
|
||||||
|
|
||||||
|
let alex_pq_state = version.initial_alice_state(&[41u8; 32], Version::V1);
|
||||||
|
let blake_pq_state = version.initial_bob_state(&[41u8; 32], Version::V1);
|
||||||
|
|
||||||
|
// Now let's send some messages
|
||||||
|
println!("alex send");
|
||||||
|
let (alex_pq_state, pq_msg, alex_ec_state, ec_msg, ec_idx, alex_key) =
|
||||||
|
send_hybrid_message(&alex_pq_state, alex_ec_state)?;
|
||||||
|
println!("blake recv");
|
||||||
|
let (blake_pq_state, blake_ec_state, blake_key) =
|
||||||
|
receive_hybrid_message(&blake_pq_state, &pq_msg, blake_ec_state, &ec_msg, ec_idx)?;
|
||||||
|
|
||||||
|
assert_eq!(alex_key, blake_key);
|
||||||
|
|
||||||
|
println!("blake send");
|
||||||
|
let (mut blake_pq_state, pq_msg, mut blake_ec_state, ec_msg, ec_idx, blake_key) =
|
||||||
|
send_hybrid_message(&blake_pq_state, blake_ec_state)?;
|
||||||
|
println!("alex recv");
|
||||||
|
let (mut alex_pq_state, mut alex_ec_state, alex_key) =
|
||||||
|
receive_hybrid_message(&alex_pq_state, &pq_msg, alex_ec_state, &ec_msg, ec_idx)?;
|
||||||
|
|
||||||
|
assert_eq!(alex_key, blake_key);
|
||||||
|
|
||||||
|
// now let's mix it up a little
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
for _ in 0..1000 {
|
||||||
|
let a_send = rng.random_bool(0.5);
|
||||||
|
let b_send = rng.random_bool(0.5);
|
||||||
|
let a_recv = rng.random_bool(0.7);
|
||||||
|
let b_recv = rng.random_bool(0.7);
|
||||||
|
|
||||||
|
if a_send {
|
||||||
|
println!("alex send");
|
||||||
|
let (pq_state, pq_msg, ec_state, ec_msg, ec_idx, alex_key) =
|
||||||
|
send_hybrid_message(&alex_pq_state, alex_ec_state)?;
|
||||||
|
(alex_pq_state, alex_ec_state) = (pq_state, ec_state);
|
||||||
|
if b_recv {
|
||||||
|
println!("blake recv");
|
||||||
|
let (pq_state, ec_state, blake_key) = receive_hybrid_message(
|
||||||
|
&blake_pq_state,
|
||||||
|
&pq_msg,
|
||||||
|
blake_ec_state,
|
||||||
|
&ec_msg,
|
||||||
|
ec_idx,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
(blake_pq_state, blake_ec_state) = (pq_state, ec_state);
|
||||||
|
|
||||||
|
assert_eq!(alex_key, blake_key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if b_send {
|
||||||
|
println!("blake send");
|
||||||
|
let (pq_state, pq_msg, ec_state, ec_msg, ec_idx, blake_key) =
|
||||||
|
send_hybrid_message(&blake_pq_state, blake_ec_state)?;
|
||||||
|
(blake_pq_state, blake_ec_state) = (pq_state, ec_state);
|
||||||
|
if a_recv {
|
||||||
|
println!("alex recv");
|
||||||
|
let (pq_state, ec_state, alex_key) = receive_hybrid_message(
|
||||||
|
&alex_pq_state,
|
||||||
|
&pq_msg,
|
||||||
|
alex_ec_state,
|
||||||
|
&ec_msg,
|
||||||
|
ec_idx,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
(alex_pq_state, alex_ec_state) = (pq_state, ec_state);
|
||||||
|
|
||||||
|
assert_eq!(alex_key, blake_key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
21
src/test/messaging_behavior.rs
Normal file
21
src/test/messaging_behavior.rs
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use rand_core::CryptoRng;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
|
pub enum Agent {
|
||||||
|
Alex,
|
||||||
|
Blake,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum Command {
|
||||||
|
Send(Agent),
|
||||||
|
#[allow(dead_code)]
|
||||||
|
Receive(Agent),
|
||||||
|
ReceiveAll(Agent),
|
||||||
|
}
|
||||||
|
pub trait MessagingBehavior {
|
||||||
|
fn next_commands<R: CryptoRng>(&mut self, rng: &mut R) -> Vec<Command>;
|
||||||
|
}
|
||||||
255
src/test/messaging_scka.rs
Normal file
255
src/test/messaging_scka.rs
Normal file
|
|
@ -0,0 +1,255 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
|
use rand_core::{CryptoRng, OsRng};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
test::scka::{Scka, SckaInitializer, SckaVulnerability},
|
||||||
|
Epoch, Error, Secret,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub trait MessagingScka {
|
||||||
|
type CkaOutput;
|
||||||
|
type Message;
|
||||||
|
|
||||||
|
fn init_a<R: CryptoRng>(rng: &mut R) -> Result<Self, Error>
|
||||||
|
where
|
||||||
|
Self: Sized;
|
||||||
|
fn init_b<R: CryptoRng>(rng: &mut R) -> Result<Self, Error>
|
||||||
|
where
|
||||||
|
Self: Sized;
|
||||||
|
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
|
fn messaging_scka_send<R: CryptoRng>(
|
||||||
|
&mut self,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> Result<(Option<(Epoch, Self::CkaOutput)>, Self::Message), Error>
|
||||||
|
where
|
||||||
|
Self::CkaOutput: Sized;
|
||||||
|
fn messaging_scka_recv(
|
||||||
|
&mut self,
|
||||||
|
msg: &Self::Message,
|
||||||
|
rng: &mut OsRng,
|
||||||
|
) -> Result<Option<(Epoch, Self::CkaOutput)>, Error>
|
||||||
|
where
|
||||||
|
Self::CkaOutput: Sized;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait MessagingCkaVulnerability {
|
||||||
|
fn vulnerable_epochs(&self) -> Vec<Epoch>;
|
||||||
|
fn last_emitted_epoch(&self) -> Epoch;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct GenericMessagingScka<SCKA: Scka> {
|
||||||
|
scka: SCKA,
|
||||||
|
send_outputs: BTreeMap<Epoch, Secret>,
|
||||||
|
recv_outputs: BTreeMap<Epoch, Secret>,
|
||||||
|
last_emitted_epoch: Epoch,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<SCKA: Scka + SckaInitializer + Clone> MessagingScka for GenericMessagingScka<SCKA> {
|
||||||
|
type CkaOutput = Secret;
|
||||||
|
|
||||||
|
type Message = SCKA::Message;
|
||||||
|
|
||||||
|
fn init_a<R: CryptoRng>(rng: &mut R) -> Result<Self, Error>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
Ok(Self {
|
||||||
|
scka: SCKA::init_a(rng)?,
|
||||||
|
send_outputs: BTreeMap::new(),
|
||||||
|
recv_outputs: BTreeMap::new(),
|
||||||
|
last_emitted_epoch: 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn init_b<R: CryptoRng>(rng: &mut R) -> Result<Self, Error>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
Ok(Self {
|
||||||
|
scka: SCKA::init_b(rng)?,
|
||||||
|
send_outputs: BTreeMap::new(),
|
||||||
|
recv_outputs: BTreeMap::new(),
|
||||||
|
last_emitted_epoch: 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn messaging_scka_send<R: CryptoRng>(
|
||||||
|
&mut self,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> Result<(Option<(Epoch, Self::CkaOutput)>, Self::Message), crate::Error>
|
||||||
|
where
|
||||||
|
Self::CkaOutput: Sized,
|
||||||
|
{
|
||||||
|
let (so, msg, state) = self.scka.clone().scka_send(rng)?;
|
||||||
|
self.scka = state;
|
||||||
|
|
||||||
|
// self.last_emitted_epoch = so.sending_epoch;
|
||||||
|
let earliest_send_output = if let Some((ep, _)) = self.send_outputs.first_key_value() {
|
||||||
|
*ep
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some((ep, k)) = so.output_key {
|
||||||
|
self.send_outputs.insert(ep, k);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut take_key = false;
|
||||||
|
if let Some(entry) = self.recv_outputs.first_entry() {
|
||||||
|
let ep = *entry.key();
|
||||||
|
if ep <= so.sending_epoch
|
||||||
|
&& (earliest_send_output == 0 || ep < earliest_send_output)
|
||||||
|
&& (self.last_emitted_epoch == 0 || ep == self.last_emitted_epoch + 1)
|
||||||
|
{
|
||||||
|
take_key = true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let output_key = if take_key {
|
||||||
|
let entry = self.recv_outputs.first_entry().unwrap();
|
||||||
|
let ep = *entry.key();
|
||||||
|
self.last_emitted_epoch = ep;
|
||||||
|
// info!("messaging scka send outputs: {:?}", entry);
|
||||||
|
Some((ep, entry.remove()))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok((output_key, msg))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn messaging_scka_recv(
|
||||||
|
&mut self,
|
||||||
|
msg: &Self::Message,
|
||||||
|
_rng: &mut OsRng,
|
||||||
|
) -> Result<Option<(Epoch, Self::CkaOutput)>, Error>
|
||||||
|
where
|
||||||
|
Self::CkaOutput: Sized,
|
||||||
|
{
|
||||||
|
let (ro, state) = self.scka.clone().scka_recv(msg)?;
|
||||||
|
self.scka = state;
|
||||||
|
|
||||||
|
// self.last_emitted_epoch = ro.receiving_epoch;
|
||||||
|
let earliest_recv_output = if let Some((ep, _)) = self.recv_outputs.first_key_value() {
|
||||||
|
*ep
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some((ep, k)) = ro.output_key {
|
||||||
|
self.recv_outputs.insert(ep, k);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut take_key = false;
|
||||||
|
if let Some(entry) = self.send_outputs.first_entry() {
|
||||||
|
let ep = *entry.key();
|
||||||
|
if ep <= ro.receiving_epoch
|
||||||
|
&& (earliest_recv_output == 0 || ep < earliest_recv_output)
|
||||||
|
&& (self.last_emitted_epoch == 0 || ep == self.last_emitted_epoch + 1)
|
||||||
|
{
|
||||||
|
take_key = true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let output_key = if take_key {
|
||||||
|
let entry = self.send_outputs.first_entry().unwrap();
|
||||||
|
let ep = *entry.key();
|
||||||
|
self.last_emitted_epoch = ep;
|
||||||
|
// info!("messaging scka recv outputs: {:?}", entry);
|
||||||
|
Some((ep, entry.remove()))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(output_key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl<SCKA: Scka + SckaVulnerability> MessagingCkaVulnerability for GenericMessagingScka<SCKA> {
|
||||||
|
fn vulnerable_epochs(&self) -> Vec<Epoch> {
|
||||||
|
let mut result = self.scka.vulnerable_epochs();
|
||||||
|
for (ep, _) in self.send_outputs.iter() {
|
||||||
|
result.push(*ep);
|
||||||
|
}
|
||||||
|
for (ep, _) in self.recv_outputs.iter() {
|
||||||
|
result.push(*ep);
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn last_emitted_epoch(&self) -> Epoch {
|
||||||
|
self.last_emitted_epoch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use rand::TryRngCore;
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
test::{
|
||||||
|
messaging_scka::GenericMessagingScka, onlineoffline::OnlineOfflineMessagingBehavior,
|
||||||
|
orchestrator, pingpong_messaging_behavior::PingPongMessagingBehavior,
|
||||||
|
},
|
||||||
|
v1states::States,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn random_balanced() {
|
||||||
|
type Scka = States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
orchestrator::random_balanced::<Cka, _>(&mut rng).expect("should run");
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn random_balanced_out_of_order() {
|
||||||
|
type Scka = States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
orchestrator::random_balanced_out_of_order::<Cka, _>(&mut rng).expect("should run");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn random_balanced_healing() {
|
||||||
|
type Scka = States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
orchestrator::random_healing_test::<Cka, _>(0.5, &mut rng).expect("should run");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn pingpong_healing() {
|
||||||
|
type Scka = States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut mp = PingPongMessagingBehavior::new(50, 0);
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
let hist =
|
||||||
|
orchestrator::controlled_messaging_healing_test::<Cka, PingPongMessagingBehavior, _>(
|
||||||
|
&mut mp, 10000, &mut rng,
|
||||||
|
)
|
||||||
|
.expect("should run");
|
||||||
|
orchestrator::print_histogram(&hist);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn onlineoffline_healing() {
|
||||||
|
type Scka = States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut mp = OnlineOfflineMessagingBehavior::new([0.04, 0.04], [0.05, 0.05]);
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
let hist = orchestrator::controlled_messaging_healing_test::<
|
||||||
|
Cka,
|
||||||
|
OnlineOfflineMessagingBehavior,
|
||||||
|
_,
|
||||||
|
>(&mut mp, 100000, &mut rng)
|
||||||
|
.expect("should run");
|
||||||
|
orchestrator::print_histogram(&hist);
|
||||||
|
orchestrator::print_healing_stats(&orchestrator::stats_from_histogram(&hist)[0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
87
src/test/onlineoffline.rs
Normal file
87
src/test/onlineoffline.rs
Normal file
|
|
@ -0,0 +1,87 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use rand::Rng;
|
||||||
|
use rand::TryRngCore;
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use super::messaging_behavior::{Agent, Command, MessagingBehavior};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
enum State {
|
||||||
|
Online,
|
||||||
|
Offline,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl State {
|
||||||
|
fn transition(self, prob_come_online: f64, prob_go_offline: f64) -> Self {
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
match self {
|
||||||
|
State::Online => {
|
||||||
|
if rng.random_bool(prob_go_offline) {
|
||||||
|
State::Offline
|
||||||
|
} else {
|
||||||
|
State::Online
|
||||||
|
}
|
||||||
|
}
|
||||||
|
State::Offline => {
|
||||||
|
if rng.random_bool(prob_come_online) {
|
||||||
|
State::Online
|
||||||
|
} else {
|
||||||
|
State::Offline
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_online(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
State::Online => true,
|
||||||
|
State::Offline => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const ALEX: usize = 0;
|
||||||
|
const BLAKE: usize = 1;
|
||||||
|
|
||||||
|
pub struct OnlineOfflineMessagingBehavior {
|
||||||
|
prob_go_offline: [f64; 2],
|
||||||
|
prob_come_online: [f64; 2],
|
||||||
|
agents: [Agent; 2],
|
||||||
|
states: [State; 2],
|
||||||
|
}
|
||||||
|
|
||||||
|
impl OnlineOfflineMessagingBehavior {
|
||||||
|
pub fn new(prob_go_offline: [f64; 2], prob_come_online: [f64; 2]) -> Self {
|
||||||
|
Self {
|
||||||
|
prob_go_offline,
|
||||||
|
prob_come_online,
|
||||||
|
agents: [Agent::Alex, Agent::Blake],
|
||||||
|
states: [State::Online, State::Online],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MessagingBehavior for OnlineOfflineMessagingBehavior {
|
||||||
|
fn next_commands<R: rand_core::CryptoRng>(
|
||||||
|
&mut self,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> Vec<super::messaging_behavior::Command> {
|
||||||
|
let agent = if rng.random_bool(0.5) { ALEX } else { BLAKE };
|
||||||
|
|
||||||
|
self.states[agent] = self.states[agent]
|
||||||
|
.clone()
|
||||||
|
.transition(self.prob_come_online[agent], self.prob_go_offline[agent]);
|
||||||
|
let mut cmds = Vec::new();
|
||||||
|
|
||||||
|
if self.states[agent].is_online() {
|
||||||
|
cmds.push(Command::ReceiveAll(self.agents[agent]));
|
||||||
|
if rng.random_bool(0.5) {
|
||||||
|
cmds.push(Command::Send(self.agents[agent]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cmds
|
||||||
|
}
|
||||||
|
}
|
||||||
995
src/test/orchestrator.rs
Normal file
995
src/test/orchestrator.rs
Normal file
|
|
@ -0,0 +1,995 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use std::collections::{BTreeMap, HashMap, VecDeque};
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
use rand::prelude::*;
|
||||||
|
use rand::rngs::OsRng;
|
||||||
|
use rand::rngs::StdRng;
|
||||||
|
use rand_core::CryptoRng;
|
||||||
|
|
||||||
|
use crate::test::messaging_behavior::{Agent, Command};
|
||||||
|
use crate::test::messaging_scka::{MessagingCkaVulnerability, MessagingScka};
|
||||||
|
use crate::Epoch;
|
||||||
|
use crate::Error;
|
||||||
|
|
||||||
|
use super::basic_messaging_behavior::BasicMessagingBehavior;
|
||||||
|
use super::messaging_behavior::MessagingBehavior;
|
||||||
|
|
||||||
|
pub struct OrchestratorBase<K>
|
||||||
|
where
|
||||||
|
K: MessagingScka,
|
||||||
|
<K as MessagingScka>::CkaOutput: PartialEq + Debug,
|
||||||
|
{
|
||||||
|
a2b_msg_queue: VecDeque<K::Message>,
|
||||||
|
b2a_msg_queue: VecDeque<K::Message>,
|
||||||
|
key_history_a: Vec<K::CkaOutput>,
|
||||||
|
key_history_b: Vec<K::CkaOutput>,
|
||||||
|
pub alex: K,
|
||||||
|
pub blake: K,
|
||||||
|
pub last_emitted_epoch_a: Epoch,
|
||||||
|
pub last_emitted_epoch_b: Epoch,
|
||||||
|
pub a_sent: usize,
|
||||||
|
pub b_sent: usize,
|
||||||
|
pub a_rcvd: usize,
|
||||||
|
pub b_rcvd: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K> OrchestratorBase<K>
|
||||||
|
where
|
||||||
|
K: MessagingScka + MessagingCkaVulnerability,
|
||||||
|
<K as MessagingScka>::CkaOutput: PartialEq + Debug,
|
||||||
|
{
|
||||||
|
pub fn new<R: CryptoRng>(rng: &mut R) -> Result<Self, Error> {
|
||||||
|
let alex = K::init_a(rng)?;
|
||||||
|
let blake = K::init_b(rng)?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
a2b_msg_queue: VecDeque::new(),
|
||||||
|
b2a_msg_queue: VecDeque::new(),
|
||||||
|
key_history_a: Vec::new(),
|
||||||
|
key_history_b: Vec::new(),
|
||||||
|
alex,
|
||||||
|
blake,
|
||||||
|
last_emitted_epoch_a: 0,
|
||||||
|
last_emitted_epoch_b: 0,
|
||||||
|
a_sent: 0,
|
||||||
|
b_sent: 0,
|
||||||
|
a_rcvd: 0,
|
||||||
|
b_rcvd: 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn key_history_is_consistent(&self) {
|
||||||
|
for (ka, kb) in self.key_history_a.iter().zip(self.key_history_b.iter()) {
|
||||||
|
assert_eq!(ka, kb);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
|
fn data_for_agent(
|
||||||
|
&mut self,
|
||||||
|
is_alex: bool,
|
||||||
|
) -> (
|
||||||
|
&mut K,
|
||||||
|
&mut VecDeque<K::Message>,
|
||||||
|
&mut VecDeque<K::Message>,
|
||||||
|
&mut Vec<K::CkaOutput>,
|
||||||
|
) {
|
||||||
|
if is_alex {
|
||||||
|
(
|
||||||
|
&mut self.alex,
|
||||||
|
&mut self.b2a_msg_queue,
|
||||||
|
&mut self.a2b_msg_queue,
|
||||||
|
&mut self.key_history_a,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
(
|
||||||
|
&mut self.blake,
|
||||||
|
&mut self.a2b_msg_queue,
|
||||||
|
&mut self.b2a_msg_queue,
|
||||||
|
&mut self.key_history_b,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn send<R: CryptoRng>(&mut self, is_alex: bool, rng: &mut R) -> Result<bool, Error> {
|
||||||
|
let mut emitted_key = false;
|
||||||
|
let mut emitted_ep: Option<Epoch> = None;
|
||||||
|
{
|
||||||
|
{
|
||||||
|
let (agent, _incoming_msg_queue, outgoing_msg_queue, key_history) =
|
||||||
|
self.data_for_agent(is_alex);
|
||||||
|
let (out, msg) = agent.messaging_scka_send(rng)?;
|
||||||
|
if let Some((_ep, key)) = out {
|
||||||
|
key_history.push(key);
|
||||||
|
emitted_ep = Some(agent.last_emitted_epoch());
|
||||||
|
emitted_key = true;
|
||||||
|
}
|
||||||
|
outgoing_msg_queue.push_back(msg);
|
||||||
|
}
|
||||||
|
if is_alex {
|
||||||
|
self.a_sent += 1;
|
||||||
|
} else {
|
||||||
|
self.b_sent += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ep) = emitted_ep {
|
||||||
|
if is_alex {
|
||||||
|
self.last_emitted_epoch_a = ep;
|
||||||
|
} else {
|
||||||
|
self.last_emitted_epoch_b = ep;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(emitted_key)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn receive_in_order(&mut self, is_alex: bool) -> Result<bool, Error> {
|
||||||
|
let mut emitted_key = false;
|
||||||
|
let mut emitted_ep: Option<Epoch> = None;
|
||||||
|
{
|
||||||
|
let (agent, incoming_message_queue, _omq, key_history) = self.data_for_agent(is_alex);
|
||||||
|
|
||||||
|
let maybe_msg = incoming_message_queue.pop_front();
|
||||||
|
if let Some(msg) = maybe_msg {
|
||||||
|
let mut rng = OsRng;
|
||||||
|
let out = agent.messaging_scka_recv(&msg, &mut rng)?;
|
||||||
|
if let Some((_ep, key)) = out {
|
||||||
|
key_history.push(key);
|
||||||
|
emitted_ep = Some(agent.last_emitted_epoch());
|
||||||
|
emitted_key = true
|
||||||
|
}
|
||||||
|
if is_alex {
|
||||||
|
self.a_rcvd += 1;
|
||||||
|
} else {
|
||||||
|
self.b_rcvd += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(ep) = emitted_ep {
|
||||||
|
if is_alex {
|
||||||
|
self.last_emitted_epoch_a = ep;
|
||||||
|
} else {
|
||||||
|
self.last_emitted_epoch_b = ep;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(emitted_key)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn receive_at(&mut self, is_alex: bool, i: usize) -> Result<bool, Error> {
|
||||||
|
let mut emitted_key = false;
|
||||||
|
let mut emitted_ep: Option<Epoch> = None;
|
||||||
|
{
|
||||||
|
let (agent, incoming_message_queue, _omq, key_history) = self.data_for_agent(is_alex);
|
||||||
|
|
||||||
|
let maybe_msg = incoming_message_queue.remove(i);
|
||||||
|
if let Some(msg) = maybe_msg {
|
||||||
|
let mut rng = OsRng;
|
||||||
|
let out = agent.messaging_scka_recv(&msg, &mut rng)?;
|
||||||
|
if let Some((_ep, key)) = out {
|
||||||
|
key_history.push(key);
|
||||||
|
emitted_ep = Some(agent.last_emitted_epoch());
|
||||||
|
emitted_key = true
|
||||||
|
}
|
||||||
|
if is_alex {
|
||||||
|
self.a_rcvd += 1;
|
||||||
|
} else {
|
||||||
|
self.b_rcvd += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(ep) = emitted_ep {
|
||||||
|
if is_alex {
|
||||||
|
self.last_emitted_epoch_a = ep;
|
||||||
|
} else {
|
||||||
|
self.last_emitted_epoch_b = ep;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(emitted_key)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn drop_message_at(&mut self, is_alex: bool, i: usize) {
|
||||||
|
let (_agent, incoming_message_queue, _omq, _key_history) = self.data_for_agent(is_alex);
|
||||||
|
|
||||||
|
incoming_message_queue.remove(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn incoming_queue_size(&self, is_alex: bool) -> usize {
|
||||||
|
if is_alex {
|
||||||
|
self.b2a_msg_queue.len()
|
||||||
|
} else {
|
||||||
|
self.a2b_msg_queue.len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn receive_all(&mut self, is_alex: bool) -> Result<bool, Error> {
|
||||||
|
let mut emitted_ep: Option<Epoch> = None;
|
||||||
|
let mut emitted_key = false;
|
||||||
|
let mut num_received = 0usize;
|
||||||
|
let (agent, incoming_message_queue, _omq, key_history) = self.data_for_agent(is_alex);
|
||||||
|
while !incoming_message_queue.is_empty() {
|
||||||
|
let maybe_msg = incoming_message_queue.pop_front();
|
||||||
|
if let Some(msg) = maybe_msg {
|
||||||
|
let mut rng = OsRng;
|
||||||
|
let out = agent.messaging_scka_recv(&msg, &mut rng)?;
|
||||||
|
if let Some((_ep, key)) = out {
|
||||||
|
key_history.push(key);
|
||||||
|
emitted_key = true;
|
||||||
|
emitted_ep = Some(agent.last_emitted_epoch());
|
||||||
|
}
|
||||||
|
num_received += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if is_alex {
|
||||||
|
self.a_rcvd += num_received;
|
||||||
|
} else {
|
||||||
|
self.b_rcvd += num_received;
|
||||||
|
}
|
||||||
|
if let Some(ep) = emitted_ep {
|
||||||
|
if is_alex {
|
||||||
|
self.last_emitted_epoch_a = ep;
|
||||||
|
} else {
|
||||||
|
self.last_emitted_epoch_b = ep;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(emitted_key)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn last_vulnerable_epoch_a(&self) -> Epoch {
|
||||||
|
*self.alex.vulnerable_epochs().iter().max().unwrap_or(&0u64)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn last_vulnerable_epoch_b(&self) -> Epoch {
|
||||||
|
*self.blake.vulnerable_epochs().iter().max().unwrap_or(&0u64)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn qlen(&self, for_alex: bool) -> usize {
|
||||||
|
if for_alex {
|
||||||
|
self.b2a_msg_queue.len()
|
||||||
|
} else {
|
||||||
|
self.a2b_msg_queue.len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_msg_queue_lengths(&self) {
|
||||||
|
println!(
|
||||||
|
"Alex has {} incoming, Blake has {} incoming",
|
||||||
|
self.b2a_msg_queue.len(),
|
||||||
|
self.a2b_msg_queue.len()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_key_history_lengths(&self) {
|
||||||
|
println!(
|
||||||
|
"Alex emitted {} keys, Blake emitted {} keys",
|
||||||
|
self.key_history_a.len(),
|
||||||
|
self.key_history_b.len()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Compromise {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
tick: usize,
|
||||||
|
a_sent: usize,
|
||||||
|
b_sent: usize,
|
||||||
|
a_rcvd: usize,
|
||||||
|
b_rcvd: usize,
|
||||||
|
#[allow(dead_code)]
|
||||||
|
heals_at: Epoch,
|
||||||
|
exposed_epochs: Vec<Epoch>,
|
||||||
|
active_epoch: Epoch,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct EpochVulnsetInfo {
|
||||||
|
a_start: usize,
|
||||||
|
a_end: usize,
|
||||||
|
b_start: usize,
|
||||||
|
b_end: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HealingHistogramEntry {
|
||||||
|
pub num_msgs: usize,
|
||||||
|
pub tot_by_a: usize,
|
||||||
|
pub tot_by_b: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HealingStats {
|
||||||
|
pub mean: f64,
|
||||||
|
pub stddev: f64,
|
||||||
|
pub deciles: [usize; 11],
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn stats_from_histogram(hist: &Vec<HealingHistogramEntry>) -> [HealingStats; 2] {
|
||||||
|
// first pass: compute aggregates: count, sum(num_msgs), sum(num_msgs^2)
|
||||||
|
let mut count = [0usize; 2];
|
||||||
|
let mut sum = [0usize; 2];
|
||||||
|
let mut sum_squares = [0usize; 2];
|
||||||
|
let mut min = [usize::MAX; 2];
|
||||||
|
let mut max = [0usize; 2];
|
||||||
|
let mut deciles = [[0usize; 11]; 2];
|
||||||
|
|
||||||
|
let mut mean = [0f64; 2];
|
||||||
|
let mut mean_square = [0f64; 2];
|
||||||
|
let mut var = [0f64; 2];
|
||||||
|
let mut stddev = [0f64; 2];
|
||||||
|
|
||||||
|
for entry in hist {
|
||||||
|
count[0] += entry.tot_by_a;
|
||||||
|
count[1] += entry.tot_by_b;
|
||||||
|
sum[0] += entry.num_msgs * entry.tot_by_a;
|
||||||
|
sum[1] += entry.num_msgs * entry.tot_by_b;
|
||||||
|
sum_squares[0] += entry.num_msgs * entry.num_msgs * entry.tot_by_a;
|
||||||
|
sum_squares[1] += entry.num_msgs * entry.num_msgs * entry.tot_by_b;
|
||||||
|
if entry.tot_by_a > 0 {
|
||||||
|
min[0] = std::cmp::min(min[0], entry.num_msgs);
|
||||||
|
max[0] = std::cmp::max(max[0], entry.num_msgs);
|
||||||
|
}
|
||||||
|
if entry.tot_by_b > 0 {
|
||||||
|
min[1] = std::cmp::min(min[1], entry.num_msgs);
|
||||||
|
max[1] = std::cmp::max(max[1], entry.num_msgs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for i in 0..2 {
|
||||||
|
mean[i] = (sum[i] as f64) / (count[i] as f64);
|
||||||
|
mean_square[i] = (sum_squares[i] as f64) / (count[i] as f64);
|
||||||
|
var[i] = mean_square[i] - mean[i] * mean[i];
|
||||||
|
stddev[i] = var[i].sqrt();
|
||||||
|
}
|
||||||
|
|
||||||
|
for i in 0..2 {
|
||||||
|
deciles[i][0] = min[i];
|
||||||
|
deciles[i][10] = max[i];
|
||||||
|
let mut cummulative_count = 0usize;
|
||||||
|
let mut ctr = 1usize;
|
||||||
|
for entry in hist {
|
||||||
|
cummulative_count += if i == 0 {
|
||||||
|
entry.tot_by_a
|
||||||
|
} else {
|
||||||
|
entry.tot_by_b
|
||||||
|
};
|
||||||
|
let decile_target = (count[i] * ctr) / 10;
|
||||||
|
if cummulative_count > decile_target {
|
||||||
|
deciles[i][ctr] = entry.num_msgs;
|
||||||
|
ctr += 1;
|
||||||
|
}
|
||||||
|
if ctr == 10 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
[
|
||||||
|
HealingStats {
|
||||||
|
mean: mean[0],
|
||||||
|
stddev: stddev[0],
|
||||||
|
deciles: deciles[0],
|
||||||
|
},
|
||||||
|
HealingStats {
|
||||||
|
mean: mean[1],
|
||||||
|
stddev: stddev[1],
|
||||||
|
deciles: deciles[1],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_histogram(hist: &Vec<HealingHistogramEntry>) {
|
||||||
|
println!("num exposed,msgs exposed by a comp,msgs exposed by b comp,freq a exposed by full,freq b exposed by full,tot exposed by full");
|
||||||
|
for entry in hist {
|
||||||
|
println!("{},{},{}", entry.num_msgs, entry.tot_by_a, entry.tot_by_b,);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_healing_stats(stats: &HealingStats) {
|
||||||
|
println!("mean, stddev, min,p10,p20,p30,p40,p50,p60,p70,p80,p90,max");
|
||||||
|
println!(
|
||||||
|
"{},{},{},{},{},{},{},{},{},{},{},{},{}",
|
||||||
|
stats.mean,
|
||||||
|
stats.stddev,
|
||||||
|
stats.deciles[0],
|
||||||
|
stats.deciles[1],
|
||||||
|
stats.deciles[2],
|
||||||
|
stats.deciles[3],
|
||||||
|
stats.deciles[4],
|
||||||
|
stats.deciles[5],
|
||||||
|
stats.deciles[6],
|
||||||
|
stats.deciles[7],
|
||||||
|
stats.deciles[8],
|
||||||
|
stats.deciles[9],
|
||||||
|
stats.deciles[10],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn random_healing_test<CKA, R: CryptoRng>(ratio: f64, rng: &mut R) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
CKA: MessagingScka + MessagingCkaVulnerability,
|
||||||
|
<CKA as MessagingScka>::CkaOutput: PartialEq + Debug,
|
||||||
|
{
|
||||||
|
let base_send_prob = 0.5;
|
||||||
|
let p_a = base_send_prob * ratio;
|
||||||
|
let p_b = base_send_prob * (1.0 - ratio);
|
||||||
|
let mut mp = BasicMessagingBehavior::new(p_a, p_b, 0.9);
|
||||||
|
let hist =
|
||||||
|
controlled_messaging_healing_test::<CKA, BasicMessagingBehavior, R>(&mut mp, 10000, rng)?;
|
||||||
|
print_histogram(&hist);
|
||||||
|
print_healing_stats(&stats_from_histogram(&hist)[0]);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn controlled_messaging_healing_test<CKA, MP, R>(
|
||||||
|
mp: &mut MP,
|
||||||
|
num_ticks: usize,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> Result<Vec<HealingHistogramEntry>, Error>
|
||||||
|
where
|
||||||
|
CKA: MessagingScka + MessagingCkaVulnerability,
|
||||||
|
<CKA as MessagingScka>::CkaOutput: PartialEq + Debug,
|
||||||
|
MP: MessagingBehavior,
|
||||||
|
R: CryptoRng,
|
||||||
|
{
|
||||||
|
let mut message_pattern_rng = StdRng::seed_from_u64(43);
|
||||||
|
|
||||||
|
let mut orchestrator = OrchestratorBase::<CKA>::new(rng)?;
|
||||||
|
let mut alex_compromises_that_heal_at = HashMap::<Epoch, Vec<Compromise>>::new();
|
||||||
|
let mut blake_compromises_that_heal_at = HashMap::<Epoch, Vec<Compromise>>::new();
|
||||||
|
|
||||||
|
let mut epoch_info = BTreeMap::<Epoch, EpochVulnsetInfo>::new();
|
||||||
|
|
||||||
|
for tick in 0..num_ticks {
|
||||||
|
// println!("tick {}", tick);
|
||||||
|
let mut emitted_key = false;
|
||||||
|
let mut alex_emitted_key = false;
|
||||||
|
let mut blake_emitted_key = false;
|
||||||
|
let cmds = mp.next_commands(&mut message_pattern_rng);
|
||||||
|
for cmd in &cmds {
|
||||||
|
match cmd {
|
||||||
|
Command::Send(agent) => {
|
||||||
|
let use_alex = agent == &Agent::Alex;
|
||||||
|
let emitted_send = orchestrator.send(use_alex, rng)?;
|
||||||
|
|
||||||
|
let OrchestratorBase {
|
||||||
|
a_sent,
|
||||||
|
b_sent,
|
||||||
|
a_rcvd,
|
||||||
|
b_rcvd,
|
||||||
|
..
|
||||||
|
} = orchestrator;
|
||||||
|
|
||||||
|
emitted_key = emitted_key || emitted_send;
|
||||||
|
alex_emitted_key = alex_emitted_key || (emitted_key && use_alex);
|
||||||
|
blake_emitted_key = blake_emitted_key || (emitted_key && !use_alex);
|
||||||
|
|
||||||
|
if alex_emitted_key {
|
||||||
|
// alex emitted a key and may have healed
|
||||||
|
let emitted_ep = orchestrator.alex.last_emitted_epoch();
|
||||||
|
epoch_info
|
||||||
|
.entry(emitted_ep)
|
||||||
|
.and_modify(|inf| {
|
||||||
|
inf.a_start = a_sent;
|
||||||
|
})
|
||||||
|
.or_insert(EpochVulnsetInfo {
|
||||||
|
a_start: a_sent,
|
||||||
|
a_end: 0,
|
||||||
|
b_start: usize::MAX,
|
||||||
|
b_end: 0,
|
||||||
|
});
|
||||||
|
if emitted_ep > 0 {
|
||||||
|
epoch_info
|
||||||
|
.entry(emitted_ep - 1)
|
||||||
|
.and_modify(|inf| {
|
||||||
|
inf.a_end = a_sent;
|
||||||
|
})
|
||||||
|
.or_insert(EpochVulnsetInfo {
|
||||||
|
a_start: 0,
|
||||||
|
a_end: a_sent,
|
||||||
|
b_start: usize::MAX,
|
||||||
|
b_end: 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if blake_emitted_key {
|
||||||
|
// blake emitted a key and may have healed
|
||||||
|
// alex emitted a key and may have healed
|
||||||
|
let emitted_ep = orchestrator.blake.last_emitted_epoch();
|
||||||
|
epoch_info
|
||||||
|
.entry(emitted_ep)
|
||||||
|
.and_modify(|inf| {
|
||||||
|
inf.b_start = b_sent;
|
||||||
|
})
|
||||||
|
.or_insert(EpochVulnsetInfo {
|
||||||
|
a_start: usize::MAX,
|
||||||
|
a_end: 0,
|
||||||
|
b_start: b_sent,
|
||||||
|
b_end: 0,
|
||||||
|
});
|
||||||
|
if emitted_ep > 0 {
|
||||||
|
epoch_info
|
||||||
|
.entry(emitted_ep - 1)
|
||||||
|
.and_modify(|inf| {
|
||||||
|
inf.b_end = b_sent;
|
||||||
|
})
|
||||||
|
.or_insert(EpochVulnsetInfo {
|
||||||
|
a_start: usize::MAX,
|
||||||
|
a_end: 0,
|
||||||
|
b_start: 0,
|
||||||
|
b_end: b_sent,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if use_alex {
|
||||||
|
let heals_at = orchestrator.last_vulnerable_epoch_a() + 1;
|
||||||
|
let comp = Compromise {
|
||||||
|
tick,
|
||||||
|
a_sent,
|
||||||
|
b_sent,
|
||||||
|
a_rcvd,
|
||||||
|
b_rcvd,
|
||||||
|
heals_at,
|
||||||
|
exposed_epochs: orchestrator.alex.vulnerable_epochs(),
|
||||||
|
active_epoch: orchestrator.alex.last_emitted_epoch(),
|
||||||
|
};
|
||||||
|
alex_compromises_that_heal_at
|
||||||
|
.entry(heals_at)
|
||||||
|
.or_default()
|
||||||
|
.push(comp);
|
||||||
|
} else {
|
||||||
|
let heals_at = orchestrator.last_vulnerable_epoch_b() + 1;
|
||||||
|
let comp = Compromise {
|
||||||
|
tick,
|
||||||
|
a_sent,
|
||||||
|
b_sent,
|
||||||
|
a_rcvd,
|
||||||
|
b_rcvd,
|
||||||
|
heals_at,
|
||||||
|
exposed_epochs: orchestrator.blake.vulnerable_epochs(),
|
||||||
|
active_epoch: orchestrator.blake.last_emitted_epoch(),
|
||||||
|
};
|
||||||
|
blake_compromises_that_heal_at
|
||||||
|
.entry(heals_at)
|
||||||
|
.or_default()
|
||||||
|
.push(comp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Command::ReceiveAll(agent) => {
|
||||||
|
let use_alex = agent == &Agent::Alex;
|
||||||
|
let do_compromise = orchestrator.incoming_queue_size(use_alex) > 0;
|
||||||
|
let emitted_recv = orchestrator.receive_all(use_alex)?;
|
||||||
|
|
||||||
|
let OrchestratorBase {
|
||||||
|
a_sent,
|
||||||
|
b_sent,
|
||||||
|
a_rcvd,
|
||||||
|
b_rcvd,
|
||||||
|
..
|
||||||
|
} = orchestrator;
|
||||||
|
|
||||||
|
emitted_key = emitted_key || emitted_recv;
|
||||||
|
alex_emitted_key = alex_emitted_key || (emitted_key && use_alex);
|
||||||
|
blake_emitted_key = blake_emitted_key || (emitted_key && !use_alex);
|
||||||
|
|
||||||
|
if alex_emitted_key {
|
||||||
|
// alex emitted a key and may have healed
|
||||||
|
let emitted_ep = orchestrator.alex.last_emitted_epoch();
|
||||||
|
epoch_info
|
||||||
|
.entry(emitted_ep)
|
||||||
|
.and_modify(|inf| {
|
||||||
|
inf.a_start = a_sent;
|
||||||
|
})
|
||||||
|
.or_insert(EpochVulnsetInfo {
|
||||||
|
a_start: a_sent,
|
||||||
|
a_end: 0,
|
||||||
|
b_start: usize::MAX,
|
||||||
|
b_end: 0,
|
||||||
|
});
|
||||||
|
if emitted_ep > 0 {
|
||||||
|
epoch_info
|
||||||
|
.entry(emitted_ep - 1)
|
||||||
|
.and_modify(|inf| {
|
||||||
|
inf.a_end = a_sent;
|
||||||
|
})
|
||||||
|
.or_insert(EpochVulnsetInfo {
|
||||||
|
a_start: 0,
|
||||||
|
a_end: a_sent,
|
||||||
|
b_start: usize::MAX,
|
||||||
|
b_end: 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if blake_emitted_key {
|
||||||
|
// blake emitted a key and may have healed
|
||||||
|
// alex emitted a key and may have healed
|
||||||
|
let emitted_ep = orchestrator.blake.last_emitted_epoch();
|
||||||
|
epoch_info
|
||||||
|
.entry(emitted_ep)
|
||||||
|
.and_modify(|inf| {
|
||||||
|
inf.b_start = b_sent;
|
||||||
|
})
|
||||||
|
.or_insert(EpochVulnsetInfo {
|
||||||
|
a_start: usize::MAX,
|
||||||
|
a_end: 0,
|
||||||
|
b_start: b_sent,
|
||||||
|
b_end: 0,
|
||||||
|
});
|
||||||
|
if emitted_ep > 0 {
|
||||||
|
epoch_info
|
||||||
|
.entry(emitted_ep - 1)
|
||||||
|
.and_modify(|inf| {
|
||||||
|
inf.b_end = b_sent;
|
||||||
|
})
|
||||||
|
.or_insert(EpochVulnsetInfo {
|
||||||
|
a_start: usize::MAX,
|
||||||
|
a_end: 0,
|
||||||
|
b_start: 0,
|
||||||
|
b_end: b_sent,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if do_compromise {
|
||||||
|
if use_alex {
|
||||||
|
let heals_at = orchestrator.last_vulnerable_epoch_a() + 1;
|
||||||
|
let comp = Compromise {
|
||||||
|
tick,
|
||||||
|
a_sent,
|
||||||
|
b_sent,
|
||||||
|
a_rcvd,
|
||||||
|
b_rcvd,
|
||||||
|
heals_at,
|
||||||
|
exposed_epochs: orchestrator.alex.vulnerable_epochs(),
|
||||||
|
active_epoch: orchestrator.alex.last_emitted_epoch(),
|
||||||
|
};
|
||||||
|
alex_compromises_that_heal_at
|
||||||
|
.entry(heals_at)
|
||||||
|
.or_default()
|
||||||
|
.push(comp);
|
||||||
|
} else {
|
||||||
|
let heals_at = orchestrator.last_vulnerable_epoch_b() + 1;
|
||||||
|
let comp = Compromise {
|
||||||
|
tick,
|
||||||
|
a_sent,
|
||||||
|
b_sent,
|
||||||
|
a_rcvd,
|
||||||
|
b_rcvd,
|
||||||
|
heals_at,
|
||||||
|
exposed_epochs: orchestrator.blake.vulnerable_epochs(),
|
||||||
|
active_epoch: orchestrator.blake.last_emitted_epoch(),
|
||||||
|
};
|
||||||
|
blake_compromises_that_heal_at
|
||||||
|
.entry(heals_at)
|
||||||
|
.or_default()
|
||||||
|
.push(comp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Command::Receive(agent) => {
|
||||||
|
let use_alex = agent == &Agent::Alex;
|
||||||
|
let do_compromise = orchestrator.incoming_queue_size(use_alex) > 0;
|
||||||
|
let emitted_recv = orchestrator.receive_in_order(use_alex)?;
|
||||||
|
|
||||||
|
let OrchestratorBase {
|
||||||
|
a_sent,
|
||||||
|
b_sent,
|
||||||
|
a_rcvd,
|
||||||
|
b_rcvd,
|
||||||
|
..
|
||||||
|
} = orchestrator;
|
||||||
|
|
||||||
|
emitted_key = emitted_key || emitted_recv;
|
||||||
|
alex_emitted_key = alex_emitted_key || (emitted_key && use_alex);
|
||||||
|
blake_emitted_key = blake_emitted_key || (emitted_key && !use_alex);
|
||||||
|
|
||||||
|
if alex_emitted_key {
|
||||||
|
// alex emitted a key and may have healed
|
||||||
|
let emitted_ep = orchestrator.alex.last_emitted_epoch();
|
||||||
|
epoch_info
|
||||||
|
.entry(emitted_ep)
|
||||||
|
.and_modify(|inf| {
|
||||||
|
inf.a_start = a_sent;
|
||||||
|
})
|
||||||
|
.or_insert(EpochVulnsetInfo {
|
||||||
|
a_start: a_sent,
|
||||||
|
a_end: 0,
|
||||||
|
b_start: usize::MAX,
|
||||||
|
b_end: 0,
|
||||||
|
});
|
||||||
|
if emitted_ep > 0 {
|
||||||
|
epoch_info
|
||||||
|
.entry(emitted_ep - 1)
|
||||||
|
.and_modify(|inf| {
|
||||||
|
inf.a_end = a_sent;
|
||||||
|
})
|
||||||
|
.or_insert(EpochVulnsetInfo {
|
||||||
|
a_start: 0,
|
||||||
|
a_end: a_sent,
|
||||||
|
b_start: usize::MAX,
|
||||||
|
b_end: 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if blake_emitted_key {
|
||||||
|
// blake emitted a key and may have healed
|
||||||
|
// alex emitted a key and may have healed
|
||||||
|
let emitted_ep = orchestrator.blake.last_emitted_epoch();
|
||||||
|
epoch_info
|
||||||
|
.entry(emitted_ep)
|
||||||
|
.and_modify(|inf| {
|
||||||
|
inf.b_start = b_sent;
|
||||||
|
})
|
||||||
|
.or_insert(EpochVulnsetInfo {
|
||||||
|
a_start: usize::MAX,
|
||||||
|
a_end: 0,
|
||||||
|
b_start: b_sent,
|
||||||
|
b_end: 0,
|
||||||
|
});
|
||||||
|
if emitted_ep > 0 {
|
||||||
|
epoch_info
|
||||||
|
.entry(emitted_ep - 1)
|
||||||
|
.and_modify(|inf| {
|
||||||
|
inf.b_end = b_sent;
|
||||||
|
})
|
||||||
|
.or_insert(EpochVulnsetInfo {
|
||||||
|
a_start: usize::MAX,
|
||||||
|
a_end: 0,
|
||||||
|
b_start: 0,
|
||||||
|
b_end: b_sent,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if do_compromise {
|
||||||
|
if use_alex {
|
||||||
|
let heals_at = orchestrator.last_vulnerable_epoch_a() + 1;
|
||||||
|
let comp = Compromise {
|
||||||
|
tick,
|
||||||
|
a_sent,
|
||||||
|
b_sent,
|
||||||
|
a_rcvd,
|
||||||
|
b_rcvd,
|
||||||
|
heals_at,
|
||||||
|
exposed_epochs: orchestrator.alex.vulnerable_epochs(),
|
||||||
|
active_epoch: orchestrator.alex.last_emitted_epoch(),
|
||||||
|
};
|
||||||
|
alex_compromises_that_heal_at
|
||||||
|
.entry(heals_at)
|
||||||
|
.or_default()
|
||||||
|
.push(comp);
|
||||||
|
} else {
|
||||||
|
let heals_at = orchestrator.last_vulnerable_epoch_b() + 1;
|
||||||
|
let comp = Compromise {
|
||||||
|
tick,
|
||||||
|
a_sent,
|
||||||
|
b_sent,
|
||||||
|
a_rcvd,
|
||||||
|
b_rcvd,
|
||||||
|
heals_at,
|
||||||
|
exposed_epochs: orchestrator.blake.vulnerable_epochs(),
|
||||||
|
active_epoch: orchestrator.blake.last_emitted_epoch(),
|
||||||
|
};
|
||||||
|
blake_compromises_that_heal_at
|
||||||
|
.entry(heals_at)
|
||||||
|
.or_default()
|
||||||
|
.push(comp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
orchestrator.key_history_is_consistent();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut a_comp_hist = BTreeMap::<usize, usize>::new();
|
||||||
|
let mut b_comp_hist = BTreeMap::<usize, usize>::new();
|
||||||
|
|
||||||
|
for (_ep, cs) in alex_compromises_that_heal_at {
|
||||||
|
for c in cs {
|
||||||
|
if let Some(active_ep) = epoch_info.get(&c.active_epoch) {
|
||||||
|
if active_ep.a_end < c.a_sent {
|
||||||
|
// println!("skip epoch info A1 ep {}", c.active_epoch);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let symratchet_exposed_a_msgs = active_ep.a_end.saturating_sub(c.a_sent);
|
||||||
|
|
||||||
|
if active_ep.b_end < c.a_rcvd {
|
||||||
|
// println!("skip epoch info A2 ep {}", c.active_epoch);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let symratchet_exposed_b_msgs = active_ep.b_end.saturating_sub(c.a_rcvd);
|
||||||
|
let mut exposed_a = symratchet_exposed_a_msgs;
|
||||||
|
let mut exposed_b = symratchet_exposed_b_msgs;
|
||||||
|
for ep in c.exposed_epochs {
|
||||||
|
//(c.active_epoch+1)..c.heals_at { // in c.exposed_epochs
|
||||||
|
if let Some(epinf) = epoch_info.get(&ep) {
|
||||||
|
if epinf.a_end >= epinf.a_start && epinf.b_end >= epinf.b_start {
|
||||||
|
exposed_a += epinf.a_end - epinf.a_start;
|
||||||
|
exposed_b += epinf.b_end - epinf.b_start;
|
||||||
|
} else {
|
||||||
|
// println!("sk epoch info: {:?}", epinf);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// println!("No epoch info ({})", ep)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
a_comp_hist
|
||||||
|
.entry(exposed_a + exposed_b)
|
||||||
|
.and_modify(|count| *count += 1)
|
||||||
|
.or_insert(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (_ep, cs) in blake_compromises_that_heal_at {
|
||||||
|
for c in cs {
|
||||||
|
if let Some(active_ep) = epoch_info.get(&c.active_epoch) {
|
||||||
|
if active_ep.b_end < c.b_sent {
|
||||||
|
// println!("skip epoch info B1 ep {}", c.active_epoch);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let symratchet_exposed_b_msgs = active_ep.b_end.saturating_sub(c.b_sent);
|
||||||
|
|
||||||
|
if active_ep.a_end < c.b_rcvd {
|
||||||
|
// println!("skip epoch info B2 ep {} (a_end: {} b_rcvd: {})", c.active_epoch, active_ep.a_end, c.b_rcvd);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let symratchet_exposed_a_msgs = active_ep.a_end.saturating_sub(c.b_rcvd);
|
||||||
|
let mut exposed_b = symratchet_exposed_b_msgs;
|
||||||
|
let mut exposed_a = symratchet_exposed_a_msgs;
|
||||||
|
for ep in c.exposed_epochs {
|
||||||
|
//(c.active_epoch+1)..c.heals_at { // in c.exposed_epochs
|
||||||
|
if let Some(epinf) = epoch_info.get(&ep) {
|
||||||
|
if epinf.a_end >= epinf.a_start && epinf.b_end >= epinf.b_start {
|
||||||
|
exposed_a += epinf.a_end - epinf.a_start;
|
||||||
|
exposed_b += epinf.b_end - epinf.b_start;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// println!("No epoch info ({})", ep)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
b_comp_hist
|
||||||
|
.entry(exposed_a + exposed_b)
|
||||||
|
.and_modify(|count| *count += 1)
|
||||||
|
.or_insert(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let max_exposed = *std::cmp::max(
|
||||||
|
a_comp_hist.last_key_value().unwrap().0,
|
||||||
|
b_comp_hist.last_key_value().unwrap().0,
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut hist = Vec::<HealingHistogramEntry>::new();
|
||||||
|
|
||||||
|
for i in 0..=max_exposed {
|
||||||
|
hist.push(HealingHistogramEntry {
|
||||||
|
num_msgs: i,
|
||||||
|
tot_by_a: *a_comp_hist.get(&i).unwrap_or(&0),
|
||||||
|
tot_by_b: *b_comp_hist.get(&i).unwrap_or(&0),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// orchestrator.print_key_history_lengths();
|
||||||
|
// orchestrator.print_msg_queue_lengths();
|
||||||
|
|
||||||
|
Ok(hist)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn random_balanced<CKA, R>(rng: &mut R) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
CKA: MessagingScka + MessagingCkaVulnerability,
|
||||||
|
<CKA as MessagingScka>::CkaOutput: PartialEq + Debug,
|
||||||
|
R: CryptoRng,
|
||||||
|
{
|
||||||
|
let mut a_tot = 0;
|
||||||
|
let mut b_tot = 0;
|
||||||
|
let mut orchestrator = OrchestratorBase::<CKA>::new(rng)?;
|
||||||
|
for _i in 0..10000 {
|
||||||
|
let rnd: u32 = rng.next_u32();
|
||||||
|
let use_alex = rnd & 0x1 != 0;
|
||||||
|
let do_receive = rnd & 0x6 != 0;
|
||||||
|
let do_send = rnd & 0x8 != 0;
|
||||||
|
if do_receive {
|
||||||
|
// orchestrator.receive_in_order(use_alex)?;
|
||||||
|
orchestrator.receive_all(use_alex)?;
|
||||||
|
}
|
||||||
|
if do_send {
|
||||||
|
orchestrator.send(use_alex, rng)?;
|
||||||
|
if use_alex {
|
||||||
|
a_tot += 1;
|
||||||
|
} else {
|
||||||
|
b_tot += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
orchestrator.key_history_is_consistent();
|
||||||
|
}
|
||||||
|
orchestrator.print_key_history_lengths();
|
||||||
|
orchestrator.print_msg_queue_lengths();
|
||||||
|
println!("Alex sent {a_tot} Blake sent {b_tot}");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn random_balanced_out_of_order<CKA, R>(rng: &mut R) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
CKA: MessagingScka + MessagingCkaVulnerability,
|
||||||
|
<CKA as MessagingScka>::CkaOutput: PartialEq + Debug,
|
||||||
|
R: CryptoRng,
|
||||||
|
{
|
||||||
|
let mut orchestrator = OrchestratorBase::<CKA>::new(rng)?;
|
||||||
|
for _i in 0..10000 {
|
||||||
|
let rnd = rng.next_u32();
|
||||||
|
let use_alex = rnd & 0x1 == 0;
|
||||||
|
let do_send = rnd & 0x2 != 0;
|
||||||
|
let do_ooo = rnd & 0x4 == 0;
|
||||||
|
let rcv_all = rnd & 0xF8 == 0;
|
||||||
|
if do_ooo {
|
||||||
|
if orchestrator.qlen(use_alex) > 0 {
|
||||||
|
orchestrator.receive_at(
|
||||||
|
use_alex,
|
||||||
|
((rnd >> 8) as usize) % orchestrator.qlen(use_alex),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
} else if rcv_all {
|
||||||
|
orchestrator.receive_all(use_alex)?;
|
||||||
|
} else {
|
||||||
|
orchestrator.receive_in_order(use_alex)?;
|
||||||
|
}
|
||||||
|
if do_send {
|
||||||
|
orchestrator.send(use_alex, rng)?;
|
||||||
|
}
|
||||||
|
orchestrator.key_history_is_consistent();
|
||||||
|
}
|
||||||
|
orchestrator.print_key_history_lengths();
|
||||||
|
orchestrator.print_msg_queue_lengths();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn chaos<CKA, R>(num_ticks: usize, rng: &mut R) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
CKA: MessagingScka + MessagingCkaVulnerability,
|
||||||
|
<CKA as MessagingScka>::CkaOutput: PartialEq + Debug,
|
||||||
|
R: CryptoRng,
|
||||||
|
{
|
||||||
|
let ooo_prob = 0.7;
|
||||||
|
let send_limit = 10;
|
||||||
|
let drop_message_prob = 0.1;
|
||||||
|
let mut orchestrator = OrchestratorBase::<CKA>::new(rng)?;
|
||||||
|
for i in 0..num_ticks {
|
||||||
|
let use_alex = if i % 100 < 50 {
|
||||||
|
rng.random_bool(0.25)
|
||||||
|
} else {
|
||||||
|
rng.random_bool(0.75)
|
||||||
|
};
|
||||||
|
|
||||||
|
// receive out of order
|
||||||
|
if rng.random_bool(ooo_prob) {
|
||||||
|
let qlen = orchestrator.qlen(use_alex);
|
||||||
|
if qlen > 0 {
|
||||||
|
let _received =
|
||||||
|
orchestrator.receive_at(use_alex, rng.next_u32() as usize % qlen)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let num_to_send = rng.next_u32() % send_limit;
|
||||||
|
for _ in 0..num_to_send {
|
||||||
|
orchestrator.send(use_alex, rng)?;
|
||||||
|
if rng.random_bool(drop_message_prob) {
|
||||||
|
orchestrator.drop_message_at(use_alex, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// don't let queue get too big
|
||||||
|
loop {
|
||||||
|
let qlen = orchestrator.qlen(use_alex);
|
||||||
|
if qlen > 20 {
|
||||||
|
let _received =
|
||||||
|
orchestrator.receive_at(use_alex, rng.next_u32() as usize % qlen)?;
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
orchestrator.key_history_is_consistent();
|
||||||
|
}
|
||||||
|
orchestrator.print_key_history_lengths();
|
||||||
|
orchestrator.print_msg_queue_lengths();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
54
src/test/pingpong_messaging_behavior.rs
Normal file
54
src/test/pingpong_messaging_behavior.rs
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use super::messaging_behavior;
|
||||||
|
use rand_distr::{Binomial, Distribution};
|
||||||
|
|
||||||
|
use super::messaging_behavior::{Agent, MessagingBehavior};
|
||||||
|
|
||||||
|
pub struct PingPongMessagingBehavior {
|
||||||
|
window_size: u64,
|
||||||
|
window_variance: u64,
|
||||||
|
agent: Agent,
|
||||||
|
sends_remaining: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PingPongMessagingBehavior {
|
||||||
|
pub fn new(window_size: u64, window_variance: u64) -> Self {
|
||||||
|
assert!(window_variance <= window_size);
|
||||||
|
Self {
|
||||||
|
window_size,
|
||||||
|
window_variance,
|
||||||
|
agent: Agent::Blake,
|
||||||
|
sends_remaining: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn switch_agent(&mut self) {
|
||||||
|
self.agent = match self.agent {
|
||||||
|
Agent::Alex => Agent::Blake,
|
||||||
|
Agent::Blake => Agent::Alex,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MessagingBehavior for PingPongMessagingBehavior {
|
||||||
|
fn next_commands<R: rand_core::CryptoRng>(
|
||||||
|
&mut self,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> Vec<super::messaging_behavior::Command> {
|
||||||
|
let mut cmds = Vec::<messaging_behavior::Command>::new();
|
||||||
|
if self.sends_remaining == 0 {
|
||||||
|
self.switch_agent();
|
||||||
|
cmds.push(messaging_behavior::Command::ReceiveAll(self.agent));
|
||||||
|
|
||||||
|
let bin = Binomial::new(self.window_variance, 0.5).unwrap();
|
||||||
|
let delta = bin.sample(rng);
|
||||||
|
self.sends_remaining = self.window_size + delta;
|
||||||
|
} else {
|
||||||
|
cmds.push(messaging_behavior::Command::Send(self.agent));
|
||||||
|
self.sends_remaining -= 1;
|
||||||
|
}
|
||||||
|
cmds
|
||||||
|
}
|
||||||
|
}
|
||||||
53
src/test/scka.rs
Normal file
53
src/test/scka.rs
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use rand_core::CryptoRng;
|
||||||
|
|
||||||
|
use crate::{Epoch, Error, Secret};
|
||||||
|
|
||||||
|
pub struct SendOutput {
|
||||||
|
pub output_key: Option<(Epoch, Secret)>,
|
||||||
|
pub sending_epoch: Epoch,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ReceiveOutput {
|
||||||
|
pub output_key: Option<(Epoch, Secret)>,
|
||||||
|
pub receiving_epoch: Epoch,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sparse continuous key agreement
|
||||||
|
pub trait Scka {
|
||||||
|
type Message: SckaMessage;
|
||||||
|
|
||||||
|
fn scka_send<R: CryptoRng>(
|
||||||
|
self,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> Result<(SendOutput, Self::Message, Self), Error>
|
||||||
|
where
|
||||||
|
Self: Sized;
|
||||||
|
fn scka_recv(self, msg: &Self::Message) -> Result<(ReceiveOutput, Self), Error>
|
||||||
|
where
|
||||||
|
Self: Sized;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait SckaMessage {
|
||||||
|
fn epoch(&self) -> Epoch;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait SckaInitializer {
|
||||||
|
// Note: in the paper we pass in an encapsulation key here to support more
|
||||||
|
// general protocols. We will add this if it is needed.
|
||||||
|
fn init_a<R: CryptoRng>(rng: &mut R) -> Result<Self, Error>
|
||||||
|
where
|
||||||
|
Self: Sized;
|
||||||
|
|
||||||
|
// Note: in the paper we pass in an decapsulation key here to support more
|
||||||
|
// general protocols. We will add this if it is needed.
|
||||||
|
fn init_b<R: CryptoRng>(rng: &mut R) -> Result<Self, Error>
|
||||||
|
where
|
||||||
|
Self: Sized;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait SckaVulnerability {
|
||||||
|
fn vulnerable_epochs(&self) -> Vec<Epoch>;
|
||||||
|
}
|
||||||
75
src/test/v1_impls.rs
Normal file
75
src/test/v1_impls.rs
Normal file
|
|
@ -0,0 +1,75 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use crate::v1::chunked::states;
|
||||||
|
use crate::{
|
||||||
|
test::scka::{Scka, SckaInitializer, SckaVulnerability},
|
||||||
|
Epoch, Error,
|
||||||
|
};
|
||||||
|
use rand_core::CryptoRng;
|
||||||
|
|
||||||
|
use super::scka::{ReceiveOutput, SckaMessage, SendOutput};
|
||||||
|
|
||||||
|
impl Scka for states::States {
|
||||||
|
type Message = states::Message;
|
||||||
|
|
||||||
|
fn scka_send<R: CryptoRng>(
|
||||||
|
self,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> Result<(SendOutput, states::Message, Self), Error> {
|
||||||
|
let states::Send { msg, key, state } = self.send(rng)?;
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
SendOutput {
|
||||||
|
output_key: key.map(|es| (es.epoch, es.secret)),
|
||||||
|
sending_epoch: msg.epoch - 1,
|
||||||
|
},
|
||||||
|
msg,
|
||||||
|
state,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn scka_recv(self, msg: &states::Message) -> Result<(ReceiveOutput, Self), Error>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
let states::Recv { key, state } = self.recv(msg)?;
|
||||||
|
Ok((
|
||||||
|
ReceiveOutput {
|
||||||
|
output_key: key.map(|es| (es.epoch, es.secret)),
|
||||||
|
receiving_epoch: msg.epoch - 1,
|
||||||
|
},
|
||||||
|
state,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SckaVulnerability for states::States {
|
||||||
|
fn vulnerable_epochs(&self) -> Vec<Epoch> {
|
||||||
|
states::States::vulnerable_epochs(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SckaInitializer for states::States {
|
||||||
|
fn init_a<R: CryptoRng>(_rng: &mut R) -> Result<Self, Error>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
// TODO: pass in real auth key
|
||||||
|
Ok(states::States::init_a(b"1"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn init_b<R: CryptoRng>(_rng: &mut R) -> Result<Self, Error>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
// TODO: pass in real auth key
|
||||||
|
Ok(states::States::init_b(b"1"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SckaMessage for states::Message {
|
||||||
|
fn epoch(&self) -> Epoch {
|
||||||
|
self.epoch
|
||||||
|
}
|
||||||
|
}
|
||||||
4
src/test/x25519_scka.rs
Normal file
4
src/test/x25519_scka.rs
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
pub(crate) mod states;
|
||||||
416
src/test/x25519_scka/states.rs
Normal file
416
src/test/x25519_scka/states.rs
Normal file
|
|
@ -0,0 +1,416 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
#![allow(clippy::comparison_chain)]
|
||||||
|
#![cfg(test)]
|
||||||
|
use crate::test::scka::{
|
||||||
|
ReceiveOutput, Scka, SckaInitializer, SckaMessage, SckaVulnerability, SendOutput,
|
||||||
|
};
|
||||||
|
use crate::Epoch;
|
||||||
|
use curve25519_dalek::{
|
||||||
|
ristretto::CompressedRistretto, scalar::Scalar, traits::Identity, RistrettoPoint,
|
||||||
|
};
|
||||||
|
use rand_08::rngs::OsRng as OsRngFromRand08;
|
||||||
|
use rand_core::CryptoRng;
|
||||||
|
use sha2::Digest;
|
||||||
|
|
||||||
|
const X25519_KEYTYPE: u8 = 1u8;
|
||||||
|
|
||||||
|
pub type Secret = Vec<u8>;
|
||||||
|
|
||||||
|
pub struct Message {
|
||||||
|
pub epoch: Epoch,
|
||||||
|
pub pubkey: [u8; 33],
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SckaMessage for Message {
|
||||||
|
fn epoch(&self) -> Epoch {
|
||||||
|
self.epoch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub enum States {
|
||||||
|
Send(Send),
|
||||||
|
Recv(Recv),
|
||||||
|
UninitSend(UninitSend),
|
||||||
|
UninitRecv(UninitRecv),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Send> for States {
|
||||||
|
fn from(value: Send) -> Self {
|
||||||
|
States::Send(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Recv> for States {
|
||||||
|
fn from(value: Recv) -> Self {
|
||||||
|
States::Recv(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<UninitSend> for States {
|
||||||
|
fn from(value: UninitSend) -> Self {
|
||||||
|
States::UninitSend(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<UninitRecv> for States {
|
||||||
|
fn from(value: UninitRecv) -> Self {
|
||||||
|
States::UninitRecv(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl States {
|
||||||
|
pub fn init_a() -> Self {
|
||||||
|
States::UninitSend(UninitSend { epoch: 0 })
|
||||||
|
}
|
||||||
|
pub fn init_b() -> Self {
|
||||||
|
States::UninitRecv(UninitRecv { epoch: 0 })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sending_epoch(&self) -> Epoch {
|
||||||
|
match self {
|
||||||
|
States::Send(state) => state.epoch,
|
||||||
|
States::Recv(state) => state.epoch - 1,
|
||||||
|
States::UninitSend(_state) => 0,
|
||||||
|
States::UninitRecv(_state) => 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn receiving_epoch(&self) -> Epoch {
|
||||||
|
match self {
|
||||||
|
States::Send(state) => state.epoch - 1,
|
||||||
|
States::Recv(state) => state.epoch,
|
||||||
|
States::UninitSend(_state) => 0,
|
||||||
|
States::UninitRecv(_state) => 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn epoch(&self) -> Epoch {
|
||||||
|
match self {
|
||||||
|
States::Send(state) => state.epoch,
|
||||||
|
States::Recv(state) => state.epoch,
|
||||||
|
States::UninitSend(state) => state.epoch,
|
||||||
|
States::UninitRecv(state) => state.epoch,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Send {
|
||||||
|
pub(super) epoch: Epoch,
|
||||||
|
pub(super) remote_public: RistrettoPoint,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Send {
|
||||||
|
fn send<R: CryptoRng>(self, _rng: &mut R) -> (Option<(Epoch, Secret)>, Message, States) {
|
||||||
|
let mut secret = Scalar::random(&mut OsRngFromRand08);
|
||||||
|
let public = RistrettoPoint::mul_base(&secret);
|
||||||
|
|
||||||
|
let local_public = serialize_public_key(public);
|
||||||
|
|
||||||
|
let msg = Message {
|
||||||
|
epoch: self.epoch,
|
||||||
|
pubkey: local_public,
|
||||||
|
};
|
||||||
|
|
||||||
|
// compute the shared secret to output
|
||||||
|
let shared_secret = (secret * self.remote_public).compress();
|
||||||
|
let shared_secret = shared_secret.as_bytes();
|
||||||
|
|
||||||
|
// println!(
|
||||||
|
// "Send secret output ({}, {:?})",
|
||||||
|
// self.epoch,
|
||||||
|
// shared_secret.split_at(5).0
|
||||||
|
// );
|
||||||
|
|
||||||
|
// update the private key for forward secrecy
|
||||||
|
let secret_hash: [u8; 64] = sha2::Sha512::digest(shared_secret).into();
|
||||||
|
let adjustment_scalar = Scalar::from_bytes_mod_order_wide(&secret_hash);
|
||||||
|
secret *= adjustment_scalar;
|
||||||
|
|
||||||
|
let next = Recv {
|
||||||
|
epoch: self.epoch + 1,
|
||||||
|
local_public,
|
||||||
|
secret,
|
||||||
|
};
|
||||||
|
|
||||||
|
(Some((self.epoch, shared_secret.to_vec())), msg, next.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn recv(self, _msg: &Message) -> (Option<(Epoch, Secret)>, States) {
|
||||||
|
(None, self.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Recv {
|
||||||
|
pub(super) epoch: Epoch,
|
||||||
|
pub(super) local_public: [u8; 33],
|
||||||
|
pub(super) secret: Scalar,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Recv {
|
||||||
|
fn send<R: CryptoRng>(self, _rng: &mut R) -> (Option<(Epoch, Secret)>, Message, States) {
|
||||||
|
let msg = Message {
|
||||||
|
epoch: self.epoch - 1,
|
||||||
|
pubkey: self.local_public,
|
||||||
|
};
|
||||||
|
(None, msg, self.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn recv(self, msg: &Message) -> (Option<(Epoch, Secret)>, States) {
|
||||||
|
if msg.pubkey[0] != X25519_KEYTYPE {
|
||||||
|
todo!("add error for unrecognized key")
|
||||||
|
}
|
||||||
|
|
||||||
|
if msg.epoch < self.epoch {
|
||||||
|
println!(
|
||||||
|
"recv earlier epoch {} < {}, ignoring",
|
||||||
|
msg.epoch, self.epoch
|
||||||
|
);
|
||||||
|
return (None, self.into());
|
||||||
|
} else if msg.epoch > self.epoch {
|
||||||
|
todo!("create invalid epoch error");
|
||||||
|
}
|
||||||
|
let remote_public = CompressedRistretto::from_slice(&msg.pubkey[1..33])
|
||||||
|
.expect("ristretto properly serialized")
|
||||||
|
.decompress()
|
||||||
|
.unwrap();
|
||||||
|
let shared_secret = (self.secret * remote_public).compress();
|
||||||
|
let shared_secret = shared_secret.as_bytes();
|
||||||
|
|
||||||
|
// println!(
|
||||||
|
// "Recv secret output ({},{:?})",
|
||||||
|
// self.epoch,
|
||||||
|
// shared_secret.split_at(5).0
|
||||||
|
// );
|
||||||
|
|
||||||
|
// update the remote public key for forward secrecy
|
||||||
|
let secret_hash: [u8; 64] = sha2::Sha512::digest(shared_secret).into();
|
||||||
|
let adjustment_scalar = Scalar::from_bytes_mod_order_wide(&secret_hash);
|
||||||
|
let remote_public = adjustment_scalar * remote_public;
|
||||||
|
|
||||||
|
let next = Send {
|
||||||
|
epoch: self.epoch + 1,
|
||||||
|
remote_public,
|
||||||
|
};
|
||||||
|
|
||||||
|
(Some((self.epoch, shared_secret.to_vec())), next.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct UninitSend {
|
||||||
|
pub(super) epoch: Epoch,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UninitSend {
|
||||||
|
fn send<R: CryptoRng>(self, _rng: &mut R) -> (Option<(Epoch, Secret)>, Message, States) {
|
||||||
|
let secret = Scalar::random(&mut OsRngFromRand08);
|
||||||
|
let public = RistrettoPoint::mul_base(&secret);
|
||||||
|
|
||||||
|
let local_public = serialize_public_key(public);
|
||||||
|
|
||||||
|
let msg = Message {
|
||||||
|
epoch: self.epoch,
|
||||||
|
pubkey: local_public,
|
||||||
|
};
|
||||||
|
|
||||||
|
let next = Recv {
|
||||||
|
epoch: self.epoch + 1,
|
||||||
|
local_public,
|
||||||
|
secret,
|
||||||
|
};
|
||||||
|
|
||||||
|
(None, msg, next.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn recv(self, _msg: &Message) -> (Option<(Epoch, Secret)>, States) {
|
||||||
|
(None, self.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct UninitRecv {
|
||||||
|
pub(super) epoch: Epoch,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UninitRecv {
|
||||||
|
fn send<R: CryptoRng>(self, _rng: &mut R) -> (Option<(Epoch, Secret)>, Message, States) {
|
||||||
|
println!("UninitRecv::send() epoch {}", self.epoch);
|
||||||
|
let msg = Message {
|
||||||
|
epoch: self.epoch,
|
||||||
|
pubkey: serialize_public_key(RistrettoPoint::identity()),
|
||||||
|
};
|
||||||
|
(None, msg, self.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn recv(self, msg: &Message) -> (Option<(Epoch, Secret)>, States) {
|
||||||
|
if msg.pubkey[0] != X25519_KEYTYPE {
|
||||||
|
todo!("add error for unrecognized key")
|
||||||
|
}
|
||||||
|
|
||||||
|
if msg.epoch < self.epoch {
|
||||||
|
return (None, self.into());
|
||||||
|
} else if msg.epoch > self.epoch {
|
||||||
|
todo!("create invalid epoch error");
|
||||||
|
}
|
||||||
|
|
||||||
|
let remote_public = CompressedRistretto::from_slice(&msg.pubkey[1..33])
|
||||||
|
.expect("ristretto properly serialized")
|
||||||
|
.decompress()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let next = Send {
|
||||||
|
epoch: self.epoch + 1,
|
||||||
|
remote_public,
|
||||||
|
};
|
||||||
|
|
||||||
|
(None, next.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn serialize_public_key(pubkey: RistrettoPoint) -> [u8; 33] {
|
||||||
|
let mut result = [0u8; 33];
|
||||||
|
let compressed = pubkey.compress();
|
||||||
|
result[0] = X25519_KEYTYPE;
|
||||||
|
result[1..].copy_from_slice(compressed.as_bytes());
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Scka for States {
|
||||||
|
type Message = Message;
|
||||||
|
|
||||||
|
fn scka_send<R: CryptoRng>(
|
||||||
|
self,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> Result<(SendOutput, Self::Message, Self), crate::Error>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
let (output_key, msg, state) = match self {
|
||||||
|
States::Send(state) => state.send(rng),
|
||||||
|
States::Recv(state) => state.send(rng),
|
||||||
|
States::UninitSend(state) => state.send(rng),
|
||||||
|
States::UninitRecv(state) => state.send(rng),
|
||||||
|
};
|
||||||
|
let so = SendOutput {
|
||||||
|
output_key,
|
||||||
|
sending_epoch: state.sending_epoch(),
|
||||||
|
};
|
||||||
|
Ok((so, msg, state))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn scka_recv(self, msg: &Self::Message) -> Result<(ReceiveOutput, Self), crate::Error>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
let (output_key, state) = match self {
|
||||||
|
States::Send(state) => state.recv(msg),
|
||||||
|
States::Recv(state) => state.recv(msg),
|
||||||
|
States::UninitSend(state) => state.recv(msg),
|
||||||
|
States::UninitRecv(state) => state.recv(msg),
|
||||||
|
};
|
||||||
|
|
||||||
|
let ro = ReceiveOutput {
|
||||||
|
output_key,
|
||||||
|
receiving_epoch: state.receiving_epoch(),
|
||||||
|
};
|
||||||
|
Ok((ro, state))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl SckaInitializer for States {
|
||||||
|
fn init_a<R: CryptoRng>(_: &mut R) -> Result<Self, crate::Error>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
Ok(States::init_a())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn init_b<R: CryptoRng>(_: &mut R) -> Result<Self, crate::Error>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
Ok(States::init_b())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl SckaVulnerability for States {
|
||||||
|
fn vulnerable_epochs(&self) -> Vec<Epoch> {
|
||||||
|
vec![self.epoch()]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
|
||||||
|
use crate::test::messaging_scka::GenericMessagingScka;
|
||||||
|
use crate::test::x25519_scka::states;
|
||||||
|
use crate::test::{onlineoffline::OnlineOfflineMessagingBehavior, orchestrator};
|
||||||
|
use crate::Error;
|
||||||
|
use rand::TryRngCore;
|
||||||
|
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn balanced_healing() -> Result<(), Error> {
|
||||||
|
type Scka = states::States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
orchestrator::random_healing_test::<Cka, _>(0.5, &mut rng)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn random_balanced() -> Result<(), Error> {
|
||||||
|
type Scka = states::States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
orchestrator::random_balanced::<Cka, _>(&mut rng)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn chaos() -> Result<(), Error> {
|
||||||
|
type Scka = states::States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
orchestrator::chaos::<Cka, _>(10000, &mut rng)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn onlineoffline_healing() {
|
||||||
|
type Scka = states::States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut mp = OnlineOfflineMessagingBehavior::new([0.04, 0.04], [0.05, 0.05]);
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
let hist = orchestrator::controlled_messaging_healing_test::<
|
||||||
|
Cka,
|
||||||
|
OnlineOfflineMessagingBehavior,
|
||||||
|
_,
|
||||||
|
>(&mut mp, 10000, &mut rng)
|
||||||
|
.expect("should run");
|
||||||
|
|
||||||
|
orchestrator::print_histogram(&hist);
|
||||||
|
orchestrator::print_healing_stats(&orchestrator::stats_from_histogram(&hist)[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn random_balanced_out_of_order() -> Result<(), Error> {
|
||||||
|
type Scka = states::States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
orchestrator::random_balanced_out_of_order::<Cka, _>(&mut rng)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn random_slow_alex_healing() -> Result<(), Error> {
|
||||||
|
type Scka = states::States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
orchestrator::random_healing_test::<Cka, _>(0.33, &mut rng)
|
||||||
|
}
|
||||||
|
}
|
||||||
33
src/util.rs
Normal file
33
src/util.rs
Normal file
|
|
@ -0,0 +1,33 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
// From libcrux-ml-kem/src/constant_time_ops.rs
|
||||||
|
|
||||||
|
/// Return 1 if `value` is not zero and 0 otherwise.
|
||||||
|
fn inz(value: u8) -> u8 {
|
||||||
|
let value = value as u16;
|
||||||
|
|
||||||
|
let result = ((value | (!value).wrapping_add(1)) >> 8) & 1;
|
||||||
|
|
||||||
|
result as u8
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline(never)] // Don't inline this to avoid that the compiler optimizes this out.
|
||||||
|
fn is_non_zero(value: u8) -> u8 {
|
||||||
|
core::hint::black_box(inz(value))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return 1 if the bytes of `lhs` and `rhs` do not exactly
|
||||||
|
/// match and 0 otherwise.
|
||||||
|
#[cfg_attr(hax, hax_lib::requires(
|
||||||
|
lhs.len() == rhs.len()
|
||||||
|
))]
|
||||||
|
pub(crate) fn compare(lhs: &[u8], rhs: &[u8]) -> u8 {
|
||||||
|
let mut r: u8 = 0;
|
||||||
|
|
||||||
|
for i in 0..lhs.len() {
|
||||||
|
r |= lhs[i] ^ rhs[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
is_non_zero(r)
|
||||||
|
}
|
||||||
68
src/v1.rs
Normal file
68
src/v1.rs
Normal file
|
|
@ -0,0 +1,68 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
pub(crate) mod chunked;
|
||||||
|
pub(crate) mod unchunked;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use crate::test::messaging_scka::GenericMessagingScka;
|
||||||
|
use crate::test::{onlineoffline::OnlineOfflineMessagingBehavior, orchestrator};
|
||||||
|
use crate::v1::chunked::states;
|
||||||
|
use crate::Error;
|
||||||
|
|
||||||
|
use rand::TryRngCore;
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn balanced_healing() -> Result<(), Error> {
|
||||||
|
type Scka = states::States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
orchestrator::random_healing_test::<Cka, _>(0.5, &mut rng)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn random_balanced() -> Result<(), Error> {
|
||||||
|
type Scka = states::States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
orchestrator::random_balanced::<Cka, _>(&mut rng)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn chaos() -> Result<(), Error> {
|
||||||
|
type Scka = states::States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
orchestrator::chaos::<Cka, _>(10000, &mut rng)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn onlineoffline_healing_unidir() {
|
||||||
|
type Scka = states::States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut mp = OnlineOfflineMessagingBehavior::new([0.04, 0.04], [0.05, 0.05]);
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
orchestrator::controlled_messaging_healing_test::<Cka, OnlineOfflineMessagingBehavior, _>(
|
||||||
|
&mut mp, 100000, &mut rng,
|
||||||
|
)
|
||||||
|
.expect("should run");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn random_balanced_out_of_order() -> Result<(), Error> {
|
||||||
|
type Scka = states::States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
orchestrator::random_balanced_out_of_order::<Cka, _>(&mut rng)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn random_slow_alex_healing_auth_bidir() -> Result<(), Error> {
|
||||||
|
type Scka = states::States;
|
||||||
|
type Cka = GenericMessagingScka<Scka>;
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
orchestrator::random_healing_test::<Cka, _>(0.33, &mut rng)
|
||||||
|
}
|
||||||
|
}
|
||||||
6
src/v1/chunked.rs
Normal file
6
src/v1/chunked.rs
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
pub(crate) mod send_ct;
|
||||||
|
pub(crate) mod send_ek;
|
||||||
|
pub(crate) mod states;
|
||||||
308
src/v1/chunked/send_ct.rs
Normal file
308
src/v1/chunked/send_ct.rs
Normal file
|
|
@ -0,0 +1,308 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
mod serialize;
|
||||||
|
|
||||||
|
use super::send_ek;
|
||||||
|
use crate::encoding::polynomial;
|
||||||
|
use crate::encoding::{Chunk, Decoder, Encoder};
|
||||||
|
use crate::v1::unchunked::send_ct as unchunked;
|
||||||
|
use crate::{authenticator, incremental_mlkem768};
|
||||||
|
use crate::{Epoch, EpochSecret, Error};
|
||||||
|
use rand::{CryptoRng, Rng};
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct NoHeaderReceived {
|
||||||
|
pub(super) uc: unchunked::NoHeaderReceived,
|
||||||
|
// `receiving_hdr` only decodes messages of length `incremental_mlkem768::HEADER_SIZE + authenticator::Authenticator::MACSIZE`
|
||||||
|
pub(super) receiving_hdr: polynomial::PolyDecoder,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct HeaderReceived {
|
||||||
|
uc: unchunked::HeaderReceived,
|
||||||
|
// `receiving_ek` only decodes messages of length `incremental_mlkem768::ENCAPSULATION_KEY_SIZE`
|
||||||
|
receiving_ek: polynomial::PolyDecoder,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct Ct1Sampled {
|
||||||
|
uc: unchunked::Ct1Sent,
|
||||||
|
sending_ct1: polynomial::PolyEncoder,
|
||||||
|
// `receiving_ek` only decodes messages of length `incremental_mlkem768::ENCAPSULATION_KEY_SIZE`
|
||||||
|
receiving_ek: polynomial::PolyDecoder,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct EkReceivedCt1Sampled {
|
||||||
|
uc: unchunked::Ct1SentEkReceived,
|
||||||
|
sending_ct1: polynomial::PolyEncoder,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct Ct1Acknowledged {
|
||||||
|
uc: unchunked::Ct1Sent,
|
||||||
|
// `receiving_ek` only decodes messages of length `incremental_mlkem768::ENCAPSULATION_KEY_SIZE`
|
||||||
|
receiving_ek: polynomial::PolyDecoder,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct Ct2Sampled {
|
||||||
|
uc: unchunked::Ct2Sent,
|
||||||
|
sending_ct2: polynomial::PolyEncoder,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub enum NoHeaderReceivedRecvChunk {
|
||||||
|
StillReceiving(NoHeaderReceived),
|
||||||
|
Done(HeaderReceived),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl NoHeaderReceived {
|
||||||
|
pub fn new(auth_key: &[u8]) -> Self {
|
||||||
|
let decoder = polynomial::PolyDecoder::new(
|
||||||
|
incremental_mlkem768::HEADER_SIZE + authenticator::Authenticator::MACSIZE,
|
||||||
|
);
|
||||||
|
hax_lib::assume!(decoder.is_ok());
|
||||||
|
NoHeaderReceived {
|
||||||
|
uc: unchunked::NoHeaderReceived::new(auth_key),
|
||||||
|
receiving_hdr: decoder.expect("should be able to decode header size"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(epoch == self.uc.epoch)]
|
||||||
|
pub fn recv_hdr_chunk(
|
||||||
|
self,
|
||||||
|
epoch: Epoch,
|
||||||
|
chunk: &Chunk,
|
||||||
|
) -> Result<NoHeaderReceivedRecvChunk, Error> {
|
||||||
|
assert_eq!(epoch, self.uc.epoch);
|
||||||
|
let Self {
|
||||||
|
uc,
|
||||||
|
mut receiving_hdr,
|
||||||
|
} = self;
|
||||||
|
receiving_hdr.add_chunk(chunk);
|
||||||
|
hax_lib::assume!(
|
||||||
|
receiving_hdr.get_pts_needed() <= polynomial::MAX_STORED_POLYNOMIAL_DEGREE_V1
|
||||||
|
);
|
||||||
|
if let Some(mut hdr) = receiving_hdr.decoded_message() {
|
||||||
|
let mac: authenticator::Mac = hdr.drain(incremental_mlkem768::HEADER_SIZE..).collect();
|
||||||
|
hax_lib::assume!(hdr.len() == 64 && mac.len() == authenticator::Authenticator::MACSIZE);
|
||||||
|
let receiving_ek =
|
||||||
|
polynomial::PolyDecoder::new(incremental_mlkem768::ENCAPSULATION_KEY_SIZE);
|
||||||
|
hax_lib::assume!(receiving_ek.is_ok());
|
||||||
|
Ok(NoHeaderReceivedRecvChunk::Done(HeaderReceived {
|
||||||
|
uc: uc.recv_header(epoch, hdr, &mac)?,
|
||||||
|
receiving_ek: receiving_ek.expect("should be able to decode EncapsulationKey size"),
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
Ok(NoHeaderReceivedRecvChunk::StillReceiving(Self {
|
||||||
|
uc,
|
||||||
|
receiving_hdr,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn epoch(&self) -> Epoch {
|
||||||
|
self.uc.epoch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Once the header has been received, it seems like we could start receiving
|
||||||
|
// EK chunks and should handle that possibility. However, this is not actually
|
||||||
|
// correct, as the send_ek side won't start sending EK chunks until it receives
|
||||||
|
// the first CT0 chunk. Thus, send_ct1_chunk is the only state transition
|
||||||
|
// we need to implement here.
|
||||||
|
impl HeaderReceived {
|
||||||
|
pub fn send_ct1_chunk<R: Rng + CryptoRng>(
|
||||||
|
self,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> (Ct1Sampled, Chunk, EpochSecret) {
|
||||||
|
let Self { uc, receiving_ek } = self;
|
||||||
|
|
||||||
|
let (uc, ct1, epoch_secret) = uc.send_ct1(rng);
|
||||||
|
let encoder = polynomial::PolyEncoder::encode_bytes(&ct1);
|
||||||
|
hax_lib::assume!(encoder.is_ok());
|
||||||
|
let mut sending_ct1 = encoder.expect("should be able to send CTSIZE");
|
||||||
|
let chunk = sending_ct1.next_chunk();
|
||||||
|
(
|
||||||
|
Ct1Sampled {
|
||||||
|
uc,
|
||||||
|
sending_ct1,
|
||||||
|
receiving_ek,
|
||||||
|
},
|
||||||
|
chunk,
|
||||||
|
epoch_secret,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn epoch(&self) -> Epoch {
|
||||||
|
self.uc.epoch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Consider fixing this, but since this is only used as a return value it doesn't take too much memory.
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
|
pub enum Ct1SampledRecvChunk {
|
||||||
|
StillReceivingStillSending(Ct1Sampled),
|
||||||
|
StillReceiving(Ct1Acknowledged),
|
||||||
|
StillSending(EkReceivedCt1Sampled),
|
||||||
|
Done(Ct2Sampled),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::fstar::verification_status(lax)]
|
||||||
|
fn send_ct2_encoder(ct2: &[u8], mac: &[u8]) -> polynomial::PolyEncoder {
|
||||||
|
polynomial::PolyEncoder::encode_bytes(&[ct2, mac].concat()).expect("should be able to send ct2")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl Ct1Sampled {
|
||||||
|
#[hax_lib::requires(epoch == self.uc.epoch)]
|
||||||
|
pub fn recv_ek_chunk(
|
||||||
|
self,
|
||||||
|
epoch: Epoch,
|
||||||
|
chunk: &Chunk,
|
||||||
|
ct1_ack: bool,
|
||||||
|
) -> Result<Ct1SampledRecvChunk, Error> {
|
||||||
|
let Self {
|
||||||
|
uc,
|
||||||
|
mut receiving_ek,
|
||||||
|
sending_ct1,
|
||||||
|
} = self;
|
||||||
|
receiving_ek.add_chunk(chunk);
|
||||||
|
hax_lib::assume!(
|
||||||
|
receiving_ek.get_pts_needed() <= polynomial::MAX_STORED_POLYNOMIAL_DEGREE_V1
|
||||||
|
);
|
||||||
|
Ok(if let Some(decoded) = receiving_ek.decoded_message() {
|
||||||
|
hax_lib::assume!(decoded.len() == 1152);
|
||||||
|
let uc = uc.recv_ek(epoch, decoded)?;
|
||||||
|
if ct1_ack {
|
||||||
|
let (uc, ct2, mac) = uc.send_ct2();
|
||||||
|
Ct1SampledRecvChunk::Done(Ct2Sampled {
|
||||||
|
uc,
|
||||||
|
sending_ct2: send_ct2_encoder(&ct2, &mac),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ct1SampledRecvChunk::StillSending(EkReceivedCt1Sampled { uc, sending_ct1 })
|
||||||
|
}
|
||||||
|
} else if ct1_ack {
|
||||||
|
Ct1SampledRecvChunk::StillReceiving(Ct1Acknowledged { uc, receiving_ek })
|
||||||
|
} else {
|
||||||
|
Ct1SampledRecvChunk::StillReceivingStillSending(Self {
|
||||||
|
uc,
|
||||||
|
receiving_ek,
|
||||||
|
sending_ct1,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn send_ct1_chunk(self) -> (Ct1Sampled, Chunk) {
|
||||||
|
let Self {
|
||||||
|
uc,
|
||||||
|
mut sending_ct1,
|
||||||
|
receiving_ek,
|
||||||
|
} = self;
|
||||||
|
let chunk = sending_ct1.next_chunk();
|
||||||
|
(
|
||||||
|
Ct1Sampled {
|
||||||
|
uc,
|
||||||
|
sending_ct1,
|
||||||
|
receiving_ek,
|
||||||
|
},
|
||||||
|
chunk,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn epoch(&self) -> Epoch {
|
||||||
|
self.uc.epoch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl EkReceivedCt1Sampled {
|
||||||
|
pub fn send_ct1_chunk(self) -> (EkReceivedCt1Sampled, Chunk) {
|
||||||
|
let Self {
|
||||||
|
uc,
|
||||||
|
mut sending_ct1,
|
||||||
|
} = self;
|
||||||
|
let chunk = sending_ct1.next_chunk();
|
||||||
|
(EkReceivedCt1Sampled { uc, sending_ct1 }, chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(epoch ==self.uc.epoch)]
|
||||||
|
pub fn recv_ct1_ack(self, epoch: Epoch) -> Ct2Sampled {
|
||||||
|
assert_eq!(epoch, self.uc.epoch);
|
||||||
|
let (uc, ct2, mac) = self.uc.send_ct2();
|
||||||
|
Ct2Sampled {
|
||||||
|
uc,
|
||||||
|
sending_ct2: send_ct2_encoder(&ct2, &mac),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn epoch(&self) -> Epoch {
|
||||||
|
self.uc.epoch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
|
pub enum Ct1AcknowledgedRecvChunk {
|
||||||
|
StillReceiving(Ct1Acknowledged),
|
||||||
|
Done(Ct2Sampled),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl Ct1Acknowledged {
|
||||||
|
#[hax_lib::requires(epoch ==self.uc.epoch)]
|
||||||
|
pub fn recv_ek_chunk(
|
||||||
|
self,
|
||||||
|
epoch: Epoch,
|
||||||
|
chunk: &Chunk,
|
||||||
|
) -> Result<Ct1AcknowledgedRecvChunk, Error> {
|
||||||
|
let Self {
|
||||||
|
uc,
|
||||||
|
mut receiving_ek,
|
||||||
|
} = self;
|
||||||
|
receiving_ek.add_chunk(chunk);
|
||||||
|
hax_lib::assume!(
|
||||||
|
receiving_ek.get_pts_needed() <= polynomial::MAX_STORED_POLYNOMIAL_DEGREE_V1
|
||||||
|
);
|
||||||
|
Ok(if let Some(decoded) = receiving_ek.decoded_message() {
|
||||||
|
hax_lib::assume!(decoded.len() == 1152);
|
||||||
|
let uc = uc.recv_ek(epoch, decoded)?;
|
||||||
|
let (uc, ct2, mac) = uc.send_ct2();
|
||||||
|
Ct1AcknowledgedRecvChunk::Done(Ct2Sampled {
|
||||||
|
uc,
|
||||||
|
sending_ct2: send_ct2_encoder(&ct2, &mac),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ct1AcknowledgedRecvChunk::StillReceiving(Self { uc, receiving_ek })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn epoch(&self) -> Epoch {
|
||||||
|
self.uc.epoch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl Ct2Sampled {
|
||||||
|
pub fn send_ct2_chunk(self) -> (Ct2Sampled, Chunk) {
|
||||||
|
let Self {
|
||||||
|
uc,
|
||||||
|
mut sending_ct2,
|
||||||
|
} = self;
|
||||||
|
let chunk = sending_ct2.next_chunk();
|
||||||
|
(Self { uc, sending_ct2 }, chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(self.uc.epoch < u64::MAX && epoch == self.uc.epoch + 1)]
|
||||||
|
pub fn recv_next_epoch(self, epoch: Epoch) -> send_ek::KeysUnsampled {
|
||||||
|
let uc = self.uc.recv_next_epoch(epoch);
|
||||||
|
send_ek::KeysUnsampled { uc }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn epoch(&self) -> Epoch {
|
||||||
|
self.uc.epoch
|
||||||
|
}
|
||||||
|
}
|
||||||
165
src/v1/chunked/send_ct/serialize.rs
Normal file
165
src/v1/chunked/send_ct/serialize.rs
Normal file
|
|
@ -0,0 +1,165 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use crate::encoding::polynomial;
|
||||||
|
use crate::proto::pq_ratchet as pqrpb;
|
||||||
|
use crate::v1::unchunked;
|
||||||
|
|
||||||
|
impl NoHeaderReceived {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::chunked::NoHeaderReceived {
|
||||||
|
pqrpb::v1_state::chunked::NoHeaderReceived {
|
||||||
|
uc: Some(self.uc.into_pb()),
|
||||||
|
receiving_hdr: Some(self.receiving_hdr.into_pb()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::chunked::NoHeaderReceived) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
uc: unchunked::send_ct::NoHeaderReceived::from_pb(pb.uc.ok_or(Error::StateDecode)?)?,
|
||||||
|
receiving_hdr: polynomial::PolyDecoder::from_pb(
|
||||||
|
pb.receiving_hdr.ok_or(Error::StateDecode)?,
|
||||||
|
)
|
||||||
|
.map_err(|_| Error::StateDecode)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HeaderReceived {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::chunked::HeaderReceived {
|
||||||
|
pqrpb::v1_state::chunked::HeaderReceived {
|
||||||
|
uc: Some(self.uc.into_pb()),
|
||||||
|
receiving_ek: Some(self.receiving_ek.into_pb()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::chunked::HeaderReceived) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
uc: unchunked::send_ct::HeaderReceived::from_pb(pb.uc.ok_or(Error::StateDecode)?)?,
|
||||||
|
receiving_ek: polynomial::PolyDecoder::from_pb(
|
||||||
|
pb.receiving_ek.ok_or(Error::StateDecode)?,
|
||||||
|
)
|
||||||
|
.map_err(|_| Error::StateDecode)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ct1Sampled {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::chunked::Ct1Sampled {
|
||||||
|
hax_lib::assume!(match self.sending_ct1.get_encoder_state() {
|
||||||
|
polynomial::EncoderState::Points(points) => hax_lib::prop::forall(
|
||||||
|
|pts: &Vec<crate::encoding::gf::GF16>| hax_lib::prop::implies(
|
||||||
|
points.contains(pts),
|
||||||
|
pts.len() <= polynomial::MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1
|
||||||
|
)
|
||||||
|
),
|
||||||
|
polynomial::EncoderState::Polys(polys) =>
|
||||||
|
hax_lib::prop::forall(|poly: &polynomial::Poly| hax_lib::prop::implies(
|
||||||
|
polys.contains(poly),
|
||||||
|
poly.coefficients.len() <= polynomial::MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1
|
||||||
|
)),
|
||||||
|
});
|
||||||
|
pqrpb::v1_state::chunked::Ct1Sampled {
|
||||||
|
uc: Some(self.uc.into_pb()),
|
||||||
|
sending_ct1: Some(self.sending_ct1.into_pb()),
|
||||||
|
receiving_ek: Some(self.receiving_ek.into_pb()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::chunked::Ct1Sampled) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
uc: unchunked::send_ct::Ct1Sent::from_pb(pb.uc.ok_or(Error::StateDecode)?)?,
|
||||||
|
sending_ct1: polynomial::PolyEncoder::from_pb(
|
||||||
|
pb.sending_ct1.ok_or(Error::StateDecode)?,
|
||||||
|
)
|
||||||
|
.map_err(|_| Error::StateDecode)?,
|
||||||
|
receiving_ek: polynomial::PolyDecoder::from_pb(
|
||||||
|
pb.receiving_ek.ok_or(Error::StateDecode)?,
|
||||||
|
)
|
||||||
|
.map_err(|_| Error::StateDecode)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EkReceivedCt1Sampled {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::chunked::EkReceivedCt1Sampled {
|
||||||
|
hax_lib::assume!(match self.sending_ct1.get_encoder_state() {
|
||||||
|
polynomial::EncoderState::Points(points) => hax_lib::prop::forall(
|
||||||
|
|pts: &Vec<crate::encoding::gf::GF16>| hax_lib::prop::implies(
|
||||||
|
points.contains(pts),
|
||||||
|
pts.len() <= polynomial::MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1
|
||||||
|
)
|
||||||
|
),
|
||||||
|
polynomial::EncoderState::Polys(polys) =>
|
||||||
|
hax_lib::prop::forall(|poly: &polynomial::Poly| hax_lib::prop::implies(
|
||||||
|
polys.contains(poly),
|
||||||
|
poly.coefficients.len() <= polynomial::MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1
|
||||||
|
)),
|
||||||
|
});
|
||||||
|
pqrpb::v1_state::chunked::EkReceivedCt1Sampled {
|
||||||
|
uc: Some(self.uc.into_pb()),
|
||||||
|
sending_ct1: Some(self.sending_ct1.into_pb()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::chunked::EkReceivedCt1Sampled) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
uc: unchunked::send_ct::Ct1SentEkReceived::from_pb(pb.uc.ok_or(Error::StateDecode)?)?,
|
||||||
|
sending_ct1: polynomial::PolyEncoder::from_pb(
|
||||||
|
pb.sending_ct1.ok_or(Error::StateDecode)?,
|
||||||
|
)
|
||||||
|
.map_err(|_| Error::StateDecode)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ct1Acknowledged {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::chunked::Ct1Acknowledged {
|
||||||
|
pqrpb::v1_state::chunked::Ct1Acknowledged {
|
||||||
|
uc: Some(self.uc.into_pb()),
|
||||||
|
receiving_ek: Some(self.receiving_ek.into_pb()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::chunked::Ct1Acknowledged) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
uc: unchunked::send_ct::Ct1Sent::from_pb(pb.uc.ok_or(Error::StateDecode)?)?,
|
||||||
|
receiving_ek: polynomial::PolyDecoder::from_pb(
|
||||||
|
pb.receiving_ek.ok_or(Error::StateDecode)?,
|
||||||
|
)
|
||||||
|
.map_err(|_| Error::StateDecode)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ct2Sampled {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::chunked::Ct2Sampled {
|
||||||
|
hax_lib::assume!(match self.sending_ct2.get_encoder_state() {
|
||||||
|
polynomial::EncoderState::Points(points) => hax_lib::prop::forall(
|
||||||
|
|pts: &Vec<crate::encoding::gf::GF16>| hax_lib::prop::implies(
|
||||||
|
points.contains(pts),
|
||||||
|
pts.len() <= polynomial::MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1
|
||||||
|
)
|
||||||
|
),
|
||||||
|
polynomial::EncoderState::Polys(polys) =>
|
||||||
|
hax_lib::prop::forall(|poly: &polynomial::Poly| hax_lib::prop::implies(
|
||||||
|
polys.contains(poly),
|
||||||
|
poly.coefficients.len() <= polynomial::MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1
|
||||||
|
)),
|
||||||
|
});
|
||||||
|
pqrpb::v1_state::chunked::Ct2Sampled {
|
||||||
|
uc: Some(self.uc.into_pb()),
|
||||||
|
sending_ct2: Some(self.sending_ct2.into_pb()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::chunked::Ct2Sampled) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
uc: unchunked::send_ct::Ct2Sent::from_pb(pb.uc.ok_or(Error::StateDecode)?)?,
|
||||||
|
sending_ct2: polynomial::PolyEncoder::from_pb(
|
||||||
|
pb.sending_ct2.ok_or(Error::StateDecode)?,
|
||||||
|
)
|
||||||
|
.map_err(|_| Error::StateDecode)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
239
src/v1/chunked/send_ek.rs
Normal file
239
src/v1/chunked/send_ek.rs
Normal file
|
|
@ -0,0 +1,239 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
mod serialize;
|
||||||
|
|
||||||
|
use super::send_ct;
|
||||||
|
use crate::authenticator;
|
||||||
|
use crate::encoding::polynomial;
|
||||||
|
use crate::encoding::{Chunk, Decoder, Encoder};
|
||||||
|
use crate::incremental_mlkem768;
|
||||||
|
use crate::v1::unchunked::send_ek as unchunked;
|
||||||
|
use crate::{Epoch, EpochSecret, Error};
|
||||||
|
use rand::{CryptoRng, Rng};
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct KeysUnsampled {
|
||||||
|
pub(super) uc: unchunked::KeysUnsampled,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct KeysSampled {
|
||||||
|
uc: unchunked::HeaderSent,
|
||||||
|
sending_hdr: polynomial::PolyEncoder,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct HeaderSent {
|
||||||
|
uc: unchunked::EkSent,
|
||||||
|
sending_ek: polynomial::PolyEncoder,
|
||||||
|
// `receiving_ct1` only decodes messages of length `incremental_mlkem768::CIPHERTEXT1_SIZE`
|
||||||
|
receiving_ct1: polynomial::PolyDecoder,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct Ct1Received {
|
||||||
|
uc: unchunked::EkSentCt1Received,
|
||||||
|
sending_ek: polynomial::PolyEncoder,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct EkSentCt1Received {
|
||||||
|
uc: unchunked::EkSentCt1Received,
|
||||||
|
// `receiving_ct2` only decodes messages of length `incremental_mlkem768::CIPHERTEXT2_SIZE + authenticator::Authenticator::MACSIZE`
|
||||||
|
receiving_ct2: polynomial::PolyDecoder,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl KeysUnsampled {
|
||||||
|
pub fn new(auth_key: &[u8]) -> Self {
|
||||||
|
Self {
|
||||||
|
uc: unchunked::KeysUnsampled::new(auth_key),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn send_hdr_chunk<R: Rng + CryptoRng>(self, rng: &mut R) -> (KeysSampled, Chunk) {
|
||||||
|
let (uc, hdr, mac) = self.uc.send_header(rng);
|
||||||
|
let to_send = [hdr, mac].concat();
|
||||||
|
let encoder = polynomial::PolyEncoder::encode_bytes(&to_send);
|
||||||
|
hax_lib::assume!(encoder.is_ok());
|
||||||
|
let mut sending_hdr = encoder.expect("should be able to encode header size");
|
||||||
|
let chunk = sending_hdr.next_chunk();
|
||||||
|
(KeysSampled { uc, sending_hdr }, chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn epoch(&self) -> Epoch {
|
||||||
|
self.uc.epoch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl KeysSampled {
|
||||||
|
pub fn send_hdr_chunk(self) -> (KeysSampled, Chunk) {
|
||||||
|
let Self {
|
||||||
|
uc,
|
||||||
|
mut sending_hdr,
|
||||||
|
} = self;
|
||||||
|
let chunk = sending_hdr.next_chunk();
|
||||||
|
(KeysSampled { uc, sending_hdr }, chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(epoch == self.uc.epoch)]
|
||||||
|
pub fn recv_ct1_chunk(self, epoch: Epoch, chunk: &Chunk) -> HeaderSent {
|
||||||
|
assert_eq!(epoch, self.uc.epoch);
|
||||||
|
let decoder = polynomial::PolyDecoder::new(incremental_mlkem768::CIPHERTEXT1_SIZE);
|
||||||
|
hax_lib::assume!(decoder.is_ok());
|
||||||
|
let mut receiving_ct1 = decoder.expect("should be able to decode header size");
|
||||||
|
receiving_ct1.add_chunk(chunk);
|
||||||
|
let (uc, ek) = self.uc.send_ek();
|
||||||
|
|
||||||
|
let encoder = polynomial::PolyEncoder::encode_bytes(&ek);
|
||||||
|
hax_lib::assume!(encoder.is_ok());
|
||||||
|
let sending_ek = encoder.expect("should be able to send ek");
|
||||||
|
HeaderSent {
|
||||||
|
uc,
|
||||||
|
receiving_ct1,
|
||||||
|
sending_ek,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn epoch(&self) -> Epoch {
|
||||||
|
self.uc.epoch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
|
pub enum HeaderSentRecvChunk {
|
||||||
|
StillReceiving(HeaderSent),
|
||||||
|
Done(Ct1Received),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl HeaderSent {
|
||||||
|
pub fn send_ek_chunk(self) -> (HeaderSent, Chunk) {
|
||||||
|
let Self {
|
||||||
|
uc,
|
||||||
|
mut sending_ek,
|
||||||
|
receiving_ct1,
|
||||||
|
} = self;
|
||||||
|
let chunk = sending_ek.next_chunk();
|
||||||
|
(
|
||||||
|
HeaderSent {
|
||||||
|
uc,
|
||||||
|
sending_ek,
|
||||||
|
receiving_ct1,
|
||||||
|
},
|
||||||
|
chunk,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(epoch == self.uc.epoch)]
|
||||||
|
pub fn recv_ct1_chunk(self, epoch: Epoch, chunk: &Chunk) -> HeaderSentRecvChunk {
|
||||||
|
assert_eq!(epoch, self.uc.epoch);
|
||||||
|
let Self {
|
||||||
|
uc,
|
||||||
|
sending_ek,
|
||||||
|
mut receiving_ct1,
|
||||||
|
} = self;
|
||||||
|
receiving_ct1.add_chunk(chunk);
|
||||||
|
hax_lib::assume!(
|
||||||
|
receiving_ct1.get_pts_needed() <= polynomial::MAX_STORED_POLYNOMIAL_DEGREE_V1
|
||||||
|
);
|
||||||
|
if let Some(decoded) = receiving_ct1.decoded_message() {
|
||||||
|
hax_lib::assume!(decoded.len() == 960);
|
||||||
|
let uc = uc.recv_ct1(epoch, decoded);
|
||||||
|
HeaderSentRecvChunk::Done(Ct1Received { uc, sending_ek })
|
||||||
|
} else {
|
||||||
|
HeaderSentRecvChunk::StillReceiving(HeaderSent {
|
||||||
|
uc,
|
||||||
|
sending_ek,
|
||||||
|
receiving_ct1,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn epoch(&self) -> Epoch {
|
||||||
|
self.uc.epoch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl Ct1Received {
|
||||||
|
pub fn send_ek_chunk(self) -> (Ct1Received, Chunk) {
|
||||||
|
let Self { uc, mut sending_ek } = self;
|
||||||
|
let chunk = sending_ek.next_chunk();
|
||||||
|
(Ct1Received { uc, sending_ek }, chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(epoch == self.uc.epoch)]
|
||||||
|
pub fn recv_ct2_chunk(self, epoch: Epoch, chunk: &Chunk) -> EkSentCt1Received {
|
||||||
|
assert_eq!(epoch, self.uc.epoch);
|
||||||
|
let decoder = polynomial::PolyDecoder::new(
|
||||||
|
incremental_mlkem768::CIPHERTEXT2_SIZE + authenticator::Authenticator::MACSIZE,
|
||||||
|
);
|
||||||
|
hax_lib::assume!(decoder.is_ok());
|
||||||
|
let mut receiving_ct2 = decoder.expect("should be able to decode ct2+mac size");
|
||||||
|
receiving_ct2.add_chunk(chunk);
|
||||||
|
EkSentCt1Received {
|
||||||
|
uc: self.uc,
|
||||||
|
receiving_ct2,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn epoch(&self) -> Epoch {
|
||||||
|
self.uc.epoch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum EkSentCt1ReceivedRecvChunk {
|
||||||
|
StillReceiving(EkSentCt1Received),
|
||||||
|
Done((send_ct::NoHeaderReceived, EpochSecret)),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl EkSentCt1Received {
|
||||||
|
#[hax_lib::requires(epoch == self.uc.epoch)]
|
||||||
|
pub fn recv_ct2_chunk(
|
||||||
|
self,
|
||||||
|
epoch: Epoch,
|
||||||
|
chunk: &Chunk,
|
||||||
|
) -> Result<EkSentCt1ReceivedRecvChunk, Error> {
|
||||||
|
assert_eq!(epoch, self.uc.epoch);
|
||||||
|
let Self {
|
||||||
|
uc,
|
||||||
|
mut receiving_ct2,
|
||||||
|
} = self;
|
||||||
|
receiving_ct2.add_chunk(chunk);
|
||||||
|
hax_lib::assume!(
|
||||||
|
receiving_ct2.get_pts_needed() <= polynomial::MAX_STORED_POLYNOMIAL_DEGREE_V1
|
||||||
|
);
|
||||||
|
if let Some(mut ct2) = receiving_ct2.decoded_message() {
|
||||||
|
let mac: authenticator::Mac = ct2
|
||||||
|
.drain(incremental_mlkem768::CIPHERTEXT2_SIZE..)
|
||||||
|
.collect();
|
||||||
|
hax_lib::assume!(
|
||||||
|
ct2.len() == incremental_mlkem768::CIPHERTEXT2_SIZE
|
||||||
|
&& mac.len() == authenticator::Authenticator::MACSIZE
|
||||||
|
);
|
||||||
|
let (uc, sec) = uc.recv_ct2(ct2, mac)?;
|
||||||
|
let decoder = polynomial::PolyDecoder::new(
|
||||||
|
incremental_mlkem768::HEADER_SIZE + authenticator::Authenticator::MACSIZE,
|
||||||
|
);
|
||||||
|
hax_lib::assume!(decoder.is_ok());
|
||||||
|
Ok(EkSentCt1ReceivedRecvChunk::Done((
|
||||||
|
send_ct::NoHeaderReceived {
|
||||||
|
uc,
|
||||||
|
receiving_hdr: decoder.expect("should be able to decode header size"),
|
||||||
|
},
|
||||||
|
sec,
|
||||||
|
)))
|
||||||
|
} else {
|
||||||
|
Ok(EkSentCt1ReceivedRecvChunk::StillReceiving(
|
||||||
|
EkSentCt1Received { uc, receiving_ct2 },
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn epoch(&self) -> Epoch {
|
||||||
|
self.uc.epoch
|
||||||
|
}
|
||||||
|
}
|
||||||
137
src/v1/chunked/send_ek/serialize.rs
Normal file
137
src/v1/chunked/send_ek/serialize.rs
Normal file
|
|
@ -0,0 +1,137 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use crate::encoding::polynomial;
|
||||||
|
use crate::proto::pq_ratchet as pqrpb;
|
||||||
|
use crate::v1::unchunked;
|
||||||
|
|
||||||
|
impl KeysUnsampled {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::chunked::KeysUnsampled {
|
||||||
|
pqrpb::v1_state::chunked::KeysUnsampled {
|
||||||
|
uc: Some(self.uc.into_pb()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::chunked::KeysUnsampled) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
uc: unchunked::send_ek::KeysUnsampled::from_pb(pb.uc.ok_or(Error::StateDecode)?)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl KeysSampled {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::chunked::KeysSampled {
|
||||||
|
hax_lib::assume!(match self.sending_hdr.get_encoder_state() {
|
||||||
|
polynomial::EncoderState::Points(points) => hax_lib::prop::forall(
|
||||||
|
|pts: &Vec<crate::encoding::gf::GF16>| hax_lib::prop::implies(
|
||||||
|
points.contains(pts),
|
||||||
|
pts.len() <= polynomial::MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1
|
||||||
|
)
|
||||||
|
),
|
||||||
|
polynomial::EncoderState::Polys(polys) =>
|
||||||
|
hax_lib::prop::forall(|poly: &polynomial::Poly| hax_lib::prop::implies(
|
||||||
|
polys.contains(poly),
|
||||||
|
poly.coefficients.len() <= polynomial::MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1
|
||||||
|
)),
|
||||||
|
});
|
||||||
|
pqrpb::v1_state::chunked::KeysSampled {
|
||||||
|
uc: Some(self.uc.into_pb()),
|
||||||
|
sending_hdr: Some(self.sending_hdr.into_pb()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::chunked::KeysSampled) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
uc: unchunked::send_ek::HeaderSent::from_pb(pb.uc.ok_or(Error::StateDecode)?)?,
|
||||||
|
sending_hdr: polynomial::PolyEncoder::from_pb(
|
||||||
|
pb.sending_hdr.ok_or(Error::StateDecode)?,
|
||||||
|
)
|
||||||
|
.map_err(|_| Error::StateDecode)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HeaderSent {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::chunked::HeaderSent {
|
||||||
|
hax_lib::assume!(match self.sending_ek.get_encoder_state() {
|
||||||
|
polynomial::EncoderState::Points(points) => hax_lib::prop::forall(
|
||||||
|
|pts: &Vec<crate::encoding::gf::GF16>| hax_lib::prop::implies(
|
||||||
|
points.contains(pts),
|
||||||
|
pts.len() <= polynomial::MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1
|
||||||
|
)
|
||||||
|
),
|
||||||
|
polynomial::EncoderState::Polys(polys) =>
|
||||||
|
hax_lib::prop::forall(|poly: &polynomial::Poly| hax_lib::prop::implies(
|
||||||
|
polys.contains(poly),
|
||||||
|
poly.coefficients.len() <= polynomial::MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1
|
||||||
|
)),
|
||||||
|
});
|
||||||
|
pqrpb::v1_state::chunked::HeaderSent {
|
||||||
|
uc: Some(self.uc.into_pb()),
|
||||||
|
sending_ek: Some(self.sending_ek.into_pb()),
|
||||||
|
receiving_ct1: Some(self.receiving_ct1.into_pb()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::chunked::HeaderSent) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
uc: unchunked::send_ek::EkSent::from_pb(pb.uc.ok_or(Error::StateDecode)?)?,
|
||||||
|
sending_ek: polynomial::PolyEncoder::from_pb(pb.sending_ek.ok_or(Error::StateDecode)?)
|
||||||
|
.map_err(|_| Error::StateDecode)?,
|
||||||
|
receiving_ct1: polynomial::PolyDecoder::from_pb(
|
||||||
|
pb.receiving_ct1.ok_or(Error::StateDecode)?,
|
||||||
|
)
|
||||||
|
.map_err(|_| Error::StateDecode)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ct1Received {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::chunked::Ct1Received {
|
||||||
|
hax_lib::assume!(match self.sending_ek.get_encoder_state() {
|
||||||
|
polynomial::EncoderState::Points(points) => hax_lib::prop::forall(
|
||||||
|
|pts: &Vec<crate::encoding::gf::GF16>| hax_lib::prop::implies(
|
||||||
|
points.contains(pts),
|
||||||
|
pts.len() <= polynomial::MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1
|
||||||
|
)
|
||||||
|
),
|
||||||
|
polynomial::EncoderState::Polys(polys) =>
|
||||||
|
hax_lib::prop::forall(|poly: &polynomial::Poly| hax_lib::prop::implies(
|
||||||
|
polys.contains(poly),
|
||||||
|
poly.coefficients.len() <= polynomial::MAX_INTERMEDIATE_POLYNOMIAL_DEGREE_V1
|
||||||
|
)),
|
||||||
|
});
|
||||||
|
pqrpb::v1_state::chunked::Ct1Received {
|
||||||
|
uc: Some(self.uc.into_pb()),
|
||||||
|
sending_ek: Some(self.sending_ek.into_pb()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::chunked::Ct1Received) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
uc: unchunked::send_ek::EkSentCt1Received::from_pb(pb.uc.ok_or(Error::StateDecode)?)?,
|
||||||
|
sending_ek: polynomial::PolyEncoder::from_pb(pb.sending_ek.ok_or(Error::StateDecode)?)
|
||||||
|
.map_err(|_| Error::StateDecode)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EkSentCt1Received {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::chunked::EkSentCt1Received {
|
||||||
|
pqrpb::v1_state::chunked::EkSentCt1Received {
|
||||||
|
uc: Some(self.uc.into_pb()),
|
||||||
|
receiving_ct2: Some(self.receiving_ct2.into_pb()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::chunked::EkSentCt1Received) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
uc: unchunked::send_ek::EkSentCt1Received::from_pb(pb.uc.ok_or(Error::StateDecode)?)?,
|
||||||
|
receiving_ct2: polynomial::PolyDecoder::from_pb(
|
||||||
|
pb.receiving_ct2.ok_or(Error::StateDecode)?,
|
||||||
|
)
|
||||||
|
.map_err(|_| Error::StateDecode)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
463
src/v1/chunked/states.rs
Normal file
463
src/v1/chunked/states.rs
Normal file
|
|
@ -0,0 +1,463 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
mod serialize;
|
||||||
|
|
||||||
|
use super::send_ct;
|
||||||
|
use super::send_ek;
|
||||||
|
use crate::encoding::Chunk;
|
||||||
|
use crate::{EpochSecret, Error};
|
||||||
|
use rand::{CryptoRng, Rng};
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
|
use crate::Epoch;
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub enum States {
|
||||||
|
KeysUnsampled(send_ek::KeysUnsampled),
|
||||||
|
KeysSampled(send_ek::KeysSampled),
|
||||||
|
HeaderSent(send_ek::HeaderSent),
|
||||||
|
Ct1Received(send_ek::Ct1Received),
|
||||||
|
EkSentCt1Received(send_ek::EkSentCt1Received),
|
||||||
|
|
||||||
|
NoHeaderReceived(send_ct::NoHeaderReceived),
|
||||||
|
HeaderReceived(send_ct::HeaderReceived),
|
||||||
|
Ct1Sampled(send_ct::Ct1Sampled),
|
||||||
|
EkReceivedCt1Sampled(send_ct::EkReceivedCt1Sampled),
|
||||||
|
Ct1Acknowledged(send_ct::Ct1Acknowledged),
|
||||||
|
Ct2Sampled(send_ct::Ct2Sampled),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum MessagePayload {
|
||||||
|
None,
|
||||||
|
Hdr(Chunk),
|
||||||
|
Ek(Chunk),
|
||||||
|
EkCt1Ack(Chunk),
|
||||||
|
Ct1Ack(bool),
|
||||||
|
Ct1(Chunk),
|
||||||
|
Ct2(Chunk),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Message {
|
||||||
|
pub epoch: Epoch,
|
||||||
|
pub payload: MessagePayload,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Send {
|
||||||
|
pub msg: Message,
|
||||||
|
pub key: Option<EpochSecret>,
|
||||||
|
pub state: States,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Recv {
|
||||||
|
pub key: Option<EpochSecret>,
|
||||||
|
pub state: States,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl States {
|
||||||
|
pub(crate) fn init_a(auth_key: &[u8]) -> Self {
|
||||||
|
Self::KeysUnsampled(send_ek::KeysUnsampled::new(auth_key))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn init_b(auth_key: &[u8]) -> Self {
|
||||||
|
Self::NoHeaderReceived(send_ct::NoHeaderReceived::new(auth_key))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub(crate) fn vulnerable_epochs(&self) -> Vec<Epoch> {
|
||||||
|
match self {
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
// send_ek
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
States::KeysUnsampled(_) => vec![],
|
||||||
|
States::KeysSampled(state) => vec![state.epoch()],
|
||||||
|
States::HeaderSent(state) => vec![state.epoch()],
|
||||||
|
States::Ct1Received(state) => vec![state.epoch()],
|
||||||
|
States::EkSentCt1Received(state) => vec![state.epoch()],
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
// send_ct
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
States::NoHeaderReceived(_) => vec![],
|
||||||
|
States::HeaderReceived(_) => vec![],
|
||||||
|
States::Ct1Sampled(state) => vec![state.epoch()],
|
||||||
|
States::EkReceivedCt1Sampled(state) => vec![state.epoch()],
|
||||||
|
States::Ct1Acknowledged(state) => vec![state.epoch()],
|
||||||
|
States::Ct2Sampled(state) => vec![state.epoch()],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[cfg(test)]
|
||||||
|
pub(crate) fn last_emitted_epoch(&self) -> Epoch {
|
||||||
|
match self {
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
// send_ek
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
States::KeysUnsampled(state) => state.epoch() - 1,
|
||||||
|
States::KeysSampled(state) => state.epoch() - 1,
|
||||||
|
States::HeaderSent(state) => state.epoch() - 1,
|
||||||
|
States::Ct1Received(state) => state.epoch() - 1,
|
||||||
|
States::EkSentCt1Received(state) => state.epoch() - 1,
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
// send_ct
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
States::NoHeaderReceived(state) => state.epoch() - 1,
|
||||||
|
States::HeaderReceived(state) => state.epoch() - 1,
|
||||||
|
States::Ct1Sampled(state) => state.epoch() - 1,
|
||||||
|
States::EkReceivedCt1Sampled(state) => state.epoch() - 1,
|
||||||
|
States::Ct1Acknowledged(state) => state.epoch() - 1,
|
||||||
|
States::Ct2Sampled(state) => state.epoch() - 1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn send<R: Rng + CryptoRng>(self, rng: &mut R) -> Result<Send, Error> {
|
||||||
|
match self {
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
// send_ek
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
Self::KeysUnsampled(state) => {
|
||||||
|
let epoch = state.epoch();
|
||||||
|
let (state, chunk) = state.send_hdr_chunk(rng);
|
||||||
|
Ok(Send {
|
||||||
|
state: Self::KeysSampled(state),
|
||||||
|
msg: Message {
|
||||||
|
epoch,
|
||||||
|
payload: MessagePayload::Hdr(chunk),
|
||||||
|
},
|
||||||
|
key: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Self::KeysSampled(state) => {
|
||||||
|
let epoch = state.epoch();
|
||||||
|
let (state, chunk) = state.send_hdr_chunk();
|
||||||
|
Ok(Send {
|
||||||
|
state: Self::KeysSampled(state),
|
||||||
|
msg: Message {
|
||||||
|
epoch,
|
||||||
|
payload: MessagePayload::Hdr(chunk),
|
||||||
|
},
|
||||||
|
key: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Self::HeaderSent(state) => {
|
||||||
|
let epoch = state.epoch();
|
||||||
|
let (state, chunk) = state.send_ek_chunk();
|
||||||
|
Ok(Send {
|
||||||
|
state: Self::HeaderSent(state),
|
||||||
|
msg: Message {
|
||||||
|
epoch,
|
||||||
|
payload: MessagePayload::Ek(chunk),
|
||||||
|
},
|
||||||
|
key: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Self::Ct1Received(state) => {
|
||||||
|
let epoch = state.epoch();
|
||||||
|
let (state, chunk) = state.send_ek_chunk();
|
||||||
|
|
||||||
|
Ok(Send {
|
||||||
|
state: Self::Ct1Received(state),
|
||||||
|
msg: Message {
|
||||||
|
epoch,
|
||||||
|
payload: MessagePayload::EkCt1Ack(chunk),
|
||||||
|
},
|
||||||
|
key: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Self::EkSentCt1Received(state) => {
|
||||||
|
let epoch = state.epoch();
|
||||||
|
|
||||||
|
Ok(Send {
|
||||||
|
state: Self::EkSentCt1Received(state),
|
||||||
|
msg: Message {
|
||||||
|
epoch,
|
||||||
|
payload: MessagePayload::Ct1Ack(true),
|
||||||
|
},
|
||||||
|
key: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
// send_ct
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
Self::NoHeaderReceived(state) => {
|
||||||
|
let epoch = state.epoch();
|
||||||
|
|
||||||
|
Ok(Send {
|
||||||
|
state: Self::NoHeaderReceived(state),
|
||||||
|
msg: Message {
|
||||||
|
epoch,
|
||||||
|
payload: MessagePayload::None,
|
||||||
|
},
|
||||||
|
key: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Self::HeaderReceived(state) => {
|
||||||
|
let epoch = state.epoch();
|
||||||
|
let (state, chunk, epoch_secret) = state.send_ct1_chunk(rng);
|
||||||
|
|
||||||
|
Ok(Send {
|
||||||
|
state: Self::Ct1Sampled(state),
|
||||||
|
msg: Message {
|
||||||
|
epoch,
|
||||||
|
payload: MessagePayload::Ct1(chunk),
|
||||||
|
},
|
||||||
|
key: Some(epoch_secret),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Self::Ct1Sampled(state) => {
|
||||||
|
let epoch = state.epoch();
|
||||||
|
let (state, chunk) = state.send_ct1_chunk();
|
||||||
|
|
||||||
|
Ok(Send {
|
||||||
|
state: Self::Ct1Sampled(state),
|
||||||
|
msg: Message {
|
||||||
|
epoch,
|
||||||
|
payload: MessagePayload::Ct1(chunk),
|
||||||
|
},
|
||||||
|
key: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Self::EkReceivedCt1Sampled(state) => {
|
||||||
|
let epoch = state.epoch();
|
||||||
|
let (state, chunk) = state.send_ct1_chunk();
|
||||||
|
|
||||||
|
Ok(Send {
|
||||||
|
state: Self::EkReceivedCt1Sampled(state),
|
||||||
|
msg: Message {
|
||||||
|
epoch,
|
||||||
|
payload: MessagePayload::Ct1(chunk),
|
||||||
|
},
|
||||||
|
key: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Self::Ct1Acknowledged(state) => {
|
||||||
|
let epoch = state.epoch();
|
||||||
|
|
||||||
|
Ok(Send {
|
||||||
|
state: Self::Ct1Acknowledged(state),
|
||||||
|
msg: Message {
|
||||||
|
epoch,
|
||||||
|
payload: MessagePayload::None,
|
||||||
|
},
|
||||||
|
key: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Self::Ct2Sampled(state) => {
|
||||||
|
let epoch = state.epoch();
|
||||||
|
let (state, chunk) = state.send_ct2_chunk();
|
||||||
|
|
||||||
|
Ok(Send {
|
||||||
|
state: Self::Ct2Sampled(state),
|
||||||
|
msg: Message {
|
||||||
|
epoch,
|
||||||
|
payload: MessagePayload::Ct2(chunk),
|
||||||
|
},
|
||||||
|
key: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn recv(self, msg: &Message) -> Result<Recv, Error> {
|
||||||
|
hax_lib::fstar!("admit()");
|
||||||
|
// println!("send_ct recv msg: {:?}", msg);
|
||||||
|
let mut key = None;
|
||||||
|
let state = match self {
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
// send_ek
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
Self::KeysUnsampled(state) => match msg.epoch.cmp(&state.epoch()) {
|
||||||
|
Ordering::Greater => {
|
||||||
|
return Err(Error::EpochOutOfRange(msg.epoch));
|
||||||
|
}
|
||||||
|
Ordering::Less => Self::KeysUnsampled(state),
|
||||||
|
Ordering::Equal => Self::KeysUnsampled(state),
|
||||||
|
},
|
||||||
|
Self::KeysSampled(state) => match msg.epoch.cmp(&state.epoch()) {
|
||||||
|
Ordering::Greater => {
|
||||||
|
return Err(Error::EpochOutOfRange(msg.epoch));
|
||||||
|
}
|
||||||
|
Ordering::Less => Self::KeysSampled(state),
|
||||||
|
Ordering::Equal => {
|
||||||
|
if let MessagePayload::Ct1(ref chunk) = msg.payload {
|
||||||
|
Self::HeaderSent(state.recv_ct1_chunk(msg.epoch, chunk))
|
||||||
|
} else {
|
||||||
|
Self::KeysSampled(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Self::HeaderSent(state) => match msg.epoch.cmp(&state.epoch()) {
|
||||||
|
Ordering::Greater => {
|
||||||
|
return Err(Error::EpochOutOfRange(msg.epoch));
|
||||||
|
}
|
||||||
|
Ordering::Less => Self::HeaderSent(state),
|
||||||
|
Ordering::Equal => {
|
||||||
|
if let MessagePayload::Ct1(ref chunk) = msg.payload {
|
||||||
|
match state.recv_ct1_chunk(msg.epoch, chunk) {
|
||||||
|
send_ek::HeaderSentRecvChunk::StillReceiving(state) => {
|
||||||
|
Self::HeaderSent(state)
|
||||||
|
}
|
||||||
|
send_ek::HeaderSentRecvChunk::Done(state) => Self::Ct1Received(state),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Self::HeaderSent(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Self::Ct1Received(state) => match msg.epoch.cmp(&state.epoch()) {
|
||||||
|
Ordering::Greater => {
|
||||||
|
return Err(Error::EpochOutOfRange(msg.epoch));
|
||||||
|
}
|
||||||
|
Ordering::Less => Self::Ct1Received(state),
|
||||||
|
Ordering::Equal => {
|
||||||
|
if let MessagePayload::Ct2(ref chunk) = msg.payload {
|
||||||
|
Self::EkSentCt1Received(state.recv_ct2_chunk(msg.epoch, chunk))
|
||||||
|
} else {
|
||||||
|
Self::Ct1Received(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Self::EkSentCt1Received(state) => match msg.epoch.cmp(&state.epoch()) {
|
||||||
|
Ordering::Greater => {
|
||||||
|
return Err(Error::EpochOutOfRange(msg.epoch));
|
||||||
|
}
|
||||||
|
Ordering::Less => Self::EkSentCt1Received(state),
|
||||||
|
Ordering::Equal => {
|
||||||
|
if let MessagePayload::Ct2(ref chunk) = msg.payload {
|
||||||
|
match state.recv_ct2_chunk(msg.epoch, chunk)? {
|
||||||
|
send_ek::EkSentCt1ReceivedRecvChunk::StillReceiving(state) => {
|
||||||
|
Self::EkSentCt1Received(state)
|
||||||
|
}
|
||||||
|
send_ek::EkSentCt1ReceivedRecvChunk::Done((state, sec)) => {
|
||||||
|
key = Some(sec);
|
||||||
|
Self::NoHeaderReceived(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Self::EkSentCt1Received(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
// send_ct
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
Self::NoHeaderReceived(state) => match msg.epoch.cmp(&state.epoch()) {
|
||||||
|
Ordering::Greater => {
|
||||||
|
return Err(Error::EpochOutOfRange(msg.epoch));
|
||||||
|
}
|
||||||
|
Ordering::Less => Self::NoHeaderReceived(state),
|
||||||
|
Ordering::Equal => {
|
||||||
|
if let MessagePayload::Hdr(ref chunk) = msg.payload {
|
||||||
|
match state.recv_hdr_chunk(msg.epoch, chunk)? {
|
||||||
|
send_ct::NoHeaderReceivedRecvChunk::StillReceiving(state) => {
|
||||||
|
Self::NoHeaderReceived(state)
|
||||||
|
}
|
||||||
|
send_ct::NoHeaderReceivedRecvChunk::Done(state) => {
|
||||||
|
Self::HeaderReceived(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Self::NoHeaderReceived(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Self::HeaderReceived(state) => match msg.epoch.cmp(&state.epoch()) {
|
||||||
|
Ordering::Greater => {
|
||||||
|
return Err(Error::EpochOutOfRange(msg.epoch));
|
||||||
|
}
|
||||||
|
Ordering::Less => Self::HeaderReceived(state),
|
||||||
|
Ordering::Equal => Self::HeaderReceived(state),
|
||||||
|
},
|
||||||
|
Self::Ct1Sampled(state) => match msg.epoch.cmp(&state.epoch()) {
|
||||||
|
Ordering::Greater => {
|
||||||
|
return Err(Error::EpochOutOfRange(msg.epoch));
|
||||||
|
}
|
||||||
|
Ordering::Less => Self::Ct1Sampled(state),
|
||||||
|
Ordering::Equal => {
|
||||||
|
let (chunk, ack) = match msg.payload {
|
||||||
|
MessagePayload::Ek(ref chunk) => (Some(chunk), false),
|
||||||
|
MessagePayload::EkCt1Ack(ref chunk) => (Some(chunk), true),
|
||||||
|
_ => (None, false),
|
||||||
|
};
|
||||||
|
if let Some(chunk) = chunk {
|
||||||
|
match state.recv_ek_chunk(msg.epoch, chunk, ack)? {
|
||||||
|
send_ct::Ct1SampledRecvChunk::StillReceivingStillSending(state) => {
|
||||||
|
Self::Ct1Sampled(state)
|
||||||
|
}
|
||||||
|
send_ct::Ct1SampledRecvChunk::StillReceiving(state) => {
|
||||||
|
Self::Ct1Acknowledged(state)
|
||||||
|
}
|
||||||
|
send_ct::Ct1SampledRecvChunk::StillSending(state) => {
|
||||||
|
Self::EkReceivedCt1Sampled(state)
|
||||||
|
}
|
||||||
|
send_ct::Ct1SampledRecvChunk::Done(state) => Self::Ct2Sampled(state),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Self::Ct1Sampled(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Self::EkReceivedCt1Sampled(state) => match msg.epoch.cmp(&state.epoch()) {
|
||||||
|
Ordering::Greater => {
|
||||||
|
return Err(Error::EpochOutOfRange(msg.epoch));
|
||||||
|
}
|
||||||
|
Ordering::Less => Self::EkReceivedCt1Sampled(state),
|
||||||
|
Ordering::Equal => {
|
||||||
|
if matches!(
|
||||||
|
msg.payload,
|
||||||
|
MessagePayload::Ct1Ack(true) | MessagePayload::EkCt1Ack(_)
|
||||||
|
) {
|
||||||
|
Self::Ct2Sampled(state.recv_ct1_ack(msg.epoch))
|
||||||
|
} else {
|
||||||
|
Self::EkReceivedCt1Sampled(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Self::Ct1Acknowledged(state) => match msg.epoch.cmp(&state.epoch()) {
|
||||||
|
Ordering::Greater => {
|
||||||
|
return Err(Error::EpochOutOfRange(msg.epoch));
|
||||||
|
}
|
||||||
|
Ordering::Less => Self::Ct1Acknowledged(state),
|
||||||
|
Ordering::Equal => {
|
||||||
|
// If we got all messages in order, we would never receive a msg.ek at
|
||||||
|
// this point, since we already got our first msg.ek_ct1_ack. However,
|
||||||
|
// we can get messages out of order, so let's use the msg.ek chunks if we
|
||||||
|
// get them.
|
||||||
|
let chunk = match msg.payload {
|
||||||
|
MessagePayload::Ek(ref chunk) => Some(chunk),
|
||||||
|
MessagePayload::EkCt1Ack(ref chunk) => Some(chunk),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
if let Some(chunk) = chunk {
|
||||||
|
match state.recv_ek_chunk(msg.epoch, chunk)? {
|
||||||
|
send_ct::Ct1AcknowledgedRecvChunk::StillReceiving(state) => {
|
||||||
|
Self::Ct1Acknowledged(state)
|
||||||
|
}
|
||||||
|
send_ct::Ct1AcknowledgedRecvChunk::Done(state) => {
|
||||||
|
Self::Ct2Sampled(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Self::Ct1Acknowledged(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Self::Ct2Sampled(state) => match msg.epoch.cmp(&state.epoch()) {
|
||||||
|
Ordering::Greater => {
|
||||||
|
if msg.epoch == state.epoch() + 1 {
|
||||||
|
Self::KeysUnsampled(state.recv_next_epoch(msg.epoch))
|
||||||
|
} else {
|
||||||
|
return Err(Error::EpochOutOfRange(msg.epoch));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ordering::Less => Self::Ct2Sampled(state),
|
||||||
|
Ordering::Equal => Self::Ct2Sampled(state),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
Ok(Recv { state, key })
|
||||||
|
}
|
||||||
|
}
|
||||||
285
src/v1/chunked/states/serialize.rs
Normal file
285
src/v1/chunked/states/serialize.rs
Normal file
|
|
@ -0,0 +1,285 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use crate::proto::pq_ratchet as pqrpb;
|
||||||
|
use crate::{Error, SerializedMessage, Version};
|
||||||
|
use num_enum::IntoPrimitive;
|
||||||
|
|
||||||
|
impl States {
|
||||||
|
pub fn into_pb(self) -> pqrpb::V1State {
|
||||||
|
pqrpb::V1State {
|
||||||
|
inner_state: Some(match self {
|
||||||
|
// send_ek
|
||||||
|
Self::KeysUnsampled(state) => {
|
||||||
|
pqrpb::v1_state::InnerState::KeysUnsampled(state.into_pb())
|
||||||
|
}
|
||||||
|
Self::KeysSampled(state) => {
|
||||||
|
pqrpb::v1_state::InnerState::KeysSampled(state.into_pb())
|
||||||
|
}
|
||||||
|
Self::HeaderSent(state) => pqrpb::v1_state::InnerState::HeaderSent(state.into_pb()),
|
||||||
|
Self::Ct1Received(state) => {
|
||||||
|
pqrpb::v1_state::InnerState::Ct1Received(state.into_pb())
|
||||||
|
}
|
||||||
|
Self::EkSentCt1Received(state) => {
|
||||||
|
pqrpb::v1_state::InnerState::EkSentCt1Received(state.into_pb())
|
||||||
|
}
|
||||||
|
|
||||||
|
// send_ct
|
||||||
|
Self::NoHeaderReceived(state) => {
|
||||||
|
pqrpb::v1_state::InnerState::NoHeaderReceived(state.into_pb())
|
||||||
|
}
|
||||||
|
Self::HeaderReceived(state) => {
|
||||||
|
pqrpb::v1_state::InnerState::HeaderReceived(state.into_pb())
|
||||||
|
}
|
||||||
|
Self::Ct1Sampled(state) => pqrpb::v1_state::InnerState::Ct1Sampled(state.into_pb()),
|
||||||
|
Self::EkReceivedCt1Sampled(state) => {
|
||||||
|
pqrpb::v1_state::InnerState::EkReceivedCt1Sampled(state.into_pb())
|
||||||
|
}
|
||||||
|
Self::Ct1Acknowledged(state) => {
|
||||||
|
pqrpb::v1_state::InnerState::Ct1Acknowledged(state.into_pb())
|
||||||
|
}
|
||||||
|
Self::Ct2Sampled(state) => pqrpb::v1_state::InnerState::Ct2Sampled(state.into_pb()),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::V1State) -> Result<Self, Error> {
|
||||||
|
Ok(match pb.inner_state {
|
||||||
|
// send_ek
|
||||||
|
Some(pqrpb::v1_state::InnerState::KeysUnsampled(pb)) => {
|
||||||
|
Self::KeysUnsampled(send_ek::KeysUnsampled::from_pb(pb)?)
|
||||||
|
}
|
||||||
|
Some(pqrpb::v1_state::InnerState::KeysSampled(pb)) => {
|
||||||
|
Self::KeysSampled(send_ek::KeysSampled::from_pb(pb)?)
|
||||||
|
}
|
||||||
|
Some(pqrpb::v1_state::InnerState::HeaderSent(pb)) => {
|
||||||
|
Self::HeaderSent(send_ek::HeaderSent::from_pb(pb)?)
|
||||||
|
}
|
||||||
|
Some(pqrpb::v1_state::InnerState::Ct1Received(pb)) => {
|
||||||
|
Self::Ct1Received(send_ek::Ct1Received::from_pb(pb)?)
|
||||||
|
}
|
||||||
|
Some(pqrpb::v1_state::InnerState::EkSentCt1Received(pb)) => {
|
||||||
|
Self::EkSentCt1Received(send_ek::EkSentCt1Received::from_pb(pb)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
// send_ct
|
||||||
|
Some(pqrpb::v1_state::InnerState::NoHeaderReceived(pb)) => {
|
||||||
|
Self::NoHeaderReceived(send_ct::NoHeaderReceived::from_pb(pb)?)
|
||||||
|
}
|
||||||
|
Some(pqrpb::v1_state::InnerState::HeaderReceived(pb)) => {
|
||||||
|
Self::HeaderReceived(send_ct::HeaderReceived::from_pb(pb)?)
|
||||||
|
}
|
||||||
|
Some(pqrpb::v1_state::InnerState::Ct1Sampled(pb)) => {
|
||||||
|
Self::Ct1Sampled(send_ct::Ct1Sampled::from_pb(pb)?)
|
||||||
|
}
|
||||||
|
Some(pqrpb::v1_state::InnerState::EkReceivedCt1Sampled(pb)) => {
|
||||||
|
Self::EkReceivedCt1Sampled(send_ct::EkReceivedCt1Sampled::from_pb(pb)?)
|
||||||
|
}
|
||||||
|
Some(pqrpb::v1_state::InnerState::Ct1Acknowledged(pb)) => {
|
||||||
|
Self::Ct1Acknowledged(send_ct::Ct1Acknowledged::from_pb(pb)?)
|
||||||
|
}
|
||||||
|
Some(pqrpb::v1_state::InnerState::Ct2Sampled(pb)) => {
|
||||||
|
Self::Ct2Sampled(send_ct::Ct2Sampled::from_pb(pb)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => {
|
||||||
|
return Err(Error::StateDecode);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(IntoPrimitive)]
|
||||||
|
#[repr(u8)]
|
||||||
|
enum MessageType {
|
||||||
|
None = 0,
|
||||||
|
Hdr = 1,
|
||||||
|
Ek = 2,
|
||||||
|
EkCt1Ack = 3,
|
||||||
|
Ct1Ack = 4,
|
||||||
|
Ct1 = 5,
|
||||||
|
Ct2 = 6,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::opaque]
|
||||||
|
impl TryFrom<u8> for MessageType {
|
||||||
|
type Error = String;
|
||||||
|
fn try_from(value: u8) -> Result<Self, Self::Error> {
|
||||||
|
match value {
|
||||||
|
0 => Ok(MessageType::None),
|
||||||
|
1 => Ok(MessageType::Hdr),
|
||||||
|
2 => Ok(MessageType::Ek),
|
||||||
|
3 => Ok(MessageType::EkCt1Ack),
|
||||||
|
4 => Ok(MessageType::Ct1Ack),
|
||||||
|
5 => Ok(MessageType::Ct1),
|
||||||
|
6 => Ok(MessageType::Ct2),
|
||||||
|
_ => Err("Expected a number between 0 and 6".to_owned()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MessageType {
|
||||||
|
fn from_payload(mp: &MessagePayload) -> Self {
|
||||||
|
match mp {
|
||||||
|
MessagePayload::None => Self::None,
|
||||||
|
MessagePayload::Hdr(_) => Self::Hdr,
|
||||||
|
MessagePayload::Ek(_) => Self::Ek,
|
||||||
|
MessagePayload::EkCt1Ack(_) => Self::EkCt1Ack,
|
||||||
|
MessagePayload::Ct1Ack(_) => Self::Ct1Ack,
|
||||||
|
MessagePayload::Ct1(_) => Self::Ct1,
|
||||||
|
MessagePayload::Ct2(_) => Self::Ct2,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn encode_varint(mut a: u64, into: &mut SerializedMessage) {
|
||||||
|
for _i in 0..10 {
|
||||||
|
let byte = (a & 0x7F) as u8;
|
||||||
|
if a < 0x80 {
|
||||||
|
into.push(byte);
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
into.push(0x80 | byte);
|
||||||
|
a >>= 7;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::opaque] // return in while
|
||||||
|
fn decode_varint(from: &SerializedMessage, at: &mut usize) -> Result<u64, Error> {
|
||||||
|
let mut out = 0u64;
|
||||||
|
let mut shift = 0;
|
||||||
|
while *at < from.len() {
|
||||||
|
let byte = from[*at];
|
||||||
|
out |= ((byte as u64) & 0x7f) << shift;
|
||||||
|
*at += 1;
|
||||||
|
if byte & 0x80 == 0 {
|
||||||
|
return Ok(out);
|
||||||
|
}
|
||||||
|
shift += 7;
|
||||||
|
}
|
||||||
|
Err(Error::MsgDecode)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::fstar::verification_status(lax)]
|
||||||
|
fn encode_chunk(c: &Chunk, into: &mut SerializedMessage) {
|
||||||
|
encode_varint(c.index as u64, into);
|
||||||
|
into.extend_from_slice(&c.data[..]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::fstar::verification_status(lax)]
|
||||||
|
fn decode_chunk(from: &SerializedMessage, at: &mut usize) -> Result<Chunk, Error> {
|
||||||
|
let index = decode_varint(from, at)?;
|
||||||
|
let start = *at;
|
||||||
|
*at += 32;
|
||||||
|
if *at > from.len() || index > 65535 {
|
||||||
|
return Err(Error::MsgDecode);
|
||||||
|
}
|
||||||
|
Ok(Chunk {
|
||||||
|
index: index as u16,
|
||||||
|
data: from[start..*at].try_into().expect("correct size"),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Message {
|
||||||
|
/// Serialize a message.
|
||||||
|
///
|
||||||
|
/// Messages are serialized as:
|
||||||
|
///
|
||||||
|
/// [version] - 1 byte
|
||||||
|
/// [epoch] - varint, 1-10 bytes
|
||||||
|
/// [index] - varint, 1-5 bytes
|
||||||
|
/// [message_type] - 1 byte
|
||||||
|
///
|
||||||
|
/// Many of the message types also have a data chunk concatenated to them, of
|
||||||
|
/// the form:
|
||||||
|
///
|
||||||
|
/// [index] - varint, 1-3 bytes
|
||||||
|
/// [chunk_data] - 32 bytes
|
||||||
|
pub fn serialize(&self, index: u32) -> SerializedMessage {
|
||||||
|
hax_lib::fstar!("admit()");
|
||||||
|
let mut into = Vec::with_capacity(40);
|
||||||
|
into.push(Version::V1.into());
|
||||||
|
encode_varint(self.epoch, &mut into);
|
||||||
|
encode_varint(index as u64, &mut into);
|
||||||
|
into.push(MessageType::from_payload(&self.payload).into());
|
||||||
|
encode_chunk(
|
||||||
|
match &self.payload {
|
||||||
|
MessagePayload::Hdr(ref chunk) => chunk,
|
||||||
|
MessagePayload::Ek(ref chunk) => chunk,
|
||||||
|
MessagePayload::EkCt1Ack(ref chunk) => chunk,
|
||||||
|
MessagePayload::Ct1(ref chunk) => chunk,
|
||||||
|
MessagePayload::Ct2(ref chunk) => chunk,
|
||||||
|
_ => {
|
||||||
|
return into;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
&mut into,
|
||||||
|
);
|
||||||
|
into
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deserialize(from: &SerializedMessage) -> Result<(Self, u32, usize), Error> {
|
||||||
|
hax_lib::fstar!("admit()");
|
||||||
|
if from.is_empty() || from[0] != Version::V1.into() {
|
||||||
|
return Err(Error::MsgDecode);
|
||||||
|
}
|
||||||
|
let mut at = 1usize;
|
||||||
|
let epoch = decode_varint(from, &mut at)? as Epoch;
|
||||||
|
let index: u32 = decode_varint(from, &mut at)?
|
||||||
|
.try_into()
|
||||||
|
.map_err(|_| Error::MsgDecode)?;
|
||||||
|
let msg_type = MessageType::try_from(from[at]).map_err(|_| Error::MsgDecode)?;
|
||||||
|
at += 1;
|
||||||
|
let payload = match msg_type {
|
||||||
|
MessageType::None => MessagePayload::None,
|
||||||
|
MessageType::Ct1Ack => MessagePayload::Ct1Ack(true),
|
||||||
|
MessageType::Hdr => MessagePayload::Hdr(decode_chunk(from, &mut at)?),
|
||||||
|
MessageType::Ek => MessagePayload::Ek(decode_chunk(from, &mut at)?),
|
||||||
|
MessageType::EkCt1Ack => MessagePayload::EkCt1Ack(decode_chunk(from, &mut at)?),
|
||||||
|
MessageType::Ct1 => MessagePayload::Ct1(decode_chunk(from, &mut at)?),
|
||||||
|
MessageType::Ct2 => MessagePayload::Ct2(decode_chunk(from, &mut at)?),
|
||||||
|
};
|
||||||
|
// We allow for there to be additional trailing data in the message, so it's
|
||||||
|
// possible that `at < from.len()`. This allows for us to potentially
|
||||||
|
// upgrade sessions in future versions of the protocol.
|
||||||
|
Ok((Self { epoch, payload }, index, at))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::{decode_varint, encode_varint};
|
||||||
|
use rand::RngCore;
|
||||||
|
use rand::TryRngCore;
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encoding_varint() {
|
||||||
|
let mut v = vec![];
|
||||||
|
encode_varint(0x012C, &mut v);
|
||||||
|
assert_eq!(&v, &[0xAC, 0x02][..]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn decoding_varint() {
|
||||||
|
let v = vec![0xFF, 0xAC, 0x02, 0xFF];
|
||||||
|
let mut at = 1usize;
|
||||||
|
assert_eq!(0x012C, decode_varint(&v, &mut at).unwrap());
|
||||||
|
assert_eq!(at, 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn roundtrip_varint() {
|
||||||
|
let mut rng = OsRng.unwrap_err();
|
||||||
|
for _i in 0..10000 {
|
||||||
|
let u = rng.next_u64();
|
||||||
|
let mut v = vec![];
|
||||||
|
encode_varint(u, &mut v);
|
||||||
|
let mut at = 0usize;
|
||||||
|
assert_eq!(u, decode_varint(&v, &mut at).unwrap());
|
||||||
|
assert_eq!(at, v.len());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
5
src/v1/unchunked.rs
Normal file
5
src/v1/unchunked.rs
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
pub(crate) mod send_ct;
|
||||||
|
pub(crate) mod send_ek;
|
||||||
208
src/v1/unchunked/send_ct.rs
Normal file
208
src/v1/unchunked/send_ct.rs
Normal file
|
|
@ -0,0 +1,208 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
mod serialize;
|
||||||
|
|
||||||
|
use super::send_ek;
|
||||||
|
use crate::authenticator;
|
||||||
|
use crate::incremental_mlkem768;
|
||||||
|
use crate::kdf;
|
||||||
|
use crate::{Epoch, EpochSecret, Error};
|
||||||
|
use rand::{CryptoRng, Rng};
|
||||||
|
|
||||||
|
// START (epoch = 1)
|
||||||
|
// │
|
||||||
|
// ┌───────▼───────────┐
|
||||||
|
// ┌─────► NoHeaderReceived │
|
||||||
|
// │ └───────┬───────────┘
|
||||||
|
// │ │
|
||||||
|
// │ │recv_header
|
||||||
|
// │ │
|
||||||
|
// │ ┌───────▼───────────┐
|
||||||
|
// │ │ HeaderReceived │
|
||||||
|
// │ └───────┬───────────┘
|
||||||
|
// │ │
|
||||||
|
// │ │send_ct1
|
||||||
|
// │ │
|
||||||
|
// │ ┌───────▼───────────┐
|
||||||
|
// recv_next_epoch│ │ Ct1Sent │
|
||||||
|
// (epoch += 1) │ └───────┬───────────┘
|
||||||
|
// │ │
|
||||||
|
// │ │recv_ek
|
||||||
|
// │ │
|
||||||
|
// │ ┌───────▼───────────┐
|
||||||
|
// │ │ Ct1SentEkReceived │
|
||||||
|
// │ └───────┬───────────┘
|
||||||
|
// │ │
|
||||||
|
// │ │send_ct2
|
||||||
|
// │ │
|
||||||
|
// │ ┌───────▼───────────┐
|
||||||
|
// └─────┤ Ct2Sent │
|
||||||
|
// └───────────────────┘
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct NoHeaderReceived {
|
||||||
|
pub epoch: Epoch,
|
||||||
|
pub(super) auth: authenticator::Authenticator,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
pub struct HeaderReceived {
|
||||||
|
pub epoch: Epoch,
|
||||||
|
auth: authenticator::Authenticator,
|
||||||
|
#[hax_lib::refine(hdr.len() == 64)]
|
||||||
|
hdr: incremental_mlkem768::Header,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
pub struct Ct1Sent {
|
||||||
|
pub epoch: Epoch,
|
||||||
|
auth: authenticator::Authenticator,
|
||||||
|
#[hax_lib::refine(hdr.len() == 64)]
|
||||||
|
hdr: incremental_mlkem768::Header,
|
||||||
|
#[hax_lib::refine(es.len() == 2080)]
|
||||||
|
es: incremental_mlkem768::EncapsulationState,
|
||||||
|
#[hax_lib::refine(ct1.len() == 960)]
|
||||||
|
ct1: incremental_mlkem768::Ciphertext1,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
pub struct Ct1SentEkReceived {
|
||||||
|
pub epoch: Epoch,
|
||||||
|
auth: authenticator::Authenticator,
|
||||||
|
#[hax_lib::refine(es.len() == 2080)]
|
||||||
|
es: incremental_mlkem768::EncapsulationState,
|
||||||
|
#[hax_lib::refine(ek.len() == 1152)]
|
||||||
|
ek: incremental_mlkem768::EncapsulationKey,
|
||||||
|
#[hax_lib::refine(ct1.len() == 960)]
|
||||||
|
ct1: incremental_mlkem768::Ciphertext1,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct Ct2Sent {
|
||||||
|
pub epoch: Epoch,
|
||||||
|
auth: authenticator::Authenticator,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl NoHeaderReceived {
|
||||||
|
pub fn new(auth_key: &[u8]) -> Self {
|
||||||
|
Self {
|
||||||
|
epoch: 1,
|
||||||
|
auth: authenticator::Authenticator::new(auth_key.to_vec(), 1),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::requires(epoch == self.epoch && hdr.len() == 64 && mac.len() == authenticator::Authenticator::MACSIZE)]
|
||||||
|
pub fn recv_header(
|
||||||
|
self,
|
||||||
|
epoch: Epoch,
|
||||||
|
hdr: incremental_mlkem768::Header,
|
||||||
|
mac: &authenticator::Mac,
|
||||||
|
) -> Result<HeaderReceived, Error> {
|
||||||
|
assert_eq!(epoch, self.epoch);
|
||||||
|
self.auth.verify_hdr(self.epoch, &hdr, mac)?;
|
||||||
|
Ok(HeaderReceived {
|
||||||
|
epoch: self.epoch,
|
||||||
|
auth: self.auth,
|
||||||
|
hdr,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl HeaderReceived {
|
||||||
|
#[hax_lib::requires(self.hdr.len() == 64)]
|
||||||
|
pub fn send_ct1<R: Rng + CryptoRng>(
|
||||||
|
self,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> (Ct1Sent, incremental_mlkem768::Ciphertext1, EpochSecret) {
|
||||||
|
let Self {
|
||||||
|
epoch,
|
||||||
|
mut auth,
|
||||||
|
hdr,
|
||||||
|
} = self;
|
||||||
|
let (ct1, es, secret) = incremental_mlkem768::encaps1(&hdr, rng);
|
||||||
|
let info = [
|
||||||
|
b"Signal_PQCKA_V1_MLKEM768:SCKA Key",
|
||||||
|
epoch.to_be_bytes().as_slice(),
|
||||||
|
]
|
||||||
|
.concat();
|
||||||
|
let secret = kdf::hkdf_to_vec(&[0u8; 32], &secret, &info, 32);
|
||||||
|
auth.update(epoch, &secret);
|
||||||
|
(
|
||||||
|
Ct1Sent {
|
||||||
|
epoch,
|
||||||
|
auth,
|
||||||
|
hdr,
|
||||||
|
es,
|
||||||
|
ct1: ct1.clone(),
|
||||||
|
},
|
||||||
|
ct1,
|
||||||
|
EpochSecret { secret, epoch },
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl Ct1Sent {
|
||||||
|
#[hax_lib::requires(epoch == self.epoch && ek.len() == 1152)]
|
||||||
|
pub fn recv_ek(
|
||||||
|
self,
|
||||||
|
epoch: Epoch,
|
||||||
|
ek: incremental_mlkem768::EncapsulationKey,
|
||||||
|
) -> Result<Ct1SentEkReceived, Error> {
|
||||||
|
assert_eq!(epoch, self.epoch);
|
||||||
|
if incremental_mlkem768::ek_matches_header(&ek, &self.hdr) {
|
||||||
|
Ok(Ct1SentEkReceived {
|
||||||
|
epoch: self.epoch,
|
||||||
|
auth: self.auth,
|
||||||
|
ek,
|
||||||
|
es: self.es,
|
||||||
|
ct1: self.ct1,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(Error::ErroneousDataReceived)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl Ct1SentEkReceived {
|
||||||
|
#[hax_lib::ensures(|(_, ct2, mac)| ct2.len() == 128 && mac.len() == authenticator::Authenticator::MACSIZE)]
|
||||||
|
pub fn send_ct2(
|
||||||
|
self,
|
||||||
|
) -> (
|
||||||
|
Ct2Sent,
|
||||||
|
incremental_mlkem768::Ciphertext2,
|
||||||
|
authenticator::Mac,
|
||||||
|
) {
|
||||||
|
let Self {
|
||||||
|
epoch,
|
||||||
|
ek,
|
||||||
|
es,
|
||||||
|
auth,
|
||||||
|
mut ct1,
|
||||||
|
} = self;
|
||||||
|
let ct2 = incremental_mlkem768::encaps2(&ek, &es);
|
||||||
|
ct1.extend_from_slice(&ct2);
|
||||||
|
let mac = auth.mac_ct(epoch, &ct1);
|
||||||
|
(Ct2Sent { epoch, auth }, ct2, mac)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl Ct2Sent {
|
||||||
|
#[hax_lib::requires(self.epoch < u64::MAX && next_epoch == self.epoch + 1)]
|
||||||
|
pub fn recv_next_epoch(self, next_epoch: Epoch) -> send_ek::KeysUnsampled {
|
||||||
|
let Self { epoch, auth } = self;
|
||||||
|
assert_eq!(epoch + 1, next_epoch);
|
||||||
|
send_ek::KeysUnsampled {
|
||||||
|
epoch: epoch + 1,
|
||||||
|
auth,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
113
src/v1/unchunked/send_ct/serialize.rs
Normal file
113
src/v1/unchunked/send_ct/serialize.rs
Normal file
|
|
@ -0,0 +1,113 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use crate::authenticator::Authenticator;
|
||||||
|
use crate::proto::pq_ratchet as pqrpb;
|
||||||
|
use crate::Error;
|
||||||
|
|
||||||
|
impl NoHeaderReceived {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::unchunked::NoHeaderReceived {
|
||||||
|
pqrpb::v1_state::unchunked::NoHeaderReceived {
|
||||||
|
epoch: self.epoch,
|
||||||
|
auth: Some(self.auth.into_pb()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::unchunked::NoHeaderReceived) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
epoch: pb.epoch,
|
||||||
|
auth: Authenticator::from_pb(pb.auth.as_ref().ok_or(Error::StateDecode)?),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HeaderReceived {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::unchunked::HeaderReceived {
|
||||||
|
pqrpb::v1_state::unchunked::HeaderReceived {
|
||||||
|
epoch: self.epoch,
|
||||||
|
auth: Some(self.auth.into_pb()),
|
||||||
|
hdr: self.hdr,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::unchunked::HeaderReceived) -> Result<Self, Error> {
|
||||||
|
if pb.hdr.len() == 64 {
|
||||||
|
Ok(Self {
|
||||||
|
epoch: pb.epoch,
|
||||||
|
auth: Authenticator::from_pb(pb.auth.as_ref().ok_or(Error::StateDecode)?),
|
||||||
|
hdr: pb.hdr,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(Error::StateDecode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ct1Sent {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::unchunked::Ct1Sent {
|
||||||
|
pqrpb::v1_state::unchunked::Ct1Sent {
|
||||||
|
epoch: self.epoch,
|
||||||
|
auth: Some(self.auth.into_pb()),
|
||||||
|
hdr: self.hdr,
|
||||||
|
es: self.es,
|
||||||
|
ct1: self.ct1.to_vec(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::unchunked::Ct1Sent) -> Result<Self, Error> {
|
||||||
|
if pb.hdr.len() == 64 && pb.es.len() == 2080 && pb.ct1.len() == 960 {
|
||||||
|
Ok(Self {
|
||||||
|
epoch: pb.epoch,
|
||||||
|
auth: Authenticator::from_pb(pb.auth.as_ref().ok_or(Error::StateDecode)?),
|
||||||
|
hdr: pb.hdr,
|
||||||
|
es: pb.es,
|
||||||
|
ct1: pb.ct1,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(Error::StateDecode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ct1SentEkReceived {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::unchunked::Ct1SentEkReceived {
|
||||||
|
pqrpb::v1_state::unchunked::Ct1SentEkReceived {
|
||||||
|
epoch: self.epoch,
|
||||||
|
auth: Some(self.auth.into_pb()),
|
||||||
|
es: self.es,
|
||||||
|
ek: self.ek,
|
||||||
|
ct1: self.ct1.to_vec(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::unchunked::Ct1SentEkReceived) -> Result<Self, Error> {
|
||||||
|
if pb.es.len() == 2080 && pb.ct1.len() == 960 && pb.ek.len() == 1152 {
|
||||||
|
Ok(Self {
|
||||||
|
epoch: pb.epoch,
|
||||||
|
auth: Authenticator::from_pb(pb.auth.as_ref().ok_or(Error::StateDecode)?),
|
||||||
|
es: pb.es,
|
||||||
|
ek: pb.ek,
|
||||||
|
ct1: pb.ct1,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(Error::StateDecode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ct2Sent {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::unchunked::Ct2Sent {
|
||||||
|
pqrpb::v1_state::unchunked::Ct2Sent {
|
||||||
|
epoch: self.epoch,
|
||||||
|
auth: Some(self.auth.into_pb()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::unchunked::Ct2Sent) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
epoch: pb.epoch,
|
||||||
|
auth: Authenticator::from_pb(pb.auth.as_ref().ok_or(Error::StateDecode)?),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
169
src/v1/unchunked/send_ek.rs
Normal file
169
src/v1/unchunked/send_ek.rs
Normal file
|
|
@ -0,0 +1,169 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
mod serialize;
|
||||||
|
|
||||||
|
use super::send_ct;
|
||||||
|
use crate::authenticator;
|
||||||
|
use crate::incremental_mlkem768;
|
||||||
|
use crate::kdf;
|
||||||
|
use crate::{Epoch, EpochSecret, Error};
|
||||||
|
use rand::{CryptoRng, Rng};
|
||||||
|
|
||||||
|
// START (epoch = 1)
|
||||||
|
// │
|
||||||
|
// ┌─────▼─────────────┐
|
||||||
|
// ┌─────► KeysUnsampled │
|
||||||
|
// │ └─────┬─────────────┘
|
||||||
|
// │ │
|
||||||
|
// │ │send_header
|
||||||
|
// │ │
|
||||||
|
// │ ┌─────▼─────────────┐
|
||||||
|
// │ │ HeaderSent │
|
||||||
|
// │ └─────┬─────────────┘
|
||||||
|
// │ │
|
||||||
|
// recv_ct2│ │send_ek
|
||||||
|
// (epoch += 1)│ │
|
||||||
|
// │ ┌─────▼─────────────┐
|
||||||
|
// │ │ EkSent │
|
||||||
|
// │ └─────┬─────────────┘
|
||||||
|
// │ │
|
||||||
|
// │ │recv_ct1
|
||||||
|
// │ │
|
||||||
|
// │ ┌─────▼─────────────┐
|
||||||
|
// └─────┤ EkSentCt1Received │
|
||||||
|
// └───────────────────┘
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
pub struct KeysUnsampled {
|
||||||
|
pub epoch: Epoch,
|
||||||
|
pub(super) auth: authenticator::Authenticator,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
pub struct HeaderSent {
|
||||||
|
pub epoch: Epoch,
|
||||||
|
auth: authenticator::Authenticator,
|
||||||
|
#[hax_lib::refine(ek.len() == 1152)]
|
||||||
|
ek: incremental_mlkem768::EncapsulationKey,
|
||||||
|
#[hax_lib::refine(dk.len() == 2400)]
|
||||||
|
dk: incremental_mlkem768::DecapsulationKey,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
pub struct EkSent {
|
||||||
|
pub epoch: Epoch,
|
||||||
|
auth: authenticator::Authenticator,
|
||||||
|
#[hax_lib::refine(dk.len() == 2400)]
|
||||||
|
dk: incremental_mlkem768::DecapsulationKey,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(test, derive(Clone))]
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
pub struct EkSentCt1Received {
|
||||||
|
pub epoch: Epoch,
|
||||||
|
auth: authenticator::Authenticator,
|
||||||
|
#[hax_lib::refine(dk.len() == 2400)]
|
||||||
|
dk: incremental_mlkem768::DecapsulationKey,
|
||||||
|
#[hax_lib::refine(ct1.len() == 960)]
|
||||||
|
ct1: incremental_mlkem768::Ciphertext1,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl KeysUnsampled {
|
||||||
|
pub fn new(auth_key: &[u8]) -> Self {
|
||||||
|
Self {
|
||||||
|
epoch: 1,
|
||||||
|
auth: authenticator::Authenticator::new(auth_key.to_vec(), 1),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn send_header<R: Rng + CryptoRng>(
|
||||||
|
self,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> (HeaderSent, incremental_mlkem768::Header, authenticator::Mac) {
|
||||||
|
let keys = incremental_mlkem768::generate(rng);
|
||||||
|
let mac = self.auth.mac_hdr(self.epoch, &keys.hdr);
|
||||||
|
(
|
||||||
|
HeaderSent {
|
||||||
|
epoch: self.epoch,
|
||||||
|
auth: self.auth,
|
||||||
|
ek: keys.ek,
|
||||||
|
dk: keys.dk,
|
||||||
|
},
|
||||||
|
keys.hdr,
|
||||||
|
mac,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HeaderSent {
|
||||||
|
pub fn send_ek(self) -> (EkSent, incremental_mlkem768::EncapsulationKey) {
|
||||||
|
(
|
||||||
|
EkSent {
|
||||||
|
epoch: self.epoch,
|
||||||
|
auth: self.auth,
|
||||||
|
dk: self.dk,
|
||||||
|
},
|
||||||
|
self.ek,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl EkSent {
|
||||||
|
#[hax_lib::requires(epoch == self.epoch && ct1.len() == 960)]
|
||||||
|
pub fn recv_ct1(
|
||||||
|
self,
|
||||||
|
epoch: Epoch,
|
||||||
|
ct1: incremental_mlkem768::Ciphertext1,
|
||||||
|
) -> EkSentCt1Received {
|
||||||
|
assert_eq!(epoch, self.epoch);
|
||||||
|
EkSentCt1Received {
|
||||||
|
epoch: self.epoch,
|
||||||
|
auth: self.auth,
|
||||||
|
dk: self.dk,
|
||||||
|
ct1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[hax_lib::attributes]
|
||||||
|
impl EkSentCt1Received {
|
||||||
|
#[hax_lib::requires(ct2.len() == 128 && mac.len() == authenticator::Authenticator::MACSIZE)]
|
||||||
|
pub fn recv_ct2(
|
||||||
|
self,
|
||||||
|
ct2: incremental_mlkem768::Ciphertext2,
|
||||||
|
mac: authenticator::Mac,
|
||||||
|
) -> Result<(send_ct::NoHeaderReceived, EpochSecret), Error> {
|
||||||
|
let Self {
|
||||||
|
epoch,
|
||||||
|
mut auth,
|
||||||
|
dk,
|
||||||
|
mut ct1,
|
||||||
|
} = self;
|
||||||
|
let ss = incremental_mlkem768::decaps(&dk, &ct1, &ct2);
|
||||||
|
let info = [
|
||||||
|
b"Signal_PQCKA_V1_MLKEM768:SCKA Key",
|
||||||
|
epoch.to_be_bytes().as_slice(),
|
||||||
|
]
|
||||||
|
.concat();
|
||||||
|
let ss = kdf::hkdf_to_vec(&[0u8; 32], &ss, &info, 32);
|
||||||
|
|
||||||
|
auth.update(epoch, &ss);
|
||||||
|
ct1.extend_from_slice(&ct2);
|
||||||
|
auth.verify_ct(epoch, &ct1, &mac)?;
|
||||||
|
hax_lib::assume!(epoch < u64::MAX);
|
||||||
|
Ok((
|
||||||
|
send_ct::NoHeaderReceived {
|
||||||
|
epoch: epoch + 1,
|
||||||
|
auth,
|
||||||
|
},
|
||||||
|
EpochSecret {
|
||||||
|
secret: ss.to_vec(),
|
||||||
|
epoch,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
93
src/v1/unchunked/send_ek/serialize.rs
Normal file
93
src/v1/unchunked/send_ek/serialize.rs
Normal file
|
|
@ -0,0 +1,93 @@
|
||||||
|
// Copyright 2025 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use crate::authenticator::Authenticator;
|
||||||
|
use crate::proto::pq_ratchet as pqrpb;
|
||||||
|
use crate::Error;
|
||||||
|
|
||||||
|
impl KeysUnsampled {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::unchunked::KeysUnsampled {
|
||||||
|
pqrpb::v1_state::unchunked::KeysUnsampled {
|
||||||
|
epoch: self.epoch,
|
||||||
|
auth: Some(self.auth.into_pb()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::unchunked::KeysUnsampled) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
epoch: pb.epoch,
|
||||||
|
auth: Authenticator::from_pb(pb.auth.as_ref().ok_or(Error::StateDecode)?),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HeaderSent {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::unchunked::HeaderSent {
|
||||||
|
pqrpb::v1_state::unchunked::HeaderSent {
|
||||||
|
epoch: self.epoch,
|
||||||
|
auth: Some(self.auth.into_pb()),
|
||||||
|
ek: self.ek,
|
||||||
|
dk: self.dk,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::unchunked::HeaderSent) -> Result<Self, Error> {
|
||||||
|
if pb.dk.len() == 2400 && pb.ek.len() == 1152 {
|
||||||
|
Ok(Self {
|
||||||
|
epoch: pb.epoch,
|
||||||
|
auth: Authenticator::from_pb(pb.auth.as_ref().ok_or(Error::StateDecode)?),
|
||||||
|
ek: pb.ek,
|
||||||
|
dk: pb.dk,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(Error::StateDecode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EkSent {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::unchunked::EkSent {
|
||||||
|
pqrpb::v1_state::unchunked::EkSent {
|
||||||
|
epoch: self.epoch,
|
||||||
|
auth: Some(self.auth.into_pb()),
|
||||||
|
dk: self.dk,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::unchunked::EkSent) -> Result<Self, Error> {
|
||||||
|
if pb.dk.len() == 2400 {
|
||||||
|
Ok(Self {
|
||||||
|
epoch: pb.epoch,
|
||||||
|
auth: Authenticator::from_pb(pb.auth.as_ref().ok_or(Error::StateDecode)?),
|
||||||
|
dk: pb.dk,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(Error::StateDecode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EkSentCt1Received {
|
||||||
|
pub fn into_pb(self) -> pqrpb::v1_state::unchunked::EkSentCt1Received {
|
||||||
|
pqrpb::v1_state::unchunked::EkSentCt1Received {
|
||||||
|
epoch: self.epoch,
|
||||||
|
auth: Some(self.auth.into_pb()),
|
||||||
|
dk: self.dk,
|
||||||
|
ct1: self.ct1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_pb(pb: pqrpb::v1_state::unchunked::EkSentCt1Received) -> Result<Self, Error> {
|
||||||
|
if pb.dk.len() == 2400 && pb.ct1.len() == 960 {
|
||||||
|
Ok(Self {
|
||||||
|
epoch: pb.epoch,
|
||||||
|
auth: Authenticator::from_pb(pb.auth.as_ref().ok_or(Error::StateDecode)?),
|
||||||
|
dk: pb.dk,
|
||||||
|
ct1: pb.ct1,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(Error::StateDecode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue