mirror of
https://github.com/rosenpass/rosenpass.git
synced 2025-12-18 21:34:37 +03:00
Compare commits
543 Commits
regression
...
dev/karo/a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b8e9519e26 | ||
|
|
c3def9744f | ||
|
|
e3d3584adb | ||
|
|
a1982e0245 | ||
|
|
4896cd6130 | ||
|
|
9aab9d2d2a | ||
|
|
108ca440fe | ||
|
|
03e408b7c2 | ||
|
|
67f387a190 | ||
|
|
745c3962bb | ||
|
|
f6971aa5ad | ||
|
|
b46cd636d2 | ||
|
|
f22f4aad7d | ||
|
|
a83589d76a | ||
|
|
508d46f2bc | ||
|
|
3fc3083a54 | ||
|
|
faa45a8540 | ||
|
|
77632d0725 | ||
|
|
7218b0a3f4 | ||
|
|
4266cbfb72 | ||
|
|
070d299329 | ||
|
|
15699710a0 | ||
|
|
ae418ffba7 | ||
|
|
e3f7773bac | ||
|
|
9ab754eb0b | ||
|
|
b055457d01 | ||
|
|
b3403e7120 | ||
|
|
abd5210ae4 | ||
|
|
03464e1be7 | ||
|
|
54fc904c15 | ||
|
|
ceff8b711a | ||
|
|
c84bbed3bd | ||
|
|
d453002230 | ||
|
|
e81612d2e3 | ||
|
|
d558bdb633 | ||
|
|
e8fb7206fc | ||
|
|
b47d3a9deb | ||
|
|
f7fb09bc44 | ||
|
|
db6530ef77 | ||
|
|
8f519b042d | ||
|
|
954162b61f | ||
|
|
c65abe7bd9 | ||
|
|
80885d81d7 | ||
|
|
d023108d3b | ||
|
|
417df7aa7f | ||
|
|
9dd00e04c1 | ||
|
|
1a8e220aa8 | ||
|
|
de0022f092 | ||
|
|
dbb891a2ed | ||
|
|
531ae0ef70 | ||
|
|
8bb54b9cca | ||
|
|
7566eadef8 | ||
|
|
ebf6403ea7 | ||
|
|
62d408eade | ||
|
|
d1cf6af531 | ||
|
|
5e6c85d73d | ||
|
|
3205f8c572 | ||
|
|
b21a95dbbd | ||
|
|
006946442a | ||
|
|
33901d598a | ||
|
|
944be10bd2 | ||
|
|
23cf60c7ec | ||
|
|
6f71767529 | ||
|
|
38f371e3d7 | ||
|
|
2dba9205e7 | ||
|
|
30c3de3f87 | ||
|
|
b16619b1d3 | ||
|
|
576ad5f6d0 | ||
|
|
6494518460 | ||
|
|
185e92108e | ||
|
|
253243a8c8 | ||
|
|
075d9ffff3 | ||
|
|
01a1408044 | ||
|
|
b84e0beae8 | ||
|
|
949a3e4d23 | ||
|
|
d61b137761 | ||
|
|
a1f41953b7 | ||
|
|
46ebb6f46c | ||
|
|
32ae8f7051 | ||
|
|
b94ddd980d | ||
|
|
44e46895aa | ||
|
|
2ddd1488b3 | ||
|
|
c9aad280b2 | ||
|
|
d7398d9bcf | ||
|
|
6d25c13fd1 | ||
|
|
2d2d109246 | ||
|
|
30e158f594 | ||
|
|
cf74584f51 | ||
|
|
793cfd227f | ||
|
|
54c8e91db4 | ||
|
|
1b0179e751 | ||
|
|
760ecdc457 | ||
|
|
6a9bbddde3 | ||
|
|
530f81b9d5 | ||
|
|
b96df1588c | ||
|
|
5a2555a327 | ||
|
|
ac3f21c4bd | ||
|
|
b36d30d89d | ||
|
|
62fe529d36 | ||
|
|
76d01ffaf9 | ||
|
|
576b17cd9c | ||
|
|
cbc1bb4be2 | ||
|
|
c8a084157e | ||
|
|
09f1353dcc | ||
|
|
43225c1fe8 | ||
|
|
8e41cfc0b4 | ||
|
|
69538622b4 | ||
|
|
45a7c17cdd | ||
|
|
b8ecdab8dc | ||
|
|
af9d83b472 | ||
|
|
f81e329a11 | ||
|
|
5e2c72ef99 | ||
|
|
88e7d1d1cb | ||
|
|
43a930d3f7 | ||
|
|
b5f6d07650 | ||
|
|
be3c3d3d61 | ||
|
|
fe60cea959 | ||
|
|
441988cf43 | ||
|
|
b40b7f4f2f | ||
|
|
da76d88170 | ||
|
|
e35955f99c | ||
|
|
87587399ed | ||
|
|
9fdba31b32 | ||
|
|
0bfe47e5b8 | ||
|
|
771dce3ac7 | ||
|
|
436c6e6f87 | ||
|
|
f093406c34 | ||
|
|
eadf70ee38 | ||
|
|
7ac0883970 | ||
|
|
b1658b83a0 | ||
|
|
27650e95a7 | ||
|
|
6ab4e1152c | ||
|
|
2c64da23f1 | ||
|
|
03cc609a1e | ||
|
|
3effcb313e | ||
|
|
fded3b2e79 | ||
|
|
1471bb6a9f | ||
|
|
7edf84bd4a | ||
|
|
5187e50bb7 | ||
|
|
fd5806ba55 | ||
|
|
8e50d38b38 | ||
|
|
377f2f40d2 | ||
|
|
9bae080c4d | ||
|
|
3392da5163 | ||
|
|
3109cf1ffc | ||
|
|
d2539e445f | ||
|
|
6dc58cc6c1 | ||
|
|
e3d16966c9 | ||
|
|
a5e6af4b49 | ||
|
|
24a71977f0 | ||
|
|
5f0ac579d7 | ||
|
|
4df994b5f0 | ||
|
|
e4e0a9e661 | ||
|
|
742e037936 | ||
|
|
b5848af799 | ||
|
|
4982e40084 | ||
|
|
c1ae3268c6 | ||
|
|
524ec68f3f | ||
|
|
184603aa2c | ||
|
|
ec6706ffeb | ||
|
|
7571670e71 | ||
|
|
0d7dd99d96 | ||
|
|
c78a9cb777 | ||
|
|
dd0db53e8b | ||
|
|
422acf9891 | ||
|
|
877c15a018 | ||
|
|
55d7f8b1c1 | ||
|
|
199ff63a06 | ||
|
|
47b556e317 | ||
|
|
f87e2cb31b | ||
|
|
58e1c8fbff | ||
|
|
c89c7d7acf | ||
|
|
a5b876f119 | ||
|
|
c2f50f47b3 | ||
|
|
53168dc62d | ||
|
|
2cfe703118 | ||
|
|
a2d7c3aaa6 | ||
|
|
1aa111570e | ||
|
|
a91d61f9f0 | ||
|
|
ff7827c24e | ||
|
|
255e377d29 | ||
|
|
50505d81cc | ||
|
|
10484cc6d4 | ||
|
|
d27e602f43 | ||
|
|
73f6b33dbb | ||
|
|
a279dfc0b1 | ||
|
|
caf2f6bfec | ||
|
|
d398ad369e | ||
|
|
00696321ff | ||
|
|
d807a1bca7 | ||
|
|
4daf97b2ee | ||
|
|
b394e302ab | ||
|
|
198bc2d5f2 | ||
|
|
fc2f535eae | ||
|
|
302e249f08 | ||
|
|
d8fe3eba5f | ||
|
|
35519e7baa | ||
|
|
78af5d1dc4 | ||
|
|
61b8b28e86 | ||
|
|
26f77924f8 | ||
|
|
2e0e2cfa0c | ||
|
|
9cc860fdeb | ||
|
|
a537eb3e1b | ||
|
|
ea233bf137 | ||
|
|
db8796ab40 | ||
|
|
0353c82729 | ||
|
|
ae3fbde0a3 | ||
|
|
51d4dede15 | ||
|
|
4725a2d628 | ||
|
|
a6bac74d48 | ||
|
|
b9a34f4238 | ||
|
|
46e855b266 | ||
|
|
c0b91fd729 | ||
|
|
97dff8453d | ||
|
|
a3d4686104 | ||
|
|
cee0678817 | ||
|
|
a996f194c7 | ||
|
|
447be89414 | ||
|
|
ef4f550abc | ||
|
|
4737cd2b2a | ||
|
|
9336794e4d | ||
|
|
096bac6ee5 | ||
|
|
161826979a | ||
|
|
c435b772d2 | ||
|
|
8805ef7c38 | ||
|
|
cca02dc8d1 | ||
|
|
d4350195eb | ||
|
|
1c5e4ecf95 | ||
|
|
b15947b815 | ||
|
|
cacbf8535c | ||
|
|
f6d9da4a18 | ||
|
|
68f73e264d | ||
|
|
d5f68dcbd2 | ||
|
|
96581ed118 | ||
|
|
553b058759 | ||
|
|
85286c146f | ||
|
|
0f58b36c5b | ||
|
|
737781c8bc | ||
|
|
4ea1c76b81 | ||
|
|
5251721bcf | ||
|
|
a789f801ab | ||
|
|
be06f8adec | ||
|
|
03d3c70e2e | ||
|
|
94ba99d89b | ||
|
|
667a994253 | ||
|
|
9561ea4a47 | ||
|
|
fb641f8568 | ||
|
|
6e16956bc7 | ||
|
|
eeb738b649 | ||
|
|
2d20ad6335 | ||
|
|
df3d1821c8 | ||
|
|
6048ebd3d9 | ||
|
|
cd7558594f | ||
|
|
022cdc4ffa | ||
|
|
06d4e289a5 | ||
|
|
f9dce3fc9a | ||
|
|
d9f3c8fb96 | ||
|
|
0ea9f1061e | ||
|
|
737f0bc9f9 | ||
|
|
32ebd18107 | ||
|
|
f04cff6d57 | ||
|
|
2c1a0a7451 | ||
|
|
74fdb44680 | ||
|
|
c3adbb7cf3 | ||
|
|
fa583ec6ae | ||
|
|
aa76db1e1c | ||
|
|
c5699b5259 | ||
|
|
d3c52fdf64 | ||
|
|
b18f05ae19 | ||
|
|
d8839ba341 | ||
|
|
7022a93378 | ||
|
|
c9da9b8591 | ||
|
|
b483612cb7 | ||
|
|
a30805f8a0 | ||
|
|
a9b0a90ab5 | ||
|
|
2bc138e614 | ||
|
|
f97781039f | ||
|
|
5eda161cf2 | ||
|
|
a473fe6d9b | ||
|
|
e2c46f1ff0 | ||
|
|
c8b804b39d | ||
|
|
e56798b04c | ||
|
|
b76d18e3c8 | ||
|
|
a9792c3143 | ||
|
|
cb2c1c12ee | ||
|
|
08514d69e5 | ||
|
|
baf2d68070 | ||
|
|
cc7f7a4b4d | ||
|
|
5b701631b5 | ||
|
|
402158b706 | ||
|
|
e95636bf70 | ||
|
|
744e2bcf3e | ||
|
|
8c82ca18fb | ||
|
|
208e79c3a7 | ||
|
|
6ee023c9e9 | ||
|
|
6f75d34934 | ||
|
|
6b364a35dc | ||
|
|
2b6d10f0aa | ||
|
|
cb380b89d1 | ||
|
|
f703933e7f | ||
|
|
d02a5d2eb7 | ||
|
|
c7273e6f88 | ||
|
|
85eca49a5b | ||
|
|
9943f1336b | ||
|
|
bb2a0732cc | ||
|
|
1275b992a0 | ||
|
|
196767964f | ||
|
|
d4e9770fe6 | ||
|
|
8e2f6991d1 | ||
|
|
af0db88939 | ||
|
|
6601742903 | ||
|
|
9436281350 | ||
|
|
f3399907b9 | ||
|
|
0cea8c5eff | ||
|
|
5b3f4da23e | ||
|
|
c13badb697 | ||
|
|
cc7757a0db | ||
|
|
d267916445 | ||
|
|
03bc89a582 | ||
|
|
19b31bcdf0 | ||
|
|
939d216027 | ||
|
|
05fbaff2dc | ||
|
|
1d1c0e9da7 | ||
|
|
e19b724673 | ||
|
|
f879ad5020 | ||
|
|
29e7087cb5 | ||
|
|
637a08d222 | ||
|
|
6416c247f4 | ||
|
|
4b3b7e41e4 | ||
|
|
325fb915f0 | ||
|
|
43cb0c09c5 | ||
|
|
0836a2eb28 | ||
|
|
ca7df013d5 | ||
|
|
1209d68718 | ||
|
|
8806494899 | ||
|
|
582d27351a | ||
|
|
61136d79eb | ||
|
|
71bd406201 | ||
|
|
ce63cf534a | ||
|
|
d3ff19bdb9 | ||
|
|
3b6d0822d6 | ||
|
|
533afea129 | ||
|
|
da5b281b96 | ||
|
|
b9e873e534 | ||
|
|
a3b339b180 | ||
|
|
b4347c1382 | ||
|
|
0745019e10 | ||
|
|
2369006342 | ||
|
|
0fa6176d06 | ||
|
|
22bdeaf8f1 | ||
|
|
5731272844 | ||
|
|
bc7cef9de0 | ||
|
|
4cdcc35c3e | ||
|
|
a8f1292cbf | ||
|
|
ae5c5ed2b4 | ||
|
|
c483452a6a | ||
|
|
4ce331d299 | ||
|
|
d81eb7e2ed | ||
|
|
61043500ba | ||
|
|
9c4752559d | ||
|
|
6aec7acdb8 | ||
|
|
337cc1b4b4 | ||
|
|
387a266a49 | ||
|
|
179970b905 | ||
|
|
8b769e04c1 | ||
|
|
810bdf5519 | ||
|
|
d3a666bea0 | ||
|
|
2b8f780584 | ||
|
|
6aea3c0c1f | ||
|
|
e4fdfcae08 | ||
|
|
48e629fff7 | ||
|
|
6321bb36fc | ||
|
|
2f9ff487ba | ||
|
|
c0c06cd1dc | ||
|
|
e9772effa6 | ||
|
|
cf68f15674 | ||
|
|
dd5d45cdc9 | ||
|
|
17a6aed8a6 | ||
|
|
3f9926e353 | ||
|
|
f4ab2ac891 | ||
|
|
de51c1005f | ||
|
|
1e2cd589b1 | ||
|
|
02bc485d97 | ||
|
|
3ae52b9824 | ||
|
|
cbf361206b | ||
|
|
398da99df2 | ||
|
|
acfbb67abe | ||
|
|
c407b8b006 | ||
|
|
bc7213d8c0 | ||
|
|
69e68aad2c | ||
|
|
9b07f5803b | ||
|
|
5ce572b739 | ||
|
|
d9f8fa0092 | ||
|
|
a5208795f6 | ||
|
|
0959148305 | ||
|
|
f2bc3a8b64 | ||
|
|
06529df2c0 | ||
|
|
128c77f77a | ||
|
|
501cc9bb05 | ||
|
|
9ad5277a90 | ||
|
|
0cbcaeaf98 | ||
|
|
687ef3f6f8 | ||
|
|
b0706354d3 | ||
|
|
c1e86daec8 | ||
|
|
18a286e688 | ||
|
|
cb92313391 | ||
|
|
5cd30b4c13 | ||
|
|
76d8d38744 | ||
|
|
f63f0bbc2e | ||
|
|
4a449e6502 | ||
|
|
1e6d2df004 | ||
|
|
3fa9aadda2 | ||
|
|
0c79a4ce95 | ||
|
|
036960b5b1 | ||
|
|
e7258849cb | ||
|
|
8c88f68990 | ||
|
|
cf20536576 | ||
|
|
72e18e3ec2 | ||
|
|
6040156a0e | ||
|
|
d3b318b413 | ||
|
|
3a49345138 | ||
|
|
4ec7813259 | ||
|
|
db31da14d3 | ||
|
|
4c20efc8a8 | ||
|
|
c81d484294 | ||
|
|
cc578169d6 | ||
|
|
91527702f1 | ||
|
|
0179f1c673 | ||
|
|
2238919657 | ||
|
|
d913e19883 | ||
|
|
1555d0897b | ||
|
|
abdbf8f3da | ||
|
|
9f78531979 | ||
|
|
624d8d2f44 | ||
|
|
9bbf9433e6 | ||
|
|
77760d71df | ||
|
|
53e560191f | ||
|
|
93cd266c68 | ||
|
|
594f894206 | ||
|
|
a831e01a5c | ||
|
|
0884641d64 | ||
|
|
ae85d0ed2b | ||
|
|
163f66f20e | ||
|
|
3caff91515 | ||
|
|
24eebe29a1 | ||
|
|
1d2fa7d038 | ||
|
|
edf1e774c1 | ||
|
|
7a31b57227 | ||
|
|
d5a8c85abe | ||
|
|
48f7ff93e3 | ||
|
|
5f6c36e773 | ||
|
|
7b3b7612cf | ||
|
|
c1704b1464 | ||
|
|
2785aaf783 | ||
|
|
15002a74cc | ||
|
|
0fe2d9825b | ||
|
|
ab805dae75 | ||
|
|
08653c3338 | ||
|
|
520c8c6eaa | ||
|
|
258efe408c | ||
|
|
fd0f35b279 | ||
|
|
8808ed5dbc | ||
|
|
6fc45cab53 | ||
|
|
1f7196e473 | ||
|
|
c359b87d0c | ||
|
|
355b48169b | ||
|
|
274d245bed | ||
|
|
065b0fcc8a | ||
|
|
191fb10663 | ||
|
|
3faa84117f | ||
|
|
fda75a0184 | ||
|
|
96b1f6c0d3 | ||
|
|
fb73c68626 | ||
|
|
42b0e23695 | ||
|
|
c58f832727 | ||
|
|
7b6a9eebc1 | ||
|
|
4554dc4bb3 | ||
|
|
465c6beaab | ||
|
|
1853e0a3c0 | ||
|
|
245d4d1a0f | ||
|
|
d5d15cd9bc | ||
|
|
9fd3df67ed | ||
|
|
6d47169a5c | ||
|
|
4bcd38a4ea | ||
|
|
730a03957a | ||
|
|
ea071f5363 | ||
|
|
3063d3e4c2 | ||
|
|
1bf0eed90a | ||
|
|
138e6b6553 | ||
|
|
2dde0a2b47 | ||
|
|
3cc3b6009f | ||
|
|
1ab457ed37 | ||
|
|
c9c266fe7c | ||
|
|
8d3c8790fe | ||
|
|
648a94ead8 | ||
|
|
54ac5eecdb | ||
|
|
40c5bbd167 | ||
|
|
a4b8fc2226 | ||
|
|
37f7b3e4e9 | ||
|
|
deafc1c1af | ||
|
|
6bbe85a57b | ||
|
|
e70c5b33a8 | ||
|
|
25fdfef4d0 | ||
|
|
6ab8fafe59 | ||
|
|
c1aacf76b8 | ||
|
|
1bcaf5781f | ||
|
|
de60e5f8f0 | ||
|
|
b50ddda151 | ||
|
|
7282fba3b3 | ||
|
|
0cca389f10 | ||
|
|
8a08d49215 | ||
|
|
8637bc7884 | ||
|
|
4412c2bdd1 | ||
|
|
ecc815dd8e | ||
|
|
b7d7c03e35 | ||
|
|
f6320c3c35 | ||
|
|
19f7905bc9 | ||
|
|
9b5b7ee620 | ||
|
|
4fdd271de7 | ||
|
|
860e65965a | ||
|
|
87144233da | ||
|
|
d0a6e99a1f | ||
|
|
79b634fadf | ||
|
|
99ac3c0902 | ||
|
|
010c14dadf | ||
|
|
45b6132312 | ||
|
|
77f9fd38f3 | ||
|
|
775ed86adc | ||
|
|
40377dce1f | ||
|
|
19293471e8 | ||
|
|
cc5877dd83 | ||
|
|
ebb591aa6f | ||
|
|
07146d9914 | ||
|
|
cd04dbc4eb | ||
|
|
cc22165dc4 | ||
|
|
8496571765 | ||
|
|
ee3a1f580e | ||
|
|
89584645c3 | ||
|
|
3286e49370 | ||
|
|
100d7b6e1c | ||
|
|
921b2bfc39 | ||
|
|
a18658847c | ||
|
|
bdad414c90 |
14
.ci/boot_race/a.toml
Normal file
14
.ci/boot_race/a.toml
Normal file
@@ -0,0 +1,14 @@
|
||||
public_key = "rp-a-public-key"
|
||||
secret_key = "rp-a-secret-key"
|
||||
listen = ["127.0.0.1:9999"]
|
||||
verbosity = "Verbose"
|
||||
|
||||
[api]
|
||||
listen_path = []
|
||||
listen_fd = []
|
||||
stream_fd = []
|
||||
|
||||
[[peers]]
|
||||
public_key = "rp-b-public-key"
|
||||
endpoint = "127.0.0.1:9998"
|
||||
key_out = "rp-b-key-out.txt"
|
||||
14
.ci/boot_race/b.toml
Normal file
14
.ci/boot_race/b.toml
Normal file
@@ -0,0 +1,14 @@
|
||||
public_key = "rp-b-public-key"
|
||||
secret_key = "rp-b-secret-key"
|
||||
listen = ["127.0.0.1:9998"]
|
||||
verbosity = "Verbose"
|
||||
|
||||
[api]
|
||||
listen_path = []
|
||||
listen_fd = []
|
||||
stream_fd = []
|
||||
|
||||
[[peers]]
|
||||
public_key = "rp-a-public-key"
|
||||
endpoint = "127.0.0.1:9999"
|
||||
key_out = "rp-a-key-out.txt"
|
||||
48
.ci/boot_race/run.sh
Normal file
48
.ci/boot_race/run.sh
Normal file
@@ -0,0 +1,48 @@
|
||||
#!/bin/bash
|
||||
|
||||
iterations="$1"
|
||||
sleep_time="$2"
|
||||
config_a="$3"
|
||||
config_b="$4"
|
||||
|
||||
PWD="$(pwd)"
|
||||
EXEC="$PWD/target/release/rosenpass"
|
||||
|
||||
i=0
|
||||
while [ "$i" -ne "$iterations" ]; do
|
||||
echo "=> Iteration $i"
|
||||
|
||||
# flush the PSK files
|
||||
echo "A" >rp-a-key-out.txt
|
||||
echo "B" >rp-b-key-out.txt
|
||||
|
||||
# start the two instances
|
||||
echo "Starting instance A"
|
||||
"$EXEC" exchange-config "$config_a" &
|
||||
PID_A=$!
|
||||
sleep "$sleep_time"
|
||||
echo "Starting instance B"
|
||||
"$EXEC" exchange-config "$config_b" &
|
||||
PID_B=$!
|
||||
|
||||
# give the key exchange some time to complete
|
||||
sleep 3
|
||||
|
||||
# kill the instances
|
||||
kill $PID_A
|
||||
kill $PID_B
|
||||
|
||||
# compare the keys
|
||||
if cmp -s rp-a-key-out.txt rp-b-key-out.txt; then
|
||||
echo "Keys match"
|
||||
else
|
||||
echo "::warning title=Key Exchange Race Condition::The key exchange resulted in different keys. Delay was ${sleep_time}s."
|
||||
# TODO: set this to 1 when the race condition is fixed
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# give the instances some time to shut down
|
||||
sleep 2
|
||||
|
||||
i=$((i + 1))
|
||||
done
|
||||
33
.ci/run-regression.sh
Executable file
33
.ci/run-regression.sh
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
iterations="$1"
|
||||
sleep_time="$2"
|
||||
|
||||
PWD="$(pwd)"
|
||||
EXEC="$PWD/target/release/rosenpass"
|
||||
LOGS="$PWD/output/logs"
|
||||
|
||||
mkdir -p "$LOGS"
|
||||
|
||||
run_command() {
|
||||
local file=$1
|
||||
local log_file="$2"
|
||||
("$EXEC" exchange-config "$file" 2>&1 | tee -a "$log_file") &
|
||||
echo $!
|
||||
}
|
||||
|
||||
pids=()
|
||||
|
||||
(cd output/dut && run_command "configs/dut-$iterations.toml" "$LOGS/dut.log")
|
||||
for (( x=0; x<iterations; x++ )); do
|
||||
(cd output/ate && run_command "configs/ate-$x.toml" "$LOGS/ate-$x.log") & pids+=($!)
|
||||
done
|
||||
|
||||
sleep "$sleep_time"
|
||||
|
||||
lsof -i :9999 | awk 'NR!=1 {print $2}' | xargs kill
|
||||
|
||||
for (( x=0; x<iterations; x++ )); do
|
||||
port=$((x + 50000))
|
||||
lsof -i :$port | awk 'NR!=1 {print $2}' | xargs kill
|
||||
done
|
||||
1
.dockerignore
Symbolic link
1
.dockerignore
Symbolic link
@@ -0,0 +1 @@
|
||||
.gitignore
|
||||
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@@ -4,3 +4,7 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
4
.github/workflows/doc-upload.yml
vendored
4
.github/workflows/doc-upload.yml
vendored
@@ -13,10 +13,10 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Clone rosenpass-website repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: rosenpass/rosenpass-website
|
||||
ref: main
|
||||
|
||||
288
.github/workflows/docker.yaml
vendored
Normal file
288
.github/workflows/docker.yaml
vendored
Normal file
@@ -0,0 +1,288 @@
|
||||
name: Build Docker Images
|
||||
|
||||
# Run this job on all non-pull-request events,
|
||||
# or if Docker-related files are changed in a pull request.
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
tags:
|
||||
- "v*"
|
||||
pull_request:
|
||||
paths:
|
||||
- "docker/Dockerfile"
|
||||
- ".github/workflows/docker.yaml"
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
# --------------------------------
|
||||
# 1. BUILD & TEST
|
||||
# --------------------------------
|
||||
build-and-test-rp:
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [amd64, arm64]
|
||||
runs-on: ${{ matrix.arch == 'arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Build (no push) and Load
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: docker/Dockerfile
|
||||
# no pushing here, so we can test locally
|
||||
push: false
|
||||
# load the built image into the local Docker daemon on the runner
|
||||
load: true
|
||||
target: rosenpass
|
||||
tags: rosenpass:test
|
||||
platforms: linux/${{ matrix.arch }}
|
||||
- name: Integration Test - Standalone Key Exchange
|
||||
run: |
|
||||
# Create separate workdirs
|
||||
mkdir -p workdir-server workdir-client
|
||||
|
||||
# Create a Docker network
|
||||
docker network create -d bridge rp
|
||||
|
||||
echo "=== GENERATE SERVER KEYS ==="
|
||||
docker run --rm \
|
||||
-v $PWD/workdir-server:/workdir \
|
||||
rosenpass:test gen-keys \
|
||||
--public-key=workdir/server-public \
|
||||
--secret-key=workdir/server-secret
|
||||
|
||||
echo "=== GENERATE CLIENT KEYS ==="
|
||||
docker run --rm \
|
||||
-v $PWD/workdir-client:/workdir \
|
||||
rosenpass:test gen-keys \
|
||||
--public-key=workdir/client-public \
|
||||
--secret-key=workdir/client-secret
|
||||
|
||||
echo "=== SHARE PUBLIC KEYS ==="
|
||||
cp workdir-client/client-public workdir-server/client-public
|
||||
cp workdir-server/server-public workdir-client/server-public
|
||||
|
||||
echo "=== START SERVER CONTAINER ==="
|
||||
docker run -d --rm \
|
||||
--name rpserver \
|
||||
--network rp \
|
||||
-v $PWD/workdir-server:/workdir \
|
||||
rosenpass:test exchange \
|
||||
private-key workdir/server-secret \
|
||||
public-key workdir/server-public \
|
||||
listen 0.0.0.0:9999 \
|
||||
peer public-key workdir/client-public \
|
||||
outfile workdir/server-sharedkey
|
||||
|
||||
# Get the container IP of the server
|
||||
SERVER_IP=$(docker inspect --format='{{.NetworkSettings.Networks.rp.IPAddress}}' rpserver)
|
||||
echo "SERVER_IP=$SERVER_IP"
|
||||
|
||||
echo "=== START CLIENT CONTAINER ==="
|
||||
docker run -d --rm \
|
||||
--name rpclient \
|
||||
--network rp \
|
||||
-v $PWD/workdir-client:/workdir \
|
||||
rosenpass:test exchange \
|
||||
private-key workdir/client-secret \
|
||||
public-key workdir/client-public \
|
||||
peer public-key workdir/server-public \
|
||||
endpoint ${SERVER_IP}:9999 \
|
||||
outfile workdir/client-sharedkey
|
||||
|
||||
echo "=== COMPARE SHARED KEYS ==="
|
||||
echo "Waiting up to 30 seconds for the server to generate 'server-sharedkey'..."
|
||||
for i in $(seq 1 30); do
|
||||
if [ -f "workdir-server/server-sharedkey" ]; then
|
||||
echo "server-sharedkey found!"
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
sudo cmp workdir-server/server-sharedkey workdir-client/client-sharedkey
|
||||
|
||||
echo "Standalone Key Exchange test OK."
|
||||
# --------------------------------
|
||||
# 2. PUSH (only if tests pass)
|
||||
# --------------------------------
|
||||
docker-image-rp:
|
||||
needs:
|
||||
- build-and-test-rp
|
||||
# Skip if this is not a PR. Then we want to push this image.
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
# Use a matrix to build for both AMD64 and ARM64
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [amd64, arm64]
|
||||
# Switch the runner based on the architecture
|
||||
runs-on: ${{ matrix.arch == 'arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository_owner }}/rp
|
||||
labels: |
|
||||
maintainer=Karolin Varner <karo@cupdev.net>, wucke13 <wucke13@gmail.com>
|
||||
org.opencontainers.image.authors=Karolin Varner <karo@cupdev.net>, wucke13 <wucke13@gmail.com>
|
||||
org.opencontainers.image.title=Rosenpass
|
||||
org.opencontainers.image.description=The rp command-line integrates Rosenpass and WireGuard to help you create a VPN
|
||||
org.opencontainers.image.vendor=Rosenpass e.V.
|
||||
org.opencontainers.image.licenses=MIT OR Apache-2.0
|
||||
org.opencontainers.image.url=https://rosenpass.eu
|
||||
org.opencontainers.image.documentation=https://rosenpass.eu/docs/
|
||||
org.opencontainers.image.source=https://github.com/rosenpass/rosenpass
|
||||
|
||||
- name: Log in to registry
|
||||
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.repository_owner }} --password-stdin
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: docker/Dockerfile
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: ghcr.io/${{ github.repository_owner }}/rp
|
||||
target: rp
|
||||
platforms: linux/${{ matrix.arch }}
|
||||
outputs: type=image,push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-rp-${{ matrix.arch }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
docker-image-rosenpass:
|
||||
needs:
|
||||
- build-and-test-rp
|
||||
# Skip if this is not a PR. Then we want to push this image.
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
# Use a matrix to build for both AMD64 and ARM64
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [amd64, arm64]
|
||||
# Switch the runner based on the architecture
|
||||
runs-on: ${{ matrix.arch == 'arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository_owner }}/rosenpass
|
||||
labels: |
|
||||
maintainer=Karolin Varner <karo@cupdev.net>, wucke13 <wucke13@gmail.com>
|
||||
org.opencontainers.image.authors=Karolin Varner <karo@cupdev.net>, wucke13 <wucke13@gmail.com>
|
||||
org.opencontainers.image.title=Rosenpass
|
||||
org.opencontainers.image.description=Reference implementation of the protocol rosenpass protocol
|
||||
org.opencontainers.image.vendor=Rosenpass e.V.
|
||||
org.opencontainers.image.licenses=MIT OR Apache-2.0
|
||||
org.opencontainers.image.url=https://rosenpass.eu
|
||||
org.opencontainers.image.documentation=https://rosenpass.eu/docs/
|
||||
org.opencontainers.image.source=https://github.com/rosenpass/rosenpass
|
||||
|
||||
- name: Log in to registry
|
||||
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.repository_owner }} --password-stdin
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: docker/Dockerfile
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: ghcr.io/${{ github.repository_owner }}/rosenpass
|
||||
target: rosenpass
|
||||
platforms: linux/${{ matrix.arch }}
|
||||
outputs: type=image,push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-rosenpass-${{ matrix.arch }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge-digests:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- docker-image-rosenpass
|
||||
- docker-image-rp
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
strategy:
|
||||
matrix:
|
||||
target: [rp, rosenpass]
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-${{ matrix.target }}-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Log in to registry
|
||||
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.repository_owner }} --password-stdin
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository_owner }}/${{ matrix.target }}
|
||||
tags: |
|
||||
type=edge,branch=main
|
||||
type=sha,branch=main
|
||||
type=semver,pattern={{version}}
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf 'ghcr.io/${{ github.repository_owner }}/${{ matrix.target }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ghcr.io/${{ github.repository_owner }}/${{ matrix.target }}:${{ steps.meta.outputs.version }}
|
||||
19
.github/workflows/manual-mac-pr.yaml
vendored
Normal file
19
.github/workflows/manual-mac-pr.yaml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: PR Validation on Mac
|
||||
on:
|
||||
workflow_dispatch:
|
||||
permissions:
|
||||
checks: write
|
||||
contents: write
|
||||
concurrency:
|
||||
group: manual-mac-${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
jobs:
|
||||
qc:
|
||||
uses: ./.github/workflows/qc-mac.yaml
|
||||
permissions:
|
||||
checks: write
|
||||
contents: read
|
||||
nix:
|
||||
uses: ./.github/workflows/nix-mac.yaml
|
||||
permissions:
|
||||
contents: write
|
||||
114
.github/workflows/nix-mac.yaml
vendored
Normal file
114
.github/workflows/nix-mac.yaml
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
name: Nix on Mac
|
||||
permissions:
|
||||
contents: write
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
aarch64-darwin---default:
|
||||
name: Build aarch64-darwin.default
|
||||
runs-on:
|
||||
- warp-macos-13-arm64-6x
|
||||
needs:
|
||||
- aarch64-darwin---rosenpass
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build
|
||||
run: nix build .#packages.aarch64-darwin.default --print-build-logs
|
||||
aarch64-darwin---release-package:
|
||||
name: Build aarch64-darwin.release-package
|
||||
runs-on:
|
||||
- warp-macos-13-arm64-6x
|
||||
needs:
|
||||
- aarch64-darwin---rosenpass
|
||||
- aarch64-darwin---rp
|
||||
- aarch64-darwin---rosenpass-oci-image
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build
|
||||
run: nix build .#packages.aarch64-darwin.release-package --print-build-logs
|
||||
aarch64-darwin---rosenpass:
|
||||
name: Build aarch64-darwin.rosenpass
|
||||
runs-on:
|
||||
- warp-macos-13-arm64-6x
|
||||
needs: []
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build
|
||||
run: nix build .#packages.aarch64-darwin.rosenpass --print-build-logs
|
||||
aarch64-darwin---rp:
|
||||
name: Build aarch64-darwin.rp
|
||||
runs-on:
|
||||
- warp-macos-13-arm64-6x
|
||||
needs: []
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build
|
||||
run: nix build .#packages.aarch64-darwin.rp --print-build-logs
|
||||
aarch64-darwin---rosenpass-oci-image:
|
||||
name: Build aarch64-darwin.rosenpass-oci-image
|
||||
runs-on:
|
||||
- warp-macos-13-arm64-6x
|
||||
needs:
|
||||
- aarch64-darwin---rosenpass
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build
|
||||
run: nix build .#packages.aarch64-darwin.rosenpass-oci-image --print-build-logs
|
||||
aarch64-darwin---check:
|
||||
name: Run Nix checks on aarch64-darwin
|
||||
runs-on:
|
||||
- warp-macos-13-arm64-6x
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Check
|
||||
run: nix flake check . --print-build-logs
|
||||
313
.github/workflows/nix.yaml
vendored
313
.github/workflows/nix.yaml
vendored
@@ -6,19 +6,24 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
i686-linux---default:
|
||||
name: Build i686-linux.default
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
needs:
|
||||
- i686-linux---rosenpass
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -27,14 +32,14 @@ jobs:
|
||||
i686-linux---rosenpass:
|
||||
name: Build i686-linux.rosenpass
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
needs: []
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -43,15 +48,15 @@ jobs:
|
||||
i686-linux---rosenpass-oci-image:
|
||||
name: Build i686-linux.rosenpass-oci-image
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
needs:
|
||||
- i686-linux---rosenpass
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -60,113 +65,13 @@ jobs:
|
||||
i686-linux---check:
|
||||
name: Run Nix checks on i686-linux
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Check
|
||||
run: nix flake check . --print-build-logs
|
||||
x86_64-darwin---default:
|
||||
name: Build x86_64-darwin.default
|
||||
runs-on:
|
||||
- macos-13
|
||||
needs:
|
||||
- x86_64-darwin---rosenpass
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build
|
||||
run: nix build .#packages.x86_64-darwin.default --print-build-logs
|
||||
x86_64-darwin---release-package:
|
||||
name: Build x86_64-darwin.release-package
|
||||
runs-on:
|
||||
- macos-13
|
||||
needs:
|
||||
- x86_64-darwin---rosenpass
|
||||
- x86_64-darwin---rp
|
||||
- x86_64-darwin---rosenpass-oci-image
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build
|
||||
run: nix build .#packages.x86_64-darwin.release-package --print-build-logs
|
||||
x86_64-darwin---rosenpass:
|
||||
name: Build x86_64-darwin.rosenpass
|
||||
runs-on:
|
||||
- macos-13
|
||||
needs: []
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build
|
||||
run: nix build .#packages.x86_64-darwin.rosenpass --print-build-logs
|
||||
x86_64-darwin---rp:
|
||||
name: Build x86_64-darwin.rp
|
||||
runs-on:
|
||||
- macos-13
|
||||
needs: []
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build
|
||||
run: nix build .#packages.x86_64-darwin.rp --print-build-logs
|
||||
x86_64-darwin---rosenpass-oci-image:
|
||||
name: Build x86_64-darwin.rosenpass-oci-image
|
||||
runs-on:
|
||||
- macos-13
|
||||
needs:
|
||||
- x86_64-darwin---rosenpass
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build
|
||||
run: nix build .#packages.x86_64-darwin.rosenpass-oci-image --print-build-logs
|
||||
x86_64-darwin---check:
|
||||
name: Run Nix checks on x86_64-darwin
|
||||
runs-on:
|
||||
- macos-13
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -175,15 +80,15 @@ jobs:
|
||||
x86_64-linux---default:
|
||||
name: Build x86_64-linux.default
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
needs:
|
||||
- x86_64-linux---rosenpass
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -192,15 +97,15 @@ jobs:
|
||||
x86_64-linux---proof-proverif:
|
||||
name: Build x86_64-linux.proof-proverif
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
needs:
|
||||
- x86_64-linux---proverif-patched
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -209,14 +114,14 @@ jobs:
|
||||
x86_64-linux---proverif-patched:
|
||||
name: Build x86_64-linux.proverif-patched
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
needs: []
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -225,57 +130,57 @@ jobs:
|
||||
x86_64-linux---release-package:
|
||||
name: Build x86_64-linux.release-package
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
needs:
|
||||
- x86_64-linux---rosenpass-static
|
||||
- x86_64-linux---rosenpass-static-oci-image
|
||||
- x86_64-linux---rp-static
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build
|
||||
run: nix build .#packages.x86_64-linux.release-package --print-build-logs
|
||||
aarch64-linux---release-package:
|
||||
name: Build aarch64-linux.release-package
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
needs:
|
||||
- aarch64-linux---rosenpass-oci-image
|
||||
- aarch64-linux---rosenpass
|
||||
- aarch64-linux---rp
|
||||
steps:
|
||||
- run: |
|
||||
DEBIAN_FRONTEND=noninteractive
|
||||
sudo apt-get update -q -y && sudo apt-get install -q -y qemu-system-aarch64 qemu-efi binfmt-support qemu-user-static
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
extra_nix_config: |
|
||||
system = aarch64-linux
|
||||
- uses: cachix/cachix-action@v12
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build
|
||||
run: nix build .#packages.aarch64-linux.release-package --print-build-logs
|
||||
# aarch64-linux---release-package:
|
||||
# name: Build aarch64-linux.release-package
|
||||
# runs-on:
|
||||
# - ubicloud-standard-2-arm-ubuntu-2204
|
||||
# needs:
|
||||
# - aarch64-linux---rosenpass-oci-image
|
||||
# - aarch64-linux---rosenpass
|
||||
# - aarch64-linux---rp
|
||||
# steps:
|
||||
# - run: |
|
||||
# DEBIAN_FRONTEND=noninteractive
|
||||
# sudo apt-get update -q -y && sudo apt-get install -q -y qemu-system-aarch64 qemu-efi binfmt-support qemu-user-static
|
||||
# - uses: actions/checkout@v4
|
||||
# - uses: cachix/install-nix-action@v30
|
||||
# with:
|
||||
# nix_path: nixpkgs=channel:nixos-unstable
|
||||
# extra_nix_config: |
|
||||
# system = aarch64-linux
|
||||
# - uses: cachix/cachix-action@v15
|
||||
# with:
|
||||
# name: rosenpass
|
||||
# authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
# - name: Build
|
||||
# run: nix build .#packages.aarch64-linux.release-package --print-build-logs
|
||||
x86_64-linux---rosenpass:
|
||||
name: Build x86_64-linux.rosenpass
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
needs: []
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -284,19 +189,19 @@ jobs:
|
||||
aarch64-linux---rosenpass:
|
||||
name: Build aarch64-linux.rosenpass
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-arm-ubuntu-2204
|
||||
needs: []
|
||||
steps:
|
||||
- run: |
|
||||
DEBIAN_FRONTEND=noninteractive
|
||||
sudo apt-get update -q -y && sudo apt-get install -q -y qemu-system-aarch64 qemu-efi binfmt-support qemu-user-static
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
sudo apt-get update -q -y && sudo apt-get install -q -y qemu-system-aarch64 qemu-efi-aarch64 binfmt-support qemu-user-static
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
extra_nix_config: |
|
||||
system = aarch64-linux
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -305,19 +210,19 @@ jobs:
|
||||
aarch64-linux---rp:
|
||||
name: Build aarch64-linux.rp
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-arm-ubuntu-2204
|
||||
needs: []
|
||||
steps:
|
||||
- run: |
|
||||
DEBIAN_FRONTEND=noninteractive
|
||||
sudo apt-get update -q -y && sudo apt-get install -q -y qemu-system-aarch64 qemu-efi binfmt-support qemu-user-static
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
sudo apt-get update -q -y && sudo apt-get install -q -y qemu-system-aarch64 qemu-efi-aarch64 binfmt-support qemu-user-static
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
extra_nix_config: |
|
||||
system = aarch64-linux
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -326,15 +231,15 @@ jobs:
|
||||
x86_64-linux---rosenpass-oci-image:
|
||||
name: Build x86_64-linux.rosenpass-oci-image
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
needs:
|
||||
- x86_64-linux---rosenpass
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -343,20 +248,20 @@ jobs:
|
||||
aarch64-linux---rosenpass-oci-image:
|
||||
name: Build aarch64-linux.rosenpass-oci-image
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-arm-ubuntu-2204
|
||||
needs:
|
||||
- aarch64-linux---rosenpass
|
||||
steps:
|
||||
- run: |
|
||||
DEBIAN_FRONTEND=noninteractive
|
||||
sudo apt-get update -q -y && sudo apt-get install -q -y qemu-system-aarch64 qemu-efi binfmt-support qemu-user-static
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
sudo apt-get update -q -y && sudo apt-get install -q -y qemu-system-aarch64 qemu-efi-aarch64 binfmt-support qemu-user-static
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
extra_nix_config: |
|
||||
system = aarch64-linux
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -365,14 +270,14 @@ jobs:
|
||||
x86_64-linux---rosenpass-static:
|
||||
name: Build x86_64-linux.rosenpass-static
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
needs: []
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -381,14 +286,14 @@ jobs:
|
||||
x86_64-linux---rp-static:
|
||||
name: Build x86_64-linux.rp-static
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
needs: []
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -397,15 +302,15 @@ jobs:
|
||||
x86_64-linux---rosenpass-static-oci-image:
|
||||
name: Build x86_64-linux.rosenpass-static-oci-image
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
needs:
|
||||
- x86_64-linux---rosenpass-static
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -414,14 +319,14 @@ jobs:
|
||||
x86_64-linux---whitepaper:
|
||||
name: Build x86_64-linux.whitepaper
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
needs: []
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -430,13 +335,13 @@ jobs:
|
||||
x86_64-linux---check:
|
||||
name: Run Nix checks on x86_64-linux
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -444,14 +349,14 @@ jobs:
|
||||
run: nix flake check . --print-build-logs
|
||||
x86_64-linux---whitepaper-upload:
|
||||
name: Upload whitepaper x86_64-linux
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubicloud-standard-2-ubuntu-2204
|
||||
if: ${{ github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
@@ -460,7 +365,7 @@ jobs:
|
||||
- name: Build
|
||||
run: nix build .#packages.x86_64-linux.whitepaper --print-build-logs
|
||||
- name: Deploy PDF artifacts
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
uses: peaceiris/actions-gh-pages@v4
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: result/
|
||||
|
||||
32
.github/workflows/qc-mac.yaml
vendored
Normal file
32
.github/workflows/qc-mac.yaml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: QC Mac
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
checks: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
cargo-test-mac:
|
||||
runs-on: warp-macos-13-arm64-6x
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
# liboqs requires quite a lot of stack memory, thus we adjust
|
||||
# the default stack size picked for new threads (which is used
|
||||
# by `cargo test`) to be _big enough_. Setting it to 8 MiB
|
||||
- run: RUST_MIN_STACK=8388608 cargo test --workspace --all-features
|
||||
92
.github/workflows/qc.yaml
vendored
92
.github/workflows/qc.yaml
vendored
@@ -4,40 +4,44 @@ on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
checks: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
prettier:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubicloud-standard-2-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actionsx/prettier@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actionsx/prettier@v3
|
||||
with:
|
||||
args: --check .
|
||||
|
||||
shellcheck:
|
||||
name: Shellcheck
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubicloud-standard-2-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run ShellCheck
|
||||
uses: ludeeus/action-shellcheck@master
|
||||
|
||||
rustfmt:
|
||||
name: Rust Format
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubicloud-standard-2-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run Rust Formatting Script
|
||||
run: bash format_rust_code.sh --mode check
|
||||
|
||||
cargo-bench:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubicloud-standard-2-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
@@ -53,29 +57,27 @@ jobs:
|
||||
|
||||
mandoc:
|
||||
name: mandoc
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubicloud-standard-2-ubuntu-2204
|
||||
steps:
|
||||
- name: Install mandoc
|
||||
run: sudo apt-get install -y mandoc
|
||||
- uses: actions/checkout@v3
|
||||
- name: Check rosenpass.1
|
||||
run: doc/check.sh doc/rosenpass.1
|
||||
- uses: actions/checkout@v4
|
||||
- name: Check rp.1
|
||||
run: doc/check.sh doc/rp.1
|
||||
|
||||
cargo-audit:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubicloud-standard-2-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rs/audit-check@v1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
cargo-clippy:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubicloud-standard-2-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
@@ -91,10 +93,10 @@ jobs:
|
||||
args: --all-features
|
||||
|
||||
cargo-doc:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubicloud-standard-2-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
@@ -110,10 +112,15 @@ jobs:
|
||||
- run: RUSTDOCFLAGS="-D warnings" cargo doc --no-deps --document-private-items
|
||||
|
||||
cargo-test:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubicloud-standard-2-ubuntu-2204, warp-macos-13-arm64-6x]
|
||||
# - ubuntu is x86-64
|
||||
# - macos-13 is also x86-64 architecture
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
@@ -129,10 +136,10 @@ jobs:
|
||||
|
||||
cargo-test-nix-devshell-x86_64-linux:
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
- ubicloud-standard-2-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
@@ -141,20 +148,21 @@ jobs:
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
- uses: cachix/install-nix-action@v21
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- run: nix develop --command cargo test --workspace --all-features
|
||||
|
||||
cargo-fuzz:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubicloud-standard-2-ubuntu-2204
|
||||
env:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/cache@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
@@ -166,7 +174,7 @@ jobs:
|
||||
- name: Install nightly toolchain
|
||||
run: |
|
||||
rustup toolchain install nightly
|
||||
rustup default nightly
|
||||
rustup override nightly
|
||||
- name: Install cargo-fuzz
|
||||
run: cargo install cargo-fuzz
|
||||
- name: Run fuzzing
|
||||
@@ -184,19 +192,23 @@ jobs:
|
||||
cargo fuzz run fuzz_vec_secret_alloc_memfdsec_mallocfb -- -max_total_time=5
|
||||
|
||||
codecov:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubicloud-standard-2-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- run: rustup default nightly
|
||||
- run: rustup component add llvm-tools-preview
|
||||
- run: |
|
||||
cargo install cargo-llvm-cov || true
|
||||
cargo llvm-cov --lcov --output-path coverage.lcov
|
||||
cargo install grcov || true
|
||||
./coverage_report.sh
|
||||
# If using tarapulin
|
||||
#- run: cargo install cargo-tarpaulin
|
||||
#- run: cargo tarpaulin --out Xml
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v4.0.1
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./coverage.lcov
|
||||
files: ./target/grcov/lcov
|
||||
verbose: true
|
||||
env:
|
||||
RUSTUP_TOOLCHAIN: 1.81
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
37
.github/workflows/regressions.yml
vendored
Normal file
37
.github/workflows/regressions.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
name: Regressions
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
checks: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
multi-peer:
|
||||
runs-on: ubicloud-standard-2-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: cargo build --bin rosenpass --release
|
||||
- run: python misc/generate_configs.py
|
||||
- run: chmod +x .ci/run-regression.sh
|
||||
- run: .ci/run-regression.sh 100 20
|
||||
- run: |
|
||||
[ $(ls -1 output/ate/out | wc -l) -eq 100 ]
|
||||
|
||||
boot-race:
|
||||
runs-on: ubicloud-standard-2-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: cargo build --bin rosenpass --release
|
||||
- run: chmod +x .ci/boot_race/run.sh
|
||||
- run: cargo run --release --bin rosenpass gen-keys .ci/boot_race/a.toml
|
||||
- run: cargo run --release --bin rosenpass gen-keys .ci/boot_race/b.toml
|
||||
- run: .ci/boot_race/run.sh 5 2 .ci/boot_race/a.toml .ci/boot_race/b.toml
|
||||
- run: .ci/boot_race/run.sh 5 1 .ci/boot_race/a.toml .ci/boot_race/b.toml
|
||||
- run: .ci/boot_race/run.sh 5 0 .ci/boot_race/a.toml .ci/boot_race/b.toml
|
||||
49
.github/workflows/release.yaml
vendored
49
.github/workflows/release.yaml
vendored
@@ -11,18 +11,16 @@ jobs:
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build release
|
||||
run: nix build .#release-package --print-build-logs
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
draft: ${{ contains(github.ref_name, 'rc') }}
|
||||
prerelease: ${{ contains(github.ref_name, 'alpha') || contains(github.ref_name, 'beta') }}
|
||||
@@ -32,18 +30,16 @@ jobs:
|
||||
runs-on:
|
||||
- macos-13
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build release
|
||||
run: nix build .#release-package --print-build-logs
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
draft: ${{ contains(github.ref_name, 'rc') }}
|
||||
prerelease: ${{ contains(github.ref_name, 'alpha') || contains(github.ref_name, 'beta') }}
|
||||
@@ -53,19 +49,40 @@ jobs:
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
- uses: cachix/cachix-action@v12
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build release
|
||||
run: nix build .#release-package --print-build-logs
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
draft: ${{ contains(github.ref_name, 'rc') }}
|
||||
prerelease: ${{ contains(github.ref_name, 'alpha') || contains(github.ref_name, 'beta') }}
|
||||
files: result/*
|
||||
linux-packages:
|
||||
name: Build and upload DEB and RPM packages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cachix/install-nix-action@v30
|
||||
- uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: rosenpass
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
- name: Build DEB & RPM package
|
||||
run: |
|
||||
mkdir packages
|
||||
for f in $(nix build .#package-deb .#package-rpm --print-out-paths); do cp "$f" "packages/${f#*-}"; done
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
draft: ${{ contains(github.ref_name, 'rc') }}
|
||||
prerelease: ${{ contains(github.ref_name, 'alpha') || contains(github.ref_name, 'beta') }}
|
||||
files: |
|
||||
packages/*
|
||||
|
||||
91
.github/workflows/supply-chain.yml
vendored
Normal file
91
.github/workflows/supply-chain.yml
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
name: Supply-Chain
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
cargo-deny:
|
||||
name: Deny dependencies with vulnerabilities or incompatible licenses
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: EmbarkStudios/cargo-deny-action@v2
|
||||
cargo-supply-chain:
|
||||
name: Supply Chain Report
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cache/cargo-supply-chain/
|
||||
key: cargo-supply-chain-cache
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ runner.tool_cache }}/cargo-supply-chain
|
||||
key: cargo-supply-chain-bin
|
||||
- name: Add the tool cache directory to the search path
|
||||
run: echo "${{ runner.tool_cache }}/cargo-supply-chain/bin" >> $GITHUB_PATH
|
||||
- name: Ensure that the tool cache is populated with the cargo-supply-chain binary
|
||||
run: cargo install --root ${{ runner.tool_cache }}/cargo-supply-chain cargo-supply-chain
|
||||
- name: Update data for cargo-supply-chain
|
||||
run: cargo supply-chain update
|
||||
- name: Generate cargo-supply-chain report about publishers
|
||||
run: cargo supply-chain publishers
|
||||
- name: Generate cargo-supply-chain report about crates
|
||||
run: cargo supply-chain crates
|
||||
# The setup for cargo-vet follows the recommendations in the cargo-vet documentation: https://mozilla.github.io/cargo-vet/configuring-ci.html
|
||||
cargo-vet:
|
||||
name: Vet Dependencies
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
key: cargo-vet-cache
|
||||
- name: Install stable toolchain # Since we are running/compiling cargo-vet, we should rely on the stable toolchain.
|
||||
run: |
|
||||
rustup toolchain install stable
|
||||
rustup default stable
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ runner.tool_cache }}/cargo-vet
|
||||
key: cargo-vet-bin
|
||||
- name: Add the tool cache directory to the search path
|
||||
run: echo "${{ runner.tool_cache }}/cargo-vet/bin" >> $GITHUB_PATH
|
||||
- name: Ensure that the tool cache is populated with the cargo-vet binary
|
||||
run: cargo install --root ${{ runner.tool_cache }}/cargo-vet cargo-vet
|
||||
- name: Regenerate vet exemptions for dependabot PRs
|
||||
if: github.actor == 'dependabot[bot]' # Run only for Dependabot PRs
|
||||
run: cargo vet regenerate exemptions
|
||||
- name: Check for changes in case of dependabot PR
|
||||
if: github.actor == 'dependabot[bot]' # Run only for Dependabot PRs
|
||||
run: git diff --exit-code || echo "Changes detected, committing..."
|
||||
- name: Commit and push changes for dependabot PRs
|
||||
if: success() && github.actor == 'dependabot[bot]'
|
||||
run: |
|
||||
git fetch origin ${{ github.head_ref }}
|
||||
git switch ${{ github.head_ref }}
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions@github.com"
|
||||
git add supply-chain/*
|
||||
git commit -m "Regenerate cargo vet exemptions"
|
||||
git push origin ${{ github.head_ref }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Invoke cargo-vet
|
||||
run: cargo vet --locked
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -20,3 +20,9 @@ _markdown_*
|
||||
**/result
|
||||
**/result-*
|
||||
.direnv
|
||||
|
||||
# Visual studio code
|
||||
.vscode
|
||||
|
||||
/output
|
||||
.nixos-test-history
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
.direnv/
|
||||
flake.lock
|
||||
papers/whitepaper.md
|
||||
target/
|
||||
src/usage.md
|
||||
target/
|
||||
|
||||
41
CONTRIBUTING.md
Normal file
41
CONTRIBUTING.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Contributing to Rosenpass
|
||||
|
||||
## Common operations
|
||||
|
||||
### Apply code formatting
|
||||
|
||||
Format rust code:
|
||||
|
||||
```bash
|
||||
cargo fmt
|
||||
```
|
||||
|
||||
Format rust code in markdown files:
|
||||
|
||||
```bash
|
||||
./format_rust_code.sh --mode fix
|
||||
```
|
||||
|
||||
### Spawn a development environment with nix
|
||||
|
||||
```bash
|
||||
nix develop .#fullEnv
|
||||
```
|
||||
|
||||
You need to [install this nix package manager](https://wiki.archlinux.org/title/Nix) first.
|
||||
|
||||
### Run our test
|
||||
|
||||
Make sure to increase the stack size available; some of our cryptography operations require a lot of stack memory.
|
||||
|
||||
```bash
|
||||
RUST_MIN_STACK=8388608 cargo test --workspace --all-features
|
||||
```
|
||||
|
||||
### Generate coverage reports
|
||||
|
||||
Keep in mind that many of Rosenpass' tests are doctests, so to get an accurate read on our code coverage, you have to include doctests:
|
||||
|
||||
```bash
|
||||
./coverage_report.sh
|
||||
```
|
||||
1531
Cargo.lock
generated
1531
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
58
Cargo.toml
58
Cargo.toml
@@ -32,51 +32,69 @@ rosenpass-secret-memory = { path = "secret-memory" }
|
||||
rosenpass-oqs = { path = "oqs" }
|
||||
rosenpass-wireguard-broker = { path = "wireguard-broker" }
|
||||
doc-comment = "0.3.3"
|
||||
base64ct = {version = "1.6.0", default-features=false}
|
||||
base64ct = { version = "1.6.0", default-features = false }
|
||||
zeroize = "1.8.1"
|
||||
memoffset = "0.9.1"
|
||||
thiserror = "1.0.61"
|
||||
thiserror = "1.0.69"
|
||||
paste = "1.0.15"
|
||||
env_logger = "0.10.2"
|
||||
toml = "0.7.8"
|
||||
static_assertions = "1.1.0"
|
||||
allocator-api2 = "0.2.14"
|
||||
memsec = { git="https://github.com/rosenpass/memsec.git" ,rev="aceb9baee8aec6844125bd6612f92e9a281373df", features = [ "alloc_ext", ] }
|
||||
memsec = { git = "https://github.com/rosenpass/memsec.git", rev = "aceb9baee8aec6844125bd6612f92e9a281373df", features = [
|
||||
"alloc_ext",
|
||||
] }
|
||||
rand = "0.8.5"
|
||||
typenum = "1.17.0"
|
||||
log = { version = "0.4.21" }
|
||||
clap = { version = "4.5.7", features = ["derive"] }
|
||||
serde = { version = "1.0.203", features = ["derive"] }
|
||||
arbitrary = { version = "1.3.2", features = ["derive"] }
|
||||
anyhow = { version = "1.0.86", features = ["backtrace", "std"] }
|
||||
mio = { version = "0.8.11", features = ["net", "os-poll"] }
|
||||
log = { version = "0.4.22" }
|
||||
clap = { version = "4.5.23", features = ["derive"] }
|
||||
clap_mangen = "0.2.24"
|
||||
clap_complete = "4.5.40"
|
||||
serde = { version = "1.0.217", features = ["derive"] }
|
||||
arbitrary = { version = "1.4.1", features = ["derive"] }
|
||||
anyhow = { version = "1.0.95", features = ["backtrace", "std"] }
|
||||
mio = { version = "1.0.3", features = ["net", "os-poll"] }
|
||||
oqs-sys = { version = "0.9.1", default-features = false, features = [
|
||||
'classic_mceliece',
|
||||
'kyber',
|
||||
] }
|
||||
blake2 = "0.10.6"
|
||||
sha3 = "0.10.8"
|
||||
chacha20poly1305 = { version = "0.10.1", default-features = false, features = [
|
||||
"std",
|
||||
"heapless",
|
||||
] }
|
||||
zerocopy = { version = "0.7.34", features = ["derive"] }
|
||||
home = "0.5.9"
|
||||
derive_builder = "0.20.0"
|
||||
tokio = { version = "1.38", features = ["macros", "rt-multi-thread"] }
|
||||
postcard= {version = "1.0.8", features = ["alloc"]}
|
||||
zerocopy = { version = "0.7.35", features = ["derive"] }
|
||||
home = "=0.5.9" # 5.11 requires rustc 1.81
|
||||
derive_builder = "0.20.1"
|
||||
tokio = { version = "1.42", features = ["macros", "rt-multi-thread"] }
|
||||
postcard = { version = "1.1.1", features = ["alloc"] }
|
||||
libcrux = { version = "0.0.2-pre.2" }
|
||||
libcrux-chacha20poly1305 = { version = "0.0.2-beta.3" }
|
||||
libcrux-ml-kem = { version = "0.0.2-beta.3" }
|
||||
libcrux-blake2 = { git = "https://github.com/cryspen/libcrux.git", rev = "10ce653e9476"}
|
||||
hex-literal = { version = "0.4.1" }
|
||||
hex = { version = "0.4.3" }
|
||||
heck = { version = "0.5.0" }
|
||||
libc = { version = "0.2" }
|
||||
uds = { git = "https://github.com/rosenpass/uds" }
|
||||
signal-hook = "0.3.17"
|
||||
|
||||
#Dev dependencies
|
||||
serial_test = "3.1.1"
|
||||
serial_test = "3.2.0"
|
||||
tempfile = "3"
|
||||
stacker = "0.1.15"
|
||||
stacker = "0.1.17"
|
||||
libfuzzer-sys = "0.4"
|
||||
test_bin = "0.4.0"
|
||||
criterion = "0.4.0"
|
||||
criterion = "0.5.1"
|
||||
allocator-api2-tests = "0.2.15"
|
||||
procspawn = {version = "1.0.0", features= ["test-support"]}
|
||||
procspawn = { version = "1.0.1", features = ["test-support"] }
|
||||
|
||||
|
||||
#Broker dependencies (might need cleanup or changes)
|
||||
wireguard-uapi = "3.0.0"
|
||||
wireguard-uapi = { version = "3.0.0", features = ["xplatform"] }
|
||||
command-fds = "0.2.3"
|
||||
rustix = { version = "0.38.27", features = ["net"] }
|
||||
rustix = { version = "0.38.42", features = ["net", "fs", "process"] }
|
||||
futures = "0.3"
|
||||
futures-util = "0.3"
|
||||
x25519-dalek = "2"
|
||||
|
||||
@@ -8,5 +8,13 @@ description = "Rosenpass internal traits for cryptographic primitives"
|
||||
homepage = "https://rosenpass.eu/"
|
||||
repository = "https://github.com/rosenpass/rosenpass"
|
||||
readme = "readme.md"
|
||||
rust-version = "1.77"
|
||||
|
||||
[dependencies]
|
||||
thiserror = { workspace = true }
|
||||
rosenpass-to = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rosenpass-oqs = { workspace = true }
|
||||
rosenpass-secret-memory = { workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
|
||||
Rosenpass internal library providing traits for cryptographic primitives.
|
||||
|
||||
This is an internal library; not guarantee is made about its API at this point in time.
|
||||
This is an internal library; no guarantee is made about its API at this point in time.
|
||||
|
||||
137
cipher-traits/src/algorithms.rs
Normal file
137
cipher-traits/src/algorithms.rs
Normal file
@@ -0,0 +1,137 @@
|
||||
//! This module contains the traits for all the cryptographic algorithms used throughout Rosenpass.
|
||||
//! These traits are marker traits that signal intent. They can also be used for trait objects.
|
||||
|
||||
/// Constants and trait for the Incorrect HMAC over Blake2b, with 256 key and hash length.
|
||||
pub mod keyed_hash_incorrect_hmac_blake2b {
|
||||
use crate::primitives::keyed_hash::*;
|
||||
|
||||
// These constants describe how they are used here, not what the algorithm defines.
|
||||
|
||||
/// The key length used in [`KeyedHashIncorrectHmacBlake2b`].
|
||||
pub const KEY_LEN: usize = 32;
|
||||
/// The hash length used in [`KeyedHashIncorrectHmacBlake2b`].
|
||||
pub const HASH_LEN: usize = 32;
|
||||
|
||||
/// A [`KeyedHash`] that is an incorrect HMAC over Blake2 (a custom Rosenpass construction)
|
||||
pub trait KeyedHashIncorrectHmacBlake2b: KeyedHash<KEY_LEN, HASH_LEN> {}
|
||||
}
|
||||
|
||||
/// Constants and trait for Blake2b, with 256 key and hash length.
|
||||
pub mod keyed_hash_blake2b {
|
||||
use crate::primitives::keyed_hash::*;
|
||||
|
||||
// These constants describe how they are used here, not what the algorithm defines.
|
||||
|
||||
/// The key length used in [`KeyedHashBlake2b`].
|
||||
pub const KEY_LEN: usize = 32;
|
||||
/// The hash length used in [`KeyedHashBlake2b`].
|
||||
pub const HASH_LEN: usize = 32;
|
||||
|
||||
/// A [`KeyedHash`] that is Blake2b
|
||||
pub trait KeyedHashBlake2b: KeyedHash<KEY_LEN, HASH_LEN> {}
|
||||
}
|
||||
|
||||
/// Constants and trait for SHAKE256, with 256 key and hash length.
|
||||
pub mod keyed_hash_shake256 {
|
||||
use crate::primitives::keyed_hash::*;
|
||||
|
||||
// These constants describe how they are used here, not what the algorithm defines.
|
||||
|
||||
/// The key length used in [`KeyedHashShake256`].
|
||||
pub const KEY_LEN: usize = 32;
|
||||
/// The hash length used in [`KeyedHashShake256`].
|
||||
pub const HASH_LEN: usize = 32;
|
||||
|
||||
/// A [`KeyedHash`] that is SHAKE256.
|
||||
pub trait KeyedHashShake256: KeyedHash<KEY_LEN, HASH_LEN> {}
|
||||
}
|
||||
|
||||
/// Constants and trait for the ChaCha20Poly1305 AEAD
|
||||
pub mod aead_chacha20poly1305 {
|
||||
use crate::primitives::aead::*;
|
||||
|
||||
// See https://datatracker.ietf.org/doc/html/rfc7539#section-2.8
|
||||
|
||||
/// The key length used in [`AeadChaCha20Poly1305`].
|
||||
pub const KEY_LEN: usize = 32;
|
||||
/// The nonce length used in [`AeadChaCha20Poly1305`].
|
||||
pub const NONCE_LEN: usize = 12;
|
||||
/// The tag length used in [`AeadChaCha20Poly1305`].
|
||||
pub const TAG_LEN: usize = 16;
|
||||
|
||||
/// An [`Aead`] that is ChaCha20Poly1305.
|
||||
pub trait AeadChaCha20Poly1305: Aead<KEY_LEN, NONCE_LEN, TAG_LEN> {}
|
||||
}
|
||||
|
||||
/// Constants and trait for the XChaCha20Poly1305 AEAD (i.e. ChaCha20Poly1305 with extended nonce
|
||||
/// lengths)
|
||||
pub mod aead_xchacha20poly1305 {
|
||||
use crate::primitives::aead::*;
|
||||
|
||||
// See https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-xchacha-03
|
||||
|
||||
/// The key length used in [`AeadXChaCha20Poly1305`].
|
||||
pub const KEY_LEN: usize = 32;
|
||||
/// The nonce length used in [`AeadXChaCha20Poly1305`].
|
||||
pub const NONCE_LEN: usize = 24;
|
||||
/// The tag length used in [`AeadXChaCha20Poly1305`].
|
||||
pub const TAG_LEN: usize = 16;
|
||||
|
||||
/// An [`Aead`] that is XChaCha20Poly1305.
|
||||
pub trait AeadXChaCha20Poly1305: Aead<KEY_LEN, NONCE_LEN, TAG_LEN> {}
|
||||
}
|
||||
|
||||
/// Constants and trait for the Kyber512 KEM
|
||||
pub mod kem_kyber512 {
|
||||
use crate::primitives::kem::*;
|
||||
|
||||
// page 39 of https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.203.pdf
|
||||
// (which is ml-kem instead of kyber, but it's the same)
|
||||
|
||||
/// The secret key length used in [`KemKyber512`].
|
||||
pub const SK_LEN: usize = 1632;
|
||||
|
||||
/// The public key length used in [`KemKyber512`].
|
||||
pub const PK_LEN: usize = 800;
|
||||
|
||||
/// The ciphertext length used in [`KemKyber512`].
|
||||
pub const CT_LEN: usize = 768;
|
||||
|
||||
/// The shared key length used in [`KemKyber512`].
|
||||
pub const SHK_LEN: usize = 32;
|
||||
|
||||
/// A [`Kem`] that is Kyber512.
|
||||
pub trait KemKyber512: Kem<SK_LEN, PK_LEN, CT_LEN, SHK_LEN> {}
|
||||
}
|
||||
|
||||
/// Constants and trait for the Classic McEliece 460896 KEM
|
||||
pub mod kem_classic_mceliece460896 {
|
||||
use crate::primitives::kem::*;
|
||||
|
||||
// page 6 of https://classic.mceliece.org/mceliece-impl-20221023.pdf
|
||||
|
||||
/// The secret key length used in [`KemClassicMceliece460896`].
|
||||
pub const SK_LEN: usize = 13608;
|
||||
|
||||
/// The public key length used in [`KemClassicMceliece460896`].
|
||||
pub const PK_LEN: usize = 524160;
|
||||
|
||||
/// The ciphertext length used in [`KemClassicMceliece460896`].
|
||||
pub const CT_LEN: usize = 156;
|
||||
|
||||
/// The shared key length used in [`KemClassicMceliece460896`].
|
||||
pub const SHK_LEN: usize = 32;
|
||||
|
||||
/// A [`Kem`] that is ClassicMceliece460896.
|
||||
pub trait KemClassicMceliece460896: Kem<SK_LEN, PK_LEN, CT_LEN, SHK_LEN> {}
|
||||
}
|
||||
|
||||
pub use aead_chacha20poly1305::AeadChaCha20Poly1305;
|
||||
pub use aead_xchacha20poly1305::AeadXChaCha20Poly1305;
|
||||
|
||||
pub use kem_classic_mceliece460896::KemClassicMceliece460896;
|
||||
pub use kem_kyber512::KemKyber512;
|
||||
|
||||
pub use keyed_hash_blake2b::KeyedHashBlake2b;
|
||||
pub use keyed_hash_incorrect_hmac_blake2b::KeyedHashIncorrectHmacBlake2b;
|
||||
pub use keyed_hash_shake256::KeyedHashShake256;
|
||||
@@ -5,12 +5,128 @@
|
||||
//!
|
||||
//! Conceptually KEMs are akin to public-key encryption, but instead of encrypting
|
||||
//! arbitrary data, KEMs are limited to the transmission of keys, randomly chosen during
|
||||
//!
|
||||
//! encapsulation.
|
||||
//! The [KEM] Trait describes the basic API offered by a Key Encapsulation
|
||||
//! Mechanism. Two implementations for it are provided, [StaticKEM] and [EphemeralKEM].
|
||||
|
||||
use std::result::Result;
|
||||
//!
|
||||
//! The [Kem] Trait describes the basic API offered by a Key Encapsulation
|
||||
//! Mechanism. Two implementations for it are provided:
|
||||
//! [Kyber512](../../rosenpass_oqs/kyber_512/enum.Kyber512.html) and
|
||||
//! [ClassicMceliece460896](../../rosenpass_oqs/classic_mceliece_460896/enum.ClassicMceliece460896.html).
|
||||
//!
|
||||
//! An example where Alice generates a keypair and gives her public key to Bob, for Bob to
|
||||
//! encapsulate a symmetric key and Alice to decapsulate it would look as follows.
|
||||
//! In the example, we are using Kyber512, but any KEM that correctly implements the [Kem]
|
||||
//! trait could be used as well.
|
||||
//!```rust
|
||||
//! use rosenpass_cipher_traits::Kem;
|
||||
//! use rosenpass_oqs::Kyber512;
|
||||
//! # use rosenpass_secret_memory::{secret_policy_use_only_malloc_secrets, Secret};
|
||||
//!
|
||||
//! type MyKem = Kyber512;
|
||||
//! secret_policy_use_only_malloc_secrets();
|
||||
//! let mut alice_sk: Secret<{ MyKem::SK_LEN }> = Secret::zero();
|
||||
//! let mut alice_pk: [u8; MyKem::PK_LEN] = [0; MyKem::PK_LEN];
|
||||
//! MyKem::keygen(alice_sk.secret_mut(), &mut alice_pk)?;
|
||||
//!
|
||||
//! let mut bob_shk: Secret<{ MyKem::SHK_LEN }> = Secret::zero();
|
||||
//! let mut bob_ct: [u8; MyKem::CT_LEN] = [0; MyKem::CT_LEN];
|
||||
//! MyKem::encaps(bob_shk.secret_mut(), &mut bob_ct, &mut alice_pk)?;
|
||||
//!
|
||||
//! let mut alice_shk: Secret<{ MyKem::SHK_LEN }> = Secret::zero();
|
||||
//! MyKem::decaps(alice_shk.secret_mut(), alice_sk.secret_mut(), &mut bob_ct)?;
|
||||
//!
|
||||
//! # assert_eq!(alice_shk.secret(), bob_shk.secret());
|
||||
//! # Ok::<(), anyhow::Error>(())
|
||||
//!```
|
||||
//!
|
||||
//! Implementing the [Kem]-trait for a KEM is easy. Mostly, you must format the KEM's
|
||||
//! keys, and ciphertext as `u8` slices. Below, we provide an example for how the trait can
|
||||
//! be implemented using a **HORRIBLY INSECURE** DummyKem that only uses static values for keys
|
||||
//! and ciphertexts as an example.
|
||||
//!```rust
|
||||
//!# use rosenpass_cipher_traits::Kem;
|
||||
//!
|
||||
//! struct DummyKem {}
|
||||
//! impl Kem for DummyKem {
|
||||
//!
|
||||
//! // For this DummyKem, using String for errors is sufficient.
|
||||
//! type Error = String;
|
||||
//!
|
||||
//! // For this DummyKem, we will use a single `u8` for everything
|
||||
//! const SK_LEN: usize = 1;
|
||||
//! const PK_LEN: usize = 1;
|
||||
//! const CT_LEN: usize = 1;
|
||||
//! const SHK_LEN: usize = 1;
|
||||
//!
|
||||
//! fn keygen(sk: &mut [u8], pk: &mut [u8]) -> Result<(), Self::Error> {
|
||||
//! if sk.len() != Self::SK_LEN {
|
||||
//! return Err("sk does not have the correct length!".to_string());
|
||||
//! }
|
||||
//! if pk.len() != Self::PK_LEN {
|
||||
//! return Err("pk does not have the correct length!".to_string());
|
||||
//! }
|
||||
//! sk[0] = 42;
|
||||
//! pk[0] = 21;
|
||||
//! Ok(())
|
||||
//! }
|
||||
//!
|
||||
//! fn encaps(shk: &mut [u8], ct: &mut [u8], pk: &[u8]) -> Result<(), Self::Error> {
|
||||
//! if pk.len() != Self::PK_LEN {
|
||||
//! return Err("pk does not have the correct length!".to_string());
|
||||
//! }
|
||||
//! if ct.len() != Self::CT_LEN {
|
||||
//! return Err("ct does not have the correct length!".to_string());
|
||||
//! }
|
||||
//! if shk.len() != Self::SHK_LEN {
|
||||
//! return Err("shk does not have the correct length!".to_string());
|
||||
//! }
|
||||
//! if pk[0] != 21 {
|
||||
//! return Err("Invalid public key!".to_string());
|
||||
//! }
|
||||
//! ct[0] = 7;
|
||||
//! shk[0] = 17;
|
||||
//! Ok(())
|
||||
//! }
|
||||
//!
|
||||
//! fn decaps(shk: &mut [u8], sk: &[u8], ct: &[u8]) -> Result<(), Self::Error> {
|
||||
//! if sk.len() != Self::SK_LEN {
|
||||
//! return Err("sk does not have the correct length!".to_string());
|
||||
//! }
|
||||
//! if ct.len() != Self::CT_LEN {
|
||||
//! return Err("ct does not have the correct length!".to_string());
|
||||
//! }
|
||||
//! if shk.len() != Self::SHK_LEN {
|
||||
//! return Err("shk does not have the correct length!".to_string());
|
||||
//! }
|
||||
//! if sk[0] != 42 {
|
||||
//! return Err("Invalid public key!".to_string());
|
||||
//! }
|
||||
//! if ct[0] != 7 {
|
||||
//! return Err("Invalid ciphertext!".to_string());
|
||||
//! }
|
||||
//! shk[0] = 17;
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! }
|
||||
//! # use rosenpass_secret_memory::{secret_policy_use_only_malloc_secrets, Secret};
|
||||
//! #
|
||||
//! # type MyKem = DummyKem;
|
||||
//! # secret_policy_use_only_malloc_secrets();
|
||||
//! # let mut alice_sk: Secret<{ MyKem::SK_LEN }> = Secret::zero();
|
||||
//! # let mut alice_pk: [u8; MyKem::PK_LEN] = [0; MyKem::PK_LEN];
|
||||
//! # MyKem::keygen(alice_sk.secret_mut(), &mut alice_pk)?;
|
||||
//!
|
||||
//! # let mut bob_shk: Secret<{ MyKem::SHK_LEN }> = Secret::zero();
|
||||
//! # let mut bob_ct: [u8; MyKem::CT_LEN] = [0; MyKem::CT_LEN];
|
||||
//! # MyKem::encaps(bob_shk.secret_mut(), &mut bob_ct, &mut alice_pk)?;
|
||||
//! #
|
||||
//! # let mut alice_shk: Secret<{ MyKem::SHK_LEN }> = Secret::zero();
|
||||
//! # MyKem::decaps(alice_shk.secret_mut(), alice_sk.secret_mut(), &mut bob_ct)?;
|
||||
//! #
|
||||
//! # assert_eq!(alice_shk.secret(), bob_shk.secret());
|
||||
//! #
|
||||
//! # Ok::<(), String>(())
|
||||
//!```
|
||||
//!
|
||||
|
||||
/// Key Encapsulation Mechanism
|
||||
///
|
||||
|
||||
@@ -1,2 +1,5 @@
|
||||
mod kem;
|
||||
pub use kem::Kem;
|
||||
//! This trait contains traits, constants and wrappers that provid= the interface between Rosenpass
|
||||
//! as a consumer of cryptographic libraries and the implementations of cryptographic algorithms.
|
||||
|
||||
pub mod algorithms;
|
||||
pub mod primitives;
|
||||
|
||||
10
cipher-traits/src/primitives.rs
Normal file
10
cipher-traits/src/primitives.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
//! Traits for cryptographic primitives used in Rosenpass, specifically KEM, AEAD and keyed
|
||||
//! hashing.
|
||||
|
||||
pub(crate) mod aead;
|
||||
pub(crate) mod kem;
|
||||
pub(crate) mod keyed_hash;
|
||||
|
||||
pub use aead::{Aead, AeadWithNonceInCiphertext, Error as AeadError};
|
||||
pub use kem::{Error as KemError, Kem};
|
||||
pub use keyed_hash::*;
|
||||
175
cipher-traits/src/primitives/aead.rs
Normal file
175
cipher-traits/src/primitives/aead.rs
Normal file
@@ -0,0 +1,175 @@
|
||||
use rosenpass_to::{ops::copy_slice, To as _};
|
||||
use thiserror::Error;
|
||||
|
||||
/// Models authenticated encryption with assiciated data (AEAD) functionality.
|
||||
///
|
||||
/// The methods of this trait take a `&self` argument as a receiver. This has two reasons:
|
||||
/// 1. It makes type inference a lot smoother
|
||||
/// 2. It allows to use the functionality through a trait object or having an enum that has
|
||||
/// variants for multiple options (like e.g. the `KeyedHash` enum in `rosenpass-ciphers`).
|
||||
///
|
||||
/// Since the caller needs an instance of the type to use the functionality, implementors are
|
||||
/// adviced to implement the [`Default`] trait where possible.
|
||||
///
|
||||
/// Example for encrypting a message with a specific [`Aead`] instance:
|
||||
/// ```
|
||||
/// use rosenpass_cipher_traits::primitives::Aead;
|
||||
///
|
||||
/// const KEY_LEN: usize = 32;
|
||||
/// const NONCE_LEN: usize = 12;
|
||||
/// const TAG_LEN: usize = 16;
|
||||
///
|
||||
/// fn encrypt_message_given_an_aead<AeadImpl>(
|
||||
/// aead: &AeadImpl,
|
||||
/// msg: &str,
|
||||
/// nonce: &[u8; NONCE_LEN],
|
||||
/// encrypted: &mut [u8]
|
||||
/// ) where AeadImpl: Aead<KEY_LEN, NONCE_LEN, TAG_LEN> {
|
||||
/// let key = [0u8; KEY_LEN]; // This is not a secure key!
|
||||
/// let ad = b""; // we don't need associated data here
|
||||
/// aead.encrypt(encrypted, &key, nonce, ad, msg.as_bytes()).unwrap();
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// If only the type (but no instance) is available, then we can still encrypt, as long as the type
|
||||
/// also is [`Default`]:
|
||||
/// ```
|
||||
/// use rosenpass_cipher_traits::primitives::Aead;
|
||||
///
|
||||
/// const KEY_LEN: usize = 32;
|
||||
/// const NONCE_LEN: usize = 12;
|
||||
/// const TAG_LEN: usize = 16;
|
||||
///
|
||||
/// fn encrypt_message_without_aead<AeadImpl>(
|
||||
/// msg: &str,
|
||||
/// nonce: &[u8; NONCE_LEN],
|
||||
/// encrypted: &mut [u8]
|
||||
/// ) where AeadImpl: Default + Aead<KEY_LEN, NONCE_LEN, TAG_LEN> {
|
||||
/// let key = [0u8; KEY_LEN]; // This is not a secure key!
|
||||
/// let ad = b""; // we don't need associated data here
|
||||
/// AeadImpl::default().encrypt(encrypted, &key, nonce, ad, msg.as_bytes()).unwrap();
|
||||
/// }
|
||||
/// ```
|
||||
pub trait Aead<const KEY_LEN: usize, const NONCE_LEN: usize, const TAG_LEN: usize> {
|
||||
const KEY_LEN: usize = KEY_LEN;
|
||||
const NONCE_LEN: usize = NONCE_LEN;
|
||||
const TAG_LEN: usize = TAG_LEN;
|
||||
|
||||
/// Encrypts `plaintext` using the given `key` and `nonce`, taking into account the additional
|
||||
/// data `ad` and writes the result into `ciphertext`.
|
||||
///
|
||||
/// `ciphertext` must be exactly `TAG_LEN` longer than `plaintext`.
|
||||
fn encrypt(
|
||||
&self,
|
||||
ciphertext: &mut [u8],
|
||||
key: &[u8; KEY_LEN],
|
||||
nonce: &[u8; NONCE_LEN],
|
||||
ad: &[u8],
|
||||
plaintext: &[u8],
|
||||
) -> Result<(), Error>;
|
||||
|
||||
/// Decrypts `ciphertexttext` using the given `key` and `nonce`, taking into account the additional
|
||||
/// data `ad` and writes the result into `plaintext`.
|
||||
///
|
||||
/// `ciphertext` must be exactly `TAG_LEN` longer than `plaintext`.
|
||||
fn decrypt(
|
||||
&self,
|
||||
plaintext: &mut [u8],
|
||||
key: &[u8; KEY_LEN],
|
||||
nonce: &[u8; NONCE_LEN],
|
||||
ad: &[u8],
|
||||
ciphertext: &[u8],
|
||||
) -> Result<(), Error>;
|
||||
}
|
||||
|
||||
/// Provides an AEAD API where the nonce is part of the ciphertext.
|
||||
///
|
||||
/// The old xaead API had the ciphertext begin with the `nonce`. In order to not having to change
|
||||
/// the calling code too much, we add a wrapper trait that provides this API and implement it for
|
||||
/// all AEAD.
|
||||
pub trait AeadWithNonceInCiphertext<
|
||||
const KEY_LEN: usize,
|
||||
const NONCE_LEN: usize,
|
||||
const TAG_LEN: usize,
|
||||
>: Aead<KEY_LEN, NONCE_LEN, TAG_LEN>
|
||||
{
|
||||
/// Encrypts `plaintext` using the given `key` and `nonce`, taking into account the additional
|
||||
/// data `ad` and writes the result into `ciphertext`.
|
||||
///
|
||||
/// `ciphertext` must be exactly `TAG_LEN` + `NONCE_LEN` longer than `plaintext`.
|
||||
fn encrypt_with_nonce_in_ctxt(
|
||||
&self,
|
||||
ciphertext: &mut [u8],
|
||||
key: &[u8; KEY_LEN],
|
||||
nonce: &[u8; NONCE_LEN],
|
||||
ad: &[u8],
|
||||
plaintext: &[u8],
|
||||
) -> Result<(), Error> {
|
||||
// The comparison looks complicated, but we need to do it this way to prevent
|
||||
// over/underflows.
|
||||
if ciphertext.len() < NONCE_LEN + TAG_LEN
|
||||
|| ciphertext.len() - TAG_LEN - NONCE_LEN < plaintext.len()
|
||||
{
|
||||
return Err(Error::InvalidLengths);
|
||||
}
|
||||
|
||||
let (n, rest) = ciphertext.split_at_mut(NONCE_LEN);
|
||||
copy_slice(nonce).to(n);
|
||||
|
||||
self.encrypt(rest, key, nonce, ad, plaintext)
|
||||
}
|
||||
|
||||
/// Decrypts `ciphertexttext` using the given `key` and `nonce`, taking into account the additional
|
||||
/// data `ad` and writes the result into `plaintext`.
|
||||
///
|
||||
/// `ciphertext` must be exactly `TAG_LEN` + `NONCE_LEN` longer than `plaintext`.
|
||||
fn decrypt_with_nonce_in_ctxt(
|
||||
&self,
|
||||
plaintext: &mut [u8],
|
||||
key: &[u8; KEY_LEN],
|
||||
ad: &[u8],
|
||||
ciphertext: &[u8],
|
||||
) -> Result<(), Error> {
|
||||
// The comparison looks complicated, but we need to do it this way to prevent
|
||||
// over/underflows.
|
||||
if ciphertext.len() < NONCE_LEN + TAG_LEN
|
||||
|| ciphertext.len() - TAG_LEN - NONCE_LEN < plaintext.len()
|
||||
{
|
||||
return Err(Error::InvalidLengths);
|
||||
}
|
||||
|
||||
let (nonce, rest) = ciphertext.split_at(NONCE_LEN);
|
||||
// We know this should be the right length (we just split it), and everything else would be
|
||||
// very unexpected.
|
||||
let nonce = nonce.try_into().map_err(|_| Error::InternalError)?;
|
||||
|
||||
self.decrypt(plaintext, key, nonce, ad, rest)
|
||||
}
|
||||
}
|
||||
|
||||
impl<
|
||||
const KEY_LEN: usize,
|
||||
const NONCE_LEN: usize,
|
||||
const TAG_LEN: usize,
|
||||
T: Aead<KEY_LEN, NONCE_LEN, TAG_LEN>,
|
||||
> AeadWithNonceInCiphertext<KEY_LEN, NONCE_LEN, TAG_LEN> for T
|
||||
{
|
||||
}
|
||||
|
||||
/// The error returned by AEAD operations
|
||||
#[derive(Debug, Error)]
|
||||
pub enum Error {
|
||||
/// An internal error occurred. This should never be happen and indicates an error in the
|
||||
/// AEAD implementation.
|
||||
#[error("internal error")]
|
||||
InternalError,
|
||||
|
||||
/// Could not decrypt a message because the message is not a valid ciphertext for the given
|
||||
/// key.
|
||||
#[error("decryption error")]
|
||||
DecryptError,
|
||||
|
||||
/// The provided buffers have the wrong lengths.
|
||||
#[error("buffers have invalid length")]
|
||||
InvalidLengths,
|
||||
}
|
||||
212
cipher-traits/src/primitives/kem.rs
Normal file
212
cipher-traits/src/primitives/kem.rs
Normal file
@@ -0,0 +1,212 @@
|
||||
//! Traits and implementations for Key Encapsulation Mechanisms (KEMs)
|
||||
//!
|
||||
//! KEMs are the interface provided by almost all post-quantum
|
||||
//! secure key exchange mechanisms.
|
||||
//!
|
||||
//! Conceptually KEMs are akin to public-key encryption, but instead of encrypting
|
||||
//! arbitrary data, KEMs are limited to the transmission of keys, randomly chosen during
|
||||
//! encapsulation.
|
||||
//!
|
||||
//! The [Kem] Trait describes the basic API offered by a Key Encapsulation
|
||||
//! Mechanism. Two implementations for it are provided:
|
||||
//! [Kyber512](../../rosenpass_oqs/kyber_512/enum.Kyber512.html) and
|
||||
//! [ClassicMceliece460896](../../rosenpass_oqs/classic_mceliece_460896/enum.ClassicMceliece460896.html).
|
||||
//!
|
||||
//! An example where Alice generates a keypair and gives her public key to Bob, for Bob to
|
||||
//! encapsulate a symmetric key and Alice to decapsulate it would look as follows.
|
||||
//! In the example, we are using Kyber512, but any KEM that correctly implements the [Kem]
|
||||
//! trait could be used as well.
|
||||
//!```rust
|
||||
//! use rosenpass_cipher_traits::primitives::Kem;
|
||||
//! use rosenpass_oqs::Kyber512;
|
||||
//! # use rosenpass_secret_memory::{secret_policy_use_only_malloc_secrets, Secret};
|
||||
//!
|
||||
//! type MyKem = Kyber512;
|
||||
//! secret_policy_use_only_malloc_secrets();
|
||||
//! let mut alice_sk: Secret<{ MyKem::SK_LEN }> = Secret::zero();
|
||||
//! let mut alice_pk: [u8; MyKem::PK_LEN] = [0; MyKem::PK_LEN];
|
||||
//! MyKem::default().keygen(alice_sk.secret_mut(), &mut alice_pk)?;
|
||||
//!
|
||||
//! let mut bob_shk: Secret<{ MyKem::SHK_LEN }> = Secret::zero();
|
||||
//! let mut bob_ct: [u8; MyKem::CT_LEN] = [0; MyKem::CT_LEN];
|
||||
//! MyKem::default().encaps(bob_shk.secret_mut(), &mut bob_ct, &mut alice_pk)?;
|
||||
//!
|
||||
//! let mut alice_shk: Secret<{ MyKem::SHK_LEN }> = Secret::zero();
|
||||
//! MyKem::default().decaps(alice_shk.secret_mut(), alice_sk.secret_mut(), &mut bob_ct)?;
|
||||
//!
|
||||
//! # assert_eq!(alice_shk.secret(), bob_shk.secret());
|
||||
//! # Ok::<(), anyhow::Error>(())
|
||||
//!```
|
||||
//!
|
||||
//! Implementing the [Kem]-trait for a KEM is easy. Mostly, you must format the KEM's
|
||||
//! keys, and ciphertext as `u8` slices. Below, we provide an example for how the trait can
|
||||
//! be implemented using a **HORRIBLY INSECURE** DummyKem that only uses static values for keys
|
||||
//! and ciphertexts as an example.
|
||||
//!```rust
|
||||
//!# use rosenpass_cipher_traits::primitives::{Kem, KemError as Error};
|
||||
//!
|
||||
//! struct DummyKem {}
|
||||
//! impl Kem<1,1,1,1> for DummyKem {
|
||||
//!
|
||||
//! // For this DummyKem, we will use a single `u8` for everything
|
||||
//! const SK_LEN: usize = 1;
|
||||
//! const PK_LEN: usize = 1;
|
||||
//! const CT_LEN: usize = 1;
|
||||
//! const SHK_LEN: usize = 1;
|
||||
//!
|
||||
//! fn keygen(&self, sk: &mut [u8;1], pk: &mut [u8;1]) -> Result<(), Error> {
|
||||
//! sk[0] = 42;
|
||||
//! pk[0] = 21;
|
||||
//! Ok(())
|
||||
//! }
|
||||
//!
|
||||
//! fn encaps(&self, shk: &mut [u8;1], ct: &mut [u8;1], pk: &[u8;1]) -> Result<(), Error> {
|
||||
//! if pk[0] != 21 {
|
||||
//! return Err(Error::InvalidArgument);
|
||||
//! }
|
||||
//! ct[0] = 7;
|
||||
//! shk[0] = 17;
|
||||
//! Ok(())
|
||||
//! }
|
||||
//!
|
||||
//! fn decaps(&self, shk: &mut [u8;1 ], sk: &[u8;1], ct: &[u8;1]) -> Result<(), Error> {
|
||||
//! if sk[0] != 42 {
|
||||
//! return Err(Error::InvalidArgument);
|
||||
//! }
|
||||
//! if ct[0] != 7 {
|
||||
//! return Err(Error::InvalidArgument);
|
||||
//! }
|
||||
//! shk[0] = 17;
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! }
|
||||
//!
|
||||
//! impl Default for DummyKem {
|
||||
//! fn default() -> Self {
|
||||
//! Self{}
|
||||
//! }
|
||||
//! }
|
||||
//! # use rosenpass_secret_memory::{secret_policy_use_only_malloc_secrets, Secret};
|
||||
//! #
|
||||
//! # type MyKem = DummyKem;
|
||||
//! # secret_policy_use_only_malloc_secrets();
|
||||
//! # let mut alice_sk: Secret<{ MyKem::SK_LEN }> = Secret::zero();
|
||||
//! # let mut alice_pk: [u8; MyKem::PK_LEN] = [0; MyKem::PK_LEN];
|
||||
//! # MyKem::default().keygen(alice_sk.secret_mut(), &mut alice_pk)?;
|
||||
//!
|
||||
//! # let mut bob_shk: Secret<{ MyKem::SHK_LEN }> = Secret::zero();
|
||||
//! # let mut bob_ct: [u8; MyKem::CT_LEN] = [0; MyKem::CT_LEN];
|
||||
//! # MyKem::default().encaps(bob_shk.secret_mut(), &mut bob_ct, &mut alice_pk)?;
|
||||
//! #
|
||||
//! # let mut alice_shk: Secret<{ MyKem::SHK_LEN }> = Secret::zero();
|
||||
//! # MyKem::default().decaps(alice_shk.secret_mut(), alice_sk.secret_mut(), &mut bob_ct)?;
|
||||
//! #
|
||||
//! # assert_eq!(alice_shk.secret(), bob_shk.secret());
|
||||
//! #
|
||||
//! # Ok::<(), Error>(())
|
||||
//!```
|
||||
//!
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
/// Key Encapsulation Mechanism
|
||||
///
|
||||
/// The KEM interface defines three operations: Key generation, key encapsulation and key
|
||||
/// decapsulation. The parameters are made available as associated constants for convenience.
|
||||
///
|
||||
/// The methods of this trait take a `&self` argument as a receiver. This has two reasons:
|
||||
/// 1. It makes type inference a lot smoother
|
||||
/// 2. It allows to use the functionality through a trait object or having an enum that has
|
||||
/// variants for multiple options (like e.g. the `KeyedHash` enum in `rosenpass-ciphers`).
|
||||
///
|
||||
/// Since the caller needs an instance of the type to use the functionality, implementors are
|
||||
/// adviced to implement the [`Default`] trait where possible.
|
||||
///
|
||||
/// Example for encrypting a message with a specific [`Kem`] instance:
|
||||
/// ```
|
||||
/// use rosenpass_cipher_traits::primitives::Kem;
|
||||
///
|
||||
/// const SK_LEN: usize = 1632;
|
||||
/// const PK_LEN: usize = 800;
|
||||
/// const CT_LEN: usize = 768;
|
||||
/// const SHK_LEN: usize = 32;
|
||||
///
|
||||
/// fn encaps_given_a_kem<KemImpl>(
|
||||
/// kem: &KemImpl,
|
||||
/// pk: &[u8; PK_LEN],
|
||||
/// ct: &mut [u8; CT_LEN]
|
||||
/// ) -> [u8; SHK_LEN] where KemImpl: Kem<SK_LEN, PK_LEN, CT_LEN, SHK_LEN>{
|
||||
/// let mut shk = [0u8; SHK_LEN];
|
||||
/// kem.encaps(&mut shk, ct, pk).unwrap();
|
||||
/// shk
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// If only the type (but no instance) is available, then we can still use the trait, as long as
|
||||
/// the type also is [`Default`]:
|
||||
/// ```
|
||||
/// use rosenpass_cipher_traits::primitives::Kem;
|
||||
///
|
||||
/// const SK_LEN: usize = 1632;
|
||||
/// const PK_LEN: usize = 800;
|
||||
/// const CT_LEN: usize = 768;
|
||||
/// const SHK_LEN: usize = 32;
|
||||
///
|
||||
/// fn encaps_without_kem<KemImpl>(
|
||||
/// pk: &[u8; PK_LEN],
|
||||
/// ct: &mut [u8; CT_LEN]
|
||||
/// ) -> [u8; SHK_LEN]
|
||||
/// where KemImpl: Default + Kem<SK_LEN, PK_LEN, CT_LEN, SHK_LEN> {
|
||||
/// let mut shk = [0u8; SHK_LEN];
|
||||
/// KemImpl::default().encaps(&mut shk, ct, pk).unwrap();
|
||||
/// shk
|
||||
/// }
|
||||
/// ```
|
||||
pub trait Kem<const SK_LEN: usize, const PK_LEN: usize, const CT_LEN: usize, const SHK_LEN: usize> {
|
||||
/// The length of the secret (decapsulation) key.
|
||||
const SK_LEN: usize = SK_LEN;
|
||||
|
||||
/// The length of the public (encapsulation) key.
|
||||
const PK_LEN: usize = PK_LEN;
|
||||
|
||||
/// The length of the ciphertext.
|
||||
const CT_LEN: usize = CT_LEN;
|
||||
|
||||
/// The legnth of the resulting shared key.
|
||||
const SHK_LEN: usize = SHK_LEN;
|
||||
|
||||
/// Generate a keypair consisting of secret key (`sk`) and public key (`pk`)
|
||||
///
|
||||
/// `keygen() -> sk, pk`
|
||||
fn keygen(&self, sk: &mut [u8; SK_LEN], pk: &mut [u8; PK_LEN]) -> Result<(), Error>;
|
||||
|
||||
/// From a public key (`pk`), generate a shared key (`shk`, for local use)
|
||||
/// and a cipher text (`ct`, to be sent to the owner of the `pk`).
|
||||
///
|
||||
/// `encaps(pk) -> shk, ct`
|
||||
fn encaps(
|
||||
&self,
|
||||
shk: &mut [u8; SHK_LEN],
|
||||
ct: &mut [u8; CT_LEN],
|
||||
pk: &[u8; PK_LEN],
|
||||
) -> Result<(), Error>;
|
||||
|
||||
/// From a secret key (`sk`) and a cipher text (`ct`) derive a shared key
|
||||
/// (`shk`)
|
||||
///
|
||||
/// `decaps(sk, ct) -> shk`
|
||||
fn decaps(
|
||||
&self,
|
||||
shk: &mut [u8; SHK_LEN],
|
||||
sk: &[u8; SK_LEN],
|
||||
ct: &[u8; CT_LEN],
|
||||
) -> Result<(), Error>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum Error {
|
||||
#[error("invalid argument")]
|
||||
InvalidArgument,
|
||||
#[error("internal error")]
|
||||
InternalError,
|
||||
}
|
||||
159
cipher-traits/src/primitives/keyed_hash.rs
Normal file
159
cipher-traits/src/primitives/keyed_hash.rs
Normal file
@@ -0,0 +1,159 @@
|
||||
use std::marker::PhantomData;
|
||||
|
||||
/// Models a keyed hash function using an associated function (i.e. without `&self` receiver).
|
||||
pub trait KeyedHash<const KEY_LEN: usize, const HASH_LEN: usize> {
|
||||
/// The error type used to signal what went wrong.
|
||||
type Error;
|
||||
|
||||
/// Performs a keyed hash using `key` and `data` and writes the output to `out`
|
||||
fn keyed_hash(
|
||||
key: &[u8; KEY_LEN],
|
||||
data: &[u8],
|
||||
out: &mut [u8; HASH_LEN],
|
||||
) -> Result<(), Self::Error>;
|
||||
}
|
||||
|
||||
/// Models a keyed hash function using a method (i.e. with a `&self` receiver).
|
||||
///
|
||||
/// This makes type inference easier, but also requires having a [`KeyedHashInstance`] value,
|
||||
/// instead of just the [`KeyedHash`] type.
|
||||
pub trait KeyedHashInstance<const KEY_LEN: usize, const HASH_LEN: usize> {
|
||||
/// The error type used to signal what went wrong.
|
||||
type Error;
|
||||
|
||||
/// Performs a keyed hash using `key` and `data` and writes the output to `out`
|
||||
fn keyed_hash(
|
||||
&self,
|
||||
key: &[u8; KEY_LEN],
|
||||
data: &[u8],
|
||||
out: &mut [u8; HASH_LEN],
|
||||
) -> Result<(), Self::Error>;
|
||||
}
|
||||
|
||||
/// This is a helper to allow for type parameter inference when calling functions
|
||||
/// that need a [KeyedHash].
|
||||
///
|
||||
/// Really just binds the [KeyedHash] trait to a dummy variable, so the type of this dummy variable
|
||||
/// can be used for type inference. Less typing work.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct InferKeyedHash<Static, const KEY_LEN: usize, const HASH_LEN: usize>
|
||||
where
|
||||
Static: KeyedHash<KEY_LEN, HASH_LEN>,
|
||||
{
|
||||
pub _phantom_keyed_hasher: PhantomData<*const Static>,
|
||||
}
|
||||
|
||||
impl<Static, const KEY_LEN: usize, const HASH_LEN: usize> InferKeyedHash<Static, KEY_LEN, HASH_LEN>
|
||||
where
|
||||
Static: KeyedHash<KEY_LEN, HASH_LEN>,
|
||||
{
|
||||
pub const KEY_LEN: usize = KEY_LEN;
|
||||
pub const HASH_LEN: usize = HASH_LEN;
|
||||
|
||||
pub const fn new() -> Self {
|
||||
Self {
|
||||
_phantom_keyed_hasher: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// This just forwards to [KeyedHash::keyed_hash] of the type parameter `Static`
|
||||
fn keyed_hash_internal<'a>(
|
||||
&self,
|
||||
key: &'a [u8; KEY_LEN],
|
||||
data: &'a [u8],
|
||||
out: &mut [u8; HASH_LEN],
|
||||
) -> Result<(), Static::Error> {
|
||||
Static::keyed_hash(key, data, out)
|
||||
}
|
||||
|
||||
/// Returns the key length of the keyed hash function.
|
||||
pub const fn key_len(self) -> usize {
|
||||
Self::KEY_LEN
|
||||
}
|
||||
|
||||
/// Returns the hash length of the keyed hash function.
|
||||
pub const fn hash_len(self) -> usize {
|
||||
Self::HASH_LEN
|
||||
}
|
||||
}
|
||||
|
||||
impl<const KEY_LEN: usize, const HASH_LEN: usize, Static: KeyedHash<KEY_LEN, HASH_LEN>>
|
||||
KeyedHashInstance<KEY_LEN, HASH_LEN> for InferKeyedHash<Static, KEY_LEN, HASH_LEN>
|
||||
{
|
||||
type Error = Static::Error;
|
||||
|
||||
fn keyed_hash(
|
||||
&self,
|
||||
key: &[u8; KEY_LEN],
|
||||
data: &[u8],
|
||||
out: &mut [u8; HASH_LEN],
|
||||
) -> Result<(), Static::Error> {
|
||||
self.keyed_hash_internal(key, data, out)
|
||||
}
|
||||
}
|
||||
|
||||
// Helper traits /////////////////////////////////////////////
|
||||
|
||||
impl<Static, const KEY_LEN: usize, const OUT_LEN: usize> Default
|
||||
for InferKeyedHash<Static, KEY_LEN, OUT_LEN>
|
||||
where
|
||||
Static: KeyedHash<KEY_LEN, OUT_LEN>,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Static, const KEY_LEN: usize, const OUT_LEN: usize> Clone
|
||||
for InferKeyedHash<Static, KEY_LEN, OUT_LEN>
|
||||
where
|
||||
Static: KeyedHash<KEY_LEN, OUT_LEN>,
|
||||
{
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<Static, const KEY_LEN: usize, const OUT_LEN: usize> Copy
|
||||
for InferKeyedHash<Static, KEY_LEN, OUT_LEN>
|
||||
where
|
||||
Static: KeyedHash<KEY_LEN, OUT_LEN>,
|
||||
{
|
||||
}
|
||||
|
||||
use rosenpass_to::{with_destination, To};
|
||||
|
||||
/// Extends the [`KeyedHash`] trait with a [`To`]-flavoured function.
|
||||
pub trait KeyedHashTo<const KEY_LEN: usize, const HASH_LEN: usize>:
|
||||
KeyedHash<KEY_LEN, HASH_LEN>
|
||||
{
|
||||
fn keyed_hash_to(
|
||||
key: &[u8; KEY_LEN],
|
||||
data: &[u8],
|
||||
) -> impl To<[u8; HASH_LEN], Result<(), Self::Error>> {
|
||||
with_destination(|out| Self::keyed_hash(key, data, out))
|
||||
}
|
||||
}
|
||||
|
||||
impl<const KEY_LEN: usize, const HASH_LEN: usize, T: KeyedHash<KEY_LEN, HASH_LEN>>
|
||||
KeyedHashTo<KEY_LEN, HASH_LEN> for T
|
||||
{
|
||||
}
|
||||
|
||||
/// Extends the [`KeyedHashInstance`] trait with a [`To`]-flavoured function.
|
||||
pub trait KeyedHashInstanceTo<const KEY_LEN: usize, const HASH_LEN: usize>:
|
||||
KeyedHashInstance<KEY_LEN, HASH_LEN>
|
||||
{
|
||||
fn keyed_hash_to(
|
||||
&self,
|
||||
key: &[u8; KEY_LEN],
|
||||
data: &[u8],
|
||||
) -> impl To<[u8; HASH_LEN], Result<(), Self::Error>> {
|
||||
with_destination(|out| self.keyed_hash(key, data, out))
|
||||
}
|
||||
}
|
||||
|
||||
impl<const KEY_LEN: usize, const HASH_LEN: usize, T: KeyedHashInstance<KEY_LEN, HASH_LEN>>
|
||||
KeyedHashInstanceTo<KEY_LEN, HASH_LEN> for T
|
||||
{
|
||||
}
|
||||
@@ -8,6 +8,22 @@ description = "Rosenpass internal ciphers and other cryptographic primitives use
|
||||
homepage = "https://rosenpass.eu/"
|
||||
repository = "https://github.com/rosenpass/rosenpass"
|
||||
readme = "readme.md"
|
||||
rust-version = "1.77"
|
||||
|
||||
[features]
|
||||
experiment_libcrux_all = [
|
||||
"experiment_libcrux_blake2",
|
||||
"experiment_libcrux_chachapoly",
|
||||
"experiment_libcrux_chachapoly_test",
|
||||
"experiment_libcrux_kyber",
|
||||
]
|
||||
experiment_libcrux_blake2 = ["dep:libcrux-blake2", "dep:thiserror"]
|
||||
experiment_libcrux_chachapoly = ["dep:libcrux-chacha20poly1305"]
|
||||
experiment_libcrux_chachapoly_test = [
|
||||
"experiment_libcrux_chachapoly",
|
||||
"dep:libcrux",
|
||||
]
|
||||
experiment_libcrux_kyber = ["dep:libcrux-ml-kem", "dep:rand"]
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
@@ -16,7 +32,21 @@ rosenpass-constant-time = { workspace = true }
|
||||
rosenpass-secret-memory = { workspace = true }
|
||||
rosenpass-oqs = { workspace = true }
|
||||
rosenpass-util = { workspace = true }
|
||||
rosenpass-cipher-traits = { workspace = true }
|
||||
static_assertions = { workspace = true }
|
||||
zeroize = { workspace = true }
|
||||
chacha20poly1305 = { workspace = true }
|
||||
blake2 = { workspace = true }
|
||||
sha3 = { workspace = true }
|
||||
rand = { workspace = true, optional = true }
|
||||
thiserror = { workspace = true, optional = true }
|
||||
|
||||
libcrux-chacha20poly1305 = { workspace = true, optional = true }
|
||||
libcrux-blake2 = { workspace = true, optional = true }
|
||||
libcrux-ml-kem = { workspace = true, optional = true, features = ["kyber"] }
|
||||
|
||||
# this one is only used in testing, so it requires the `experiment_libcrux_chachapoly_test` feature.
|
||||
libcrux = { workspace = true, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rand = { workspace = true }
|
||||
|
||||
@@ -1,109 +1,209 @@
|
||||
//!
|
||||
//!```rust
|
||||
//! # use rosenpass_ciphers::hash_domain::{HashDomain, HashDomainNamespace, SecretHashDomain, SecretHashDomainNamespace};
|
||||
//! use rosenpass_ciphers::KeyedHash;
|
||||
//! use rosenpass_secret_memory::Secret;
|
||||
//! # rosenpass_secret_memory::secret_policy_use_only_malloc_secrets();
|
||||
//!
|
||||
//! const PROTOCOL_IDENTIFIER: &str = "MY_PROTOCOL:IDENTIFIER";
|
||||
//! // create use once hash domain for the protocol identifier
|
||||
//! let mut hash_domain = HashDomain::zero(KeyedHash::keyed_shake256());
|
||||
//! hash_domain = hash_domain.mix(PROTOCOL_IDENTIFIER.as_bytes())?;
|
||||
//! // upgrade to reusable hash domain
|
||||
//! let hash_domain_namespace: HashDomainNamespace = hash_domain.dup();
|
||||
//! // derive new key
|
||||
//! let key_identifier = "my_key_identifier";
|
||||
//! let key = hash_domain_namespace.mix(key_identifier.as_bytes())?.into_value();
|
||||
//! // derive a new key based on a secret
|
||||
//! const MY_SECRET_LEN: usize = 21;
|
||||
//! let my_secret_bytes = "my super duper secret".as_bytes();
|
||||
//! let my_secret: Secret<21> = Secret::from_slice("my super duper secret".as_bytes());
|
||||
//! let secret_hash_domain: SecretHashDomain = hash_domain_namespace.mix_secret(my_secret)?;
|
||||
//! // derive a new key based on the secret key
|
||||
//! let new_key_identifier = "my_new_key_identifier".as_bytes();
|
||||
//! let new_key = secret_hash_domain.mix(new_key_identifier)?.into_secret();
|
||||
//!
|
||||
//! # Ok::<(), anyhow::Error>(())
|
||||
//!```
|
||||
//!
|
||||
|
||||
use anyhow::Result;
|
||||
use rosenpass_secret_memory::Secret;
|
||||
use rosenpass_to::To;
|
||||
use rosenpass_to::To as _;
|
||||
|
||||
use crate::subtle::incorrect_hmac_blake2b as hash;
|
||||
pub use crate::{KeyedHash, KEY_LEN};
|
||||
|
||||
pub use hash::KEY_LEN;
|
||||
use rosenpass_cipher_traits::primitives::KeyedHashInstanceTo;
|
||||
|
||||
// TODO Use a proper Dec interface
|
||||
/// A use-once hash domain for a specified key that can be used directly.
|
||||
/// The key must consist of [KEY_LEN] many bytes. If the key must remain secret,
|
||||
/// use [SecretHashDomain] instead.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct HashDomain([u8; KEY_LEN]);
|
||||
pub struct HashDomain([u8; KEY_LEN], KeyedHash);
|
||||
/// A reusable hash domain for a namespace identified by the key.
|
||||
/// The key must consist of [KEY_LEN] many bytes. If the key must remain secret,
|
||||
/// use [SecretHashDomainNamespace] instead.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct HashDomainNamespace([u8; KEY_LEN]);
|
||||
pub struct HashDomainNamespace([u8; KEY_LEN], KeyedHash);
|
||||
/// A use-once hash domain for a specified key that can be used directly
|
||||
/// by wrapping it in [Secret]. The key must consist of [KEY_LEN] many bytes.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct SecretHashDomain(Secret<KEY_LEN>);
|
||||
pub struct SecretHashDomain(Secret<KEY_LEN>, KeyedHash);
|
||||
/// A reusable secure hash domain for a namespace identified by the key and that keeps the key secure
|
||||
/// by wrapping it in [Secret]. The key must consist of [KEY_LEN] many bytes.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct SecretHashDomainNamespace(Secret<KEY_LEN>);
|
||||
pub struct SecretHashDomainNamespace(Secret<KEY_LEN>, KeyedHash);
|
||||
|
||||
impl HashDomain {
|
||||
pub fn zero() -> Self {
|
||||
Self([0u8; KEY_LEN])
|
||||
/// Creates a nw [HashDomain] initialized with a all-zeros key.
|
||||
pub fn zero(choice: KeyedHash) -> Self {
|
||||
Self([0u8; KEY_LEN], choice)
|
||||
}
|
||||
|
||||
/// Turns this [HashDomain] into a [HashDomainNamespace], keeping the key.
|
||||
pub fn dup(self) -> HashDomainNamespace {
|
||||
HashDomainNamespace(self.0)
|
||||
HashDomainNamespace(self.0, self.1)
|
||||
}
|
||||
|
||||
/// Turns this [HashDomain] into a [SecretHashDomain] by wrapping the key into a [Secret]
|
||||
/// and creating a new [SecretHashDomain] from it.
|
||||
pub fn turn_secret(self) -> SecretHashDomain {
|
||||
SecretHashDomain(Secret::from_slice(&self.0))
|
||||
SecretHashDomain(Secret::from_slice(&self.0), self.1)
|
||||
}
|
||||
|
||||
// TODO: Protocol! Use domain separation to ensure that
|
||||
/// Creates a new [HashDomain] by mixing in a new key `v`. Specifically,
|
||||
/// it evaluates [hash::hash] with this HashDomain's key as the key and `v`
|
||||
/// as the `data` and uses the result as the key for the new [HashDomain].
|
||||
///
|
||||
pub fn mix(self, v: &[u8]) -> Result<Self> {
|
||||
Ok(Self(hash::hash(&self.0, v).collect::<[u8; KEY_LEN]>()?))
|
||||
let mut new_key: [u8; KEY_LEN] = [0u8; KEY_LEN];
|
||||
self.1.keyed_hash_to(&self.0, v).to(&mut new_key)?;
|
||||
Ok(Self(new_key, self.1))
|
||||
}
|
||||
|
||||
/// Creates a new [SecretHashDomain] by mixing in a new key `v`
|
||||
/// by calling [SecretHashDomain::invoke_primitive] with this
|
||||
/// [HashDomain]'s key as `k` and `v` as `d`.
|
||||
pub fn mix_secret<const N: usize>(self, v: Secret<N>) -> Result<SecretHashDomain> {
|
||||
SecretHashDomain::invoke_primitive(&self.0, v.secret())
|
||||
SecretHashDomain::invoke_primitive(&self.0, v.secret(), self.1)
|
||||
}
|
||||
|
||||
/// Gets the key of this [HashDomain].
|
||||
pub fn into_value(self) -> [u8; KEY_LEN] {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl HashDomainNamespace {
|
||||
/// Creates a new [HashDomain] by mixing in a new key `v`. Specifically,
|
||||
/// it evaluates [hash::hash] with the key of this HashDomainNamespace key as the key and `v`
|
||||
/// as the `data` and uses the result as the key for the new [HashDomain].
|
||||
pub fn mix(&self, v: &[u8]) -> Result<HashDomain> {
|
||||
Ok(HashDomain(
|
||||
hash::hash(&self.0, v).collect::<[u8; KEY_LEN]>()?,
|
||||
))
|
||||
let mut new_key: [u8; KEY_LEN] = [0u8; KEY_LEN];
|
||||
self.1.keyed_hash_to(&self.0, v).to(&mut new_key)?;
|
||||
Ok(HashDomain(new_key, self.1.clone()))
|
||||
}
|
||||
|
||||
/// Creates a new [SecretHashDomain] by mixing in a new key `v`
|
||||
/// by calling [SecretHashDomain::invoke_primitive] with the key of this
|
||||
/// [HashDomainNamespace] as `k` and `v` as `d`.
|
||||
///
|
||||
/// It requires that `v` consists of exactly [KEY_LEN] many bytes.
|
||||
pub fn mix_secret<const N: usize>(&self, v: Secret<N>) -> Result<SecretHashDomain> {
|
||||
SecretHashDomain::invoke_primitive(&self.0, v.secret())
|
||||
SecretHashDomain::invoke_primitive(&self.0, v.secret(), self.1.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl SecretHashDomain {
|
||||
pub fn invoke_primitive(k: &[u8], d: &[u8]) -> Result<SecretHashDomain> {
|
||||
let mut r = SecretHashDomain(Secret::zero());
|
||||
hash::hash(k, d).to(r.0.secret_mut())?;
|
||||
/// Create a new [SecretHashDomain] with the given key `k` and data `d` by calling
|
||||
/// [hash::hash] with `k` as the `key` and `d` s the `data`, and using the result
|
||||
/// as the content for the new [SecretHashDomain].
|
||||
/// Both `k` and `d` have to be exactly [KEY_LEN] bytes in length.
|
||||
/// TODO: docu
|
||||
pub fn invoke_primitive(
|
||||
k: &[u8],
|
||||
d: &[u8],
|
||||
hash_choice: KeyedHash,
|
||||
) -> Result<SecretHashDomain> {
|
||||
let mut new_secret_key = Secret::zero();
|
||||
hash_choice
|
||||
.keyed_hash_to(k.try_into()?, d)
|
||||
.to(new_secret_key.secret_mut())?;
|
||||
let r = SecretHashDomain(new_secret_key, hash_choice);
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
pub fn zero() -> Self {
|
||||
Self(Secret::zero())
|
||||
/// Creates a new [SecretHashDomain] that is initialized with an all zeros key.
|
||||
pub fn zero(hash_choice: KeyedHash) -> Self {
|
||||
Self(Secret::zero(), hash_choice)
|
||||
}
|
||||
|
||||
/// Turns this [SecretHashDomain] into a [SecretHashDomainNamespace].
|
||||
pub fn dup(self) -> SecretHashDomainNamespace {
|
||||
SecretHashDomainNamespace(self.0)
|
||||
SecretHashDomainNamespace(self.0, self.1)
|
||||
}
|
||||
|
||||
pub fn danger_from_secret(k: Secret<KEY_LEN>) -> Self {
|
||||
Self(k)
|
||||
/// Creates a new [SecretHashDomain] from a [Secret] `k`.
|
||||
///
|
||||
/// It requires that `k` consist of exactly [KEY_LEN] bytes.
|
||||
pub fn danger_from_secret(k: Secret<KEY_LEN>, hash_choice: KeyedHash) -> Self {
|
||||
Self(k, hash_choice)
|
||||
}
|
||||
|
||||
/// Creates a new [SecretHashDomain] by mixing in a new key `v`. Specifically,
|
||||
/// it evaluates [hash::hash] with this [SecretHashDomain]'s key as the key and `v`
|
||||
/// as the `data` and uses the result as the key for the new [SecretHashDomain].
|
||||
///
|
||||
/// It requires that `v` consists of exactly [KEY_LEN] many bytes.
|
||||
pub fn mix(self, v: &[u8]) -> Result<SecretHashDomain> {
|
||||
Self::invoke_primitive(self.0.secret(), v)
|
||||
Self::invoke_primitive(self.0.secret(), v, self.1)
|
||||
}
|
||||
|
||||
/// Creates a new [SecretHashDomain] by mixing in a new key `v`
|
||||
/// by calling [SecretHashDomain::invoke_primitive] with the key of this
|
||||
/// [HashDomainNamespace] as `k` and `v` as `d`.
|
||||
///
|
||||
/// It requires that `v` consists of exactly [KEY_LEN] many bytes.
|
||||
pub fn mix_secret<const N: usize>(self, v: Secret<N>) -> Result<SecretHashDomain> {
|
||||
Self::invoke_primitive(self.0.secret(), v.secret())
|
||||
Self::invoke_primitive(self.0.secret(), v.secret(), self.1)
|
||||
}
|
||||
|
||||
/// Get the secret key data from this [SecretHashDomain].
|
||||
pub fn into_secret(self) -> Secret<KEY_LEN> {
|
||||
self.0
|
||||
}
|
||||
|
||||
pub fn into_secret_slice(mut self, v: &[u8], dst: &[u8]) -> Result<()> {
|
||||
hash::hash(v, dst).to(self.0.secret_mut())
|
||||
}
|
||||
}
|
||||
|
||||
impl SecretHashDomainNamespace {
|
||||
/// Creates a new [SecretHashDomain] by mixing in a new key `v`. Specifically,
|
||||
/// it evaluates [hash::hash] with the key of this HashDomainNamespace key as the key and `v`
|
||||
/// as the `data` and uses the result as the key for the new [HashDomain].
|
||||
///
|
||||
/// It requires that `v` consists of exactly [KEY_LEN] many bytes.
|
||||
pub fn mix(&self, v: &[u8]) -> Result<SecretHashDomain> {
|
||||
SecretHashDomain::invoke_primitive(self.0.secret(), v)
|
||||
SecretHashDomain::invoke_primitive(self.0.secret(), v, self.1.clone())
|
||||
}
|
||||
|
||||
/// Creates a new [SecretHashDomain] by mixing in a new key `v`
|
||||
/// by calling [SecretHashDomain::invoke_primitive] with the key of this
|
||||
/// [HashDomainNamespace] as `k` and `v` as `d`.
|
||||
///
|
||||
/// It requires that `v` consists of exactly [KEY_LEN] many bytes.
|
||||
pub fn mix_secret<const N: usize>(&self, v: Secret<N>) -> Result<SecretHashDomain> {
|
||||
SecretHashDomain::invoke_primitive(self.0.secret(), v.secret())
|
||||
SecretHashDomain::invoke_primitive(self.0.secret(), v.secret(), self.1.clone())
|
||||
}
|
||||
|
||||
// TODO: This entire API is not very nice; we need this for biscuits, but
|
||||
// it might be better to extract a special "biscuit"
|
||||
// labeled subkey and reinitialize the chain with this
|
||||
/// Get the secret key data from this [SecretHashDomain].
|
||||
pub fn danger_into_secret(self) -> Secret<KEY_LEN> {
|
||||
self.0
|
||||
}
|
||||
|
||||
pub fn keyed_hash(&self) -> &KeyedHash {
|
||||
&self.1
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,27 +1,46 @@
|
||||
use rosenpass_cipher_traits::primitives::Aead as AeadTrait;
|
||||
use static_assertions::const_assert;
|
||||
|
||||
pub mod subtle;
|
||||
|
||||
/// All keyed primitives in this crate use 32 byte keys
|
||||
pub const KEY_LEN: usize = 32;
|
||||
const_assert!(KEY_LEN == aead::KEY_LEN);
|
||||
const_assert!(KEY_LEN == xaead::KEY_LEN);
|
||||
const_assert!(KEY_LEN == Aead::KEY_LEN);
|
||||
const_assert!(KEY_LEN == XAead::KEY_LEN);
|
||||
const_assert!(KEY_LEN == hash_domain::KEY_LEN);
|
||||
|
||||
/// Authenticated encryption with associated data
|
||||
pub mod aead {
|
||||
pub use crate::subtle::chacha20poly1305_ietf::{decrypt, encrypt, KEY_LEN, NONCE_LEN, TAG_LEN};
|
||||
}
|
||||
/// Keyed hashing
|
||||
///
|
||||
/// This should only be used for implementation details; anything with relevance
|
||||
/// to the cryptographic protocol should use the facilities in [hash_domain], (though
|
||||
/// hash domain uses this module internally)
|
||||
pub use crate::subtle::keyed_hash::KeyedHash;
|
||||
|
||||
/// Authenticated encryption with associated data with a constant nonce
|
||||
pub mod xaead {
|
||||
pub use crate::subtle::xchacha20poly1305_ietf::{
|
||||
decrypt, encrypt, KEY_LEN, NONCE_LEN, TAG_LEN,
|
||||
};
|
||||
}
|
||||
/// Authenticated encryption with associated data (AEAD)
|
||||
/// Chacha20poly1305 is used.
|
||||
#[cfg(feature = "experiment_libcrux_chachapoly")]
|
||||
pub use subtle::libcrux::chacha20poly1305_ietf::ChaCha20Poly1305 as Aead;
|
||||
|
||||
/// Authenticated encryption with associated data (AEAD)
|
||||
/// Chacha20poly1305 is used.
|
||||
#[cfg(not(feature = "experiment_libcrux_chachapoly"))]
|
||||
pub use crate::subtle::rust_crypto::chacha20poly1305_ietf::ChaCha20Poly1305 as Aead;
|
||||
|
||||
/// Authenticated encryption with associated data with a extended-length nonce (XAEAD)
|
||||
/// XChacha20poly1305 is used.
|
||||
pub use crate::subtle::rust_crypto::xchacha20poly1305_ietf::XChaCha20Poly1305 as XAead;
|
||||
|
||||
/// Use Classic-McEcliece-460986 as the Static KEM.
|
||||
///
|
||||
/// See [rosenpass_oqs::ClassicMceliece460896] for more details.
|
||||
pub use rosenpass_oqs::ClassicMceliece460896 as StaticKem;
|
||||
|
||||
/// Use Kyber-512 as the Static KEM
|
||||
///
|
||||
/// See [rosenpass_oqs::Kyber512] for more details.
|
||||
#[cfg(not(feature = "experiment_libcrux_kyber"))]
|
||||
pub use rosenpass_oqs::Kyber512 as EphemeralKem;
|
||||
#[cfg(feature = "experiment_libcrux_kyber")]
|
||||
pub use subtle::libcrux::kyber512::Kyber512 as EphemeralKem;
|
||||
|
||||
pub mod hash_domain;
|
||||
|
||||
pub mod kem {
|
||||
pub use rosenpass_oqs::ClassicMceliece460896 as StaticKem;
|
||||
pub use rosenpass_oqs::Kyber512 as EphemeralKem;
|
||||
}
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
use zeroize::Zeroizing;
|
||||
|
||||
use blake2::digest::crypto_common::generic_array::GenericArray;
|
||||
use blake2::digest::crypto_common::typenum::U32;
|
||||
use blake2::digest::crypto_common::KeySizeUser;
|
||||
use blake2::digest::{FixedOutput, Mac, OutputSizeUser};
|
||||
use blake2::Blake2bMac;
|
||||
|
||||
use rosenpass_to::{ops::copy_slice, with_destination, To};
|
||||
use rosenpass_util::typenum2const;
|
||||
|
||||
type Impl = Blake2bMac<U32>;
|
||||
|
||||
type KeyLen = <Impl as KeySizeUser>::KeySize;
|
||||
type OutLen = <Impl as OutputSizeUser>::OutputSize;
|
||||
|
||||
const KEY_LEN: usize = typenum2const! { KeyLen };
|
||||
const OUT_LEN: usize = typenum2const! { OutLen };
|
||||
|
||||
pub const KEY_MIN: usize = KEY_LEN;
|
||||
pub const KEY_MAX: usize = KEY_LEN;
|
||||
pub const OUT_MIN: usize = OUT_LEN;
|
||||
pub const OUT_MAX: usize = OUT_LEN;
|
||||
|
||||
#[inline]
|
||||
pub fn hash<'a>(key: &'a [u8], data: &'a [u8]) -> impl To<[u8], anyhow::Result<()>> + 'a {
|
||||
with_destination(|out: &mut [u8]| {
|
||||
let mut h = Impl::new_from_slice(key)?;
|
||||
h.update(data);
|
||||
|
||||
// Jesus christ, blake2 crate, your usage of GenericArray might be nice and fancy
|
||||
// but it introduces a ton of complexity. This cost me half an hour just to figure
|
||||
// out the right way to use the imports while allowing for zeroization.
|
||||
// An API based on slices might actually be simpler.
|
||||
let mut tmp = Zeroizing::new([0u8; OUT_LEN]);
|
||||
let tmp = GenericArray::from_mut_slice(tmp.as_mut());
|
||||
h.finalize_into(tmp);
|
||||
copy_slice(tmp.as_ref()).to(out);
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
use rosenpass_to::ops::copy_slice;
|
||||
use rosenpass_to::To;
|
||||
use rosenpass_util::typenum2const;
|
||||
|
||||
use chacha20poly1305::aead::generic_array::GenericArray;
|
||||
use chacha20poly1305::ChaCha20Poly1305 as AeadImpl;
|
||||
use chacha20poly1305::{AeadCore, AeadInPlace, KeyInit, KeySizeUser};
|
||||
|
||||
pub const KEY_LEN: usize = typenum2const! { <AeadImpl as KeySizeUser>::KeySize };
|
||||
pub const TAG_LEN: usize = typenum2const! { <AeadImpl as AeadCore>::TagSize };
|
||||
pub const NONCE_LEN: usize = typenum2const! { <AeadImpl as AeadCore>::NonceSize };
|
||||
|
||||
#[inline]
|
||||
pub fn encrypt(
|
||||
ciphertext: &mut [u8],
|
||||
key: &[u8],
|
||||
nonce: &[u8],
|
||||
ad: &[u8],
|
||||
plaintext: &[u8],
|
||||
) -> anyhow::Result<()> {
|
||||
let nonce = GenericArray::from_slice(nonce);
|
||||
let (ct, mac) = ciphertext.split_at_mut(ciphertext.len() - TAG_LEN);
|
||||
copy_slice(plaintext).to(ct);
|
||||
let mac_value = AeadImpl::new_from_slice(key)?.encrypt_in_place_detached(nonce, ad, ct)?;
|
||||
copy_slice(&mac_value[..]).to(mac);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn decrypt(
|
||||
plaintext: &mut [u8],
|
||||
key: &[u8],
|
||||
nonce: &[u8],
|
||||
ad: &[u8],
|
||||
ciphertext: &[u8],
|
||||
) -> anyhow::Result<()> {
|
||||
let nonce = GenericArray::from_slice(nonce);
|
||||
let (ct, mac) = ciphertext.split_at(ciphertext.len() - TAG_LEN);
|
||||
let tag = GenericArray::from_slice(mac);
|
||||
copy_slice(ct).to(plaintext);
|
||||
AeadImpl::new_from_slice(key)?.decrypt_in_place_detached(nonce, ad, plaintext, tag)?;
|
||||
Ok(())
|
||||
}
|
||||
79
ciphers/src/subtle/custom/incorrect_hmac_blake2b.rs
Normal file
79
ciphers/src/subtle/custom/incorrect_hmac_blake2b.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use rosenpass_cipher_traits::{
|
||||
algorithms::KeyedHashIncorrectHmacBlake2b,
|
||||
primitives::{InferKeyedHash, KeyedHash, KeyedHashTo},
|
||||
};
|
||||
use rosenpass_constant_time::xor;
|
||||
use rosenpass_to::{ops::copy_slice, To};
|
||||
use zeroize::Zeroizing;
|
||||
|
||||
#[cfg(not(feature = "experiment_libcrux_blake2"))]
|
||||
use crate::subtle::rust_crypto::blake2b::Blake2b;
|
||||
#[cfg(not(feature = "experiment_libcrux_blake2"))]
|
||||
use anyhow::Error;
|
||||
|
||||
#[cfg(feature = "experiment_libcrux_blake2")]
|
||||
use crate::subtle::libcrux::blake2b::{Blake2b, Error};
|
||||
|
||||
/// The key length, 32 bytes or 256 bits.
|
||||
pub const KEY_LEN: usize = 32;
|
||||
|
||||
/// The hash length, 32 bytes or 256 bits.
|
||||
pub const HASH_LEN: usize = 32;
|
||||
|
||||
/// This is a woefully incorrect implementation of hmac_blake2b.
|
||||
/// See <https://github.com/rosenpass/rosenpass/issues/68#issuecomment-1563612222>
|
||||
///
|
||||
/// It accepts 32 byte keys, exclusively.
|
||||
///
|
||||
/// This will be replaced, likely by Kekkac at some point soon.
|
||||
/// <https://github.com/rosenpass/rosenpass/pull/145>
|
||||
///
|
||||
/// # Examples
|
||||
///```rust
|
||||
/// # use rosenpass_ciphers::subtle::custom::incorrect_hmac_blake2b::IncorrectHmacBlake2bCore;
|
||||
/// use rosenpass_cipher_traits::primitives::KeyedHashTo;
|
||||
/// use rosenpass_to::To;
|
||||
/// let key: [u8; 32] = [0; 32];
|
||||
/// let data: [u8; 32] = [255; 32];
|
||||
/// // buffer for the hash output
|
||||
/// let mut hash_data: [u8; 32] = [0u8; 32];
|
||||
///
|
||||
/// assert!(IncorrectHmacBlake2bCore::keyed_hash_to(&key, &data).to(&mut hash_data).is_ok(), "Hashing has to return OK result");
|
||||
/// # let expected_hash: &[u8] = &[5, 152, 135, 141, 151, 106, 147, 8, 220, 95, 38, 66, 29, 33, 3,
|
||||
/// 104, 250, 114, 131, 119, 27, 56, 59, 44, 11, 67, 230, 113, 112, 20, 80, 103];
|
||||
/// # assert_eq!(hash_data, expected_hash);
|
||||
///```
|
||||
///
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct IncorrectHmacBlake2bCore;
|
||||
|
||||
impl KeyedHash<KEY_LEN, HASH_LEN> for IncorrectHmacBlake2bCore {
|
||||
type Error = Error;
|
||||
|
||||
fn keyed_hash(
|
||||
key: &[u8; KEY_LEN],
|
||||
data: &[u8],
|
||||
out: &mut [u8; HASH_LEN],
|
||||
) -> Result<(), Self::Error> {
|
||||
const IPAD: [u8; KEY_LEN] = [0x36u8; KEY_LEN];
|
||||
const OPAD: [u8; KEY_LEN] = [0x5Cu8; KEY_LEN];
|
||||
|
||||
type Key = Zeroizing<[u8; KEY_LEN]>;
|
||||
let mut tmp_key = Key::default();
|
||||
|
||||
copy_slice(key).to(tmp_key.as_mut());
|
||||
xor(&IPAD).to(tmp_key.as_mut());
|
||||
let mut outer_data = Key::default();
|
||||
Blake2b::keyed_hash_to(&tmp_key, data).to(&mut outer_data)?;
|
||||
|
||||
copy_slice(key).to(tmp_key.as_mut());
|
||||
xor(&OPAD).to(tmp_key.as_mut());
|
||||
Blake2b::keyed_hash_to(&tmp_key, outer_data.as_ref()).to(out)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub type IncorrectHmacBlake2b = InferKeyedHash<IncorrectHmacBlake2bCore, KEY_LEN, HASH_LEN>;
|
||||
|
||||
impl KeyedHashIncorrectHmacBlake2b for IncorrectHmacBlake2bCore {}
|
||||
3
ciphers/src/subtle/custom/mod.rs
Normal file
3
ciphers/src/subtle/custom/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
//! Own implementations of custom algorithms
|
||||
|
||||
pub mod incorrect_hmac_blake2b;
|
||||
@@ -1,46 +0,0 @@
|
||||
use anyhow::ensure;
|
||||
use zeroize::Zeroizing;
|
||||
|
||||
use rosenpass_constant_time::xor;
|
||||
use rosenpass_to::{ops::copy_slice, with_destination, To};
|
||||
|
||||
use crate::subtle::blake2b;
|
||||
|
||||
pub const KEY_LEN: usize = 32;
|
||||
pub const KEY_MIN: usize = KEY_LEN;
|
||||
pub const KEY_MAX: usize = KEY_LEN;
|
||||
pub const OUT_MIN: usize = blake2b::OUT_MIN;
|
||||
pub const OUT_MAX: usize = blake2b::OUT_MAX;
|
||||
|
||||
/// This is a woefully incorrect implementation of hmac_blake2b.
|
||||
/// See <https://github.com/rosenpass/rosenpass/issues/68#issuecomment-1563612222>
|
||||
///
|
||||
/// It accepts 32 byte keys, exclusively.
|
||||
///
|
||||
/// This will be replaced, likely by Kekkac at some point soon.
|
||||
/// <https://github.com/rosenpass/rosenpass/pull/145>
|
||||
#[inline]
|
||||
pub fn hash<'a>(key: &'a [u8], data: &'a [u8]) -> impl To<[u8], anyhow::Result<()>> + 'a {
|
||||
const IPAD: [u8; KEY_LEN] = [0x36u8; KEY_LEN];
|
||||
const OPAD: [u8; KEY_LEN] = [0x5Cu8; KEY_LEN];
|
||||
|
||||
with_destination(|out: &mut [u8]| {
|
||||
// Not bothering with padding; the implementation
|
||||
// uses appropriately sized keys.
|
||||
ensure!(key.len() == KEY_LEN);
|
||||
|
||||
type Key = Zeroizing<[u8; KEY_LEN]>;
|
||||
let mut tmp_key = Key::default();
|
||||
|
||||
copy_slice(key).to(tmp_key.as_mut());
|
||||
xor(&IPAD).to(tmp_key.as_mut());
|
||||
let mut outer_data = Key::default();
|
||||
blake2b::hash(tmp_key.as_ref(), data).to(outer_data.as_mut())?;
|
||||
|
||||
copy_slice(key).to(tmp_key.as_mut());
|
||||
xor(&OPAD).to(tmp_key.as_mut());
|
||||
blake2b::hash(tmp_key.as_ref(), outer_data.as_ref()).to(out)?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
65
ciphers/src/subtle/keyed_hash.rs
Normal file
65
ciphers/src/subtle/keyed_hash.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
//! This module provides types that enabling choosing the keyed hash building block to be used at
|
||||
//! runtime (using enums) instead of at compile time (using generics).
|
||||
|
||||
use anyhow::Result;
|
||||
use rosenpass_cipher_traits::primitives::KeyedHashInstance;
|
||||
use std::fmt::Display;
|
||||
|
||||
use crate::subtle::{
|
||||
custom::incorrect_hmac_blake2b::IncorrectHmacBlake2b, rust_crypto::keyed_shake256::SHAKE256_32,
|
||||
};
|
||||
|
||||
/// Length of symmetric key throughout Rosenpass.
|
||||
pub const KEY_LEN: usize = 32;
|
||||
|
||||
/// The hash is used as a symmetric key and should have the same length.
|
||||
pub const HASH_LEN: usize = KEY_LEN;
|
||||
|
||||
/// Provides a way to pick which keyed hash to use at runtime.
|
||||
/// Implements [`KeyedHashInstance`] to allow hashing using the respective algorithm.
|
||||
#[derive(Debug, Eq, PartialEq, Clone)]
|
||||
pub enum KeyedHash {
|
||||
/// A hasher backed by [`SHAKE256_32`].
|
||||
KeyedShake256(SHAKE256_32),
|
||||
/// A hasher backed by [`IncorrectHmacBlake2b`].
|
||||
IncorrectHmacBlake2b(IncorrectHmacBlake2b),
|
||||
}
|
||||
|
||||
impl KeyedHash {
|
||||
/// Creates an [`KeyedHash`] backed by SHAKE256.
|
||||
pub fn keyed_shake256() -> Self {
|
||||
Self::KeyedShake256(Default::default())
|
||||
}
|
||||
|
||||
/// Creates an [`KeyedHash`] backed by Blake2B.
|
||||
pub fn incorrect_hmac_blake2b() -> Self {
|
||||
Self::IncorrectHmacBlake2b(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl KeyedHashInstance<KEY_LEN, HASH_LEN> for KeyedHash {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn keyed_hash(
|
||||
&self,
|
||||
key: &[u8; KEY_LEN],
|
||||
data: &[u8],
|
||||
out: &mut [u8; HASH_LEN],
|
||||
) -> Result<(), Self::Error> {
|
||||
match self {
|
||||
Self::KeyedShake256(h) => h.keyed_hash(key, data, out)?,
|
||||
Self::IncorrectHmacBlake2b(h) => h.keyed_hash(key, data, out)?,
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for KeyedHash {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::KeyedShake256(_) => write!(f, "KeyedShake256_32"),
|
||||
Self::IncorrectHmacBlake2b(_) => write!(f, "IncorrectHmacBlake2b"),
|
||||
}
|
||||
}
|
||||
}
|
||||
88
ciphers/src/subtle/libcrux/blake2b.rs
Normal file
88
ciphers/src/subtle/libcrux/blake2b.rs
Normal file
@@ -0,0 +1,88 @@
|
||||
//! Implementation of the [`KeyedHashBlake2b`] trait based on the [`libcrux_blake2`] crate.
|
||||
|
||||
use libcrux_blake2::Blake2bBuilder;
|
||||
|
||||
use rosenpass_cipher_traits::algorithms::KeyedHashBlake2b;
|
||||
use rosenpass_cipher_traits::primitives::KeyedHash;
|
||||
|
||||
pub use rosenpass_cipher_traits::algorithms::keyed_hash_blake2b::HASH_LEN;
|
||||
pub use rosenpass_cipher_traits::algorithms::keyed_hash_blake2b::KEY_LEN;
|
||||
|
||||
/// Describles which error occurred
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
/// An unexpected internal error occurred. Should never be returned and points to a bug in the
|
||||
/// implementation.
|
||||
#[error("internal error")]
|
||||
InternalError,
|
||||
|
||||
/// Indicates that the provided data was too long.
|
||||
#[error("data is too long")]
|
||||
DataTooLong,
|
||||
}
|
||||
|
||||
/// Hasher for the given `data` with the Blake2b hash function.
|
||||
pub struct Blake2b;
|
||||
|
||||
impl KeyedHash<KEY_LEN, HASH_LEN> for Blake2b {
|
||||
type Error = Error;
|
||||
|
||||
fn keyed_hash(
|
||||
key: &[u8; KEY_LEN],
|
||||
data: &[u8],
|
||||
out: &mut [u8; HASH_LEN],
|
||||
) -> Result<(), Self::Error> {
|
||||
let mut h = Blake2bBuilder::new_keyed_const(key)
|
||||
// this may fail if the key length is invalid, but 32 is fine
|
||||
.map_err(|_| Error::InternalError)?
|
||||
.build_const_digest_len()
|
||||
.map_err(|_|
|
||||
// this can only fail if the output length is invalid, but 32 is fine.
|
||||
Error::InternalError)?;
|
||||
|
||||
h.update(data).map_err(|_| Error::DataTooLong)?;
|
||||
h.finalize(out);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl KeyedHashBlake2b for Blake2b {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod equivalence_tests {
|
||||
use super::*;
|
||||
use rand::RngCore;
|
||||
|
||||
#[test]
|
||||
fn fuzz_equivalence_libcrux_old_new() {
|
||||
let datas: [&[u8]; 3] = [
|
||||
b"".as_slice(),
|
||||
b"test".as_slice(),
|
||||
b"abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd",
|
||||
];
|
||||
|
||||
let mut key = [0; KEY_LEN];
|
||||
let mut rng = rand::thread_rng();
|
||||
|
||||
let mut hash_left = [0; 32];
|
||||
let mut hash_right = [0; 32];
|
||||
|
||||
for data in datas {
|
||||
for _ in 0..1000 {
|
||||
rng.fill_bytes(&mut key);
|
||||
|
||||
crate::subtle::rust_crypto::blake2b::Blake2b::keyed_hash(
|
||||
&key,
|
||||
data,
|
||||
&mut hash_left,
|
||||
)
|
||||
.unwrap();
|
||||
crate::subtle::libcrux::blake2b::Blake2b::keyed_hash(&key, data, &mut hash_right)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(hash_left, hash_right);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
274
ciphers/src/subtle/libcrux/chacha20poly1305_ietf.rs
Normal file
274
ciphers/src/subtle/libcrux/chacha20poly1305_ietf.rs
Normal file
@@ -0,0 +1,274 @@
|
||||
//! Implementation of the [`AeadChaCha20Poly1305`] trait based on the [`libcrux_chacha20poly1305`] crate.
|
||||
|
||||
use rosenpass_cipher_traits::algorithms::AeadChaCha20Poly1305;
|
||||
use rosenpass_cipher_traits::primitives::{Aead, AeadError};
|
||||
|
||||
pub use rosenpass_cipher_traits::algorithms::aead_chacha20poly1305::{KEY_LEN, NONCE_LEN, TAG_LEN};
|
||||
|
||||
/// An implementation of the ChaCha20Poly1305 AEAD based on libcrux
|
||||
pub struct ChaCha20Poly1305;
|
||||
|
||||
impl Aead<KEY_LEN, NONCE_LEN, TAG_LEN> for ChaCha20Poly1305 {
|
||||
fn encrypt(
|
||||
&self,
|
||||
ciphertext: &mut [u8],
|
||||
key: &[u8; KEY_LEN],
|
||||
nonce: &[u8; NONCE_LEN],
|
||||
ad: &[u8],
|
||||
plaintext: &[u8],
|
||||
) -> Result<(), AeadError> {
|
||||
let (ctxt, tag) = libcrux_chacha20poly1305::encrypt(key, plaintext, ciphertext, ad, nonce)
|
||||
.map_err(|_| AeadError::InternalError)?;
|
||||
|
||||
// return an error of the destination buffer is longer than expected
|
||||
// because the caller wouldn't know where the end is
|
||||
if ctxt.len() + tag.len() != ciphertext.len() {
|
||||
return Err(AeadError::InternalError);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn decrypt(
|
||||
&self,
|
||||
plaintext: &mut [u8],
|
||||
key: &[u8; KEY_LEN],
|
||||
nonce: &[u8; NONCE_LEN],
|
||||
ad: &[u8],
|
||||
ciphertext: &[u8],
|
||||
) -> Result<(), AeadError> {
|
||||
let ptxt = libcrux_chacha20poly1305::decrypt(key, plaintext, ciphertext, ad, nonce)
|
||||
.map_err(|_| AeadError::DecryptError)?;
|
||||
|
||||
// return an error of the destination buffer is longer than expected
|
||||
// because the caller wouldn't know where the end is
|
||||
if ptxt.len() != plaintext.len() {
|
||||
return Err(AeadError::DecryptError);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl AeadChaCha20Poly1305 for ChaCha20Poly1305 {}
|
||||
|
||||
/// The idea of these tests is to check that the above implemenatation behaves, by and large, the
|
||||
/// same as the one from the old libcrux and the one from RustCrypto. You can consider them janky,
|
||||
/// self-rolled property-based tests.
|
||||
#[cfg(test)]
|
||||
mod equivalence_tests {
|
||||
use super::*;
|
||||
use rand::RngCore;
|
||||
|
||||
#[test]
|
||||
fn proptest_equivalence_libcrux_rustcrypto() {
|
||||
use crate::subtle::rust_crypto::chacha20poly1305_ietf::ChaCha20Poly1305 as RustCryptoChaCha20Poly1305;
|
||||
let ptxts: [&[u8]; 3] = [
|
||||
b"".as_slice(),
|
||||
b"test".as_slice(),
|
||||
b"abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd",
|
||||
];
|
||||
let mut key = [0; KEY_LEN];
|
||||
let mut rng = rand::thread_rng();
|
||||
|
||||
let mut ctxt_left = [0; 64 + TAG_LEN];
|
||||
let mut ctxt_right = [0; 64 + TAG_LEN];
|
||||
|
||||
let mut ptxt_left = [0; 64];
|
||||
let mut ptxt_right = [0; 64];
|
||||
|
||||
let nonce = [0; NONCE_LEN];
|
||||
let ad = b"";
|
||||
|
||||
for ptxt in ptxts {
|
||||
for _ in 0..1000 {
|
||||
rng.fill_bytes(&mut key);
|
||||
let ctxt_left = &mut ctxt_left[..ptxt.len() + TAG_LEN];
|
||||
let ctxt_right = &mut ctxt_right[..ptxt.len() + TAG_LEN];
|
||||
|
||||
let ptxt_left = &mut ptxt_left[..ptxt.len()];
|
||||
let ptxt_right = &mut ptxt_right[..ptxt.len()];
|
||||
|
||||
RustCryptoChaCha20Poly1305
|
||||
.encrypt(ctxt_left, &key, &nonce, ad, ptxt)
|
||||
.unwrap();
|
||||
ChaCha20Poly1305
|
||||
.encrypt(ctxt_right, &key, &nonce, ad, ptxt)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(ctxt_left, ctxt_right);
|
||||
|
||||
RustCryptoChaCha20Poly1305
|
||||
.decrypt(ptxt_left, &key, &nonce, ad, ctxt_left)
|
||||
.unwrap();
|
||||
ChaCha20Poly1305
|
||||
.decrypt(ptxt_right, &key, &nonce, ad, ctxt_right)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(ptxt_left, ptxt);
|
||||
assert_eq!(ptxt_right, ptxt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(feature = "experiment_libcrux_chachapoly_test")]
|
||||
fn proptest_equivalence_libcrux_old_new() {
|
||||
let ptxts: [&[u8]; 3] = [
|
||||
b"".as_slice(),
|
||||
b"test".as_slice(),
|
||||
b"abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd",
|
||||
];
|
||||
let mut key = [0; KEY_LEN];
|
||||
let mut rng = rand::thread_rng();
|
||||
|
||||
let mut ctxt_left = [0; 64 + TAG_LEN];
|
||||
let mut ctxt_right = [0; 64 + TAG_LEN];
|
||||
|
||||
let mut ptxt_left = [0; 64];
|
||||
let mut ptxt_right = [0; 64];
|
||||
|
||||
let nonce = [0; NONCE_LEN];
|
||||
let ad = b"";
|
||||
|
||||
for ptxt in ptxts {
|
||||
for _ in 0..1000 {
|
||||
rng.fill_bytes(&mut key);
|
||||
let ctxt_left = &mut ctxt_left[..ptxt.len() + TAG_LEN];
|
||||
let ctxt_right = &mut ctxt_right[..ptxt.len() + TAG_LEN];
|
||||
|
||||
let ptxt_left = &mut ptxt_left[..ptxt.len()];
|
||||
let ptxt_right = &mut ptxt_right[..ptxt.len()];
|
||||
|
||||
encrypt(ctxt_left, &key, &nonce, ad, ptxt).unwrap();
|
||||
ChaCha20Poly1305
|
||||
.encrypt(ctxt_right, &key, &nonce, ad, ptxt)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(ctxt_left, ctxt_right);
|
||||
|
||||
decrypt(ptxt_left, &key, &nonce, ad, ctxt_left).unwrap();
|
||||
ChaCha20Poly1305
|
||||
.decrypt(ptxt_right, &key, &nonce, ad, ctxt_right)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(ptxt_left, ptxt);
|
||||
assert_eq!(ptxt_right, ptxt);
|
||||
}
|
||||
}
|
||||
|
||||
// The old libcrux functions:
|
||||
|
||||
// The functions below are from the old libcrux backend. I am keeping them around so we can
|
||||
// check if they behave the same.
|
||||
use rosenpass_to::ops::copy_slice;
|
||||
use rosenpass_to::To;
|
||||
use zeroize::Zeroize;
|
||||
|
||||
/// Encrypts using ChaCha20Poly1305 as implemented in [libcrux](https://github.com/cryspen/libcrux).
|
||||
/// Key and nonce MUST be chosen (pseudo-)randomly. The `key` slice MUST have a length of
|
||||
/// [KEY_LEN]. The `nonce` slice MUST have a length of [NONCE_LEN]. The last [TAG_LEN] bytes
|
||||
/// written in `ciphertext` are the tag guaranteeing integrity. `ciphertext` MUST have a capacity of
|
||||
/// `plaintext.len()` + [TAG_LEN].
|
||||
///
|
||||
/// # Examples
|
||||
///```rust
|
||||
/// # use rosenpass_ciphers::subtle::chacha20poly1305_ietf_libcrux::{encrypt, TAG_LEN, KEY_LEN, NONCE_LEN};
|
||||
///
|
||||
/// const PLAINTEXT_LEN: usize = 43;
|
||||
/// let plaintext = "post-quantum cryptography is very important".as_bytes();
|
||||
/// assert_eq!(PLAINTEXT_LEN, plaintext.len());
|
||||
/// let key: &[u8] = &[0u8; KEY_LEN]; // THIS IS NOT A SECURE KEY
|
||||
/// let nonce: &[u8] = &[0u8; NONCE_LEN]; // THIS IS NOT A SECURE NONCE
|
||||
/// let additional_data: &[u8] = "the encrypted message is very important".as_bytes();
|
||||
/// let mut ciphertext_buffer = [0u8; PLAINTEXT_LEN + TAG_LEN];
|
||||
///
|
||||
/// let res: anyhow::Result<()> = encrypt(&mut ciphertext_buffer, key, nonce, additional_data, plaintext);
|
||||
/// assert!(res.is_ok());
|
||||
/// # let expected_ciphertext: &[u8] = &[239, 104, 148, 202, 120, 32, 77, 27, 246, 206, 226, 17,
|
||||
/// # 83, 78, 122, 116, 187, 123, 70, 199, 58, 130, 21, 1, 107, 230, 58, 77, 18, 152, 31, 159, 80,
|
||||
/// # 151, 72, 27, 236, 137, 60, 55, 180, 31, 71, 97, 199, 12, 60, 155, 70, 221, 225, 110, 132, 191,
|
||||
/// # 8, 114, 85, 4, 25];
|
||||
/// # assert_eq!(expected_ciphertext, &ciphertext_buffer);
|
||||
///```
|
||||
///
|
||||
#[inline]
|
||||
pub fn encrypt(
|
||||
ciphertext: &mut [u8],
|
||||
key: &[u8],
|
||||
nonce: &[u8],
|
||||
ad: &[u8],
|
||||
plaintext: &[u8],
|
||||
) -> anyhow::Result<()> {
|
||||
let (ciphertext, mac) = ciphertext.split_at_mut(ciphertext.len() - TAG_LEN);
|
||||
|
||||
use libcrux::aead as C;
|
||||
let crux_key = C::Key::Chacha20Poly1305(C::Chacha20Key(key.try_into().unwrap()));
|
||||
let crux_iv = C::Iv(nonce.try_into().unwrap());
|
||||
|
||||
copy_slice(plaintext).to(ciphertext);
|
||||
let crux_tag = libcrux::aead::encrypt(&crux_key, ciphertext, crux_iv, ad).unwrap();
|
||||
copy_slice(crux_tag.as_ref()).to(mac);
|
||||
|
||||
match crux_key {
|
||||
C::Key::Chacha20Poly1305(mut k) => k.0.zeroize(),
|
||||
_ => panic!(),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Decrypts a `ciphertext` and verifies the integrity of the `ciphertext` and the additional data
|
||||
/// `ad`. using ChaCha20Poly1305 as implemented in [libcrux](https://github.com/cryspen/libcrux).
|
||||
///
|
||||
/// The `key` slice MUST have a length of [KEY_LEN]. The `nonce` slice MUST have a length of
|
||||
/// [NONCE_LEN]. The plaintext buffer must have a capacity of `ciphertext.len()` - [TAG_LEN].
|
||||
///
|
||||
/// # Examples
|
||||
///```rust
|
||||
/// # use rosenpass_ciphers::subtle::chacha20poly1305_ietf_libcrux::{decrypt, TAG_LEN, KEY_LEN, NONCE_LEN};
|
||||
/// let ciphertext: &[u8] = &[239, 104, 148, 202, 120, 32, 77, 27, 246, 206, 226, 17,
|
||||
/// 83, 78, 122, 116, 187, 123, 70, 199, 58, 130, 21, 1, 107, 230, 58, 77, 18, 152, 31, 159, 80,
|
||||
/// 151, 72, 27, 236, 137, 60, 55, 180, 31, 71, 97, 199, 12, 60, 155, 70, 221, 225, 110, 132, 191,
|
||||
/// 8, 114, 85, 4, 25]; // this is the ciphertext generated by the example for the encryption
|
||||
/// const PLAINTEXT_LEN: usize = 43;
|
||||
/// assert_eq!(PLAINTEXT_LEN + TAG_LEN, ciphertext.len());
|
||||
///
|
||||
/// let key: &[u8] = &[0u8; KEY_LEN]; // THIS IS NOT A SECURE KEY
|
||||
/// let nonce: &[u8] = &[0u8; NONCE_LEN]; // THIS IS NOT A SECURE NONCE
|
||||
/// let additional_data: &[u8] = "the encrypted message is very important".as_bytes();
|
||||
/// let mut plaintext_buffer = [0u8; PLAINTEXT_LEN];
|
||||
///
|
||||
/// let res: anyhow::Result<()> = decrypt(&mut plaintext_buffer, key, nonce, additional_data, ciphertext);
|
||||
/// assert!(res.is_ok());
|
||||
/// let expected_plaintext = "post-quantum cryptography is very important".as_bytes();
|
||||
/// assert_eq!(expected_plaintext, plaintext_buffer);
|
||||
///
|
||||
///```
|
||||
#[inline]
|
||||
pub fn decrypt(
|
||||
plaintext: &mut [u8],
|
||||
key: &[u8],
|
||||
nonce: &[u8],
|
||||
ad: &[u8],
|
||||
ciphertext: &[u8],
|
||||
) -> anyhow::Result<()> {
|
||||
let (ciphertext, mac) = ciphertext.split_at(ciphertext.len() - TAG_LEN);
|
||||
|
||||
use libcrux::aead as C;
|
||||
let crux_key = C::Key::Chacha20Poly1305(C::Chacha20Key(key.try_into().unwrap()));
|
||||
let crux_iv = C::Iv(nonce.try_into().unwrap());
|
||||
let crux_tag = C::Tag::from_slice(mac).unwrap();
|
||||
|
||||
copy_slice(ciphertext).to(plaintext);
|
||||
libcrux::aead::decrypt(&crux_key, plaintext, crux_iv, ad, &crux_tag).unwrap();
|
||||
|
||||
match crux_key {
|
||||
C::Key::Chacha20Poly1305(mut k) => k.0.zeroize(),
|
||||
_ => panic!(),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
133
ciphers/src/subtle/libcrux/kyber512.rs
Normal file
133
ciphers/src/subtle/libcrux/kyber512.rs
Normal file
@@ -0,0 +1,133 @@
|
||||
//! Implementation of the [`KemKyber512`] trait based on the [`libcrux_ml_kem`] crate.
|
||||
|
||||
use libcrux_ml_kem::kyber512;
|
||||
use rand::RngCore;
|
||||
|
||||
use rosenpass_cipher_traits::algorithms::KemKyber512;
|
||||
use rosenpass_cipher_traits::primitives::{Kem, KemError};
|
||||
|
||||
pub use rosenpass_cipher_traits::algorithms::kem_kyber512::{CT_LEN, PK_LEN, SHK_LEN, SK_LEN};
|
||||
|
||||
/// An implementation of the Kyber512 KEM based on libcrux
|
||||
pub struct Kyber512;
|
||||
|
||||
impl Kem<SK_LEN, PK_LEN, CT_LEN, SHK_LEN> for Kyber512 {
|
||||
fn keygen(&self, sk: &mut [u8; SK_LEN], pk: &mut [u8; PK_LEN]) -> Result<(), KemError> {
|
||||
let mut randomness = [0u8; libcrux_ml_kem::KEY_GENERATION_SEED_SIZE];
|
||||
rand::thread_rng().fill_bytes(&mut randomness);
|
||||
|
||||
let key_pair = kyber512::generate_key_pair(randomness);
|
||||
|
||||
let new_sk: &[u8; SK_LEN] = key_pair.sk();
|
||||
let new_pk: &[u8; PK_LEN] = key_pair.pk();
|
||||
|
||||
sk.clone_from_slice(new_sk);
|
||||
pk.clone_from_slice(new_pk);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn encaps(
|
||||
&self,
|
||||
shk: &mut [u8; SHK_LEN],
|
||||
ct: &mut [u8; CT_LEN],
|
||||
pk: &[u8; PK_LEN],
|
||||
) -> Result<(), KemError> {
|
||||
let mut randomness = [0u8; libcrux_ml_kem::SHARED_SECRET_SIZE];
|
||||
rand::thread_rng().fill_bytes(&mut randomness);
|
||||
|
||||
let (new_ct, new_shk) = kyber512::encapsulate(&pk.into(), randomness);
|
||||
let new_ct: &[u8; CT_LEN] = new_ct.as_slice();
|
||||
|
||||
shk.clone_from_slice(&new_shk);
|
||||
ct.clone_from_slice(new_ct);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn decaps(
|
||||
&self,
|
||||
shk: &mut [u8; SHK_LEN],
|
||||
sk: &[u8; SK_LEN],
|
||||
ct: &[u8; CT_LEN],
|
||||
) -> Result<(), KemError> {
|
||||
let new_shk: [u8; SHK_LEN] = kyber512::decapsulate(&sk.into(), &ct.into());
|
||||
shk.clone_from(&new_shk);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Kyber512 {
|
||||
fn default() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
impl KemKyber512 for Kyber512 {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod equivalence_tests {
|
||||
use super::*;
|
||||
|
||||
// Test that libcrux and OQS produce the same results
|
||||
#[test]
|
||||
fn proptest_equivalence_libcrux_oqs() {
|
||||
use rosenpass_oqs::Kyber512 as OqsKyber512;
|
||||
|
||||
let (mut sk1, mut pk1) = ([0; SK_LEN], [0; PK_LEN]);
|
||||
let (mut sk2, mut pk2) = ([0; SK_LEN], [0; PK_LEN]);
|
||||
|
||||
let mut ct_left = [0; CT_LEN];
|
||||
let mut ct_right = [0; CT_LEN];
|
||||
|
||||
let mut shk_enc_left = [0; SHK_LEN];
|
||||
let mut shk_enc_right = [0; SHK_LEN];
|
||||
|
||||
// naming schema: shk_dec_{encapsing lib}_{decapsing lib}
|
||||
// should be the same if the encapsing lib was the same.
|
||||
let mut shk_dec_left_left = [0; SHK_LEN];
|
||||
let mut shk_dec_left_right = [0; SHK_LEN];
|
||||
let mut shk_dec_right_left = [0; SHK_LEN];
|
||||
let mut shk_dec_right_right = [0; SHK_LEN];
|
||||
|
||||
for _ in 0..1000 {
|
||||
let sk1 = &mut sk1;
|
||||
let pk1 = &mut pk1;
|
||||
let sk2 = &mut sk2;
|
||||
let pk2 = &mut pk2;
|
||||
|
||||
let ct_left = &mut ct_left;
|
||||
let ct_right = &mut ct_right;
|
||||
|
||||
let shk_enc_left = &mut shk_enc_left;
|
||||
let shk_enc_right = &mut shk_enc_right;
|
||||
|
||||
let shk_dec_left_left = &mut shk_dec_left_left;
|
||||
let shk_dec_left_right = &mut shk_dec_left_right;
|
||||
let shk_dec_right_left = &mut shk_dec_right_left;
|
||||
let shk_dec_right_right = &mut shk_dec_right_right;
|
||||
|
||||
Kyber512.keygen(sk1, pk1).unwrap();
|
||||
Kyber512.keygen(sk2, pk2).unwrap();
|
||||
|
||||
Kyber512.encaps(shk_enc_left, ct_left, pk2).unwrap();
|
||||
OqsKyber512.encaps(shk_enc_right, ct_right, pk2).unwrap();
|
||||
|
||||
Kyber512.decaps(shk_dec_left_left, sk2, ct_left).unwrap();
|
||||
Kyber512.decaps(shk_dec_right_left, sk2, ct_right).unwrap();
|
||||
|
||||
OqsKyber512
|
||||
.decaps(shk_dec_left_right, sk2, ct_left)
|
||||
.unwrap();
|
||||
OqsKyber512
|
||||
.decaps(shk_dec_right_right, sk2, ct_right)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(shk_enc_left, shk_dec_left_left);
|
||||
assert_eq!(shk_enc_left, shk_dec_left_right);
|
||||
|
||||
assert_eq!(shk_enc_right, shk_dec_right_left);
|
||||
assert_eq!(shk_enc_right, shk_dec_right_right);
|
||||
}
|
||||
}
|
||||
}
|
||||
14
ciphers/src/subtle/libcrux/mod.rs
Normal file
14
ciphers/src/subtle/libcrux/mod.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
//! Implementations backed by libcrux, a verified crypto library.
|
||||
//!
|
||||
//! [Website](https://cryspen.com/libcrux/)
|
||||
//!
|
||||
//! [Github](https://github.com/cryspen/libcrux)
|
||||
|
||||
#[cfg(feature = "experiment_libcrux_blake2")]
|
||||
pub mod blake2b;
|
||||
|
||||
#[cfg(feature = "experiment_libcrux_chachapoly")]
|
||||
pub mod chacha20poly1305_ietf;
|
||||
|
||||
#[cfg(feature = "experiment_libcrux_kyber")]
|
||||
pub mod kyber512;
|
||||
@@ -1,4 +1,16 @@
|
||||
pub mod blake2b;
|
||||
pub mod chacha20poly1305_ietf;
|
||||
pub mod incorrect_hmac_blake2b;
|
||||
pub mod xchacha20poly1305_ietf;
|
||||
//! Contains the implementations of the crypto algorithms used throughout Rosenpass.
|
||||
|
||||
pub mod keyed_hash;
|
||||
|
||||
pub use custom::incorrect_hmac_blake2b;
|
||||
pub use rust_crypto::{blake2b, keyed_shake256};
|
||||
|
||||
pub mod custom;
|
||||
pub mod rust_crypto;
|
||||
|
||||
#[cfg(any(
|
||||
feature = "experiment_libcrux_blake2",
|
||||
feature = "experiment_libcrux_chachapoly",
|
||||
feature = "experiment_libcrux_kyber"
|
||||
))]
|
||||
pub mod libcrux;
|
||||
|
||||
44
ciphers/src/subtle/rust_crypto/blake2b.rs
Normal file
44
ciphers/src/subtle/rust_crypto/blake2b.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use zeroize::Zeroizing;
|
||||
|
||||
use blake2::digest::crypto_common::generic_array::GenericArray;
|
||||
use blake2::digest::crypto_common::typenum::U32;
|
||||
use blake2::digest::{FixedOutput, Mac};
|
||||
use blake2::Blake2bMac;
|
||||
|
||||
use rosenpass_cipher_traits::primitives::KeyedHash;
|
||||
use rosenpass_to::{ops::copy_slice, To};
|
||||
|
||||
pub use rosenpass_cipher_traits::algorithms::keyed_hash_blake2b::{HASH_LEN, KEY_LEN};
|
||||
|
||||
/// Specify that the used implementation of BLAKE2b is the MAC version of BLAKE2b
|
||||
/// with output and key length of 32 bytes (see [Blake2bMac]).
|
||||
type Impl = Blake2bMac<U32>;
|
||||
|
||||
/// Hashes the given `data` with the [Blake2bMac] hash function under the given `key`.
|
||||
/// The both the length of the output the length of the key 32 bytes (or 256 bits).
|
||||
pub struct Blake2b;
|
||||
|
||||
impl KeyedHash<KEY_LEN, HASH_LEN> for Blake2b {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn keyed_hash(
|
||||
key: &[u8; KEY_LEN],
|
||||
data: &[u8],
|
||||
out: &mut [u8; HASH_LEN],
|
||||
) -> Result<(), Self::Error> {
|
||||
let mut h = Impl::new_from_slice(key)?;
|
||||
h.update(data);
|
||||
|
||||
// Jesus christ, blake2 crate, your usage of GenericArray might be nice and fancy,
|
||||
// but it introduces a ton of complexity. This cost me half an hour just to figure
|
||||
// out the right way to use the imports while allowing for zeroization.
|
||||
// An API based on slices might actually be simpler.
|
||||
let mut tmp = Zeroizing::new([0u8; HASH_LEN]);
|
||||
let tmp = GenericArray::from_mut_slice(tmp.as_mut());
|
||||
h.finalize_into(tmp);
|
||||
copy_slice(tmp.as_ref()).to(out);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl rosenpass_cipher_traits::algorithms::KeyedHashBlake2b for Blake2b {}
|
||||
79
ciphers/src/subtle/rust_crypto/chacha20poly1305_ietf.rs
Normal file
79
ciphers/src/subtle/rust_crypto/chacha20poly1305_ietf.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use rosenpass_to::ops::copy_slice;
|
||||
use rosenpass_to::To;
|
||||
|
||||
use rosenpass_cipher_traits::algorithms::AeadChaCha20Poly1305;
|
||||
use rosenpass_cipher_traits::primitives::{Aead, AeadError};
|
||||
|
||||
use chacha20poly1305::aead::generic_array::GenericArray;
|
||||
use chacha20poly1305::ChaCha20Poly1305 as AeadImpl;
|
||||
use chacha20poly1305::{AeadInPlace, KeyInit};
|
||||
|
||||
pub use rosenpass_cipher_traits::algorithms::aead_chacha20poly1305::{KEY_LEN, NONCE_LEN, TAG_LEN};
|
||||
|
||||
/// Implements the [`Aead`] and [`AeadChaCha20Poly1305`] traits backed by the RustCrypto
|
||||
/// implementation.
|
||||
pub struct ChaCha20Poly1305;
|
||||
|
||||
impl Aead<KEY_LEN, NONCE_LEN, TAG_LEN> for ChaCha20Poly1305 {
|
||||
fn encrypt(
|
||||
&self,
|
||||
ciphertext: &mut [u8],
|
||||
key: &[u8; KEY_LEN],
|
||||
nonce: &[u8; NONCE_LEN],
|
||||
ad: &[u8],
|
||||
plaintext: &[u8],
|
||||
) -> Result<(), AeadError> {
|
||||
// The comparison looks complicated, but we need to do it this way to prevent
|
||||
// over/underflows.
|
||||
if ciphertext.len() < TAG_LEN || ciphertext.len() - TAG_LEN < plaintext.len() {
|
||||
return Err(AeadError::InvalidLengths);
|
||||
}
|
||||
|
||||
let nonce = GenericArray::from_slice(nonce);
|
||||
let (ct, mac) = ciphertext.split_at_mut(ciphertext.len() - TAG_LEN);
|
||||
copy_slice(plaintext).to(ct);
|
||||
|
||||
// This only fails if the length is wrong, which really shouldn't happen and would
|
||||
// constitute an internal error.
|
||||
let encrypter = AeadImpl::new_from_slice(key).map_err(|_| AeadError::InternalError)?;
|
||||
|
||||
let mac_value = encrypter
|
||||
.encrypt_in_place_detached(nonce, ad, ct)
|
||||
.map_err(|_| AeadError::InternalError)?;
|
||||
copy_slice(&mac_value[..]).to(mac);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn decrypt(
|
||||
&self,
|
||||
plaintext: &mut [u8],
|
||||
key: &[u8; KEY_LEN],
|
||||
nonce: &[u8; NONCE_LEN],
|
||||
ad: &[u8],
|
||||
ciphertext: &[u8],
|
||||
) -> Result<(), AeadError> {
|
||||
// The comparison looks complicated, but we need to do it this way to prevent
|
||||
// over/underflows.
|
||||
if ciphertext.len() < TAG_LEN || ciphertext.len() - TAG_LEN < plaintext.len() {
|
||||
return Err(AeadError::InvalidLengths);
|
||||
}
|
||||
|
||||
let nonce = GenericArray::from_slice(nonce);
|
||||
let (ct, mac) = ciphertext.split_at(ciphertext.len() - TAG_LEN);
|
||||
let tag = GenericArray::from_slice(mac);
|
||||
copy_slice(ct).to(plaintext);
|
||||
|
||||
// This only fails if the length is wrong, which really shouldn't happen and would
|
||||
// constitute an internal error.
|
||||
let decrypter = AeadImpl::new_from_slice(key).map_err(|_| AeadError::InternalError)?;
|
||||
|
||||
decrypter
|
||||
.decrypt_in_place_detached(nonce, ad, plaintext, tag)
|
||||
.map_err(|_| AeadError::DecryptError)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl AeadChaCha20Poly1305 for ChaCha20Poly1305 {}
|
||||
117
ciphers/src/subtle/rust_crypto/keyed_shake256.rs
Normal file
117
ciphers/src/subtle/rust_crypto/keyed_shake256.rs
Normal file
@@ -0,0 +1,117 @@
|
||||
use anyhow::ensure;
|
||||
use rosenpass_cipher_traits::primitives::{InferKeyedHash, KeyedHash};
|
||||
use sha3::digest::{ExtendableOutput, Update, XofReader};
|
||||
use sha3::Shake256;
|
||||
|
||||
pub use rosenpass_cipher_traits::algorithms::keyed_hash_shake256::{HASH_LEN, KEY_LEN};
|
||||
|
||||
/// An implementation of the [`KeyedHash`] trait backed by the RustCrypto implementation of SHAKE256.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct SHAKE256Core<const KEY_LEN: usize, const HASH_LEN: usize>;
|
||||
|
||||
impl<const KEY_LEN: usize, const HASH_LEN: usize> KeyedHash<KEY_LEN, HASH_LEN>
|
||||
for SHAKE256Core<KEY_LEN, HASH_LEN>
|
||||
{
|
||||
type Error = anyhow::Error;
|
||||
|
||||
/// Provides a keyed hash function based on SHAKE256. To work for the protocol, the output length
|
||||
/// and key length are fixed to 32 bytes (also see [KEY_LEN] and [HASH_LEN]).
|
||||
///
|
||||
/// Note that the SHAKE256 is designed for 64 bytes output length, which we truncate to 32 bytes
|
||||
/// to work well with the overall protocol. Referring to Table 4 of FIPS 202, this offers the
|
||||
/// same collision resistance as SHAKE128, but 256 bits of preimage resistance. We therefore
|
||||
/// prefer a truncated SHAKE256 over SHAKE128.
|
||||
///
|
||||
/// #Examples
|
||||
/// ```rust
|
||||
/// # use rosenpass_ciphers::subtle::rust_crypto::keyed_shake256::SHAKE256Core;
|
||||
/// use rosenpass_cipher_traits::primitives::KeyedHash;
|
||||
/// const KEY_LEN: usize = 32;
|
||||
/// const HASH_LEN: usize = 32;
|
||||
/// let key: [u8; 32] = [0; KEY_LEN];
|
||||
/// let data: [u8; 32] = [255; 32]; // arbitrary data, could also be longer
|
||||
/// // buffer for the hash output
|
||||
/// let mut hash_data: [u8; 32] = [0u8; HASH_LEN];
|
||||
///
|
||||
/// assert!(SHAKE256Core::<32, 32>::keyed_hash(&key, &data, &mut hash_data).is_ok(), "Hashing has to return OK result");
|
||||
/// # let expected_hash: &[u8] = &[174, 4, 47, 188, 1, 228, 179, 246, 67, 43, 255, 94, 155, 11,
|
||||
/// 187, 161, 38, 110, 217, 23, 4, 62, 172, 30, 218, 187, 249, 80, 171, 21, 145, 238];
|
||||
/// # assert_eq!(hash_data, expected_hash);
|
||||
/// ```
|
||||
fn keyed_hash(
|
||||
key: &[u8; KEY_LEN],
|
||||
data: &[u8],
|
||||
out: &mut [u8; HASH_LEN],
|
||||
) -> Result<(), Self::Error> {
|
||||
// Since SHAKE256 is a XOF, we fix the output length manually to what is required for the
|
||||
// protocol.
|
||||
ensure!(out.len() == HASH_LEN);
|
||||
// Not bothering with padding; the implementation
|
||||
// uses appropriately sized keys.
|
||||
ensure!(key.len() == KEY_LEN);
|
||||
let mut shake256 = Shake256::default();
|
||||
shake256.update(key);
|
||||
shake256.update(data);
|
||||
|
||||
// Since we use domain separation extensively, related outputs of the truncated XOF
|
||||
// are not a concern. This follows the NIST recommendations in Section A.2 of the FIPS 202
|
||||
// standard, (pages 24/25, i.e., 32/33 in the PDF).
|
||||
shake256.finalize_xof().read(out);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<const KEY_LEN: usize, const HASH_LEN: usize> SHAKE256Core<KEY_LEN, HASH_LEN> {
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
impl<const KEY_LEN: usize, const HASH_LEN: usize> Default for SHAKE256Core<KEY_LEN, HASH_LEN> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// This type provides the same functionality as [SHAKE256Core], but bound to an instance.
|
||||
/// In contrast to [SHAKE256Core], this allows for type interference and thus allows the user of the
|
||||
/// type to omit explicit type parameters when instantiating the type or using it.
|
||||
///
|
||||
/// The instantiation is based on the [InferKeyedHash] trait.
|
||||
///
|
||||
/// ```rust
|
||||
/// # use rosenpass_ciphers::subtle::rust_crypto::keyed_shake256::{SHAKE256};
|
||||
/// use rosenpass_cipher_traits::primitives::KeyedHashInstance;
|
||||
/// const KEY_LEN: usize = 32;
|
||||
/// const HASH_LEN: usize = 32;
|
||||
/// let key: [u8; KEY_LEN] = [0; KEY_LEN];
|
||||
/// let data: [u8; 32] = [255; 32]; // arbitrary data, could also be longer
|
||||
/// // buffer for the hash output
|
||||
/// let mut hash_data: [u8; 32] = [0u8; HASH_LEN];
|
||||
/// assert!(SHAKE256::new().keyed_hash(&key, &data, &mut hash_data).is_ok(), "Hashing has to return OK result");
|
||||
/// # let expected_hash: &[u8] = &[174, 4, 47, 188, 1, 228, 179, 246, 67, 43, 255, 94, 155, 11, 187,
|
||||
/// 161, 38, 110, 217, 23, 4, 62, 172, 30, 218, 187, 249, 80, 171, 21, 145, 238];
|
||||
/// # assert_eq!(hash_data, expected_hash);
|
||||
/// ```
|
||||
pub type SHAKE256<const KEY_LEN: usize, const HASH_LEN: usize> =
|
||||
InferKeyedHash<SHAKE256Core<KEY_LEN, HASH_LEN>, KEY_LEN, HASH_LEN>;
|
||||
|
||||
/// The SHAKE256_32 type is a specific instance of the [SHAKE256] type with the key length and hash
|
||||
/// length fixed to 32 bytes.
|
||||
///
|
||||
/// ```rust
|
||||
/// # use rosenpass_ciphers::subtle::keyed_shake256::{SHAKE256_32};
|
||||
/// use rosenpass_cipher_traits::primitives::KeyedHashInstance;
|
||||
/// const KEY_LEN: usize = 32;
|
||||
/// const HASH_LEN: usize = 32;
|
||||
/// let key: [u8; 32] = [0; KEY_LEN];
|
||||
/// let data: [u8; 32] = [255; 32]; // arbitrary data, could also be longer
|
||||
/// // buffer for the hash output
|
||||
/// let mut hash_data: [u8; 32] = [0u8; HASH_LEN];
|
||||
///
|
||||
/// assert!(SHAKE256_32::new().keyed_hash(&key, &data, &mut hash_data).is_ok(), "Hashing has to return OK result");
|
||||
/// # let expected_hash: &[u8] = &[174, 4, 47, 188, 1, 228, 179, 246, 67, 43, 255, 94, 155, 11, 187,
|
||||
/// 161, 38, 110, 217, 23, 4, 62, 172, 30, 218, 187, 249, 80, 171, 21, 145, 238];
|
||||
/// # assert_eq!(hash_data, expected_hash);
|
||||
/// ```
|
||||
pub type SHAKE256_32 = SHAKE256<32, 32>;
|
||||
7
ciphers/src/subtle/rust_crypto/mod.rs
Normal file
7
ciphers/src/subtle/rust_crypto/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
//! Implementations backed by RustCrypto
|
||||
|
||||
pub mod blake2b;
|
||||
pub mod keyed_shake256;
|
||||
|
||||
pub mod chacha20poly1305_ietf;
|
||||
pub mod xchacha20poly1305_ietf;
|
||||
164
ciphers/src/subtle/rust_crypto/xchacha20poly1305_ietf.rs
Normal file
164
ciphers/src/subtle/rust_crypto/xchacha20poly1305_ietf.rs
Normal file
@@ -0,0 +1,164 @@
|
||||
use rosenpass_to::ops::copy_slice;
|
||||
use rosenpass_to::To;
|
||||
|
||||
use rosenpass_cipher_traits::algorithms::aead_xchacha20poly1305::AeadXChaCha20Poly1305;
|
||||
use rosenpass_cipher_traits::primitives::{Aead, AeadError, AeadWithNonceInCiphertext};
|
||||
|
||||
use chacha20poly1305::aead::generic_array::GenericArray;
|
||||
use chacha20poly1305::XChaCha20Poly1305 as AeadImpl;
|
||||
use chacha20poly1305::{AeadInPlace, KeyInit};
|
||||
|
||||
pub use rosenpass_cipher_traits::algorithms::aead_xchacha20poly1305::{
|
||||
KEY_LEN, NONCE_LEN, TAG_LEN,
|
||||
};
|
||||
/// Implements the [`Aead`] and [`AeadXChaCha20Poly1305`] traits backed by the RustCrypto
|
||||
/// implementation.
|
||||
pub struct XChaCha20Poly1305;
|
||||
|
||||
impl Aead<KEY_LEN, NONCE_LEN, TAG_LEN> for XChaCha20Poly1305 {
|
||||
fn encrypt(
|
||||
&self,
|
||||
ciphertext: &mut [u8],
|
||||
key: &[u8; KEY_LEN],
|
||||
nonce: &[u8; NONCE_LEN],
|
||||
ad: &[u8],
|
||||
plaintext: &[u8],
|
||||
) -> Result<(), AeadError> {
|
||||
// The comparison looks complicated, but we need to do it this way to prevent
|
||||
// over/underflows.
|
||||
if ciphertext.len() < TAG_LEN || ciphertext.len() - TAG_LEN < plaintext.len() {
|
||||
return Err(AeadError::InvalidLengths);
|
||||
}
|
||||
|
||||
let (ct, mac) = ciphertext.split_at_mut(ciphertext.len() - TAG_LEN);
|
||||
copy_slice(plaintext).to(ct);
|
||||
|
||||
let nonce = GenericArray::from_slice(nonce);
|
||||
|
||||
// This only fails if the length is wrong, which really shouldn't happen and would
|
||||
// constitute an internal error.
|
||||
let encrypter = AeadImpl::new_from_slice(key).map_err(|_| AeadError::InternalError)?;
|
||||
|
||||
let mac_value = encrypter
|
||||
.encrypt_in_place_detached(nonce, ad, ct)
|
||||
.map_err(|_| AeadError::InternalError)?;
|
||||
copy_slice(&mac_value[..]).to(mac);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn decrypt(
|
||||
&self,
|
||||
plaintext: &mut [u8],
|
||||
key: &[u8; KEY_LEN],
|
||||
nonce: &[u8; NONCE_LEN],
|
||||
ad: &[u8],
|
||||
ciphertext: &[u8],
|
||||
) -> Result<(), AeadError> {
|
||||
// The comparison looks complicated, but we need to do it this way to prevent
|
||||
// over/underflows.
|
||||
if ciphertext.len() < TAG_LEN || ciphertext.len() - TAG_LEN < plaintext.len() {
|
||||
return Err(AeadError::InvalidLengths);
|
||||
}
|
||||
|
||||
let (ct, mac) = ciphertext.split_at(ciphertext.len() - TAG_LEN);
|
||||
let nonce = GenericArray::from_slice(nonce);
|
||||
let tag = GenericArray::from_slice(mac);
|
||||
copy_slice(ct).to(plaintext);
|
||||
|
||||
// This only fails if the length is wrong, which really shouldn't happen and would
|
||||
// constitute an internal error.
|
||||
let decrypter = AeadImpl::new_from_slice(key).map_err(|_| AeadError::InternalError)?;
|
||||
|
||||
decrypter
|
||||
.decrypt_in_place_detached(nonce, ad, plaintext, tag)
|
||||
.map_err(|_| AeadError::DecryptError)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl AeadXChaCha20Poly1305 for XChaCha20Poly1305 {}
|
||||
|
||||
/// Encrypts using XChaCha20Poly1305 as implemented in [RustCrypto](https://github.com/RustCrypto/AEADs/tree/master/chacha20poly1305).
|
||||
/// `key` and `nonce` MUST be chosen (pseudo-)randomly. The `key` slice MUST have a length of
|
||||
/// [KEY_LEN]. The `nonce` slice MUST have a length of [NONCE_LEN].
|
||||
/// In contrast to [chacha20poly1305_ietf::encrypt](crate::subtle::chacha20poly1305_ietf::encrypt) and
|
||||
/// [chacha20poly1305_ietf_libcrux::encrypt](crate::subtle::chacha20poly1305_ietf_libcrux::encrypt),
|
||||
/// `nonce` is also written into `ciphertext` and therefore ciphertext MUST have a length
|
||||
/// of at least [NONCE_LEN] + `plaintext.len()` + [TAG_LEN].
|
||||
///
|
||||
/// # Examples
|
||||
///```rust
|
||||
/// # use rosenpass_ciphers::subtle::rust_crypto::xchacha20poly1305_ietf::{encrypt, TAG_LEN, KEY_LEN, NONCE_LEN};
|
||||
/// const PLAINTEXT_LEN: usize = 43;
|
||||
/// let plaintext = "post-quantum cryptography is very important".as_bytes();
|
||||
/// assert_eq!(PLAINTEXT_LEN, plaintext.len());
|
||||
/// let key: &[u8; KEY_LEN] = &[0u8; KEY_LEN]; // THIS IS NOT A SECURE KEY
|
||||
/// let nonce: &[u8; NONCE_LEN] = &[0u8; NONCE_LEN]; // THIS IS NOT A SECURE NONCE
|
||||
/// let additional_data: &[u8] = "the encrypted message is very important".as_bytes();
|
||||
/// let mut ciphertext_buffer = [0u8; NONCE_LEN + PLAINTEXT_LEN + TAG_LEN];
|
||||
///
|
||||
///
|
||||
/// let res: anyhow::Result<()> = encrypt(&mut ciphertext_buffer, key, nonce, additional_data, plaintext);
|
||||
/// # assert!(res.is_ok());
|
||||
/// # let expected_ciphertext: &[u8] = &[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
/// # 0, 0, 0, 0, 8, 241, 229, 253, 200, 81, 248, 30, 183, 149, 134, 168, 149, 87, 109, 49, 159, 108,
|
||||
/// # 206, 89, 51, 232, 232, 197, 163, 253, 254, 208, 73, 76, 253, 13, 247, 162, 133, 184, 177, 44,
|
||||
/// # 73, 138, 176, 193, 61, 248, 61, 183, 164, 192, 214, 168, 4, 1, 62, 243, 36, 48, 149, 164, 6];
|
||||
/// # assert_eq!(expected_ciphertext, &ciphertext_buffer);
|
||||
///```
|
||||
#[inline]
|
||||
pub fn encrypt(
|
||||
ciphertext: &mut [u8],
|
||||
key: &[u8; KEY_LEN],
|
||||
nonce: &[u8; NONCE_LEN],
|
||||
ad: &[u8],
|
||||
plaintext: &[u8],
|
||||
) -> anyhow::Result<()> {
|
||||
XChaCha20Poly1305
|
||||
.encrypt_with_nonce_in_ctxt(ciphertext, key, nonce, ad, plaintext)
|
||||
.map_err(anyhow::Error::from)
|
||||
}
|
||||
|
||||
/// Decrypts a `ciphertext` and verifies the integrity of the `ciphertext` and the additional data
|
||||
/// `ad`. using XChaCha20Poly1305 as implemented in [RustCrypto](https://github.com/RustCrypto/AEADs/tree/master/chacha20poly1305).
|
||||
///
|
||||
/// The `key` slice MUST have a length of [KEY_LEN]. The `nonce` slice MUST have a length of
|
||||
/// [NONCE_LEN]. The plaintext buffer must have a capacity of `ciphertext.len()` - [TAG_LEN] - [NONCE_LEN].
|
||||
///
|
||||
/// In contrast to [chacha20poly1305_ietf::decrypt](crate::subtle::chacha20poly1305_ietf::decrypt) and
|
||||
/// [chacha20poly1305_ietf_libcrux::decrypt](crate::subtle::chacha20poly1305_ietf_libcrux::decrypt),
|
||||
/// `ciperhtext` MUST include the as it is not given otherwise.
|
||||
///
|
||||
/// # Examples
|
||||
///```rust
|
||||
/// # use rosenpass_ciphers::subtle::rust_crypto::xchacha20poly1305_ietf::{decrypt, TAG_LEN, KEY_LEN, NONCE_LEN};
|
||||
/// let ciphertext: &[u8] = &[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
/// # 0, 0, 0, 0, 8, 241, 229, 253, 200, 81, 248, 30, 183, 149, 134, 168, 149, 87, 109, 49, 159, 108,
|
||||
/// # 206, 89, 51, 232, 232, 197, 163, 253, 254, 208, 73, 76, 253, 13, 247, 162, 133, 184, 177, 44,
|
||||
/// # 73, 138, 176, 193, 61, 248, 61, 183, 164, 192, 214, 168, 4, 1, 62, 243, 36, 48, 149, 164, 6];
|
||||
/// // this is the ciphertext generated by the example for the encryption
|
||||
/// const PLAINTEXT_LEN: usize = 43;
|
||||
/// assert_eq!(PLAINTEXT_LEN + TAG_LEN + NONCE_LEN, ciphertext.len());
|
||||
///
|
||||
/// let key: &[u8; KEY_LEN] = &[0u8; KEY_LEN]; // THIS IS NOT A SECURE KEY
|
||||
/// let nonce: &[u8; NONCE_LEN] = &[0u8; NONCE_LEN]; // THIS IS NOT A SECURE NONCE
|
||||
/// let additional_data: &[u8] = "the encrypted message is very important".as_bytes();
|
||||
/// let mut plaintext_buffer = [0u8; PLAINTEXT_LEN];
|
||||
///
|
||||
/// let res: anyhow::Result<()> = decrypt(&mut plaintext_buffer, key, additional_data, ciphertext);
|
||||
/// assert!(res.is_ok());
|
||||
/// let expected_plaintext = "post-quantum cryptography is very important".as_bytes();
|
||||
/// assert_eq!(expected_plaintext, plaintext_buffer);
|
||||
///
|
||||
///```
|
||||
#[inline]
|
||||
pub fn decrypt(
|
||||
plaintext: &mut [u8],
|
||||
key: &[u8; KEY_LEN],
|
||||
ad: &[u8],
|
||||
ciphertext: &[u8],
|
||||
) -> anyhow::Result<()> {
|
||||
XChaCha20Poly1305
|
||||
.decrypt_with_nonce_in_ctxt(plaintext, key, ad, ciphertext)
|
||||
.map_err(anyhow::Error::from)
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
use rosenpass_to::ops::copy_slice;
|
||||
use rosenpass_to::To;
|
||||
use rosenpass_util::typenum2const;
|
||||
|
||||
use chacha20poly1305::aead::generic_array::GenericArray;
|
||||
use chacha20poly1305::XChaCha20Poly1305 as AeadImpl;
|
||||
use chacha20poly1305::{AeadCore, AeadInPlace, KeyInit, KeySizeUser};
|
||||
|
||||
pub const KEY_LEN: usize = typenum2const! { <AeadImpl as KeySizeUser>::KeySize };
|
||||
pub const TAG_LEN: usize = typenum2const! { <AeadImpl as AeadCore>::TagSize };
|
||||
pub const NONCE_LEN: usize = typenum2const! { <AeadImpl as AeadCore>::NonceSize };
|
||||
|
||||
#[inline]
|
||||
pub fn encrypt(
|
||||
ciphertext: &mut [u8],
|
||||
key: &[u8],
|
||||
nonce: &[u8],
|
||||
ad: &[u8],
|
||||
plaintext: &[u8],
|
||||
) -> anyhow::Result<()> {
|
||||
let nonce = GenericArray::from_slice(nonce);
|
||||
let (n, ct_mac) = ciphertext.split_at_mut(NONCE_LEN);
|
||||
let (ct, mac) = ct_mac.split_at_mut(ct_mac.len() - TAG_LEN);
|
||||
copy_slice(nonce).to(n);
|
||||
copy_slice(plaintext).to(ct);
|
||||
let mac_value = AeadImpl::new_from_slice(key)?.encrypt_in_place_detached(nonce, ad, ct)?;
|
||||
copy_slice(&mac_value[..]).to(mac);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn decrypt(
|
||||
plaintext: &mut [u8],
|
||||
key: &[u8],
|
||||
ad: &[u8],
|
||||
ciphertext: &[u8],
|
||||
) -> anyhow::Result<()> {
|
||||
let (n, ct_mac) = ciphertext.split_at(NONCE_LEN);
|
||||
let (ct, mac) = ct_mac.split_at(ct_mac.len() - TAG_LEN);
|
||||
let nonce = GenericArray::from_slice(n);
|
||||
let tag = GenericArray::from_slice(mac);
|
||||
copy_slice(ct).to(plaintext);
|
||||
AeadImpl::new_from_slice(key)?.decrypt_in_place_detached(nonce, ad, plaintext, tag)?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -8,6 +8,7 @@ description = "Rosenpass internal utilities for constant time crypto implementat
|
||||
homepage = "https://rosenpass.eu/"
|
||||
repository = "https://github.com/rosenpass/rosenpass"
|
||||
readme = "readme.md"
|
||||
rust-version = "1.77"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@@ -19,4 +20,7 @@ rosenpass-to = { workspace = true }
|
||||
memsec = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.8.5"
|
||||
rand = { workspace = true }
|
||||
|
||||
[lints.rust]
|
||||
unexpected_cfgs = { level = "allow", check-cfg = ['cfg(coverage)'] }
|
||||
|
||||
@@ -1,7 +1,30 @@
|
||||
//! Constant-time comparison
|
||||
|
||||
use core::ptr;
|
||||
|
||||
/// Little endian memcmp version of quinier/memsec
|
||||
/// https://github.com/quininer/memsec/blob/bbc647967ff6d20d6dccf1c85f5d9037fcadd3b0/src/lib.rs#L30
|
||||
/// Little endian memcmp version of [quinier/memsec](https://github.com/quininer/memsec/blob/bbc647967ff6d20d6dccf1c85f5d9037fcadd3b0/src/lib.rs#L30)
|
||||
///
|
||||
/// # Panic & Safety
|
||||
///
|
||||
/// Both input arrays must be at least of the indicated length.
|
||||
///
|
||||
/// See [std::ptr::read_volatile] on safety.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// let a = [1, 2, 3, 4];
|
||||
/// let b = [1, 2, 3, 4];
|
||||
/// let c = [1, 2, 2, 5];
|
||||
/// let d = [1, 2, 2, 4];
|
||||
///
|
||||
/// unsafe {
|
||||
/// use rosenpass_constant_time::memcmp_le;
|
||||
/// assert_eq!(memcmp_le(a.as_ptr(), b.as_ptr(), 4), 0);
|
||||
/// assert!(memcmp_le(a.as_ptr(), c.as_ptr(), 4) < 0);
|
||||
/// assert!(memcmp_le(a.as_ptr(), d.as_ptr(), 4) > 0);
|
||||
/// assert_eq!(memcmp_le(a.as_ptr(), b.as_ptr(), 2), 0);
|
||||
/// }
|
||||
/// ```
|
||||
#[inline(never)]
|
||||
pub unsafe fn memcmp_le(b1: *const u8, b2: *const u8, len: usize) -> i32 {
|
||||
let mut res = 0;
|
||||
@@ -13,6 +36,16 @@ pub unsafe fn memcmp_le(b1: *const u8, b2: *const u8, len: usize) -> i32 {
|
||||
((res - 1) >> 8) + (res >> 8) + 1
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn memcmp_le_test() {
|
||||
// use rosenpass_constant_time::memcmp_le;
|
||||
let a = [0, 1, 0, 0];
|
||||
let b = [0, 0, 0, 1];
|
||||
assert_eq!(-1, unsafe { memcmp_le(a.as_ptr(), b.as_ptr(), 4) });
|
||||
assert_eq!(0, unsafe { memcmp_le(a.as_ptr(), a.as_ptr(), 4) });
|
||||
assert_eq!(1, unsafe { memcmp_le(b.as_ptr(), a.as_ptr(), 4) });
|
||||
}
|
||||
|
||||
/// compares two slices of memory content and returns an integer indicating the relationship between
|
||||
/// the slices
|
||||
///
|
||||
@@ -32,8 +65,50 @@ pub unsafe fn memcmp_le(b1: *const u8, b2: *const u8, len: usize) -> i32 {
|
||||
/// ## Tests
|
||||
/// For discussion on how to ensure the constant-time execution of this function, see
|
||||
/// <https://github.com/rosenpass/rosenpass/issues/232>
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use rosenpass_constant_time::compare;
|
||||
/// let a = [0, 1, 0, 0];
|
||||
/// let b = [0, 0, 0, 1];
|
||||
/// assert_eq!(-1, compare(&a, &b));
|
||||
/// assert_eq!(0, compare(&a, &a));
|
||||
/// assert_eq!(1, compare(&b, &a));
|
||||
/// ```
|
||||
///
|
||||
/// # Panic
|
||||
///
|
||||
/// This function will panic if the input arrays are of different lengths.
|
||||
///
|
||||
/// ```should_panic
|
||||
/// use rosenpass_constant_time::compare;
|
||||
/// let a = [0, 1, 0];
|
||||
/// let b = [0, 0, 0, 1];
|
||||
/// compare(&a, &b);
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn compare(a: &[u8], b: &[u8]) -> i32 {
|
||||
assert!(a.len() == b.len());
|
||||
unsafe { memcmp_le(a.as_ptr(), b.as_ptr(), a.len()) }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::compare::memcmp_le;
|
||||
|
||||
#[test]
|
||||
fn memcmp_le_test() {
|
||||
let a = [1, 2, 3, 4];
|
||||
let b = [1, 2, 3, 4];
|
||||
let c = [1, 2, 2, 5];
|
||||
let d = [1, 2, 2, 4];
|
||||
|
||||
unsafe {
|
||||
assert_eq!(memcmp_le(a.as_ptr(), b.as_ptr(), 4), 0);
|
||||
assert!(memcmp_le(a.as_ptr(), c.as_ptr(), 4) < 0);
|
||||
assert!(memcmp_le(a.as_ptr(), d.as_ptr(), 4) > 0);
|
||||
assert_eq!(memcmp_le(a.as_ptr(), b.as_ptr(), 2), 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,21 @@
|
||||
//! Incrementing numbers
|
||||
|
||||
use core::hint::black_box;
|
||||
|
||||
/// Interpret the given slice as a little-endian unsigned integer
|
||||
/// and increment that integer.
|
||||
///
|
||||
/// # Leaks
|
||||
/// TODO: mention here if this function leaks any information, see
|
||||
/// <https://github.com/rosenpass/rosenpass/issues/232>
|
||||
/// This function may leak timing information in the following ways:
|
||||
///
|
||||
/// - The function execution time is linearly proportional to the input length
|
||||
/// - The number of carry operations that occur may affect timing slightly
|
||||
/// - Memory access patterns are sequential and predictable
|
||||
///
|
||||
/// The carry operation timing variation is mitigated through the use of black_box,
|
||||
/// but the linear scaling with input size is inherent to the operation.
|
||||
/// These timing characteristics are generally considered acceptable for most
|
||||
/// cryptographic counter implementations.
|
||||
///
|
||||
/// ## Tests
|
||||
/// For discussion on how to ensure the constant-time execution of this function, see
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#![warn(missing_docs)]
|
||||
#![warn(clippy::missing_docs_in_private_items)]
|
||||
//! constant-time implementations of some primitives
|
||||
//!
|
||||
//! Rosenpass internal library providing basic constant-time operations.
|
||||
@@ -5,6 +7,32 @@
|
||||
//! ## TODO
|
||||
//! Figure out methodology to ensure that code is actually constant time, see
|
||||
//! <https://github.com/rosenpass/rosenpass/issues/232>
|
||||
//!
|
||||
//! # Examples
|
||||
//!
|
||||
//! ```rust
|
||||
//! use rosenpass_constant_time::{memcmp, compare};
|
||||
//!
|
||||
//! let a = [1, 2, 3, 4];
|
||||
//! let b = [1, 2, 3, 4];
|
||||
//! let c = [1, 2, 3, 5];
|
||||
//!
|
||||
//! // Compare for equality
|
||||
//! assert!(memcmp(&a, &b));
|
||||
//! assert!(!memcmp(&a, &c));
|
||||
//!
|
||||
//! // Compare lexicographically
|
||||
//! assert_eq!(compare(&a, &c), -1); // a < c
|
||||
//! assert_eq!(compare(&c, &a), 1); // c > a
|
||||
//! assert_eq!(compare(&a, &b), 0); // a == b
|
||||
//! ```
|
||||
//!
|
||||
//! # Security Notes
|
||||
//!
|
||||
//! While these functions aim to be constant-time, they may leak timing information in some cases:
|
||||
//!
|
||||
//! - Length mismatches between inputs are immediately detectable
|
||||
//! - Execution time scales linearly with input size
|
||||
|
||||
mod compare;
|
||||
mod increment;
|
||||
@@ -12,6 +40,7 @@ mod memcmp;
|
||||
mod xor;
|
||||
|
||||
pub use compare::compare;
|
||||
pub use compare::memcmp_le;
|
||||
pub use increment::increment;
|
||||
pub use memcmp::memcmp;
|
||||
pub use xor::xor;
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
//! memcmp
|
||||
|
||||
/// compares two sclices of memory content and returns whether they are equal
|
||||
///
|
||||
/// ## Leaks
|
||||
@@ -8,20 +10,33 @@
|
||||
/// The execution time of the function grows approx. linear with the length of the input. This is
|
||||
/// considered safe.
|
||||
///
|
||||
/// ## Tests
|
||||
/// [`tests::memcmp_runs_in_constant_time`] runs a stasticial test that the equality of the two
|
||||
/// input parameters does not correlate with the run time.
|
||||
/// ## Examples
|
||||
///
|
||||
/// For discussion on how to (further) ensure the constant-time execution of this function,
|
||||
/// see <https://github.com/rosenpass/rosenpass/issues/232>
|
||||
/// ```rust
|
||||
/// use rosenpass_constant_time::memcmp;
|
||||
/// let a = [0, 0, 0, 0];
|
||||
/// let b = [0, 0, 0, 1];
|
||||
/// let c = [0, 0, 0];
|
||||
/// assert!(memcmp(&a, &a));
|
||||
/// assert!(!memcmp(&a, &b));
|
||||
/// assert!(!memcmp(&a, &c));
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn memcmp(a: &[u8], b: &[u8]) -> bool {
|
||||
a.len() == b.len() && unsafe { memsec::memeq(a.as_ptr(), b.as_ptr(), a.len()) }
|
||||
}
|
||||
|
||||
/// [tests::memcmp_runs_in_constant_time] runs a stasticial test that the equality of the two
|
||||
/// input parameters does not correlate with the run time.
|
||||
///
|
||||
/// For discussion on how to (further) ensure the constant-time execution of this function,
|
||||
/// see <https://github.com/rosenpass/rosenpass/issues/232>
|
||||
#[cfg(all(test, feature = "constant_time_tests"))]
|
||||
// Stopgap measure against https://github.com/rosenpass/rosenpass/issues/634
|
||||
#[cfg(not(all(target_os = "macos", target_arch = "aarch64")))]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use core::hint::black_box;
|
||||
use rand::seq::SliceRandom;
|
||||
use rand::thread_rng;
|
||||
use std::time::Instant;
|
||||
@@ -38,14 +53,12 @@ mod tests {
|
||||
fn memcmp_runs_in_constant_time() {
|
||||
// prepare data to compare
|
||||
let n: usize = 1E6 as usize; // number of comparisons to run
|
||||
let len = 1024; // length of each slice passed as parameters to the tested comparison function
|
||||
let a1 = "a".repeat(len);
|
||||
let a2 = a1.clone();
|
||||
let b = "b".repeat(len);
|
||||
const LEN: usize = 1024; // length of each slice passed as parameters to the tested comparison function
|
||||
|
||||
let a1 = a1.as_bytes();
|
||||
let a2 = a2.as_bytes();
|
||||
let b = b.as_bytes();
|
||||
let a = [b'a'; LEN];
|
||||
let b = [b'b'; LEN];
|
||||
|
||||
let mut tmp = [0u8; LEN];
|
||||
|
||||
// vector representing all timing tests
|
||||
//
|
||||
@@ -59,12 +72,14 @@ mod tests {
|
||||
|
||||
// run comparisons / call function to test
|
||||
for test in tests.iter_mut() {
|
||||
let src = match test.0 {
|
||||
true => a,
|
||||
false => b,
|
||||
};
|
||||
tmp.copy_from_slice(&src);
|
||||
|
||||
let now = Instant::now();
|
||||
if test.0 {
|
||||
memcmp(a1, a2);
|
||||
} else {
|
||||
memcmp(a1, b);
|
||||
}
|
||||
memcmp(black_box(&a), black_box(&tmp));
|
||||
test.1 = now.elapsed();
|
||||
// println!("eq: {}, elapsed: {:.2?}", test.0, test.1);
|
||||
}
|
||||
@@ -101,6 +116,7 @@ mod tests {
|
||||
// Pearson correlation
|
||||
let correlation = cv / (sd_x * sd_y);
|
||||
println!("correlation: {:.6?}", correlation);
|
||||
#[cfg(not(coverage))]
|
||||
assert!(
|
||||
correlation.abs() < 0.01,
|
||||
"execution time correlates with result"
|
||||
|
||||
@@ -1,14 +1,27 @@
|
||||
//! xor
|
||||
|
||||
use core::hint::black_box;
|
||||
use rosenpass_to::{with_destination, To};
|
||||
|
||||
/// Xors the source into the destination
|
||||
///
|
||||
/// Performs a constant-time XOR operation between two byte slices
|
||||
///
|
||||
/// Takes a source slice and XORs it with the destination slice in-place using the
|
||||
/// rosenpass_to trait for destination management.
|
||||
///
|
||||
/// # Panics
|
||||
/// If source and destination are of different sizes.
|
||||
///
|
||||
/// # Leaks
|
||||
/// TODO: mention here if this function leaks any information, see
|
||||
/// <https://github.com/rosenpass/rosenpass/issues/232>
|
||||
/// This function may leak timing information in the following ways:
|
||||
///
|
||||
/// - The function execution time is linearly proportional to the input length
|
||||
/// - Length mismatches between source and destination are immediately detectable via panic
|
||||
/// - Memory access patterns follow a predictable sequential pattern
|
||||
///
|
||||
/// These leaks are generally considered acceptable in most cryptographic contexts
|
||||
/// as they don't reveal information about the actual content being XORed.
|
||||
///
|
||||
/// ## Tests
|
||||
/// For discussion on how to ensure the constant-time execution of this function, see
|
||||
|
||||
46
coverage_report.sh
Executable file
46
coverage_report.sh
Executable file
@@ -0,0 +1,46 @@
|
||||
#! /usr/bin/env bash
|
||||
|
||||
set -e -o pipefail
|
||||
|
||||
OUTPUT_DIR="target/grcov"
|
||||
|
||||
log() {
|
||||
echo >&2 "$@"
|
||||
}
|
||||
|
||||
exc() {
|
||||
echo '$' "$@"
|
||||
"$@"
|
||||
}
|
||||
|
||||
main() {
|
||||
exc cd "$(dirname "$0")"
|
||||
|
||||
local open="0"
|
||||
if [[ "$1" == "--open" ]]; then
|
||||
open="1"
|
||||
fi
|
||||
|
||||
exc cargo llvm-cov --all-features --workspace --doctests --branch
|
||||
|
||||
exc rm -rf target/llvm-cov-target/debug/deps/doctestbins
|
||||
exc mv -v target/llvm-cov-target/doctestbins target/llvm-cov-target/debug/deps/
|
||||
exc rm -rf "${OUTPUT_DIR}"
|
||||
exc mkdir -p "${OUTPUT_DIR}"
|
||||
exc grcov target/llvm-cov-target/ --llvm -s . --branch \
|
||||
--binary-path ./target/llvm-cov-target/debug/deps \
|
||||
--ignore-not-existing --ignore '../*' --ignore "/*" \
|
||||
--excl-line '^\s*#\[(derive|repr)\(' \
|
||||
-t lcov,html,markdown -o "${OUTPUT_DIR}"
|
||||
|
||||
if (( "${open}" == 1 )); then
|
||||
xdg-open "${PWD}/${OUTPUT_DIR}/html/index.html"
|
||||
fi
|
||||
|
||||
log ""
|
||||
log "Generated reports in \"${PWD}/${OUTPUT_DIR}\"."
|
||||
log "Open \"${PWD}/${OUTPUT_DIR}/html/index.html\" to view HTML report."
|
||||
log ""
|
||||
}
|
||||
|
||||
main "$@"
|
||||
129
deny.toml
Normal file
129
deny.toml
Normal file
@@ -0,0 +1,129 @@
|
||||
# The graph table configures how the dependency graph is constructed and thus
|
||||
# which crates the checks are performed against
|
||||
[graph]
|
||||
# If true, metadata will be collected with `--all-features`. Note that this can't
|
||||
# be toggled off if true, if you want to conditionally enable `--all-features` it
|
||||
# is recommended to pass `--all-features` on the cmd line instead
|
||||
all-features = true
|
||||
# If true, metadata will be collected with `--no-default-features`. The same
|
||||
# caveat with `all-features` applies
|
||||
no-default-features = false
|
||||
|
||||
# The output table provides options for how/if diagnostics are outputted
|
||||
[output]
|
||||
# When outputting inclusion graphs in diagnostics that include features, this
|
||||
# option can be used to specify the depth at which feature edges will be added.
|
||||
# This option is included since the graphs can be quite large and the addition
|
||||
# of features from the crate(s) to all of the graph roots can be far too verbose.
|
||||
# This option can be overridden via `--feature-depth` on the cmd line
|
||||
feature-depth = 1
|
||||
|
||||
# This section is considered when running `cargo deny check advisories`
|
||||
# More documentation for the advisories section can be found here:
|
||||
# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html
|
||||
[advisories]
|
||||
# A list of advisory IDs to ignore. Note that ignored advisories will still
|
||||
# output a note when they are encountered.
|
||||
ignore = [
|
||||
"RUSTSEC-2024-0370",
|
||||
"RUSTSEC-2024-0436",
|
||||
"RUSTSEC-2023-0089",
|
||||
]
|
||||
# If this is true, then cargo deny will use the git executable to fetch advisory database.
|
||||
# If this is false, then it uses a built-in git library.
|
||||
# Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support.
|
||||
# See Git Authentication for more information about setting up git authentication.
|
||||
#git-fetch-with-cli = true
|
||||
|
||||
# This section is considered when running `cargo deny check #licenses`
|
||||
# More documentation for the licenses section can be found here:
|
||||
# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html
|
||||
[licenses]
|
||||
# List of explicitly allowed licenses
|
||||
# See https://spdx.org/licenses/ for list of possible licenses
|
||||
# [possible values: any SPDX 3.11 short identifier (+ optional exception)].
|
||||
allow = [
|
||||
"MIT",
|
||||
"Apache-2.0",
|
||||
"Apache-2.0 WITH LLVM-exception",
|
||||
"BSD-3-Clause",
|
||||
"ISC",
|
||||
]
|
||||
# The confidence threshold for detecting a license from license text.
|
||||
# The higher the value, the more closely the license text must be to the
|
||||
# canonical license text of a valid SPDX license file.
|
||||
# [possible values: any between 0.0 and 1.0].
|
||||
confidence-threshold = 0.8
|
||||
# Allow 1 or more licenses on a per-crate basis, so that particular licenses
|
||||
# aren't accepted for every possible crate as with the normal allow list
|
||||
exceptions = [
|
||||
# Each entry is the crate and version constraint, and its specific allow
|
||||
# list
|
||||
{ allow = ["Unicode-DFS-2016", "Unicode-3.0"], crate = "unicode-ident" },
|
||||
{ allow = ["NCSA"], crate = "libfuzzer-sys" },
|
||||
|
||||
]
|
||||
|
||||
[licenses.private]
|
||||
# If true, ignores workspace crates that aren't published, or are only
|
||||
# published to private registries.
|
||||
# To see how to mark a crate as unpublished (to the official registry),
|
||||
# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field.
|
||||
ignore = true
|
||||
|
||||
# This section is considered when running `cargo deny check bans`.
|
||||
# More documentation about the 'bans' section can be found here:
|
||||
# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html
|
||||
[bans]
|
||||
# Lint level for when multiple versions of the same crate are detected
|
||||
multiple-versions = "warn"
|
||||
# Lint level for when a crate version requirement is `*`
|
||||
wildcards = "allow"
|
||||
# The graph highlighting used when creating dotgraphs for crates
|
||||
# with multiple versions
|
||||
# * lowest-version - The path to the lowest versioned duplicate is highlighted
|
||||
# * simplest-path - The path to the version with the fewest edges is highlighted
|
||||
# * all - Both lowest-version and simplest-path are used
|
||||
highlight = "all"
|
||||
# The default lint level for `default` features for crates that are members of
|
||||
# the workspace that is being checked. This can be overridden by allowing/denying
|
||||
# `default` on a crate-by-crate basis if desired.
|
||||
workspace-default-features = "allow"
|
||||
# The default lint level for `default` features for external crates that are not
|
||||
# members of the workspace. This can be overridden by allowing/denying `default`
|
||||
# on a crate-by-crate basis if desired.
|
||||
external-default-features = "allow"
|
||||
# List of crates that are allowed. Use with care!
|
||||
allow = [
|
||||
]
|
||||
# List of crates to deny
|
||||
deny = [
|
||||
]
|
||||
|
||||
skip-tree = [
|
||||
|
||||
]
|
||||
|
||||
# This section is considered when running `cargo deny check sources`.
|
||||
# More documentation about the 'sources' section can be found here:
|
||||
# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html
|
||||
[sources]
|
||||
# Lint level for what to happen when a crate from a crate registry that is not
|
||||
# in the allow list is encountered
|
||||
unknown-registry = "warn"
|
||||
# Lint level for what to happen when a crate from a git repository that is not
|
||||
# in the allow list is encountered
|
||||
unknown-git = "warn"
|
||||
# List of URLs for allowed crate registries. Defaults to the crates.io index
|
||||
# if not specified. If it is specified but empty, no registries are allowed.
|
||||
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
|
||||
# List of URLs for allowed Git repositories
|
||||
allow-git = ["git+https://github.com/rosenpass/memsec.git?branch=master"]
|
||||
|
||||
[sources.allow-org]
|
||||
# github.com organizations to allow git sources for
|
||||
github = []
|
||||
# gitlab.com organizations to allow git sources for
|
||||
gitlab = []
|
||||
# bitbucket.org organizations to allow git sources for
|
||||
bitbucket = []
|
||||
114
doc/rosenpass.1
114
doc/rosenpass.1
@@ -1,114 +0,0 @@
|
||||
.Dd $Mdocdate$
|
||||
.Dt ROSENPASS 1
|
||||
.Os
|
||||
.Sh NAME
|
||||
.Nm rosenpass
|
||||
.Nd builds post-quantum-secure VPNs
|
||||
.Sh SYNOPSIS
|
||||
.Nm
|
||||
.Op COMMAND
|
||||
.Op Ar OPTIONS ...
|
||||
.Op Ar ARGS ...
|
||||
.Sh DESCRIPTION
|
||||
.Nm
|
||||
performs cryptographic key exchanges that are secure against quantum-computers
|
||||
and then outputs the keys.
|
||||
These keys can then be passed to various services, such as wireguard or other
|
||||
vpn services, as pre-shared-keys to achieve security against attackers with
|
||||
quantum computers.
|
||||
.Pp
|
||||
This is a research project and quantum computers are not thought to become
|
||||
practical in fewer than ten years.
|
||||
If you are not specifically tasked with developing post-quantum secure systems,
|
||||
you probably do not need this tool.
|
||||
.Ss COMMANDS
|
||||
.Bl -tag -width Ds
|
||||
.It Ar gen-keys --secret-key <file-path> --public-key <file-path>
|
||||
Generate a keypair to use in the exchange command later.
|
||||
Send the public-key file to your communication partner and keep the private-key
|
||||
file secret!
|
||||
.It Ar exchange private-key <file-path> public-key <file-path> [ OPTIONS ] PEERS
|
||||
Start a process to exchange keys with the specified peers.
|
||||
You should specify at least one peer.
|
||||
.Pp
|
||||
Its
|
||||
.Ar OPTIONS
|
||||
are as follows:
|
||||
.Bl -tag -width Ds
|
||||
.It Ar listen <ip>[:<port>]
|
||||
Instructs
|
||||
.Nm
|
||||
to listen on the specified interface and port.
|
||||
By default,
|
||||
.Nm
|
||||
will listen on all interfaces and select a random port.
|
||||
.It Ar verbose
|
||||
Extra logging.
|
||||
.El
|
||||
.El
|
||||
.Ss PEER
|
||||
Each
|
||||
.Ar PEER
|
||||
is defined as follows:
|
||||
.Qq peer public-key <file-path> [endpoint <ip>[:<port>]] [preshared-key <file-path>] [outfile <file-path>] [wireguard <dev> <peer> <extra_params>]
|
||||
.Pp
|
||||
Providing a
|
||||
.Ar PEER
|
||||
instructs
|
||||
.Nm
|
||||
to exchange keys with the given peer and write the resulting PSK into the given
|
||||
output file.
|
||||
You must either specify the outfile or wireguard output option.
|
||||
.Pp
|
||||
The parameters of
|
||||
.Ar PEER
|
||||
are as follows:
|
||||
.Bl -tag -width Ds
|
||||
.It Ar endpoint <ip>[:<port>]
|
||||
Specifies the address where the peer can be reached.
|
||||
This will be automatically updated after the first successful key exchange with
|
||||
the peer.
|
||||
If this is unspecified, the peer must initiate the connection.
|
||||
.It Ar preshared-key <file-path>
|
||||
You may specify a pre-shared key which will be mixed into the final secret.
|
||||
.It Ar outfile <file-path>
|
||||
You may specify a file to write the exchanged keys to.
|
||||
If this option is specified,
|
||||
.Nm
|
||||
will write a notification to standard out every time the key is updated.
|
||||
.It Ar wireguard <dev> <peer> <extra_params>
|
||||
This allows you to directly specify a wireguard peer to deploy the
|
||||
pre-shared-key to.
|
||||
You may specify extra parameters you would pass to
|
||||
.Qq wg set
|
||||
besides the preshared-key parameter which is used by
|
||||
.Nm .
|
||||
This makes it possible to add peers entirely from
|
||||
.Nm .
|
||||
.El
|
||||
.Sh EXIT STATUS
|
||||
.Ex -std
|
||||
.Sh SEE ALSO
|
||||
.Xr rp 1 ,
|
||||
.Xr wg 1
|
||||
.Rs
|
||||
.%A Karolin Varner
|
||||
.%A Benjamin Lipp
|
||||
.%A Wanja Zaeske
|
||||
.%A Lisa Schmidt
|
||||
.%D 2023
|
||||
.%T Rosenpass
|
||||
.%U https://rosenpass.eu/whitepaper.pdf
|
||||
.Re
|
||||
.Sh STANDARDS
|
||||
This tool is the reference implementation of the Rosenpass protocol, as
|
||||
specified within the whitepaper referenced above.
|
||||
.Sh AUTHORS
|
||||
Rosenpass was created by Karolin Varner, Benjamin Lipp, Wanja Zaeske,
|
||||
Marei Peischl, Stephan Ajuvo, and Lisa Schmidt.
|
||||
.Pp
|
||||
This manual page was written by
|
||||
.An Emil Engler
|
||||
.Sh BUGS
|
||||
The bugs are tracked at
|
||||
.Lk https://github.com/rosenpass/rosenpass/issues .
|
||||
2
doc/rp.1
2
doc/rp.1
@@ -113,7 +113,7 @@ Rosenpass was created by Karolin Varner, Benjamin Lipp, Wanja Zaeske,
|
||||
Marei Peischl, Stephan Ajuvo, and Lisa Schmidt.
|
||||
.Pp
|
||||
This manual page was written by
|
||||
.An Emil Engler
|
||||
.An Clara Engler
|
||||
.Sh BUGS
|
||||
The bugs are tracked at
|
||||
.Lk https://github.com/rosenpass/rosenpass/issues .
|
||||
|
||||
45
docker/Dockerfile
Normal file
45
docker/Dockerfile
Normal file
@@ -0,0 +1,45 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG BASE_IMAGE=debian:bookworm-slim
|
||||
|
||||
# Stage 1: Base image with cargo-chef installed
|
||||
FROM rust:latest AS chef
|
||||
RUN cargo install cargo-chef
|
||||
# install software required for liboqs-rust
|
||||
RUN apt-get update && apt-get install -y clang cmake && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Stage 2: Prepare the cargo-chef recipe
|
||||
FROM chef AS planner
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
RUN cargo chef prepare --recipe-path recipe.json
|
||||
|
||||
# Stage 3: Cache dependencies using the recipe
|
||||
FROM chef AS cacher
|
||||
WORKDIR /app
|
||||
COPY --from=planner /app/recipe.json recipe.json
|
||||
RUN cargo chef cook --release --recipe-path recipe.json
|
||||
|
||||
# Stage 4: Build the application
|
||||
FROM cacher AS builder
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
RUN cargo build --release
|
||||
|
||||
# Stage 5: Install runtime-dependencies in the base image
|
||||
FROM ${BASE_IMAGE} AS base_image_with_dependencies
|
||||
|
||||
RUN apt-get update && apt-get install -y iproute2 && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Final Stage (rosenpass): Copy the rosenpass binary
|
||||
FROM base_image_with_dependencies AS rosenpass
|
||||
COPY --from=builder /app/target/release/rosenpass /usr/local/bin/rosenpass
|
||||
ENTRYPOINT [ "/usr/local/bin/rosenpass" ]
|
||||
|
||||
# Final Stage (rp): Copy the rp binary
|
||||
FROM base_image_with_dependencies AS rp
|
||||
|
||||
RUN apt-get update && apt-get install -y wireguard && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=builder /app/target/release/rp /usr/local/bin/rp
|
||||
ENTRYPOINT [ "/usr/local/bin/rp" ]
|
||||
203
docker/USAGE.md
Normal file
203
docker/USAGE.md
Normal file
@@ -0,0 +1,203 @@
|
||||
# Rosenpass in Docker
|
||||
|
||||
Rosenpass provides post-quantum-secure key exchange for VPNs. It generates symmetric keys used by [WireGuard](https://www.wireguard.com/papers/wireguard.pdf) or other applications. The protocol enhances "Post-Quantum WireGuard" ([PQWG](https://eprint.iacr.org/2020/379)) with a cookie mechanism for better security against state disruption attacks.
|
||||
|
||||
Prebuilt Docker images are available for easy deployment:
|
||||
|
||||
- [`ghcr.io/rosenpass/rosenpass`](https://github.com/rosenpass/rosenpass/pkgs/container/rosenpass) – the core key exchange tool
|
||||
- [`ghcr.io/rosenpass/rp`](https://github.com/rosenpass/rosenpass/pkgs/container/rp) – a frontend for setting up WireGuard VPNs
|
||||
|
||||
The entrypoint of the `rosenpass` image is the `rosenpass` executable, whose documentation can be found [here](https://rosenpass.eu/docs/rosenpass-tool/manuals/rp_manual/).
|
||||
Similarly, the entrypoint of the `rp` image is the `rp` executable, with its documentation available [here](https://rosenpass.eu/docs/rosenpass-tool/manuals/rp1/).
|
||||
|
||||
## Usage - Standalone Key Exchange
|
||||
|
||||
The `ghcr.io/rosenpass/rosenpass` image can be used in a server-client setup to exchange quantum-secure shared keys.
|
||||
This setup uses rosenpass as a standalone application, without using any other component such as wireguard.
|
||||
What follows, is a simple setup for illustrative purposes.
|
||||
|
||||
Create a docker network that is used to connect the containers:
|
||||
|
||||
```bash
|
||||
docker network create -d bridge rp
|
||||
export NET=rp
|
||||
```
|
||||
|
||||
Generate the server and client key pairs:
|
||||
|
||||
```bash
|
||||
mkdir ./workdir-client ./workdir-server
|
||||
docker run -it --rm -v ./workdir-server:/workdir ghcr.io/rosenpass/rosenpass \
|
||||
gen-keys --public-key=workdir/server-public --secret-key=workdir/server-secret
|
||||
docker run -it --rm -v ./workdir-client:/workdir ghcr.io/rosenpass/rosenpass \
|
||||
gen-keys --public-key=workdir/client-public --secret-key=workdir/client-secret
|
||||
# share the public keys between client and server
|
||||
cp workdir-client/client-public workdir-server/client-public
|
||||
cp workdir-server/server-public workdir-client/server-public
|
||||
```
|
||||
|
||||
Start the server container:
|
||||
|
||||
```bash
|
||||
docker run --name "rpserver" --network ${NET} \
|
||||
-it --rm -v ./workdir-server:/workdir ghcr.io/rosenpass/rosenpass \
|
||||
exchange \
|
||||
private-key workdir/server-secret \
|
||||
public-key workdir/server-public \
|
||||
listen 0.0.0.0:9999 \
|
||||
peer public-key workdir/client-public \
|
||||
outfile workdir/server-sharedkey
|
||||
```
|
||||
|
||||
Find out the ip address of the server container:
|
||||
|
||||
```bash
|
||||
EP="rpserver"
|
||||
EP=$(docker inspect --format '{{ .NetworkSettings.Networks.rp.IPAddress }}' $EP)
|
||||
```
|
||||
|
||||
Run the client container and perform the key exchange:
|
||||
|
||||
```bash
|
||||
docker run --name "rpclient" --network ${NET} \
|
||||
-it --rm -v ./workdir-client:/workdir ghcr.io/rosenpass/rosenpass \
|
||||
exchange \
|
||||
private-key workdir/client-secret \
|
||||
public-key workdir/client-public \
|
||||
peer public-key workdir/server-public endpoint ${EP}:9999 \
|
||||
outfile workdir/client-sharedkey
|
||||
```
|
||||
|
||||
Now the containers will exchange shared keys and each put them into their respective outfile.
|
||||
|
||||
Comparing the outfiles shows that these shared keys equal:
|
||||
|
||||
```bash
|
||||
cmp workdir-server/server-sharedkey workdir-client/client-sharedkey
|
||||
```
|
||||
|
||||
It is now possible to set add these keys as pre-shared keys within a wireguard interface.
|
||||
For example as the server,
|
||||
|
||||
```bash
|
||||
PREKEY=$(cat workdir-server/server-sharedkey)
|
||||
wg set <server-interface> peer <client-peer-public-key> preshared-key <(echo "$PREKEY")
|
||||
```
|
||||
|
||||
## Usage - Combined with wireguard
|
||||
|
||||
The `ghcr.io/rosenpass/rp` image can be used to build a VPN with WireGuard and Rosenpass.
|
||||
In this example, we run two containers on the same system and connect them with a bridge network within the docker overlay network.
|
||||
|
||||
Create the named docker network, to be able to connect the containers.
|
||||
|
||||
Create a docker network that is used to connect the containers:
|
||||
|
||||
```bash
|
||||
docker network create -d bridge rp
|
||||
export NET=rp
|
||||
```
|
||||
|
||||
Generate the server and client secret keys and extract public keys.
|
||||
|
||||
```bash
|
||||
mkdir -p ./workdir-server ./workdir-client
|
||||
|
||||
# server
|
||||
docker run -it --rm -v ./workdir-server:/workdir ghcr.io/rosenpass/rp \
|
||||
genkey workdir/server.rosenpass-secret
|
||||
docker run -it --rm -v ./workdir-server:/workdir ghcr.io/rosenpass/rp \
|
||||
pubkey workdir/server.rosenpass-secret workdir/server.rosenpass-public
|
||||
|
||||
# client
|
||||
docker run -it --rm -v ./workdir-client:/workdir ghcr.io/rosenpass/rp \
|
||||
genkey workdir/client.rosenpass-secret
|
||||
docker run -it --rm -v ./workdir-client:/workdir ghcr.io/rosenpass/rp \
|
||||
pubkey workdir/client.rosenpass-secret workdir/client.rosenpass-public
|
||||
|
||||
# share the public keys between client and server
|
||||
cp -r workdir-client/client.rosenpass-public workdir-server/client.rosenpass-public
|
||||
cp -r workdir-server/server.rosenpass-public workdir-client/server.rosenpass-public
|
||||
```
|
||||
|
||||
Start the server container.
|
||||
Note that the `NET_ADMIN` capability is neccessary, the rp command will create and manage wireguard interfaces.
|
||||
Also make sure the `wireguard` kernel module is loaded by the host. (`lsmod | grep wireguard`)
|
||||
|
||||
```bash
|
||||
docker run --name "rpserver" --network ${NET} -it -d --rm -v ./workdir-server:/workdir \
|
||||
--cap-add=NET_ADMIN \
|
||||
ghcr.io/rosenpass/rp \
|
||||
exchange workdir/server.rosenpass-secret dev rosenpass0 \
|
||||
listen 0.0.0.0:9999 peer workdir/client.rosenpass-public allowed-ips 10.0.0.0/8
|
||||
```
|
||||
|
||||
Now find out the ip-address of the server container and then start the client container:
|
||||
|
||||
```bash
|
||||
EP="rpserver"
|
||||
EP=$(docker inspect --format '{{ .NetworkSettings.Networks.rp.IPAddress }}' $EP)
|
||||
docker run --name "rpclient" --network ${NET} -it -d --rm -v ./workdir-client:/workdir \
|
||||
--cap-add=NET_ADMIN \
|
||||
ghcr.io/rosenpass/rp \
|
||||
exchange workdir/client.rosenpass-secret dev rosenpass1 \
|
||||
peer workdir/server.rosenpass-public endpoint ${EP}:9999 allowed-ips 10.0.0.1
|
||||
```
|
||||
|
||||
Inside the docker containers assign the IP addresses:
|
||||
|
||||
```bash
|
||||
# server
|
||||
docker exec -it rpserver ip a add 10.0.0.1/24 dev rosenpass0
|
||||
|
||||
# client
|
||||
docker exec -it rpclient ip a add 10.0.0.2/24 dev rosenpass1
|
||||
```
|
||||
|
||||
Done! The two containers should now be connected through a wireguard VPN (Port 1000) with pre-shared keys exchanged by rosenpass (Port 9999).
|
||||
|
||||
Now, test the connection by starting a shell inside the client container, and ping the server through the VPN:
|
||||
|
||||
```bash
|
||||
# client
|
||||
docker exec -it rpclient bash
|
||||
apt update; apt install iputils-ping
|
||||
ping 10.0.0.1
|
||||
```
|
||||
|
||||
The ping command should continuously show ping-logs:
|
||||
|
||||
```
|
||||
PING 10.0.0.1 (10.0.0.1) 56(84) bytes of data.
|
||||
64 bytes from 10.0.0.1: icmp_seq=1 ttl=64 time=0.119 ms
|
||||
64 bytes from 10.0.0.1: icmp_seq=2 ttl=64 time=0.132 ms
|
||||
64 bytes from 10.0.0.1: icmp_seq=3 ttl=64 time=0.394 ms
|
||||
...
|
||||
```
|
||||
|
||||
While the ping is running, you may stop the server container, and verify that the ping-log halts. In another terminal do:
|
||||
|
||||
```
|
||||
docker stop -t 1 rpserver
|
||||
```
|
||||
|
||||
## Building the Docker Images Locally
|
||||
|
||||
Clone the Rosenpass repository:
|
||||
|
||||
```
|
||||
git clone https://github.com/rosenpass/rosenpass
|
||||
cd rosenpass
|
||||
```
|
||||
|
||||
Build the rp image from the root of the repository as follows:
|
||||
|
||||
```
|
||||
docker build -f docker/Dockerfile -t ghcr.io/rosenpass/rp --target rp .
|
||||
```
|
||||
|
||||
Build the rosenpass image from the root of the repostiry with the following command:
|
||||
|
||||
```
|
||||
docker build -f docker/Dockerfile -t ghcr.io/rosenpass/rosenpass --target rosenpass .
|
||||
```
|
||||
52
flake.lock
generated
52
flake.lock
generated
@@ -2,15 +2,17 @@
|
||||
"nodes": {
|
||||
"fenix": {
|
||||
"inputs": {
|
||||
"nixpkgs": ["nixpkgs"],
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"rust-analyzer-src": "rust-analyzer-src"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1712298178,
|
||||
"narHash": "sha256-590fpCPXYAkaAeBz/V91GX4/KGzPObdYtqsTWzT6AhI=",
|
||||
"lastModified": 1728282832,
|
||||
"narHash": "sha256-I7AbcwGggf+CHqpyd/9PiAjpIBGTGx5woYHqtwxaV7I=",
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"rev": "569b5b5781395da08e7064e825953c548c26af76",
|
||||
"rev": "1ec71be1f4b8f3105c5d38da339cb061fefc43f4",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -24,11 +26,11 @@
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1710146030,
|
||||
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
|
||||
"lastModified": 1726560853,
|
||||
"narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
|
||||
"rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -37,36 +39,38 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"naersk": {
|
||||
"nix-vm-test": {
|
||||
"inputs": {
|
||||
"nixpkgs": ["nixpkgs"]
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1698420672,
|
||||
"narHash": "sha256-/TdeHMPRjjdJub7p7+w55vyABrsJlt5QkznPYy55vKA=",
|
||||
"owner": "nix-community",
|
||||
"repo": "naersk",
|
||||
"rev": "aeb58d5e8faead8980a807c840232697982d47b9",
|
||||
"lastModified": 1734355073,
|
||||
"narHash": "sha256-FfdPOGy1zElTwKzjgIMp5K2D3gfPn6VWjVa4MJ9L1Tc=",
|
||||
"owner": "numtide",
|
||||
"repo": "nix-vm-test",
|
||||
"rev": "5948de39a616f2261dbbf4b6f25cbe1cbefd788c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "naersk",
|
||||
"owner": "numtide",
|
||||
"repo": "nix-vm-test",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1712168706,
|
||||
"narHash": "sha256-XP24tOobf6GGElMd0ux90FEBalUtw6NkBSVh/RlA6ik=",
|
||||
"lastModified": 1728193676,
|
||||
"narHash": "sha256-PbDWAIjKJdlVg+qQRhzdSor04bAPApDqIv2DofTyynk=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "1487bdea619e4a7a53a4590c475deabb5a9d1bfb",
|
||||
"rev": "ecbc1ca8ffd6aea8372ad16be9ebbb39889e55b6",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-23.11",
|
||||
"ref": "nixos-24.05",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
@@ -75,18 +79,18 @@
|
||||
"inputs": {
|
||||
"fenix": "fenix",
|
||||
"flake-utils": "flake-utils",
|
||||
"naersk": "naersk",
|
||||
"nix-vm-test": "nix-vm-test",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
},
|
||||
"rust-analyzer-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1712156296,
|
||||
"narHash": "sha256-St7ZQrkrr5lmQX9wC1ZJAFxL8W7alswnyZk9d1se3Us=",
|
||||
"lastModified": 1728249780,
|
||||
"narHash": "sha256-J269DvCI5dzBmPrXhAAtj566qt0b22TJtF3TIK+tMsI=",
|
||||
"owner": "rust-lang",
|
||||
"repo": "rust-analyzer",
|
||||
"rev": "8e581ac348e223488622f4d3003cb2bd412bf27e",
|
||||
"rev": "2b750da1a1a2c1d2c70896108d7096089842d877",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
||||
450
flake.nix
450
flake.nix
@@ -1,20 +1,29 @@
|
||||
{
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11";
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.05";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
|
||||
# for quicker rust builds
|
||||
naersk.url = "github:nix-community/naersk";
|
||||
naersk.inputs.nixpkgs.follows = "nixpkgs";
|
||||
|
||||
# for rust nightly with llvm-tools-preview
|
||||
fenix.url = "github:nix-community/fenix";
|
||||
fenix.inputs.nixpkgs.follows = "nixpkgs";
|
||||
|
||||
nix-vm-test.url = "github:numtide/nix-vm-test";
|
||||
nix-vm-test.inputs.nixpkgs.follows = "nixpkgs";
|
||||
nix-vm-test.inputs.flake-utils.follows = "flake-utils";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, flake-utils, ... }@inputs:
|
||||
outputs = { self, nixpkgs, flake-utils, nix-vm-test, ... }@inputs:
|
||||
nixpkgs.lib.foldl (a: b: nixpkgs.lib.recursiveUpdate a b) { } [
|
||||
|
||||
|
||||
#
|
||||
### Export the overlay.nix from this flake ###
|
||||
#
|
||||
{
|
||||
overlays.default = import ./overlay.nix;
|
||||
}
|
||||
|
||||
|
||||
#
|
||||
### Actual Rosenpass Package and Docker Container Images ###
|
||||
#
|
||||
@@ -30,310 +39,39 @@
|
||||
]
|
||||
(system:
|
||||
let
|
||||
scoped = (scope: scope.result);
|
||||
lib = nixpkgs.lib;
|
||||
|
||||
# normal nixpkgs
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
};
|
||||
|
||||
# parsed Cargo.toml
|
||||
cargoToml = builtins.fromTOML (builtins.readFile ./rosenpass/Cargo.toml);
|
||||
|
||||
# source files relevant for rust
|
||||
src = scoped rec {
|
||||
# File suffices to include
|
||||
extensions = [
|
||||
"lock"
|
||||
"rs"
|
||||
"toml"
|
||||
];
|
||||
# Files to explicitly include
|
||||
files = [
|
||||
"to/README.md"
|
||||
];
|
||||
|
||||
src = ./.;
|
||||
filter = (path: type: scoped rec {
|
||||
inherit (lib) any id removePrefix hasSuffix;
|
||||
anyof = (any id);
|
||||
|
||||
basename = baseNameOf (toString path);
|
||||
relative = removePrefix (toString src + "/") (toString path);
|
||||
|
||||
result = anyof [
|
||||
(type == "directory")
|
||||
(any (ext: hasSuffix ".${ext}" basename) extensions)
|
||||
(any (file: file == relative) files)
|
||||
];
|
||||
});
|
||||
|
||||
result = pkgs.lib.sources.cleanSourceWith { inherit src filter; };
|
||||
};
|
||||
|
||||
# a function to generate a nix derivation for rosenpass against any
|
||||
# given set of nixpkgs
|
||||
rosenpassDerivation = p:
|
||||
let
|
||||
# whether we want to build a statically linked binary
|
||||
isStatic = p.targetPlatform.isStatic;
|
||||
|
||||
# the rust target of `p`
|
||||
target = p.rust.toRustTargetSpec p.targetPlatform;
|
||||
|
||||
# convert a string to shout case
|
||||
shout = string: builtins.replaceStrings [ "-" ] [ "_" ] (pkgs.lib.toUpper string);
|
||||
|
||||
# suitable Rust toolchain
|
||||
toolchain = with inputs.fenix.packages.${system}; combine [
|
||||
stable.cargo
|
||||
stable.rustc
|
||||
targets.${target}.stable.rust-std
|
||||
];
|
||||
|
||||
# naersk with a custom toolchain
|
||||
naersk = pkgs.callPackage inputs.naersk {
|
||||
cargo = toolchain;
|
||||
rustc = toolchain;
|
||||
};
|
||||
|
||||
# used to trick the build.rs into believing that CMake was ran **again**
|
||||
fakecmake = pkgs.writeScriptBin "cmake" ''
|
||||
#! ${pkgs.stdenv.shell} -e
|
||||
true
|
||||
'';
|
||||
in
|
||||
naersk.buildPackage
|
||||
{
|
||||
# metadata and source
|
||||
name = cargoToml.package.name;
|
||||
version = cargoToml.package.version;
|
||||
inherit src;
|
||||
|
||||
cargoBuildOptions = x: x ++ [ "-p" "rosenpass" ];
|
||||
cargoTestOptions = x: x ++ [ "-p" "rosenpass" ];
|
||||
|
||||
doCheck = true;
|
||||
|
||||
nativeBuildInputs = with pkgs; [
|
||||
p.stdenv.cc
|
||||
cmake # for oqs build in the oqs-sys crate
|
||||
mandoc # for the built-in manual
|
||||
removeReferencesTo
|
||||
rustPlatform.bindgenHook # for C-bindings in the crypto libs
|
||||
];
|
||||
buildInputs = with p; [ bash ];
|
||||
|
||||
override = x: {
|
||||
preBuild =
|
||||
# nix defaults to building for aarch64 _without_ the armv8-a crypto
|
||||
# extensions, but liboqs depens on these
|
||||
(lib.optionalString (system == "aarch64-linux") ''
|
||||
NIX_CFLAGS_COMPILE="$NIX_CFLAGS_COMPILE -march=armv8-a+crypto"
|
||||
''
|
||||
);
|
||||
|
||||
# fortify is only compatible with dynamic linking
|
||||
hardeningDisable = lib.optional isStatic "fortify";
|
||||
};
|
||||
|
||||
overrideMain = x: {
|
||||
# CMake detects that it was served a _foreign_ target dir, and CMake
|
||||
# would be executed again upon the second build step of naersk.
|
||||
# By adding our specially optimized CMake version, we reduce the cost
|
||||
# of recompilation by 99 % while, while avoiding any CMake errors.
|
||||
nativeBuildInputs = [ (lib.hiPrio fakecmake) ] ++ x.nativeBuildInputs;
|
||||
|
||||
# make sure that libc is linked, under musl this is not the case per
|
||||
# default
|
||||
preBuild = (lib.optionalString isStatic ''
|
||||
NIX_CFLAGS_COMPILE="$NIX_CFLAGS_COMPILE -lc"
|
||||
'');
|
||||
};
|
||||
|
||||
# We want to build for a specific target...
|
||||
CARGO_BUILD_TARGET = target;
|
||||
|
||||
# ... which might require a non-default linker:
|
||||
"CARGO_TARGET_${shout target}_LINKER" =
|
||||
let
|
||||
inherit (p.stdenv) cc;
|
||||
in
|
||||
"${cc}/bin/${cc.targetPrefix}cc";
|
||||
|
||||
meta = with pkgs.lib;
|
||||
{
|
||||
inherit (cargoToml.package) description homepage;
|
||||
license = with licenses; [ mit asl20 ];
|
||||
maintainers = [ maintainers.wucke13 ];
|
||||
platforms = platforms.all;
|
||||
};
|
||||
} // (lib.mkIf isStatic {
|
||||
# otherwise pkg-config tries to link non-existent dynamic libs
|
||||
# documented here: https://docs.rs/pkg-config/latest/pkg_config/
|
||||
PKG_CONFIG_ALL_STATIC = true;
|
||||
|
||||
# tell rust to build everything statically linked
|
||||
CARGO_BUILD_RUSTFLAGS = "-C target-feature=+crt-static";
|
||||
});
|
||||
# a function to generate a nix derivation for the rp helper against any
|
||||
# given set of nixpkgs
|
||||
rpDerivation = p:
|
||||
let
|
||||
# whether we want to build a statically linked binary
|
||||
isStatic = p.targetPlatform.isStatic;
|
||||
|
||||
# the rust target of `p`
|
||||
target = p.rust.toRustTargetSpec p.targetPlatform;
|
||||
|
||||
# convert a string to shout case
|
||||
shout = string: builtins.replaceStrings [ "-" ] [ "_" ] (pkgs.lib.toUpper string);
|
||||
|
||||
# suitable Rust toolchain
|
||||
toolchain = with inputs.fenix.packages.${system}; combine [
|
||||
stable.cargo
|
||||
stable.rustc
|
||||
targets.${target}.stable.rust-std
|
||||
];
|
||||
|
||||
# naersk with a custom toolchain
|
||||
naersk = pkgs.callPackage inputs.naersk {
|
||||
cargo = toolchain;
|
||||
rustc = toolchain;
|
||||
};
|
||||
|
||||
# used to trick the build.rs into believing that CMake was ran **again**
|
||||
fakecmake = pkgs.writeScriptBin "cmake" ''
|
||||
#! ${pkgs.stdenv.shell} -e
|
||||
true
|
||||
'';
|
||||
in
|
||||
naersk.buildPackage
|
||||
{
|
||||
# metadata and source
|
||||
name = cargoToml.package.name;
|
||||
version = cargoToml.package.version;
|
||||
inherit src;
|
||||
|
||||
cargoBuildOptions = x: x ++ [ "-p" "rp" ];
|
||||
cargoTestOptions = x: x ++ [ "-p" "rp" ];
|
||||
|
||||
doCheck = true;
|
||||
|
||||
nativeBuildInputs = with pkgs; [
|
||||
p.stdenv.cc
|
||||
cmake # for oqs build in the oqs-sys crate
|
||||
mandoc # for the built-in manual
|
||||
removeReferencesTo
|
||||
rustPlatform.bindgenHook # for C-bindings in the crypto libs
|
||||
];
|
||||
buildInputs = with p; [ bash ];
|
||||
|
||||
override = x: {
|
||||
preBuild =
|
||||
# nix defaults to building for aarch64 _without_ the armv8-a crypto
|
||||
# extensions, but liboqs depens on these
|
||||
(lib.optionalString (system == "aarch64-linux") ''
|
||||
NIX_CFLAGS_COMPILE="$NIX_CFLAGS_COMPILE -march=armv8-a+crypto"
|
||||
''
|
||||
);
|
||||
|
||||
# fortify is only compatible with dynamic linking
|
||||
hardeningDisable = lib.optional isStatic "fortify";
|
||||
};
|
||||
|
||||
overrideMain = x: {
|
||||
# CMake detects that it was served a _foreign_ target dir, and CMake
|
||||
# would be executed again upon the second build step of naersk.
|
||||
# By adding our specially optimized CMake version, we reduce the cost
|
||||
# of recompilation by 99 % while, while avoiding any CMake errors.
|
||||
nativeBuildInputs = [ (lib.hiPrio fakecmake) ] ++ x.nativeBuildInputs;
|
||||
|
||||
# make sure that libc is linked, under musl this is not the case per
|
||||
# default
|
||||
preBuild = (lib.optionalString isStatic ''
|
||||
NIX_CFLAGS_COMPILE="$NIX_CFLAGS_COMPILE -lc"
|
||||
'');
|
||||
};
|
||||
|
||||
# We want to build for a specific target...
|
||||
CARGO_BUILD_TARGET = target;
|
||||
|
||||
# ... which might require a non-default linker:
|
||||
"CARGO_TARGET_${shout target}_LINKER" =
|
||||
let
|
||||
inherit (p.stdenv) cc;
|
||||
in
|
||||
"${cc}/bin/${cc.targetPrefix}cc";
|
||||
|
||||
meta = with pkgs.lib;
|
||||
{
|
||||
inherit (cargoToml.package) description homepage;
|
||||
license = with licenses; [ mit asl20 ];
|
||||
maintainers = [ maintainers.wucke13 ];
|
||||
platforms = platforms.all;
|
||||
};
|
||||
} // (lib.mkIf isStatic {
|
||||
# otherwise pkg-config tries to link non-existent dynamic libs
|
||||
# documented here: https://docs.rs/pkg-config/latest/pkg_config/
|
||||
PKG_CONFIG_ALL_STATIC = true;
|
||||
|
||||
# tell rust to build everything statically linked
|
||||
CARGO_BUILD_RUSTFLAGS = "-C target-feature=+crt-static";
|
||||
});
|
||||
# a function to generate a docker image based of rosenpass
|
||||
rosenpassOCI = name: pkgs.dockerTools.buildImage rec {
|
||||
inherit name;
|
||||
copyToRoot = pkgs.buildEnv {
|
||||
name = "image-root";
|
||||
paths = [ self.packages.${system}.${name} ];
|
||||
pathsToLink = [ "/bin" ];
|
||||
};
|
||||
config.Cmd = [ "/bin/rosenpass" ];
|
||||
# apply our own overlay, overriding/inserting our packages as defined in ./pkgs
|
||||
overlays = [ self.overlays.default ];
|
||||
};
|
||||
in
|
||||
rec {
|
||||
packages = rec {
|
||||
default = rosenpass;
|
||||
rosenpass = rosenpassDerivation pkgs;
|
||||
rp = rpDerivation pkgs;
|
||||
rosenpass-oci-image = rosenpassOCI "rosenpass";
|
||||
{
|
||||
packages = {
|
||||
default = pkgs.rosenpass;
|
||||
rosenpass = pkgs.rosenpass;
|
||||
rosenpass-oci-image = pkgs.rosenpass-oci-image;
|
||||
rp = pkgs.rp;
|
||||
|
||||
# derivation for the release
|
||||
release-package =
|
||||
let
|
||||
version = cargoToml.package.version;
|
||||
package =
|
||||
if pkgs.hostPlatform.isLinux then
|
||||
packages.rosenpass-static
|
||||
else packages.rosenpass;
|
||||
rp =
|
||||
if pkgs.hostPlatform.isLinux then
|
||||
packages.rp-static
|
||||
else packages.rp;
|
||||
oci-image =
|
||||
if pkgs.hostPlatform.isLinux then
|
||||
packages.rosenpass-static-oci-image
|
||||
else packages.rosenpass-oci-image;
|
||||
in
|
||||
pkgs.runCommandNoCC "lace-result" { }
|
||||
''
|
||||
mkdir {bin,$out}
|
||||
tar -cvf $out/rosenpass-${system}-${version}.tar \
|
||||
-C ${package} bin/rosenpass \
|
||||
-C ${rp} bin/rp
|
||||
cp ${oci-image} \
|
||||
$out/rosenpass-oci-image-${system}-${version}.tar.gz
|
||||
'';
|
||||
} // (if pkgs.stdenv.isLinux then rec {
|
||||
rosenpass-static = rosenpassDerivation pkgs.pkgsStatic;
|
||||
rp-static = rpDerivation pkgs.pkgsStatic;
|
||||
rosenpass-static-oci-image = rosenpassOCI "rosenpass-static";
|
||||
} else { });
|
||||
release-package = pkgs.release-package;
|
||||
|
||||
# for good measure, we also offer to cross compile to Linux on Arm
|
||||
aarch64-linux-rosenpass-static =
|
||||
pkgs.pkgsCross.aarch64-multiplatform.pkgsStatic.rosenpass;
|
||||
aarch64-linux-rp-static = pkgs.pkgsCross.aarch64-multiplatform.pkgsStatic.rp;
|
||||
}
|
||||
//
|
||||
# We only offer static builds for linux, as this is not supported on OS X
|
||||
(nixpkgs.lib.attrsets.optionalAttrs pkgs.stdenv.isLinux {
|
||||
rosenpass-static = pkgs.pkgsStatic.rosenpass;
|
||||
rosenpass-static-oci-image = pkgs.pkgsStatic.rosenpass-oci-image;
|
||||
rp-static = pkgs.pkgsStatic.rp;
|
||||
});
|
||||
}
|
||||
))
|
||||
|
||||
|
||||
#
|
||||
### Linux specifics ###
|
||||
#
|
||||
@@ -341,92 +79,80 @@
|
||||
let
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
|
||||
# apply our own overlay, overriding/inserting our packages as defined in ./pkgs
|
||||
overlays = [
|
||||
self.overlays.default
|
||||
nix-vm-test.overlays.default
|
||||
];
|
||||
};
|
||||
packages = self.packages.${system};
|
||||
in
|
||||
{
|
||||
#
|
||||
### Whitepaper ###
|
||||
#
|
||||
packages.whitepaper =
|
||||
let
|
||||
tlsetup = (pkgs.texlive.combine {
|
||||
inherit (pkgs.texlive) scheme-basic acmart amsfonts ccicons
|
||||
csquotes csvsimple doclicense fancyvrb fontspec gobble
|
||||
koma-script ifmtarg latexmk lm markdown mathtools minted noto
|
||||
nunito pgf soul unicode-math lualatex-math paralist
|
||||
gitinfo2 eso-pic biblatex biblatex-trad biblatex-software
|
||||
xkeyval xurl xifthen biber;
|
||||
});
|
||||
in
|
||||
pkgs.stdenvNoCC.mkDerivation {
|
||||
name = "whitepaper";
|
||||
src = ./papers;
|
||||
nativeBuildInputs = with pkgs; [
|
||||
ncurses # tput
|
||||
python3Packages.pygments
|
||||
tlsetup # custom tex live scheme
|
||||
which
|
||||
];
|
||||
buildPhase = ''
|
||||
export HOME=$(mktemp -d)
|
||||
latexmk -r tex/CI.rc
|
||||
'';
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
mv *.pdf readme.md $out/
|
||||
'';
|
||||
};
|
||||
packages.package-deb = pkgs.callPackage ./pkgs/package-deb.nix {
|
||||
rosenpass = pkgs.pkgsStatic.rosenpass;
|
||||
};
|
||||
packages.package-rpm = pkgs.callPackage ./pkgs/package-rpm.nix {
|
||||
rosenpass = pkgs.pkgsStatic.rosenpass;
|
||||
};
|
||||
|
||||
#
|
||||
### Reading materials ###
|
||||
#
|
||||
packages.whitepaper = pkgs.whitepaper;
|
||||
|
||||
#
|
||||
### Proof and Proof Tools ###
|
||||
#
|
||||
packages.proverif-patched = pkgs.proverif.overrideAttrs (old: {
|
||||
postInstall = ''
|
||||
install -D -t $out/lib cryptoverif.pvl
|
||||
'';
|
||||
});
|
||||
packages.proof-proverif = pkgs.stdenv.mkDerivation {
|
||||
name = "rosenpass-proverif-proof";
|
||||
version = "unstable";
|
||||
src = pkgs.lib.sources.sourceByRegex ./. [
|
||||
"analyze.sh"
|
||||
"marzipan(/marzipan.awk)?"
|
||||
"analysis(/.*)?"
|
||||
];
|
||||
nativeBuildInputs = [ pkgs.proverif pkgs.graphviz ];
|
||||
CRYPTOVERIF_LIB = packages.proverif-patched + "/lib/cryptoverif.pvl";
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
bash analyze.sh -color -html $out
|
||||
'';
|
||||
};
|
||||
packages.proverif-patched = pkgs.proverif-patched;
|
||||
packages.proof-proverif = pkgs.proof-proverif;
|
||||
|
||||
|
||||
#
|
||||
### Devshells ###
|
||||
#
|
||||
devShells.default = pkgs.mkShell {
|
||||
inherit (packages.proof-proverif) CRYPTOVERIF_LIB;
|
||||
inputsFrom = [ packages.default ];
|
||||
inherit (pkgs.proof-proverif) CRYPTOVERIF_LIB;
|
||||
inputsFrom = [ pkgs.rosenpass ];
|
||||
nativeBuildInputs = with pkgs; [
|
||||
cmake # override the fakecmake from the main step above
|
||||
cargo-release
|
||||
clippy
|
||||
rustfmt
|
||||
nodePackages.prettier
|
||||
nushell # for the .ci/gen-workflow-files.nu script
|
||||
proverif-patched
|
||||
];
|
||||
};
|
||||
# TODO: Write this as a patched version of the default environment
|
||||
devShells.fullEnv = pkgs.mkShell {
|
||||
inherit (pkgs.proof-proverif) CRYPTOVERIF_LIB;
|
||||
inputsFrom = [ pkgs.rosenpass ];
|
||||
nativeBuildInputs = with pkgs; [
|
||||
cargo-audit
|
||||
cargo-release
|
||||
cargo-msrv
|
||||
rustfmt
|
||||
packages.proverif-patched
|
||||
nodePackages.prettier
|
||||
nushell # for the .ci/gen-workflow-files.nu script
|
||||
proverif-patched
|
||||
inputs.fenix.packages.${system}.complete.toolchain
|
||||
pkgs.cargo-llvm-cov
|
||||
pkgs.grcov
|
||||
];
|
||||
};
|
||||
devShells.coverage = pkgs.mkShell {
|
||||
inputsFrom = [ packages.default ];
|
||||
nativeBuildInputs = with pkgs; [ inputs.fenix.packages.${system}.complete.toolchain cargo-llvm-cov ];
|
||||
inputsFrom = [ pkgs.rosenpass ];
|
||||
nativeBuildInputs = [
|
||||
inputs.fenix.packages.${system}.complete.toolchain
|
||||
pkgs.cargo-llvm-cov
|
||||
pkgs.grcov
|
||||
];
|
||||
};
|
||||
|
||||
|
||||
checks = {
|
||||
systemd-rosenpass = pkgs.testers.runNixOSTest ./tests/systemd/rosenpass.nix;
|
||||
systemd-rp = pkgs.testers.runNixOSTest ./tests/systemd/rp.nix;
|
||||
|
||||
cargo-fmt = pkgs.runCommand "check-cargo-fmt"
|
||||
{ inherit (self.devShells.${system}.default) nativeBuildInputs buildInputs; } ''
|
||||
cargo fmt --manifest-path=${./.}/Cargo.toml --check --all && touch $out
|
||||
@@ -439,7 +165,11 @@
|
||||
{ nativeBuildInputs = [ pkgs.nodePackages.prettier ]; } ''
|
||||
cd ${./.} && prettier --check . && touch $out
|
||||
'';
|
||||
};
|
||||
} // pkgs.lib.optionalAttrs (system == "x86_64-linux") (import ./tests/legacy-distro-packaging.nix {
|
||||
inherit pkgs;
|
||||
rosenpass-deb = self.packages.${system}.package-deb;
|
||||
rosenpass-rpm = self.packages.${system}.package-rpm;
|
||||
});
|
||||
|
||||
formatter = pkgs.nixpkgs-fmt;
|
||||
}))
|
||||
|
||||
@@ -3,6 +3,10 @@ name = "rosenpass-fuzzing"
|
||||
version = "0.0.1"
|
||||
publish = false
|
||||
edition = "2021"
|
||||
rust-version = "1.77"
|
||||
|
||||
[features]
|
||||
experiment_libcrux = ["rosenpass-ciphers/experiment_libcrux_all"]
|
||||
|
||||
[package.metadata]
|
||||
cargo-fuzz = true
|
||||
@@ -81,4 +85,4 @@ doc = false
|
||||
name = "fuzz_vec_secret_alloc_memfdsec_mallocfb"
|
||||
path = "fuzz_targets/vec_secret_alloc_memfdsec_mallocfb.rs"
|
||||
test = false
|
||||
doc = false
|
||||
doc = false
|
||||
|
||||
@@ -4,7 +4,8 @@ extern crate rosenpass;
|
||||
|
||||
use libfuzzer_sys::fuzz_target;
|
||||
|
||||
use rosenpass_ciphers::aead;
|
||||
use rosenpass_cipher_traits::primitives::Aead as _;
|
||||
use rosenpass_ciphers::Aead;
|
||||
|
||||
#[derive(arbitrary::Arbitrary, Debug)]
|
||||
pub struct Input {
|
||||
@@ -15,10 +16,9 @@ pub struct Input {
|
||||
}
|
||||
|
||||
fuzz_target!(|input: Input| {
|
||||
let mut ciphertext: Vec<u8> = Vec::with_capacity(input.plaintext.len() + 16);
|
||||
ciphertext.resize(input.plaintext.len() + 16, 0);
|
||||
let mut ciphertext = vec![0u8; input.plaintext.len() + 16];
|
||||
|
||||
aead::encrypt(
|
||||
Aead.encrypt(
|
||||
ciphertext.as_mut_slice(),
|
||||
&input.key,
|
||||
&input.nonce,
|
||||
|
||||
@@ -4,6 +4,7 @@ extern crate rosenpass;
|
||||
|
||||
use libfuzzer_sys::fuzz_target;
|
||||
|
||||
use rosenpass_cipher_traits::primitives::KeyedHashTo;
|
||||
use rosenpass_ciphers::subtle::blake2b;
|
||||
use rosenpass_to::To;
|
||||
|
||||
@@ -16,5 +17,7 @@ pub struct Blake2b {
|
||||
fuzz_target!(|input: Blake2b| {
|
||||
let mut out = [0u8; 32];
|
||||
|
||||
blake2b::hash(&input.key, &input.data).to(&mut out).unwrap();
|
||||
blake2b::Blake2b::keyed_hash_to(&input.key, &input.data)
|
||||
.to(&mut out)
|
||||
.unwrap();
|
||||
});
|
||||
|
||||
@@ -4,17 +4,17 @@ extern crate rosenpass;
|
||||
use libfuzzer_sys::fuzz_target;
|
||||
|
||||
use rosenpass::protocol::CryptoServer;
|
||||
use rosenpass_cipher_traits::Kem;
|
||||
use rosenpass_ciphers::kem::StaticKem;
|
||||
use rosenpass_cipher_traits::primitives::Kem;
|
||||
use rosenpass_ciphers::StaticKem;
|
||||
use rosenpass_secret_memory::policy::*;
|
||||
use rosenpass_secret_memory::Secret;
|
||||
use rosenpass_secret_memory::{PublicBox, Secret};
|
||||
use std::sync::Once;
|
||||
|
||||
static ONCE: Once = Once::new();
|
||||
fuzz_target!(|rx_buf: &[u8]| {
|
||||
ONCE.call_once(secret_policy_use_only_malloc_secrets);
|
||||
let sk = Secret::from_slice(&[0; StaticKem::SK_LEN]);
|
||||
let pk = Secret::from_slice(&[0; StaticKem::PK_LEN]);
|
||||
let pk = PublicBox::from_slice(&[0; StaticKem::PK_LEN]);
|
||||
|
||||
let mut cs = CryptoServer::new(sk, pk);
|
||||
let mut tx_buf = [0; 10240];
|
||||
|
||||
@@ -4,8 +4,8 @@ extern crate rosenpass;
|
||||
|
||||
use libfuzzer_sys::fuzz_target;
|
||||
|
||||
use rosenpass_cipher_traits::Kem;
|
||||
use rosenpass_ciphers::kem::EphemeralKem;
|
||||
use rosenpass_cipher_traits::primitives::Kem;
|
||||
use rosenpass_ciphers::EphemeralKem;
|
||||
|
||||
#[derive(arbitrary::Arbitrary, Debug)]
|
||||
pub struct Input {
|
||||
@@ -14,7 +14,9 @@ pub struct Input {
|
||||
|
||||
fuzz_target!(|input: Input| {
|
||||
let mut ciphertext = [0u8; EphemeralKem::CT_LEN];
|
||||
let mut shared_secret = [0u8; EphemeralKem::SK_LEN];
|
||||
let mut shared_secret = [0u8; EphemeralKem::SHK_LEN];
|
||||
|
||||
EphemeralKem::encaps(&mut shared_secret, &mut ciphertext, &input.pk).unwrap();
|
||||
EphemeralKem
|
||||
.encaps(&mut shared_secret, &mut ciphertext, &input.pk)
|
||||
.unwrap();
|
||||
});
|
||||
|
||||
@@ -3,13 +3,13 @@ extern crate rosenpass;
|
||||
|
||||
use libfuzzer_sys::fuzz_target;
|
||||
|
||||
use rosenpass_cipher_traits::Kem;
|
||||
use rosenpass_ciphers::kem::StaticKem;
|
||||
use rosenpass_cipher_traits::primitives::Kem;
|
||||
use rosenpass_ciphers::StaticKem;
|
||||
|
||||
fuzz_target!(|input: [u8; StaticKem::PK_LEN]| {
|
||||
let mut ciphertext = [0u8; StaticKem::CT_LEN];
|
||||
let mut shared_secret = [0u8; StaticKem::SHK_LEN];
|
||||
|
||||
// We expect errors while fuzzing therefore we do not check the result.
|
||||
let _ = StaticKem::encaps(&mut shared_secret, &mut ciphertext, &input);
|
||||
let _ = StaticKem.encaps(&mut shared_secret, &mut ciphertext, &input);
|
||||
});
|
||||
|
||||
13
manual_tests/psk_broker/peer_a.rp.config
Normal file
13
manual_tests/psk_broker/peer_a.rp.config
Normal file
@@ -0,0 +1,13 @@
|
||||
secret_key = "peer_a.rp.sk"
|
||||
public_key = "peer_a.rp.pk"
|
||||
listen = ["[::1]:46127"]
|
||||
verbosity = "Verbose"
|
||||
|
||||
[api]
|
||||
listen_path = []
|
||||
listen_fd = []
|
||||
stream_fd = []
|
||||
|
||||
[[peers]]
|
||||
public_key = "peer_b.rp.pk"
|
||||
device = "rpPskBrkTestA"
|
||||
14
manual_tests/psk_broker/peer_b.rp.config
Normal file
14
manual_tests/psk_broker/peer_b.rp.config
Normal file
@@ -0,0 +1,14 @@
|
||||
secret_key = "peer_b.rp.sk"
|
||||
public_key = "peer_b.rp.pk"
|
||||
listen = []
|
||||
verbosity = "Verbose"
|
||||
|
||||
[api]
|
||||
listen_path = []
|
||||
listen_fd = []
|
||||
stream_fd = []
|
||||
|
||||
[[peers]]
|
||||
public_key = "peer_a.rp.pk"
|
||||
endpoint = "[::1]:46127"
|
||||
device = "rpPskBrkTestB"
|
||||
215
manual_tests/psk_broker/run_test.sh
Executable file
215
manual_tests/psk_broker/run_test.sh
Executable file
@@ -0,0 +1,215 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e -o pipefail
|
||||
|
||||
enquote() {
|
||||
while (( "$#" > 1)); do
|
||||
printf "%q " "$1"
|
||||
shift
|
||||
done
|
||||
if (("$#" > 0)); then
|
||||
printf "%q" "$1"
|
||||
fi
|
||||
}
|
||||
|
||||
CLEANUP_HOOKS=()
|
||||
hook_cleanup() {
|
||||
local hook
|
||||
set +e +o pipefail
|
||||
for hook in "${CLEANUP_HOOKS[@]}"; do
|
||||
eval "${hook}"
|
||||
done
|
||||
}
|
||||
|
||||
cleanup() {
|
||||
CLEANUP_HOOKS=("$(enquote exc_with_ctx cleanup "$@")" "${CLEANUP_HOOKS[@]}")
|
||||
}
|
||||
|
||||
cleanup_eval() {
|
||||
cleanup eval "$*"
|
||||
}
|
||||
|
||||
stderr() {
|
||||
echo >&2 "$@"
|
||||
}
|
||||
|
||||
log() {
|
||||
local level; level="$1"; shift || fatal "USAGE: log LVL MESSAGE.."
|
||||
stderr "[${level}]" "$@"
|
||||
}
|
||||
|
||||
info() {
|
||||
log "INFO" "$@"
|
||||
}
|
||||
|
||||
debug() {
|
||||
log "DEBUG" "$@"
|
||||
}
|
||||
|
||||
fatal() {
|
||||
log "FATAL" "$@"
|
||||
exit 1
|
||||
}
|
||||
|
||||
assert() {
|
||||
local msg; msg="$1"; shift || fatal "USAGE: assert_cmd MESSAGE COMMAND.."
|
||||
"$@" || fatal "${msg}"
|
||||
}
|
||||
|
||||
abs_dir() {
|
||||
local dir; dir="$1"; shift || fatal "USAGE: abs_dir DIR"
|
||||
(
|
||||
cd "${dir}"
|
||||
pwd -P
|
||||
)
|
||||
}
|
||||
|
||||
exc_with_ctx() {
|
||||
local ctx; ctx="$1"; shift || fatal "USAGE: exc_with_ctx CONTEXT COMMAND.."
|
||||
if [[ -z "${ctx}" ]]; then
|
||||
info '$' "$@"
|
||||
else
|
||||
info "${ctx}\$" "$@"
|
||||
fi
|
||||
|
||||
"$@"
|
||||
}
|
||||
|
||||
exc() {
|
||||
exc_with_ctx "" "$@"
|
||||
}
|
||||
|
||||
exc_eval() {
|
||||
exc eval "$*"
|
||||
}
|
||||
|
||||
exc_eval_with_ctx() {
|
||||
local ctx; ctx="$1"; shift || fatal "USAGE: exc_eval_with_ctx CONTEXT EVAL_COMMAND.."
|
||||
exc_with_ctx "eval:${ctx}" "$*"
|
||||
}
|
||||
|
||||
exc_as_user() {
|
||||
exc sudo -u "${SUDO_USER}" "$@"
|
||||
}
|
||||
|
||||
exc_eval_as_user() {
|
||||
exc_as_user bash -c "$*"
|
||||
}
|
||||
|
||||
fork_eval_as_user() {
|
||||
exc sudo -u "${SUDO_USER}" bash -c "$*" &
|
||||
local pid; pid="$!"
|
||||
cleanup wait "${pid}"
|
||||
cleanup pkill -2 -P "${pid}" # Reverse ordering
|
||||
}
|
||||
|
||||
info_success() {
|
||||
stderr
|
||||
stderr
|
||||
if [[ "${SUCCESS}" = 1 ]]; then
|
||||
stderr " Test was a success!"
|
||||
else
|
||||
stderr " !!! TEST WAS A FAILURE!!!"
|
||||
fi
|
||||
stderr
|
||||
}
|
||||
|
||||
main() {
|
||||
assert "Use as root with sudo" [ "$(id -u)" -eq 0 ]
|
||||
assert "Use as root with sudo" [ -n "${SUDO_UID}" ]
|
||||
assert "SUDO_UID is 0; refusing to build as root" [ "${SUDO_UID}" -ne 0 ]
|
||||
|
||||
cleanup info_success
|
||||
|
||||
trap hook_cleanup EXIT
|
||||
|
||||
SCRIPT="$0"
|
||||
CFG_TEMPLATE_DIR="$(abs_dir "$(dirname "${SCRIPT}")")"
|
||||
REPO="$(abs_dir "${CFG_TEMPLATE_DIR}/../..")"
|
||||
BINS="${REPO}/target/debug"
|
||||
|
||||
# Create temp dir
|
||||
TMP_DIR="/tmp/rosenpass-psk-broker-test-$(date +%s)-$(uuidgen)"
|
||||
cleanup rm -rf "${TMP_DIR}"
|
||||
exc_as_user mkdir -p "${TMP_DIR}"
|
||||
|
||||
# Copy config
|
||||
CFG_DIR="${TMP_DIR}/cfg"
|
||||
exc_as_user cp -R "${CFG_TEMPLATE_DIR}" "${CFG_DIR}"
|
||||
|
||||
exc umask 077
|
||||
|
||||
exc cd "${REPO}"
|
||||
local build_cmd; build_cmd=(cargo build --workspace --color=always --all-features --bins --profile dev)
|
||||
if test -e "${BINS}/rosenpass-wireguard-broker-privileged" -a -e "${BINS}/rosenpass"; then
|
||||
info "Found the binaries rosenpass-wireguard-broker-privileged and rosenpass." \
|
||||
"Run following commands as a regular user to recompile the binaries with the right options" \
|
||||
"in case of an error:" '$' "${build_cmd[@]}"
|
||||
else
|
||||
exc_as_user "${build_cmd[@]}"
|
||||
fi
|
||||
exc sudo setcap CAP_NET_ADMIN=+eip "${BINS}/rosenpass-wireguard-broker-privileged"
|
||||
|
||||
exc cd "${CFG_DIR}"
|
||||
exc_eval_as_user "wg genkey > peer_a.wg.sk"
|
||||
exc_eval_as_user "wg pubkey < peer_a.wg.sk > peer_a.wg.pk"
|
||||
exc_eval_as_user "wg genkey > peer_b.wg.sk"
|
||||
exc_eval_as_user "wg pubkey < peer_b.wg.sk > peer_b.wg.pk"
|
||||
exc_eval_as_user "wg genpsk > peer_a_invalid.psk"
|
||||
exc_eval_as_user "wg genpsk > peer_b_invalid.psk"
|
||||
exc_eval_as_user "echo $(enquote "peer = \"$(cat peer_b.wg.pk)\"") >> peer_a.rp.config"
|
||||
exc_eval_as_user "echo $(enquote "peer = \"$(cat peer_a.wg.pk)\"") >> peer_b.rp.config"
|
||||
exc_as_user "${BINS}"/rosenpass gen-keys peer_a.rp.config
|
||||
exc_as_user "${BINS}"/rosenpass gen-keys peer_b.rp.config
|
||||
|
||||
cleanup ip l del dev rpPskBrkTestA
|
||||
cleanup ip l del dev rpPskBrkTestB
|
||||
exc ip l add dev rpPskBrkTestA type wireguard
|
||||
exc ip l add dev rpPskBrkTestB type wireguard
|
||||
|
||||
exc wg set rpPskBrkTestA \
|
||||
listen-port 46125 \
|
||||
private-key peer_a.wg.sk \
|
||||
peer "$(cat peer_b.wg.pk)" \
|
||||
endpoint 'localhost:46126' \
|
||||
preshared-key peer_a_invalid.psk \
|
||||
allowed-ips fe80::2/64
|
||||
exc wg set rpPskBrkTestB \
|
||||
listen-port 46126 \
|
||||
private-key peer_b.wg.sk \
|
||||
peer "$(cat peer_a.wg.pk)" \
|
||||
endpoint 'localhost:46125' \
|
||||
preshared-key peer_b_invalid.psk \
|
||||
allowed-ips fe80::1/64
|
||||
|
||||
exc ip l set rpPskBrkTestA up
|
||||
exc ip l set rpPskBrkTestB up
|
||||
|
||||
exc ip a add fe80::1/64 dev rpPskBrkTestA
|
||||
exc ip a add fe80::2/64 dev rpPskBrkTestB
|
||||
|
||||
fork_eval_as_user "\
|
||||
RUST_LOG='info' \
|
||||
PATH=$(enquote "${REPO}/target/debug:${PATH}") \
|
||||
$(enquote "${BINS}/rosenpass") --psk-broker-spawn \
|
||||
exchange-config peer_a.rp.config"
|
||||
fork_eval_as_user "\
|
||||
RUST_LOG='info' \
|
||||
PATH=$(enquote "${REPO}/target/debug:${PATH}") \
|
||||
$(enquote "${BINS}/rosenpass-wireguard-broker-socket-handler") \
|
||||
--listen-path broker.sock"
|
||||
fork_eval_as_user "\
|
||||
RUST_LOG='info' \
|
||||
PATH=$(enquote "$PWD/target/debug:${PATH}") \
|
||||
$(enquote "${BINS}/rosenpass") --psk-broker-path broker.sock \
|
||||
exchange-config peer_b.rp.config"
|
||||
|
||||
exc_as_user ping -c 2 -w 10 fe80::1%rpPskBrkTestA
|
||||
exc_as_user ping -c 2 -w 10 fe80::2%rpPskBrkTestB
|
||||
exc_as_user ping -c 2 -w 10 fe80::2%rpPskBrkTestA
|
||||
exc_as_user ping -c 2 -w 10 fe80::1%rpPskBrkTestB
|
||||
|
||||
SUCCESS=1
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -1,14 +1,14 @@
|
||||
from pathlib import Path
|
||||
from subprocess import run
|
||||
|
||||
import os
|
||||
|
||||
config = dict(
|
||||
peer_counts=[1, 5, 10, 50, 100, 500],
|
||||
peer_count_max=100,
|
||||
ate_ip="192.168.2.1",
|
||||
dut_ip="192.168.2.4",
|
||||
ate_ip="127.0.0.1",
|
||||
dut_ip="127.0.0.1",
|
||||
dut_port=9999,
|
||||
path_to_rosenpass_bin="/Users/user/src/rosenppass/rosenpass/target/debug/rosenpass",
|
||||
path_to_rosenpass_bin=os.getcwd() + "/target/release/rosenpass",
|
||||
)
|
||||
|
||||
print(config)
|
||||
|
||||
@@ -8,9 +8,14 @@ description = "Rosenpass internal bindings to liboqs"
|
||||
homepage = "https://rosenpass.eu/"
|
||||
repository = "https://github.com/rosenpass/rosenpass"
|
||||
readme = "readme.md"
|
||||
rust-version = "1.77"
|
||||
|
||||
[dependencies]
|
||||
rosenpass-cipher-traits = { workspace = true }
|
||||
rosenpass-util = { workspace = true }
|
||||
oqs-sys = { workspace = true }
|
||||
paste = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rosenpass-secret-memory = { workspace = true }
|
||||
rosenpass-constant-time = { workspace = true }
|
||||
|
||||
@@ -1,10 +1,47 @@
|
||||
macro_rules! oqs_kem {
|
||||
($name:ident) => { ::paste::paste!{
|
||||
mod [< $name:snake >] {
|
||||
use rosenpass_cipher_traits::Kem;
|
||||
use rosenpass_util::result::Guaranteed;
|
||||
//! Generic helpers for declaring bindings to liboqs kems
|
||||
|
||||
pub enum [< $name:camel >] {}
|
||||
/// Generate bindings to a liboqs-provided KEM
|
||||
macro_rules! oqs_kem {
|
||||
($name:ident, $algo_trait:path) => { ::paste::paste!{
|
||||
#[doc = "Bindings for ::oqs_sys::kem::" [<"OQS_KEM" _ $name:snake>] "_*"]
|
||||
mod [< $name:snake >] {
|
||||
use rosenpass_cipher_traits::primitives::{Kem, KemError};
|
||||
|
||||
#[doc = "Bindings for ::oqs_sys::kem::" [<"OQS_KEM" _ $name:snake>] "_*"]
|
||||
#[doc = ""]
|
||||
#[doc = "# Examples"]
|
||||
#[doc = ""]
|
||||
#[doc = "```rust"]
|
||||
#[doc = "use std::borrow::{Borrow, BorrowMut};"]
|
||||
#[doc = "use rosenpass_cipher_traits::primitives::Kem;"]
|
||||
#[doc = "use rosenpass_oqs::" $name:camel " as MyKem;"]
|
||||
#[doc = "use rosenpass_secret_memory::{Secret, Public};"]
|
||||
#[doc = ""]
|
||||
#[doc = "rosenpass_secret_memory::secret_policy_try_use_memfd_secrets();"]
|
||||
#[doc = ""]
|
||||
#[doc = "// Recipient generates secret key, transfers pk to sender"]
|
||||
#[doc = "let mut sk = Secret::<{ MyKem::SK_LEN }>::zero();"]
|
||||
#[doc = "let mut pk = Public::<{ MyKem::PK_LEN }>::zero();"]
|
||||
#[doc = "MyKem.keygen(sk.secret_mut(), &mut pk);"]
|
||||
#[doc = ""]
|
||||
#[doc = "// Sender generates ciphertext and local shared key, sends ciphertext to recipient"]
|
||||
#[doc = "let mut shk_enc = Secret::<{ MyKem::SHK_LEN }>::zero();"]
|
||||
#[doc = "let mut ct = Public::<{ MyKem::CT_LEN }>::zero();"]
|
||||
#[doc = "MyKem.encaps(shk_enc.secret_mut(), &mut ct, &pk);"]
|
||||
#[doc = ""]
|
||||
#[doc = "// Recipient decapsulates ciphertext"]
|
||||
#[doc = "let mut shk_dec = Secret::<{ MyKem::SHK_LEN }>::zero();"]
|
||||
#[doc = "MyKem.decaps(shk_dec.secret_mut(), sk.secret_mut(), &ct);"]
|
||||
#[doc = ""]
|
||||
#[doc = "// Both parties end up with the same shared key"]
|
||||
#[doc = "assert!(rosenpass_constant_time::compare(shk_enc.secret(), shk_dec.secret()) == 0);"]
|
||||
#[doc = "```"]
|
||||
pub struct [< $name:camel >];
|
||||
|
||||
pub const SK_LEN: usize = ::oqs_sys::kem::[<OQS_KEM _ $name:snake _ length_secret_key >] as usize;
|
||||
pub const PK_LEN: usize = ::oqs_sys::kem::[<OQS_KEM _ $name:snake _ length_public_key >] as usize;
|
||||
pub const CT_LEN: usize = ::oqs_sys::kem::[<OQS_KEM _ $name:snake _ length_ciphertext >] as usize;
|
||||
pub const SHK_LEN: usize = ::oqs_sys::kem::[<OQS_KEM _ $name:snake _ length_shared_secret >] as usize;
|
||||
|
||||
/// # Panic & Safety
|
||||
///
|
||||
@@ -18,17 +55,8 @@ macro_rules! oqs_kem {
|
||||
/// to only check that the buffers are big enough, allowing them to be even
|
||||
/// bigger. However, from a correctness point of view it does not make sense to
|
||||
/// allow bigger buffers.
|
||||
impl Kem for [< $name:camel >] {
|
||||
type Error = ::std::convert::Infallible;
|
||||
|
||||
const SK_LEN: usize = ::oqs_sys::kem::[<OQS_KEM _ $name:snake _ length_secret_key >] as usize;
|
||||
const PK_LEN: usize = ::oqs_sys::kem::[<OQS_KEM _ $name:snake _ length_public_key >] as usize;
|
||||
const CT_LEN: usize = ::oqs_sys::kem::[<OQS_KEM _ $name:snake _ length_ciphertext >] as usize;
|
||||
const SHK_LEN: usize = ::oqs_sys::kem::[<OQS_KEM _ $name:snake _ length_shared_secret >] as usize;
|
||||
|
||||
fn keygen(sk: &mut [u8], pk: &mut [u8]) -> Guaranteed<()> {
|
||||
assert_eq!(sk.len(), Self::SK_LEN);
|
||||
assert_eq!(pk.len(), Self::PK_LEN);
|
||||
impl Kem<SK_LEN, PK_LEN, CT_LEN, SHK_LEN> for [< $name:camel >] {
|
||||
fn keygen(&self, sk: &mut [u8; SK_LEN], pk: &mut [u8; PK_LEN]) -> Result<(), KemError> {
|
||||
unsafe {
|
||||
oqs_call!(
|
||||
::oqs_sys::kem::[< OQS_KEM _ $name:snake _ keypair >],
|
||||
@@ -40,10 +68,7 @@ macro_rules! oqs_kem {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn encaps(shk: &mut [u8], ct: &mut [u8], pk: &[u8]) -> Guaranteed<()> {
|
||||
assert_eq!(shk.len(), Self::SHK_LEN);
|
||||
assert_eq!(ct.len(), Self::CT_LEN);
|
||||
assert_eq!(pk.len(), Self::PK_LEN);
|
||||
fn encaps(&self, shk: &mut [u8; SHK_LEN], ct: &mut [u8; CT_LEN], pk: &[u8; PK_LEN]) -> Result<(), KemError> {
|
||||
unsafe {
|
||||
oqs_call!(
|
||||
::oqs_sys::kem::[< OQS_KEM _ $name:snake _ encaps >],
|
||||
@@ -56,10 +81,7 @@ macro_rules! oqs_kem {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn decaps(shk: &mut [u8], sk: &[u8], ct: &[u8]) -> Guaranteed<()> {
|
||||
assert_eq!(shk.len(), Self::SHK_LEN);
|
||||
assert_eq!(sk.len(), Self::SK_LEN);
|
||||
assert_eq!(ct.len(), Self::CT_LEN);
|
||||
fn decaps(&self, shk: &mut [u8; SHK_LEN], sk: &[u8; SK_LEN], ct: &[u8; CT_LEN]) -> Result<(), KemError> {
|
||||
unsafe {
|
||||
oqs_call!(
|
||||
::oqs_sys::kem::[< OQS_KEM _ $name:snake _ decaps >],
|
||||
@@ -72,9 +94,16 @@ macro_rules! oqs_kem {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Default for [< $name:camel >] {
|
||||
fn default() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
impl $algo_trait for [< $name:camel >] {}
|
||||
|
||||
pub use [< $name:snake >] :: [< $name:camel >];
|
||||
}}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
#![warn(missing_docs)]
|
||||
#![warn(clippy::missing_docs_in_private_items)]
|
||||
//! Bindings for liboqs used in Rosenpass
|
||||
|
||||
/// Call into a libOQS function
|
||||
macro_rules! oqs_call {
|
||||
($name:path, $($args:expr),*) => {{
|
||||
use oqs_sys::common::OQS_STATUS::*;
|
||||
@@ -17,5 +22,8 @@ macro_rules! oqs_call {
|
||||
|
||||
#[macro_use]
|
||||
mod kem_macro;
|
||||
oqs_kem!(kyber_512);
|
||||
oqs_kem!(classic_mceliece_460896);
|
||||
oqs_kem!(kyber_512, rosenpass_cipher_traits::algorithms::KemKyber512);
|
||||
oqs_kem!(
|
||||
classic_mceliece_460896,
|
||||
rosenpass_cipher_traits::algorithms::KemClassicMceliece460896
|
||||
);
|
||||
|
||||
39
overlay.nix
Normal file
39
overlay.nix
Normal file
@@ -0,0 +1,39 @@
|
||||
final: prev: {
|
||||
|
||||
|
||||
#
|
||||
### Actual rosenpass software ###
|
||||
#
|
||||
rosenpass = final.callPackage ./pkgs/rosenpass.nix { };
|
||||
rosenpass-oci-image = final.callPackage ./pkgs/rosenpass-oci-image.nix { };
|
||||
rp = final.callPackage ./pkgs/rosenpass.nix { package = "rp"; };
|
||||
|
||||
release-package = final.callPackage ./pkgs/release-package.nix { };
|
||||
|
||||
#
|
||||
### Appendix ###
|
||||
#
|
||||
proverif-patched = prev.proverif.overrideAttrs (old: {
|
||||
postInstall = ''
|
||||
install -D -t $out/lib cryptoverif.pvl
|
||||
'';
|
||||
});
|
||||
|
||||
proof-proverif = final.stdenv.mkDerivation {
|
||||
name = "rosenpass-proverif-proof";
|
||||
version = "unstable";
|
||||
src = final.lib.sources.sourceByRegex ./. [
|
||||
"analyze.sh"
|
||||
"marzipan(/marzipan.awk)?"
|
||||
"analysis(/.*)?"
|
||||
];
|
||||
nativeBuildInputs = [ final.proverif final.graphviz ];
|
||||
CRYPTOVERIF_LIB = final.proverif-patched + "/lib/cryptoverif.pvl";
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
bash analyze.sh -color -html $out
|
||||
'';
|
||||
};
|
||||
|
||||
whitepaper = final.callPackage ./pkgs/whitepaper.nix { };
|
||||
}
|
||||
@@ -2,8 +2,8 @@
|
||||
template: rosenpass
|
||||
title: Rosenpass
|
||||
author:
|
||||
- Karolin Varner = Independent Researcher
|
||||
- Benjamin Lipp = Max Planck Institute for Security and Privacy (MPI-SP)
|
||||
- Karolin Varner = Rosenpass e.V., Max Planck Institute for Security and Privacy (MPI-SP)
|
||||
- Benjamin Lipp = Rosenpass e.V., Max Planck Institute for Security and Privacy (MPI-SP)
|
||||
- Wanja Zaeske
|
||||
- Lisa Schmidt = {Scientific Illustrator – \\url{mullana.de}}
|
||||
- Prabhpreet Dua
|
||||
@@ -383,9 +383,18 @@ fn load_biscuit(nct) {
|
||||
"biscuit additional data",
|
||||
spkr, sidi, sidr);
|
||||
let pt : Biscuit = XAEAD::dec(k, n, ct, ad);
|
||||
|
||||
// Find the peer and apply retransmission protection
|
||||
lookup_peer(pt.peerid);
|
||||
assert(pt.biscuit_no <= peer.biscuit_used);
|
||||
|
||||
// In December 2024, the InitConf retransmission mechanisim was redesigned
|
||||
// in a backwards-compatible way. See the changelog.
|
||||
//
|
||||
// -- 2024-11-30, Karolin Varner
|
||||
if (protocol_version!(< "0.3.0")) {
|
||||
// Ensure that the biscuit is used only once
|
||||
assert(pt.biscuit_no <= peer.biscuit_used);
|
||||
}
|
||||
|
||||
// Restore the chaining key
|
||||
ck ← pt.ck;
|
||||
@@ -501,7 +510,7 @@ LAST_UNDER_LOAD_WINDOW = 1 //seconds
|
||||
|
||||
The initiator deals with packet loss by storing the messages it sends to the responder and retransmitting them in randomized, exponentially increasing intervals until they get a response. Receiving RespHello terminates retransmission of InitHello. A Data or EmptyData message serves as acknowledgement of receiving InitConf and terminates its retransmission.
|
||||
|
||||
The responder does not need to do anything special to handle RespHello retransmission – if the RespHello package is lost, the initiator retransmits InitHello and the responder can generate another RespHello package from that. InitConf retransmission needs to be handled specifically in the responder code because accepting an InitConf retransmission would reset the live session including the nonce counter, which would cause nonce reuse. Implementations must detect the case that `biscuit_no = biscuit_used` in ICR5, skip execution of ICR6 and ICR7, and just transmit another EmptyData package to confirm that the initiator can stop transmitting InitConf.
|
||||
The responder uses less complex form of the same mechanism: The responder never retransmits RespHello, instead the responder generates a new RespHello message if InitHello is retransmitted. Responder confirmation messages of completed handshake (EmptyData) messages are retransmitted by storing the most recent InitConf messages (or their hashes) and caching the associated EmptyData messages. Through this cache, InitConf retransmission is detected and the associated EmptyData message is retransmitted.
|
||||
|
||||
### Interaction with cookie reply system
|
||||
|
||||
@@ -515,6 +524,76 @@ When the responder is under load and it recieves an InitConf message, the messag
|
||||
|
||||
# Changelog
|
||||
|
||||
### 0.3.x
|
||||
|
||||
#### 2024-10-30 – InitConf retransmission updates
|
||||
|
||||
\vspace{0.5em}
|
||||
|
||||
Author: Karolin Varner
|
||||
Issue: [#331](https://github.com/rosenpass/rosenpass/issues/331)
|
||||
PR: [#513](https://github.com/rosenpass/rosenpass/pull/513)
|
||||
|
||||
\vspace{0.5em}
|
||||
|
||||
We redesign the InitConf retransmission mechanism to use a hash table. This avoids the need for the InitConf handling code to account for InitConf retransmission specifically and moves the retransmission logic into less-sensitive code.
|
||||
|
||||
Previously, we would specifically account for InitConf retransmission in the InitConf handling code by checking the biscuit number: If the biscuit number was higher than any previously seen biscuit number, then this must be a new key-exchange being completed; if the biscuit number was exactly the highest seen biscuit number, then the InitConf message is interpreted as an InitConf retransmission; in this case, an entirely new EmptyData (responder confirmation) message was generated as confirmation that InitConf has been received and that the initiator can now cease opportunistic retransmission of InitConf.
|
||||
|
||||
This mechanism was a bit brittle; even leading to a very minor but still relevant security issue, necessitating the release of Rosenpass maintenance version 0.2.2 with a [fix for the problem](https://github.com/rosenpass/rosenpass/pull/329). We had processed the InitConf message, correctly identifying that InitConf was a retransmission, but we failed to pass this information on to the rest of the code base, leading to double emission of the same "hey, we have a new cryptographic session key" even if the `outfile` option was used to integrate Rosenpass into some external application. If this event was used anywhere to reset a nonce, then this could have led to a nonce-misuse, although for the use with WireGuard this is not an issue.
|
||||
|
||||
By removing all retransmission handling code from the cryptographic protocol, we are taking structural measures to exclude the possibilities of similar issues.
|
||||
|
||||
- In section "Dealing With Package Loss" we replace
|
||||
|
||||
\begin{quote}
|
||||
The responder does not need to do anything special to handle RespHello retransmission – if the RespHello package is lost, the initiator retransmits InitHello and the responder can generate another RespHello package from that. InitConf retransmission needs to be handled specifically in the responder code because accepting an InitConf retransmission would reset the live session including the nonce counter, which would cause nonce reuse. Implementations must detect the case that `biscuit_no = biscuit_used` in ICR5, skip execution of ICR6 and ICR7, and just transmit another EmptyData package to confirm that the initiator can stop transmitting InitConf.
|
||||
\end{quote}
|
||||
|
||||
by
|
||||
|
||||
\begin{quote}
|
||||
The responder uses less complex form of the same mechanism: The responder never retransmits RespHello, instead the responder generates a new RespHello message if InitHello is retransmitted. Responder confirmation messages of completed handshake (EmptyData) messages are retransmitted by storing the most recent InitConf messages (or their hashes) and caching the associated EmptyData messages. Through this cache, InitConf retransmission is detected and the associated EmptyData message is retransmitted.
|
||||
\end{quote}
|
||||
|
||||
- In function `load_biscuit` we replace
|
||||
|
||||
``` {=tex}
|
||||
\begin{quote}
|
||||
\begin{minted}{pseudorust}
|
||||
assert(pt.biscuit_no <= peer.biscuit_used);
|
||||
\end{minted}
|
||||
\end{quote}
|
||||
```
|
||||
|
||||
by
|
||||
|
||||
``` {=tex}
|
||||
\begin{quote}
|
||||
\begin{minted}{pseudorust}
|
||||
// In December 2024, the InitConf retransmission mechanisim was redesigned
|
||||
// in a backwards-compatible way. See the changelog.
|
||||
//
|
||||
// -- 2024-11-30, Karolin Varner
|
||||
if (protocol_version!(< "0.3.0")) {
|
||||
// Ensure that the biscuit is used only once
|
||||
assert(pt.biscuit_no <= peer.biscuit_used);
|
||||
}
|
||||
\end{minted}
|
||||
\end{quote}
|
||||
```
|
||||
|
||||
#### 2024-04-16 – Denial of Service Mitigation
|
||||
|
||||
\vspace{0.5em}
|
||||
|
||||
Author: Prabhpreet Dua
|
||||
Issue: [#137](https://github.com/rosenpass/rosenpass/issues/137)
|
||||
PR: [#142](https://github.com/rosenpass/rosenpass/pull/142)
|
||||
|
||||
\vspace{0.5em}
|
||||
|
||||
- Added denial of service mitigation using the WireGuard cookie mechanism
|
||||
- Added section "Denial of Service Mitigation and Cookies", and modify "Dealing with Packet Loss" for DoS cookie mechanism
|
||||
|
||||
\printbibliography
|
||||
|
||||
9
pkgs/example.toml
Normal file
9
pkgs/example.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
dev = "rp-example"
|
||||
ip = "fc00::1/64"
|
||||
listen = "[::]:51821"
|
||||
private_keys_dir = "/run/credentials/rp@example.service"
|
||||
verbose = true
|
||||
|
||||
[[peers]]
|
||||
public_keys_dir = "/etc/rosenpass/example/peers/client"
|
||||
allowed_ips = "fc00::2"
|
||||
30
pkgs/package-deb.nix
Normal file
30
pkgs/package-deb.nix
Normal file
@@ -0,0 +1,30 @@
|
||||
{ runCommand, dpkg, rosenpass }:
|
||||
|
||||
let
|
||||
inherit (rosenpass) version;
|
||||
in
|
||||
|
||||
runCommand "rosenpass-${version}.deb" { } ''
|
||||
mkdir -p packageroot/DEBIAN
|
||||
|
||||
cat << EOF > packageroot/DEBIAN/control
|
||||
Package: rosenpass
|
||||
Version: ${version}
|
||||
Architecture: all
|
||||
Maintainer: Jacek Galowicz <jacek@galowicz.de>
|
||||
Depends:
|
||||
Description: Post-quantum-secure VPN tool Rosenpass
|
||||
Rosenpass is a post-quantum-secure VPN
|
||||
that uses WireGuard to transport the actual data.
|
||||
EOF
|
||||
|
||||
|
||||
mkdir -p packageroot/usr/bin
|
||||
install -m755 -t packageroot/usr/bin ${rosenpass}/bin/*
|
||||
|
||||
mkdir -p packageroot/etc/rosenpass
|
||||
cp -r ${rosenpass}/lib/systemd packageroot/etc/
|
||||
cp ${./example.toml} packageroot/etc/rosenpass/example.toml
|
||||
|
||||
${dpkg}/bin/dpkg --build packageroot $out
|
||||
''
|
||||
57
pkgs/package-rpm.nix
Normal file
57
pkgs/package-rpm.nix
Normal file
@@ -0,0 +1,57 @@
|
||||
{ lib, system, runCommand, rosenpass, rpm }:
|
||||
|
||||
let
|
||||
splitVersion = lib.strings.splitString "-" rosenpass.version;
|
||||
version = builtins.head splitVersion;
|
||||
release =
|
||||
if builtins.length splitVersion != 2
|
||||
then "release"
|
||||
else builtins.elemAt splitVersion 1;
|
||||
arch = builtins.head (builtins.split "-" system);
|
||||
in
|
||||
|
||||
runCommand "rosenpass-${version}.rpm" { } ''
|
||||
mkdir -p rpmbuild/SPECS
|
||||
|
||||
cat << EOF > rpmbuild/SPECS/rosenpass.spec
|
||||
Name: rosenpass
|
||||
Release: ${release}
|
||||
Version: ${version}
|
||||
Summary: Post-quantum-secure VPN key exchange
|
||||
License: Apache-2.0
|
||||
|
||||
%description
|
||||
Post-quantum-secure VPN tool Rosenpass
|
||||
Rosenpass is a post-quantum-secure VPN
|
||||
that uses WireGuard to transport the actual data.
|
||||
|
||||
%files
|
||||
/usr/bin/rosenpass
|
||||
/usr/bin/rp
|
||||
/etc/systemd/system/rosenpass.target
|
||||
/etc/systemd/system/rosenpass@.service
|
||||
/etc/systemd/system/rp@.service
|
||||
/etc/rosenpass/example.toml
|
||||
EOF
|
||||
|
||||
buildroot=rpmbuild/BUILDROOT/rosenpass-${version}-${release}.${arch}
|
||||
mkdir -p $buildroot/usr/bin
|
||||
install -m755 -t $buildroot/usr/bin ${rosenpass}/bin/*
|
||||
|
||||
mkdir -p $buildroot/etc/rosenpass
|
||||
cp -r ${rosenpass}/lib/systemd $buildroot/etc/
|
||||
chmod -R 744 $buildroot/etc/systemd
|
||||
cp ${./example.toml} $buildroot/etc/rosenpass/example.toml
|
||||
|
||||
export HOME=/build
|
||||
mkdir -p /build/tmp
|
||||
ls -R rpmbuild
|
||||
|
||||
${rpm}/bin/rpmbuild \
|
||||
-bb \
|
||||
--dbpath=$HOME \
|
||||
--define "_tmppath /build/tmp" \
|
||||
rpmbuild/SPECS/rosenpass.spec
|
||||
|
||||
cp rpmbuild/RPMS/${arch}/rosenpass*.rpm $out
|
||||
''
|
||||
27
pkgs/release-package.nix
Normal file
27
pkgs/release-package.nix
Normal file
@@ -0,0 +1,27 @@
|
||||
{ lib, stdenvNoCC, runCommandNoCC, pkgsStatic, rosenpass, rosenpass-oci-image, rp } @ args:
|
||||
|
||||
let
|
||||
version = rosenpass.version;
|
||||
|
||||
# select static packages on Linux, default packages otherwise
|
||||
package =
|
||||
if stdenvNoCC.hostPlatform.isLinux then
|
||||
pkgsStatic.rosenpass
|
||||
else args.rosenpass;
|
||||
rp =
|
||||
if stdenvNoCC.hostPlatform.isLinux then
|
||||
pkgsStatic.rp
|
||||
else args.rp;
|
||||
oci-image =
|
||||
if stdenvNoCC.hostPlatform.isLinux then
|
||||
pkgsStatic.rosenpass-oci-image
|
||||
else args.rosenpass-oci-image;
|
||||
in
|
||||
runCommandNoCC "lace-result" { } ''
|
||||
mkdir {bin,$out}
|
||||
tar -cvf $out/rosenpass-${stdenvNoCC.hostPlatform.system}-${version}.tar \
|
||||
-C ${package} bin/rosenpass lib/systemd \
|
||||
-C ${rp} bin/rp
|
||||
cp ${oci-image} \
|
||||
$out/rosenpass-oci-image-${stdenvNoCC.hostPlatform.system}-${version}.tar.gz
|
||||
''
|
||||
11
pkgs/rosenpass-oci-image.nix
Normal file
11
pkgs/rosenpass-oci-image.nix
Normal file
@@ -0,0 +1,11 @@
|
||||
{ dockerTools, buildEnv, rosenpass }:
|
||||
|
||||
dockerTools.buildImage {
|
||||
name = rosenpass.name + "-oci";
|
||||
copyToRoot = buildEnv {
|
||||
name = "image-root";
|
||||
paths = [ rosenpass ];
|
||||
pathsToLink = [ "/bin" ];
|
||||
};
|
||||
config.Cmd = [ "/bin/rosenpass" ];
|
||||
}
|
||||
88
pkgs/rosenpass.nix
Normal file
88
pkgs/rosenpass.nix
Normal file
@@ -0,0 +1,88 @@
|
||||
{ lib, stdenv, rustPlatform, cmake, mandoc, removeReferencesTo, bash, package ? "rosenpass" }:
|
||||
|
||||
let
|
||||
# whether we want to build a statically linked binary
|
||||
isStatic = stdenv.targetPlatform.isStatic;
|
||||
|
||||
scoped = (scope: scope.result);
|
||||
|
||||
# source files relevant for rust
|
||||
src = scoped rec {
|
||||
# File suffices to include
|
||||
extensions = [
|
||||
"lock"
|
||||
"rs"
|
||||
"service"
|
||||
"target"
|
||||
"toml"
|
||||
];
|
||||
# Files to explicitly include
|
||||
files = [
|
||||
"to/README.md"
|
||||
];
|
||||
|
||||
src = ../.;
|
||||
filter = (path: type: scoped rec {
|
||||
inherit (lib) any id removePrefix hasSuffix;
|
||||
anyof = (any id);
|
||||
|
||||
basename = baseNameOf (toString path);
|
||||
relative = removePrefix (toString src + "/") (toString path);
|
||||
|
||||
result = anyof [
|
||||
(type == "directory")
|
||||
(any (ext: hasSuffix ".${ext}" basename) extensions)
|
||||
(any (file: file == relative) files)
|
||||
];
|
||||
});
|
||||
|
||||
result = lib.sources.cleanSourceWith { inherit src filter; };
|
||||
};
|
||||
|
||||
# parsed Cargo.toml
|
||||
cargoToml = builtins.fromTOML (builtins.readFile (src + "/rosenpass/Cargo.toml"));
|
||||
in
|
||||
rustPlatform.buildRustPackage {
|
||||
name = cargoToml.package.name;
|
||||
version = cargoToml.package.version;
|
||||
inherit src;
|
||||
|
||||
cargoBuildOptions = [ "--package" package ];
|
||||
cargoTestOptions = [ "--package" package ];
|
||||
|
||||
doCheck = true;
|
||||
|
||||
cargoLock = {
|
||||
lockFile = src + "/Cargo.lock";
|
||||
outputHashes = {
|
||||
"memsec-0.6.3" = "sha256-4ri+IEqLd77cLcul3lZrmpDKj4cwuYJ8oPRAiQNGeLw=";
|
||||
"uds-0.4.2" = "sha256-qlxr/iJt2AV4WryePIvqm/8/MK/iqtzegztNliR93W8=";
|
||||
"libcrux-blake2-0.0.3-pre" = "sha256-0CLjuzwJqGooiODOHf5D8Hc8ClcG/XcGvVGyOVnLmJY=";
|
||||
};
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
stdenv.cc
|
||||
cmake # for oqs build in the oqs-sys crate
|
||||
mandoc # for the built-in manual
|
||||
removeReferencesTo
|
||||
rustPlatform.bindgenHook # for C-bindings in the crypto libs
|
||||
];
|
||||
buildInputs = [ bash ];
|
||||
|
||||
hardeningDisable = lib.optional isStatic "fortify";
|
||||
|
||||
postInstall = ''
|
||||
mkdir -p $out/lib/systemd/system
|
||||
install systemd/rosenpass@.service $out/lib/systemd/system
|
||||
install systemd/rp@.service $out/lib/systemd/system
|
||||
install systemd/rosenpass.target $out/lib/systemd/system
|
||||
'';
|
||||
|
||||
meta = {
|
||||
inherit (cargoToml.package) description homepage;
|
||||
license = with lib.licenses; [ mit asl20 ];
|
||||
maintainers = [ lib.maintainers.wucke13 ];
|
||||
platforms = lib.platforms.all;
|
||||
};
|
||||
}
|
||||
29
pkgs/whitepaper.nix
Normal file
29
pkgs/whitepaper.nix
Normal file
@@ -0,0 +1,29 @@
|
||||
{ stdenvNoCC, texlive, ncurses, python3Packages, which }:
|
||||
|
||||
let
|
||||
customTexLiveSetup = (texlive.combine {
|
||||
inherit (texlive) acmart amsfonts biber biblatex biblatex-software
|
||||
biblatex-trad ccicons csquotes csvsimple doclicense eso-pic fancyvrb
|
||||
fontspec gitinfo2 gobble ifmtarg koma-script latexmk lm lualatex-math
|
||||
markdown mathtools minted noto nunito paralist pgf scheme-basic soul
|
||||
unicode-math upquote xifthen xkeyval xurl;
|
||||
});
|
||||
in
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "whitepaper";
|
||||
src = ../papers;
|
||||
nativeBuildInputs = [
|
||||
ncurses # tput
|
||||
python3Packages.pygments
|
||||
customTexLiveSetup # custom tex live scheme
|
||||
which
|
||||
];
|
||||
buildPhase = ''
|
||||
export HOME=$(mktemp -d)
|
||||
latexmk -r tex/CI.rc
|
||||
'';
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
mv *.pdf readme.md $out/
|
||||
'';
|
||||
}
|
||||
17
readme.md
17
readme.md
@@ -23,6 +23,12 @@ rosenpass help
|
||||
|
||||
Follow [quick start instructions](https://rosenpass.eu/#start) to get a VPN up and running.
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are generally welcome. Join our [Matrix Chat](https://matrix.to/#/#rosenpass:matrix.org) if you are looking for guidance on how to contribute or for people to collaborate with.
|
||||
|
||||
We also have a – as of now, very minimal – [contributors guide](CONTRIBUTING.md).
|
||||
|
||||
## Software architecture
|
||||
|
||||
The [rosenpass tool](./src/) is written in Rust and uses liboqs[^liboqs]. The tool establishes a symmetric key and provides it to WireGuard. Since it supplies WireGuard with key through the PSK feature using Rosenpass+WireGuard is cryptographically no less secure than using WireGuard on its own ("hybrid security"). Rosenpass refreshes the symmetric key every two minutes.
|
||||
@@ -66,10 +72,21 @@ A wrapper script provides instant feedback about which queries execute as expect
|
||||
|
||||
# Getting Rosenpass
|
||||
|
||||
Documentation and installation guides can be found at the [Rosenpass website](https://rosenpass.eu/docs).
|
||||
|
||||
Rosenpass is packaged for more and more distributions, maybe also for the distribution of your choice?
|
||||
|
||||
[](https://repology.org/project/rosenpass/versions)
|
||||
|
||||
## Docker Images
|
||||
|
||||
Rosenpass is also available as prebuilt Docker images:
|
||||
|
||||
- [`ghcr.io/rosenpass/rosenpass`](https://github.com/rosenpass/rosenpass/pkgs/container/rosenpass)
|
||||
- [`ghcr.io/rosenpass/rp`](https://github.com/rosenpass/rosenpass/pkgs/container/rp)
|
||||
|
||||
For details on how to use these images, refer to the [Docker usage guide](docker/USAGE.md).
|
||||
|
||||
# Mirrors
|
||||
|
||||
Don't want to use GitHub or only have an IPv6 connection? Rosenpass has set up two mirrors for this:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "rosenpass"
|
||||
version = "0.2.1"
|
||||
version = "0.3.0-dev"
|
||||
authors = ["Karolin Varner <karo@cupdev.net>", "wucke13 <wucke13@gmail.com>"]
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
@@ -8,11 +8,33 @@ description = "Build post-quantum-secure VPNs with WireGuard!"
|
||||
homepage = "https://rosenpass.eu/"
|
||||
repository = "https://github.com/rosenpass/rosenpass"
|
||||
readme = "readme.md"
|
||||
rust-version = "1.77"
|
||||
|
||||
[[bin]]
|
||||
name = "rosenpass"
|
||||
path = "src/main.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "rosenpass-gen-ipc-msg-types"
|
||||
path = "src/bin/gen-ipc-msg-types.rs"
|
||||
required-features = ["experiment_api", "internal_bin_gen_ipc_msg_types"]
|
||||
|
||||
[[test]]
|
||||
name = "api-integration-tests"
|
||||
required-features = ["experiment_api", "internal_testing"]
|
||||
|
||||
[[test]]
|
||||
name = "api-integration-tests-api-setup"
|
||||
required-features = ["experiment_api", "internal_testing"]
|
||||
|
||||
[[test]]
|
||||
name = "gen-ipc-msg-types"
|
||||
required-features = [
|
||||
"experiment_api",
|
||||
"internal_testing",
|
||||
"internal_bin_gen_ipc_msg_types",
|
||||
]
|
||||
|
||||
[[bench]]
|
||||
name = "handshake"
|
||||
harness = false
|
||||
@@ -34,12 +56,22 @@ env_logger = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
clap = { workspace = true }
|
||||
clap_complete = { workspace = true }
|
||||
clap_mangen = { workspace = true }
|
||||
mio = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
zerocopy = { workspace = true }
|
||||
home = { workspace = true }
|
||||
derive_builder = {workspace = true}
|
||||
rosenpass-wireguard-broker = {workspace = true}
|
||||
derive_builder = { workspace = true }
|
||||
rosenpass-wireguard-broker = { workspace = true }
|
||||
zeroize = { workspace = true }
|
||||
hex-literal = { workspace = true, optional = true }
|
||||
hex = { workspace = true, optional = true }
|
||||
heck = { workspace = true, optional = true }
|
||||
command-fds = { workspace = true, optional = true }
|
||||
rustix = { workspace = true, optional = true }
|
||||
uds = { workspace = true, optional = true, features = ["mio_1xx"] }
|
||||
signal-hook = { workspace = true, optional = true }
|
||||
|
||||
[build-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
@@ -48,9 +80,32 @@ anyhow = { workspace = true }
|
||||
criterion = { workspace = true }
|
||||
test_bin = { workspace = true }
|
||||
stacker = { workspace = true }
|
||||
serial_test = {workspace = true}
|
||||
procspawn = {workspace = true}
|
||||
serial_test = { workspace = true }
|
||||
procspawn = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
rustix = { workspace = true }
|
||||
|
||||
[features]
|
||||
enable_broker_api = ["rosenpass-wireguard-broker/enable_broker_api"]
|
||||
enable_memfd_alloc = []
|
||||
#default = ["experiment_libcrux_all"]
|
||||
experiment_cookie_dos_mitigation = []
|
||||
experiment_memfd_secret = ["rosenpass-wireguard-broker/experiment_memfd_secret"]
|
||||
experiment_libcrux_all = ["rosenpass-ciphers/experiment_libcrux_all"]
|
||||
experiment_libcrux_blake2 = ["rosenpass-ciphers/experiment_libcrux_blake2"]
|
||||
experiment_libcrux_chachapoly = [
|
||||
"rosenpass-ciphers/experiment_libcrux_chachapoly",
|
||||
]
|
||||
experiment_libcrux_kyber = ["rosenpass-ciphers/experiment_libcrux_kyber"]
|
||||
experiment_api = [
|
||||
"hex-literal",
|
||||
"uds",
|
||||
"command-fds",
|
||||
"rustix",
|
||||
"rosenpass-util/experiment_file_descriptor_passing",
|
||||
"rosenpass-wireguard-broker/experiment_api",
|
||||
]
|
||||
internal_signal_handling_for_coverage_reports = ["signal-hook"]
|
||||
internal_testing = []
|
||||
internal_bin_gen_ipc_msg_types = ["hex", "heck"]
|
||||
|
||||
[lints.rust]
|
||||
unexpected_cfgs = { level = "allow", check-cfg = ['cfg(coverage)'] }
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
use anyhow::Result;
|
||||
use rosenpass::protocol::{CryptoServer, HandleMsgResult, MsgBuf, PeerPtr, SPk, SSk, SymKey};
|
||||
use rosenpass::protocol::{
|
||||
CryptoServer, HandleMsgResult, MsgBuf, PeerPtr, ProtocolVersion, SPk, SSk, SymKey,
|
||||
};
|
||||
use std::ops::DerefMut;
|
||||
|
||||
use rosenpass_cipher_traits::Kem;
|
||||
use rosenpass_ciphers::kem::StaticKem;
|
||||
use rosenpass_cipher_traits::primitives::Kem;
|
||||
use rosenpass_ciphers::StaticKem;
|
||||
|
||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
use rosenpass_secret_memory::secret_policy_try_use_memfd_secrets;
|
||||
@@ -40,25 +43,33 @@ fn hs(ini: &mut CryptoServer, res: &mut CryptoServer) -> Result<()> {
|
||||
|
||||
fn keygen() -> Result<(SSk, SPk)> {
|
||||
let (mut sk, mut pk) = (SSk::zero(), SPk::zero());
|
||||
StaticKem::keygen(sk.secret_mut(), pk.secret_mut())?;
|
||||
StaticKem.keygen(sk.secret_mut(), pk.deref_mut())?;
|
||||
Ok((sk, pk))
|
||||
}
|
||||
|
||||
fn make_server_pair() -> Result<(CryptoServer, CryptoServer)> {
|
||||
fn make_server_pair(protocol_version: ProtocolVersion) -> Result<(CryptoServer, CryptoServer)> {
|
||||
let psk = SymKey::random();
|
||||
let ((ska, pka), (skb, pkb)) = (keygen()?, keygen()?);
|
||||
let (mut a, mut b) = (
|
||||
CryptoServer::new(ska, pka.clone()),
|
||||
CryptoServer::new(skb, pkb.clone()),
|
||||
);
|
||||
a.add_peer(Some(psk.clone()), pkb)?;
|
||||
b.add_peer(Some(psk), pka)?;
|
||||
a.add_peer(Some(psk.clone()), pkb, protocol_version.clone())?;
|
||||
b.add_peer(Some(psk), pka, protocol_version)?;
|
||||
Ok((a, b))
|
||||
}
|
||||
|
||||
fn criterion_benchmark(c: &mut Criterion) {
|
||||
fn criterion_benchmark_v02(c: &mut Criterion) {
|
||||
criterion_benchmark(c, ProtocolVersion::V02)
|
||||
}
|
||||
|
||||
fn criterion_benchmark_v03(c: &mut Criterion) {
|
||||
criterion_benchmark(c, ProtocolVersion::V03)
|
||||
}
|
||||
|
||||
fn criterion_benchmark(c: &mut Criterion, protocol_version: ProtocolVersion) {
|
||||
secret_policy_try_use_memfd_secrets();
|
||||
let (mut a, mut b) = make_server_pair().unwrap();
|
||||
let (mut a, mut b) = make_server_pair(protocol_version).unwrap();
|
||||
c.bench_function("cca_secret_alloc", |bench| {
|
||||
bench.iter(|| {
|
||||
SSk::zero();
|
||||
@@ -81,5 +92,6 @@ fn criterion_benchmark(c: &mut Criterion) {
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(benches, criterion_benchmark);
|
||||
criterion_main!(benches);
|
||||
criterion_group!(benches_v02, criterion_benchmark_v02);
|
||||
criterion_group!(benches_v03, criterion_benchmark_v03);
|
||||
criterion_main!(benches_v02, benches_v03);
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
use anyhow::bail;
|
||||
use anyhow::Result;
|
||||
use std::env;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
/// Invokes a troff compiler to compile a manual page
|
||||
fn render_man(compiler: &str, man: &str) -> Result<String> {
|
||||
let out = Command::new(compiler).args(["-Tascii", man]).output()?;
|
||||
if !out.status.success() {
|
||||
bail!("{} returned an error", compiler);
|
||||
}
|
||||
|
||||
Ok(String::from_utf8(out.stdout)?)
|
||||
}
|
||||
|
||||
/// Generates the manual page
|
||||
fn generate_man() -> String {
|
||||
// This function is purposely stupid and redundant
|
||||
|
||||
let man = render_man("mandoc", "./doc/rosenpass.1");
|
||||
if let Ok(man) = man {
|
||||
return man;
|
||||
}
|
||||
|
||||
let man = render_man("groff", "./doc/rosenpass.1");
|
||||
if let Ok(man) = man {
|
||||
return man;
|
||||
}
|
||||
|
||||
"Cannot render manual page. Please visit https://rosenpass.eu/docs/manuals/\n".into()
|
||||
}
|
||||
|
||||
fn man() {
|
||||
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
|
||||
let man = generate_man();
|
||||
let path = out_dir.join("rosenpass.1.ascii");
|
||||
|
||||
let mut file = File::create(&path).unwrap();
|
||||
file.write_all(man.as_bytes()).unwrap();
|
||||
|
||||
println!("cargo:rustc-env=ROSENPASS_MAN={}", path.display());
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// For now, rerun the build script on every time, as the build script
|
||||
// is not very expensive right now.
|
||||
println!("cargo:rerun-if-changed=./");
|
||||
man();
|
||||
}
|
||||
341
rosenpass/src/api/api_handler.rs
Normal file
341
rosenpass/src/api/api_handler.rs
Normal file
@@ -0,0 +1,341 @@
|
||||
// Note: This is business logic; tested through the integration tests in
|
||||
// rosenpass/tests/
|
||||
|
||||
use std::{borrow::BorrowMut, collections::VecDeque, os::fd::OwnedFd};
|
||||
|
||||
use anyhow::Context;
|
||||
use rosenpass_to::{ops::copy_slice, To};
|
||||
use rosenpass_util::{
|
||||
fd::FdIo,
|
||||
functional::{run, ApplyExt},
|
||||
io::ReadExt,
|
||||
mem::DiscardResultExt,
|
||||
mio::UnixStreamExt,
|
||||
result::OkExt,
|
||||
};
|
||||
use rosenpass_wireguard_broker::brokers::mio_client::MioBrokerClient;
|
||||
|
||||
use crate::{
|
||||
api::{add_listen_socket_response_status, add_psk_broker_response_status},
|
||||
app_server::AppServer,
|
||||
protocol::BuildCryptoServer,
|
||||
};
|
||||
|
||||
use super::{supply_keypair_response_status, Server as ApiServer};
|
||||
|
||||
/// Stores the state of the API handler.
|
||||
///
|
||||
/// This is used in the context [ApiHandlerContext]; [ApiHandlerContext] exposes both
|
||||
/// the [AppServer] and the API handler state.
|
||||
///
|
||||
/// [ApiHandlerContext] is what actually contains the API handler functions.
|
||||
#[derive(Debug)]
|
||||
pub struct ApiHandler {
|
||||
_dummy: (),
|
||||
}
|
||||
|
||||
impl ApiHandler {
|
||||
/// Construct an [Self]
|
||||
#[allow(clippy::new_without_default)]
|
||||
pub fn new() -> Self {
|
||||
Self { _dummy: () }
|
||||
}
|
||||
}
|
||||
|
||||
/// The implementation of the API requires both access to its own state [ApiHandler] and to the
|
||||
/// [AppServer] the API is supposed to operate on.
|
||||
///
|
||||
/// This trait provides both; it implements a pattern to allow for multiple - **potentially
|
||||
/// overlapping** mutable references to be passed to the API handler functions.
|
||||
///
|
||||
/// This relatively complex scheme is chosen to appease the borrow checker: We want flexibility
|
||||
/// with regard to where the [ApiHandler] is stored and we need a mutable reference to
|
||||
/// [ApiHandler]. We also need a mutable reference to [AppServer]. Achieving this by using the
|
||||
/// direct method would be impossible because the [ApiHandler] is actually stored somewhere inside
|
||||
/// [AppServer]. The borrow checker does not allow this.
|
||||
///
|
||||
/// What we have instead is – in practice – a reference to [AppServer] and a function (as part of
|
||||
/// the trait) that extracts an [ApiHandler] reference from [AppServer], which is allowed by the
|
||||
/// borrow checker. A benefit of the use of a trait here is that we could, if desired, also store
|
||||
/// the [ApiHandler] outside [AppServer]. It really depends on the trait.
|
||||
pub trait ApiHandlerContext {
|
||||
/// Retrieve the [ApiHandler]
|
||||
fn api_handler(&self) -> &ApiHandler;
|
||||
/// Retrieve the [AppServer]
|
||||
fn app_server(&self) -> &AppServer;
|
||||
/// Retrieve the [ApiHandler]
|
||||
fn api_handler_mut(&mut self) -> &mut ApiHandler;
|
||||
/// Retrieve the [AppServer]
|
||||
fn app_server_mut(&mut self) -> &mut AppServer;
|
||||
}
|
||||
|
||||
/// This is the Error raised by [ApiServer::supply_keypair]; it contains both
|
||||
/// the underlying error message as well as the status value
|
||||
/// returned by the API.
|
||||
///
|
||||
/// [ApiServer::supply_keypair] generally constructs a [Self] by using one of the
|
||||
/// utility functions [SupplyKeypairErrorExt].
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
#[error("Error in SupplyKeypair")]
|
||||
struct SupplyKeypairError {
|
||||
/// The status code communicated via the Rosenpass API
|
||||
status: u128,
|
||||
/// The underlying error that caused the Rosenpass API level Error
|
||||
#[source]
|
||||
cause: anyhow::Error,
|
||||
}
|
||||
|
||||
trait SupplyKeypairErrorExt<T> {
|
||||
/// Imbue any Error (that can be represented as [anyhow::Error]) with
|
||||
/// an arbitrary error code
|
||||
fn e_custom(self, status: u128) -> Result<T, SupplyKeypairError>;
|
||||
/// Imbue any Error (that can be represented as [anyhow::Error]) with
|
||||
/// the [supply_keypair_response_status::INTERNAL_ERROR] error code
|
||||
fn einternal(self) -> Result<T, SupplyKeypairError>;
|
||||
/// Imbue any Error (that can be represented as [anyhow::Error]) with
|
||||
/// the [supply_keypair_response_status::KEYPAIR_ALREADY_SUPPLIED] error code
|
||||
fn ealready_supplied(self) -> Result<T, SupplyKeypairError>;
|
||||
/// Imbue any Error (that can be represented as [anyhow::Error]) with
|
||||
/// the [supply_keypair_response_status::INVALID_REQUEST] error code
|
||||
fn einvalid_req(self) -> Result<T, SupplyKeypairError>;
|
||||
}
|
||||
|
||||
impl<T, E: Into<anyhow::Error>> SupplyKeypairErrorExt<T> for Result<T, E> {
|
||||
fn e_custom(self, status: u128) -> Result<T, SupplyKeypairError> {
|
||||
self.map_err(|e| SupplyKeypairError {
|
||||
status,
|
||||
cause: e.into(),
|
||||
})
|
||||
}
|
||||
|
||||
fn einternal(self) -> Result<T, SupplyKeypairError> {
|
||||
self.e_custom(supply_keypair_response_status::INTERNAL_ERROR)
|
||||
}
|
||||
|
||||
fn ealready_supplied(self) -> Result<T, SupplyKeypairError> {
|
||||
self.e_custom(supply_keypair_response_status::KEYPAIR_ALREADY_SUPPLIED)
|
||||
}
|
||||
|
||||
fn einvalid_req(self) -> Result<T, SupplyKeypairError> {
|
||||
self.e_custom(supply_keypair_response_status::INVALID_REQUEST)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ApiServer for T
|
||||
where
|
||||
T: ?Sized + ApiHandlerContext,
|
||||
{
|
||||
fn ping(
|
||||
&mut self,
|
||||
req: &super::PingRequest,
|
||||
_req_fds: &mut VecDeque<OwnedFd>,
|
||||
res: &mut super::PingResponse,
|
||||
) -> anyhow::Result<()> {
|
||||
let (req, res) = (&req.payload, &mut res.payload);
|
||||
copy_slice(&req.echo).to(&mut res.echo);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn supply_keypair(
|
||||
&mut self,
|
||||
req: &super::SupplyKeypairRequest,
|
||||
req_fds: &mut VecDeque<OwnedFd>,
|
||||
res: &mut super::SupplyKeypairResponse,
|
||||
) -> anyhow::Result<()> {
|
||||
let outcome: Result<(), SupplyKeypairError> = run(|| {
|
||||
// Acquire the file descriptors
|
||||
let mut sk_io = FdIo(
|
||||
req_fds
|
||||
.front()
|
||||
.context("First file descriptor, secret key, missing.")
|
||||
.einvalid_req()?,
|
||||
);
|
||||
let mut pk_io = FdIo(
|
||||
req_fds
|
||||
.get(1)
|
||||
.context("Second file descriptor, public key, missing.")
|
||||
.einvalid_req()?,
|
||||
);
|
||||
|
||||
// Actually read the secrets
|
||||
let mut sk = crate::protocol::SSk::zero();
|
||||
sk_io.read_exact_til_end(sk.secret_mut()).einvalid_req()?;
|
||||
|
||||
let mut pk = crate::protocol::SPk::zero();
|
||||
pk_io.read_exact_til_end(pk.borrow_mut()).einvalid_req()?;
|
||||
|
||||
// Retrieve the construction site
|
||||
let construction_site = self.app_server_mut().crypto_site.borrow_mut();
|
||||
|
||||
// Retrieve the builder
|
||||
use rosenpass_util::build::ConstructionSite as C;
|
||||
let maybe_builder = match construction_site {
|
||||
C::Builder(builder) => Some(builder),
|
||||
C::Product(_) => None,
|
||||
C::Void => {
|
||||
return Err(anyhow::Error::msg("CryptoServer construction side is void"))
|
||||
.einternal();
|
||||
}
|
||||
};
|
||||
|
||||
// Retrieve a reference to the keypair
|
||||
let Some(BuildCryptoServer {
|
||||
ref mut keypair, ..
|
||||
}) = maybe_builder
|
||||
else {
|
||||
return Err(anyhow::Error::msg("CryptoServer already built")).ealready_supplied();
|
||||
};
|
||||
|
||||
// Supply the keypair to the CryptoServer
|
||||
keypair
|
||||
.insert(crate::protocol::Keypair { sk, pk })
|
||||
.discard_result();
|
||||
|
||||
// Actually construct the CryptoServer
|
||||
construction_site
|
||||
.erect()
|
||||
.map_err(|e| anyhow::Error::msg(format!("Error erecting the CryptoServer {e:?}")))
|
||||
.einternal()?;
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
// Handle errors
|
||||
use supply_keypair_response_status as status;
|
||||
let status = match outcome {
|
||||
Ok(()) => status::OK,
|
||||
Err(e) => {
|
||||
let lvl = match e.status {
|
||||
status::INTERNAL_ERROR => log::Level::Warn,
|
||||
_ => log::Level::Debug,
|
||||
};
|
||||
|
||||
log::log!(
|
||||
lvl,
|
||||
"Error while processing API Request.\n Request: {:?}\n Error: {:?}",
|
||||
req,
|
||||
e.cause
|
||||
);
|
||||
|
||||
if e.status == status::INTERNAL_ERROR {
|
||||
return Err(e.cause);
|
||||
}
|
||||
|
||||
e.status
|
||||
}
|
||||
};
|
||||
|
||||
res.payload.status = status;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_listen_socket(
|
||||
&mut self,
|
||||
_req: &super::boilerplate::AddListenSocketRequest,
|
||||
req_fds: &mut VecDeque<OwnedFd>,
|
||||
res: &mut super::boilerplate::AddListenSocketResponse,
|
||||
) -> anyhow::Result<()> {
|
||||
// Retrieve file descriptor
|
||||
let sock_res = run(|| -> anyhow::Result<mio::net::UdpSocket> {
|
||||
let sock = req_fds
|
||||
.pop_front()
|
||||
.context("Invalid request – socket missing.")?;
|
||||
// TODO: We need to have this outside linux
|
||||
#[cfg(target_os = "linux")]
|
||||
rosenpass_util::fd::GetSocketProtocol::demand_udp_socket(&sock)?;
|
||||
let sock = std::net::UdpSocket::from(sock);
|
||||
sock.set_nonblocking(true)?;
|
||||
mio::net::UdpSocket::from_std(sock).ok()
|
||||
});
|
||||
|
||||
let sock = match sock_res {
|
||||
Ok(sock) => sock,
|
||||
Err(e) => {
|
||||
log::debug!("Error processing AddListenSocket API request: {e:?}");
|
||||
res.payload.status = add_listen_socket_response_status::INVALID_REQUEST;
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
// Register socket
|
||||
let reg_result = self.app_server_mut().register_listen_socket(sock);
|
||||
|
||||
if let Err(internal_error) = reg_result {
|
||||
log::warn!("Internal error processing AddListenSocket API request: {internal_error:?}");
|
||||
res.payload.status = add_listen_socket_response_status::INTERNAL_ERROR;
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
res.payload.status = add_listen_socket_response_status::OK;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_psk_broker(
|
||||
&mut self,
|
||||
_req: &super::boilerplate::AddPskBrokerRequest,
|
||||
req_fds: &mut VecDeque<OwnedFd>,
|
||||
res: &mut super::boilerplate::AddPskBrokerResponse,
|
||||
) -> anyhow::Result<()> {
|
||||
// Retrieve file descriptor
|
||||
let sock_res = run(|| {
|
||||
let sock = req_fds
|
||||
.pop_front()
|
||||
.context("Invalid request – socket missing.")?;
|
||||
mio::net::UnixStream::from_fd(sock)
|
||||
});
|
||||
|
||||
// Handle errors
|
||||
let sock = match sock_res {
|
||||
Ok(sock) => sock,
|
||||
Err(e) => {
|
||||
log::debug!(
|
||||
"Request found to be invalid while processing AddPskBroker API request: {e:?}"
|
||||
);
|
||||
res.payload.status = add_psk_broker_response_status::INVALID_REQUEST;
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
// Register Socket
|
||||
let client = Box::new(MioBrokerClient::new(sock));
|
||||
|
||||
// Workaround: The broker code is currently impressively overcomplicated. Brokers are
|
||||
// stored in a hash map but the hash map key used is just a counter so a vector could
|
||||
// have been used. Broker configuration is abstracted, different peers can have different
|
||||
// brokers but there is no facility to add multiple brokers in practice. The broker index
|
||||
// uses a `Public` wrapper without actually holding any cryptographic data. Even the broker
|
||||
// configuration uses a trait abstraction for no discernible reason and a lot of the code
|
||||
// introduces pointless, single-field wrapper structs.
|
||||
// We should use an implement-what-is-actually-needed strategy next time.
|
||||
// The Broker code needs to be slimmed down, the right direction to go is probably to
|
||||
// just add event and capability support to the API and use the API to deliver OSK events.
|
||||
//
|
||||
// For now, we just replace the latest broker.
|
||||
let erase_ptr = {
|
||||
use crate::app_server::BrokerStorePtr;
|
||||
//
|
||||
use rosenpass_secret_memory::Public;
|
||||
use zerocopy::AsBytes;
|
||||
(self.app_server().brokers.store.len() - 1)
|
||||
.apply(|x| x as u64)
|
||||
.apply(|x| Public::from_slice(x.as_bytes()))
|
||||
.apply(BrokerStorePtr)
|
||||
};
|
||||
|
||||
let register_result = run(|| {
|
||||
let srv = self.app_server_mut();
|
||||
srv.unregister_broker(erase_ptr)?;
|
||||
srv.register_broker(client)
|
||||
});
|
||||
|
||||
if let Err(e) = register_result {
|
||||
log::warn!("Internal error while processing AddPskBroker API request: {e:?}");
|
||||
res.payload.status = add_psk_broker_response_status::INTERNAL_ERROR;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
res.payload.status = add_psk_broker_response_status::OK;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
262
rosenpass/src/api/boilerplate/byte_slice_ext.rs
Normal file
262
rosenpass/src/api/boilerplate/byte_slice_ext.rs
Normal file
@@ -0,0 +1,262 @@
|
||||
use zerocopy::{ByteSlice, Ref};
|
||||
|
||||
use rosenpass_util::zerocopy::{RefMaker, ZerocopySliceExt};
|
||||
|
||||
use super::{
|
||||
PingRequest, PingResponse, RawMsgType, RefMakerRawMsgTypeExt, RequestMsgType, RequestRef,
|
||||
ResponseMsgType, ResponseRef, SupplyKeypairRequest, SupplyKeypairResponse,
|
||||
};
|
||||
|
||||
pub trait ByteSliceRefExt: ByteSlice {
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_ref_maker].
|
||||
fn msg_type_maker(self) -> RefMaker<Self, RawMsgType> {
|
||||
self.zk_ref_maker()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_ref_maker] and
|
||||
/// [RefMakerRawMsgTypeExt::parse_request_msg_type]
|
||||
fn request_msg_type(self) -> anyhow::Result<RequestMsgType> {
|
||||
self.msg_type_maker().parse_request_msg_type()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_ref_maker],
|
||||
/// [RefMaker::from_prefix], and
|
||||
/// [RefMakerRawMsgTypeExt::parse_request_msg_type].
|
||||
fn request_msg_type_from_prefix(self) -> anyhow::Result<RequestMsgType> {
|
||||
self.msg_type_maker()
|
||||
.from_prefix()?
|
||||
.parse_request_msg_type()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_ref_maker],
|
||||
/// [RefMaker::from_suffix], and
|
||||
/// [RefMakerRawMsgTypeExt::parse_request_msg_type].
|
||||
fn request_msg_type_from_suffix(self) -> anyhow::Result<RequestMsgType> {
|
||||
self.msg_type_maker()
|
||||
.from_suffix()?
|
||||
.parse_request_msg_type()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_ref_maker],
|
||||
/// [RefMakerRawMsgTypeExt::parse_response_msg_type].
|
||||
fn response_msg_type(self) -> anyhow::Result<ResponseMsgType> {
|
||||
self.msg_type_maker().parse_response_msg_type()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_ref_maker],
|
||||
/// [RefMaker::from_prefix], and
|
||||
/// [RefMakerRawMsgTypeExt::parse_response_msg_type].
|
||||
fn response_msg_type_from_prefix(self) -> anyhow::Result<ResponseMsgType> {
|
||||
self.msg_type_maker()
|
||||
.from_prefix()?
|
||||
.parse_response_msg_type()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_ref_maker],
|
||||
/// [RefMaker::from_suffix], and
|
||||
/// [RefMakerRawMsgTypeExt::parse_response_msg_type].
|
||||
fn response_msg_type_from_suffix(self) -> anyhow::Result<ResponseMsgType> {
|
||||
self.msg_type_maker()
|
||||
.from_suffix()?
|
||||
.parse_response_msg_type()
|
||||
}
|
||||
|
||||
/// Shorthand for the use of [RequestRef::parse] in chaining.
|
||||
fn parse_request(self) -> anyhow::Result<RequestRef<Self>> {
|
||||
RequestRef::parse(self)
|
||||
}
|
||||
|
||||
/// Shorthand for the use of [RequestRef::parse_from_prefix] in chaining.
|
||||
fn parse_request_from_prefix(self) -> anyhow::Result<RequestRef<Self>> {
|
||||
RequestRef::parse_from_prefix(self)
|
||||
}
|
||||
|
||||
/// Shorthand for the use of [RequestRef::parse_from_suffix] in chaining.
|
||||
fn parse_request_from_suffix(self) -> anyhow::Result<RequestRef<Self>> {
|
||||
RequestRef::parse_from_suffix(self)
|
||||
}
|
||||
|
||||
/// Shorthand for the use of [ResponseRef::parse] in chaining.
|
||||
fn parse_response(self) -> anyhow::Result<ResponseRef<Self>> {
|
||||
ResponseRef::parse(self)
|
||||
}
|
||||
|
||||
/// Shorthand for the use of [ResponseRef::parse_from_prefix] in chaining.
|
||||
fn parse_response_from_prefix(self) -> anyhow::Result<ResponseRef<Self>> {
|
||||
ResponseRef::parse_from_prefix(self)
|
||||
}
|
||||
|
||||
/// Shorthand for the use of [ResponseRef::parse_from_suffix] in chaining.
|
||||
fn parse_response_from_suffix(self) -> anyhow::Result<ResponseRef<Self>> {
|
||||
ResponseRef::parse_from_suffix(self)
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_ref_maker].
|
||||
fn ping_request_maker(self) -> RefMaker<Self, PingRequest> {
|
||||
self.zk_ref_maker()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_ref_maker].
|
||||
fn ping_request(self) -> anyhow::Result<Ref<Self, PingRequest>> {
|
||||
self.zk_parse()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_prefix].
|
||||
fn ping_request_from_prefix(self) -> anyhow::Result<Ref<Self, PingRequest>> {
|
||||
self.zk_parse_prefix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_suffix].
|
||||
fn ping_request_from_suffix(self) -> anyhow::Result<Ref<Self, PingRequest>> {
|
||||
self.zk_parse_suffix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_ref_maker].
|
||||
fn ping_response_maker(self) -> RefMaker<Self, PingResponse> {
|
||||
self.zk_ref_maker()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse].
|
||||
fn ping_response(self) -> anyhow::Result<Ref<Self, PingResponse>> {
|
||||
self.zk_parse()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_prefix].
|
||||
fn ping_response_from_prefix(self) -> anyhow::Result<Ref<Self, PingResponse>> {
|
||||
self.zk_parse_prefix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_suffix].
|
||||
fn ping_response_from_suffix(self) -> anyhow::Result<Ref<Self, PingResponse>> {
|
||||
self.zk_parse_suffix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse].
|
||||
fn supply_keypair_request(self) -> anyhow::Result<Ref<Self, SupplyKeypairRequest>> {
|
||||
self.zk_parse()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_prefix].
|
||||
fn supply_keypair_request_from_prefix(self) -> anyhow::Result<Ref<Self, SupplyKeypairRequest>> {
|
||||
self.zk_parse_prefix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_suffix].
|
||||
fn supply_keypair_request_from_suffix(self) -> anyhow::Result<Ref<Self, SupplyKeypairRequest>> {
|
||||
self.zk_parse_suffix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_ref_maker].
|
||||
fn supply_keypair_response_maker(self) -> RefMaker<Self, SupplyKeypairResponse> {
|
||||
self.zk_ref_maker()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse].
|
||||
fn supply_keypair_response(self) -> anyhow::Result<Ref<Self, SupplyKeypairResponse>> {
|
||||
self.zk_parse()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_prefix].
|
||||
fn supply_keypair_response_from_prefix(
|
||||
self,
|
||||
) -> anyhow::Result<Ref<Self, SupplyKeypairResponse>> {
|
||||
self.zk_parse_prefix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_suffix].
|
||||
fn supply_keypair_response_from_suffix(
|
||||
self,
|
||||
) -> anyhow::Result<Ref<Self, SupplyKeypairResponse>> {
|
||||
self.zk_parse_suffix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse].
|
||||
fn add_listen_socket_request(self) -> anyhow::Result<Ref<Self, super::AddListenSocketRequest>> {
|
||||
self.zk_parse()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_prefix].
|
||||
fn add_listen_socket_request_from_prefix(
|
||||
self,
|
||||
) -> anyhow::Result<Ref<Self, super::AddListenSocketRequest>> {
|
||||
self.zk_parse_prefix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_suffix].
|
||||
fn add_listen_socket_request_from_suffix(
|
||||
self,
|
||||
) -> anyhow::Result<Ref<Self, super::AddListenSocketRequest>> {
|
||||
self.zk_parse_suffix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_ref_maker].
|
||||
fn add_listen_socket_response_maker(self) -> RefMaker<Self, super::AddListenSocketResponse> {
|
||||
self.zk_ref_maker()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse].
|
||||
fn add_listen_socket_response(
|
||||
self,
|
||||
) -> anyhow::Result<Ref<Self, super::AddListenSocketResponse>> {
|
||||
self.zk_parse()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_prefix].
|
||||
fn add_listen_socket_response_from_prefix(
|
||||
self,
|
||||
) -> anyhow::Result<Ref<Self, super::AddListenSocketResponse>> {
|
||||
self.zk_parse_prefix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_suffix].
|
||||
fn add_listen_socket_response_from_suffix(
|
||||
self,
|
||||
) -> anyhow::Result<Ref<Self, super::AddListenSocketResponse>> {
|
||||
self.zk_parse_suffix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse].
|
||||
fn add_psk_broker_request(self) -> anyhow::Result<Ref<Self, super::AddPskBrokerRequest>> {
|
||||
self.zk_parse()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_prefix].
|
||||
fn add_psk_broker_request_from_prefix(
|
||||
self,
|
||||
) -> anyhow::Result<Ref<Self, super::AddPskBrokerRequest>> {
|
||||
self.zk_parse_prefix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_suffix].
|
||||
fn add_psk_broker_request_from_suffix(
|
||||
self,
|
||||
) -> anyhow::Result<Ref<Self, super::AddPskBrokerRequest>> {
|
||||
self.zk_parse_suffix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_ref_maker].
|
||||
fn add_psk_broker_response_maker(self) -> RefMaker<Self, super::AddPskBrokerResponse> {
|
||||
self.zk_ref_maker()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse].
|
||||
fn add_psk_broker_response(self) -> anyhow::Result<Ref<Self, super::AddPskBrokerResponse>> {
|
||||
self.zk_parse()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_prefix].
|
||||
fn add_psk_broker_response_from_prefix(
|
||||
self,
|
||||
) -> anyhow::Result<Ref<Self, super::AddPskBrokerResponse>> {
|
||||
self.zk_parse_prefix()
|
||||
}
|
||||
|
||||
/// Shorthand for the typed use of [ZerocopySliceExt::zk_parse_suffix].
|
||||
fn add_psk_broker_response_from_suffix(
|
||||
self,
|
||||
) -> anyhow::Result<Ref<Self, super::AddPskBrokerResponse>> {
|
||||
self.zk_parse_suffix()
|
||||
}
|
||||
}
|
||||
|
||||
impl<B: ByteSlice> ByteSliceRefExt for B {}
|
||||
75
rosenpass/src/api/boilerplate/message_trait.rs
Normal file
75
rosenpass/src/api/boilerplate/message_trait.rs
Normal file
@@ -0,0 +1,75 @@
|
||||
use zerocopy::{ByteSliceMut, Ref};
|
||||
|
||||
use rosenpass_util::zerocopy::RefMaker;
|
||||
|
||||
use super::RawMsgType;
|
||||
|
||||
/// Trait implemented by all the Rosenpass API message types.
|
||||
///
|
||||
/// Implemented by the message as including the message envelope; e.g.
|
||||
/// [crate::api::PingRequest] but not by [crate::api::PingRequestPayload].
|
||||
pub trait Message {
|
||||
/// The payload this API message contains. E.g. this is [crate::api::PingRequestPayload] for [[crate::api::PingRequest].
|
||||
type Payload;
|
||||
/// Either [crate::api::RequestMsgType] or [crate::api::ResponseMsgType]
|
||||
type MessageClass: Into<RawMsgType>;
|
||||
/// The specific message type in the [Self::MessageClass].
|
||||
/// E.g. this is [crate::api::RequestMsgType::Ping] for [crate::api::PingRequest]
|
||||
const MESSAGE_TYPE: Self::MessageClass;
|
||||
|
||||
/// Wraps the payload into the envelope
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// See [crate::api::PingRequest::from_payload]
|
||||
fn from_payload(payload: Self::Payload) -> Self;
|
||||
/// Initialize the message;
|
||||
/// just sets the message type [crate::api::Envelope::msg_type].
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// See [crate::api::PingRequest::init]
|
||||
fn init(&mut self);
|
||||
/// Initialize the message from a raw buffer: Zeroize the buffer and then call [Self::init].
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// See [crate::api::PingRequest::setup]
|
||||
fn setup<B: ByteSliceMut>(buf: B) -> anyhow::Result<Ref<B, Self>>;
|
||||
}
|
||||
|
||||
/// Additional convenience functions for working with [rosenpass_util::zerocopy::RefMaker]
|
||||
pub trait ZerocopyResponseMakerSetupMessageExt<B, T> {
|
||||
fn setup_msg(self) -> anyhow::Result<Ref<B, T>>;
|
||||
}
|
||||
|
||||
impl<B, T> ZerocopyResponseMakerSetupMessageExt<B, T> for RefMaker<B, T>
|
||||
where
|
||||
B: ByteSliceMut,
|
||||
T: Message,
|
||||
{
|
||||
/// Initialize the message using [Message::setup].
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use rosenpass::api::{
|
||||
/// PingRequest, ZerocopyResponseMakerSetupMessageExt, PING_REQUEST,
|
||||
/// };
|
||||
/// use rosenpass_util::zerocopy::RefMaker;
|
||||
/// use std::mem::size_of;
|
||||
///
|
||||
/// let mut buf = [0u8; { size_of::<PingRequest>() }];
|
||||
///
|
||||
/// let rm = RefMaker::<&mut [u8], PingRequest>::new(&mut buf);
|
||||
/// let msg: zerocopy::Ref<_, PingRequest> = rm.setup_msg()?;
|
||||
///
|
||||
/// let t = msg.msg_type; // Deal with unaligned read
|
||||
/// assert_eq!(t, PING_REQUEST);
|
||||
///
|
||||
/// Ok::<(), anyhow::Error>(())
|
||||
/// ```
|
||||
fn setup_msg(self) -> anyhow::Result<Ref<B, T>> {
|
||||
T::setup(self.into_buf())
|
||||
}
|
||||
}
|
||||
180
rosenpass/src/api/boilerplate/message_type.rs
Normal file
180
rosenpass/src/api/boilerplate/message_type.rs
Normal file
@@ -0,0 +1,180 @@
|
||||
use hex_literal::hex;
|
||||
use rosenpass_util::zerocopy::RefMaker;
|
||||
use zerocopy::ByteSlice;
|
||||
|
||||
use crate::RosenpassError::{self, InvalidApiMessageType};
|
||||
|
||||
pub type RawMsgType = u128;
|
||||
|
||||
// constants generated by gen-ipc-msg-types:
|
||||
// hash domain hash of: Rosenpass IPC API -> Rosenpass Protocol Server -> Ping Request
|
||||
pub const PING_REQUEST: RawMsgType =
|
||||
RawMsgType::from_le_bytes(hex!("2397 3ecc c441 704d 0b02 ea31 45d3 4999"));
|
||||
// hash domain hash of: Rosenpass IPC API -> Rosenpass Protocol Server -> Ping Response
|
||||
pub const PING_RESPONSE: RawMsgType =
|
||||
RawMsgType::from_le_bytes(hex!("4ec7 f6f0 2bbc ba64 48f1 da14 c7cf 0260"));
|
||||
|
||||
// hash domain hash of: Rosenpass IPC API -> Rosenpass Protocol Server -> Supply Keypair Request
|
||||
const SUPPLY_KEYPAIR_REQUEST: RawMsgType =
|
||||
RawMsgType::from_le_bytes(hex!("ac91 a5a6 4f4b 21d0 ac7f 9b55 74f7 3529"));
|
||||
// hash domain hash of: Rosenpass IPC API -> Rosenpass Protocol Server -> Supply Keypair Response
|
||||
const SUPPLY_KEYPAIR_RESPONSE: RawMsgType =
|
||||
RawMsgType::from_le_bytes(hex!("f2dc 49bd e261 5f10 40b7 3c16 ec61 edb9"));
|
||||
|
||||
// hash domain hash of: Rosenpass IPC API -> Rosenpass Protocol Server -> Add Listen Socket Request
|
||||
const ADD_LISTEN_SOCKET_REQUEST: RawMsgType =
|
||||
RawMsgType::from_le_bytes(hex!("3f21 434f 87cc a08c 02c4 61e4 0816 c7da"));
|
||||
// hash domain hash of: Rosenpass IPC API -> Rosenpass Protocol Server -> Add Listen Socket Response
|
||||
const ADD_LISTEN_SOCKET_RESPONSE: RawMsgType =
|
||||
RawMsgType::from_le_bytes(hex!("45d5 0f0d 93f0 6105 98f2 9469 5dfd 5f36"));
|
||||
|
||||
// hash domain hash of: Rosenpass IPC API -> Rosenpass Protocol Server -> Add Psk Broker Request
|
||||
const ADD_PSK_BROKER_REQUEST: RawMsgType =
|
||||
RawMsgType::from_le_bytes(hex!("d798 b8dc bd61 5cab 8df1 c63d e4eb a2d1"));
|
||||
// hash domain hash of: Rosenpass IPC API -> Rosenpass Protocol Server -> Add Psk Broker Response
|
||||
const ADD_PSK_BROKER_RESPONSE: RawMsgType =
|
||||
RawMsgType::from_le_bytes(hex!("bd25 e418 ffb0 6930 248b 217e 2fae e353"));
|
||||
|
||||
/// Message properties global to the message type
|
||||
pub trait MessageAttributes {
|
||||
/// Get the size of the message
|
||||
///
|
||||
/// # Exampleds
|
||||
fn message_size(&self) -> usize;
|
||||
}
|
||||
|
||||
/// API request message types as an enum
|
||||
#[derive(Hash, PartialEq, Eq, PartialOrd, Ord, Debug, Clone, Copy)]
|
||||
pub enum RequestMsgType {
|
||||
Ping,
|
||||
SupplyKeypair,
|
||||
AddListenSocket,
|
||||
AddPskBroker,
|
||||
}
|
||||
|
||||
/// API response messages types as an enum
|
||||
#[derive(Hash, PartialEq, Eq, PartialOrd, Ord, Debug, Clone, Copy)]
|
||||
pub enum ResponseMsgType {
|
||||
Ping,
|
||||
SupplyKeypair,
|
||||
AddListenSocket,
|
||||
AddPskBroker,
|
||||
}
|
||||
|
||||
impl MessageAttributes for RequestMsgType {
|
||||
fn message_size(&self) -> usize {
|
||||
match self {
|
||||
Self::Ping => std::mem::size_of::<super::PingRequest>(),
|
||||
Self::SupplyKeypair => std::mem::size_of::<super::SupplyKeypairRequest>(),
|
||||
Self::AddListenSocket => std::mem::size_of::<super::AddListenSocketRequest>(),
|
||||
Self::AddPskBroker => std::mem::size_of::<super::AddPskBrokerRequest>(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MessageAttributes for ResponseMsgType {
|
||||
fn message_size(&self) -> usize {
|
||||
match self {
|
||||
Self::Ping => std::mem::size_of::<super::PingResponse>(),
|
||||
Self::SupplyKeypair => std::mem::size_of::<super::SupplyKeypairResponse>(),
|
||||
Self::AddListenSocket => std::mem::size_of::<super::AddListenSocketResponse>(),
|
||||
Self::AddPskBroker => std::mem::size_of::<super::AddPskBrokerResponse>(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<RawMsgType> for RequestMsgType {
|
||||
type Error = RosenpassError;
|
||||
|
||||
fn try_from(value: RawMsgType) -> Result<Self, Self::Error> {
|
||||
use RequestMsgType as E;
|
||||
Ok(match value {
|
||||
self::PING_REQUEST => E::Ping,
|
||||
self::SUPPLY_KEYPAIR_REQUEST => E::SupplyKeypair,
|
||||
self::ADD_LISTEN_SOCKET_REQUEST => E::AddListenSocket,
|
||||
self::ADD_PSK_BROKER_REQUEST => E::AddPskBroker,
|
||||
_ => return Err(InvalidApiMessageType(value)),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RequestMsgType> for RawMsgType {
|
||||
fn from(val: RequestMsgType) -> Self {
|
||||
use RequestMsgType as E;
|
||||
match val {
|
||||
E::Ping => self::PING_REQUEST,
|
||||
E::SupplyKeypair => self::SUPPLY_KEYPAIR_REQUEST,
|
||||
E::AddListenSocket => self::ADD_LISTEN_SOCKET_REQUEST,
|
||||
E::AddPskBroker => self::ADD_PSK_BROKER_REQUEST,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<RawMsgType> for ResponseMsgType {
|
||||
type Error = RosenpassError;
|
||||
|
||||
fn try_from(value: RawMsgType) -> Result<Self, Self::Error> {
|
||||
use ResponseMsgType as E;
|
||||
Ok(match value {
|
||||
self::PING_RESPONSE => E::Ping,
|
||||
self::SUPPLY_KEYPAIR_RESPONSE => E::SupplyKeypair,
|
||||
self::ADD_LISTEN_SOCKET_RESPONSE => E::AddListenSocket,
|
||||
self::ADD_PSK_BROKER_RESPONSE => E::AddPskBroker,
|
||||
_ => return Err(InvalidApiMessageType(value)),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ResponseMsgType> for RawMsgType {
|
||||
fn from(val: ResponseMsgType) -> Self {
|
||||
use ResponseMsgType as E;
|
||||
match val {
|
||||
E::Ping => self::PING_RESPONSE,
|
||||
E::SupplyKeypair => self::SUPPLY_KEYPAIR_RESPONSE,
|
||||
E::AddListenSocket => self::ADD_LISTEN_SOCKET_RESPONSE,
|
||||
E::AddPskBroker => self::ADD_PSK_BROKER_RESPONSE,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension trait for [RawMsgType].
|
||||
///
|
||||
/// We are using an extension trait rather than just using methods
|
||||
/// because [RawMsgType] is a type alias, so we can not define methods
|
||||
/// on it.
|
||||
pub trait RawMsgTypeExt {
|
||||
/// Try to convert this to a [RequestMsgType]; alias for the appropriate [TryFrom]
|
||||
/// implementation
|
||||
fn into_request_msg_type(self) -> Result<RequestMsgType, RosenpassError>;
|
||||
/// Try to convert this to a [ResponseMsgType]; alias for the appropriate [TryFrom]
|
||||
/// implementation
|
||||
fn into_response_msg_type(self) -> Result<ResponseMsgType, RosenpassError>;
|
||||
}
|
||||
|
||||
impl RawMsgTypeExt for RawMsgType {
|
||||
fn into_request_msg_type(self) -> Result<RequestMsgType, RosenpassError> {
|
||||
self.try_into()
|
||||
}
|
||||
|
||||
fn into_response_msg_type(self) -> Result<ResponseMsgType, RosenpassError> {
|
||||
self.try_into()
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension trait for [rosenpass_util::zerocopy::RefMaker].
|
||||
pub trait RefMakerRawMsgTypeExt {
|
||||
/// Parse a request message type from bytes
|
||||
fn parse_request_msg_type(self) -> anyhow::Result<RequestMsgType>;
|
||||
/// Parse a response message type from bytes
|
||||
fn parse_response_msg_type(self) -> anyhow::Result<ResponseMsgType>;
|
||||
}
|
||||
|
||||
impl<B: ByteSlice> RefMakerRawMsgTypeExt for RefMaker<B, RawMsgType> {
|
||||
fn parse_request_msg_type(self) -> anyhow::Result<RequestMsgType> {
|
||||
Ok(self.parse()?.read().try_into()?)
|
||||
}
|
||||
|
||||
fn parse_response_msg_type(self) -> anyhow::Result<ResponseMsgType> {
|
||||
Ok(self.parse()?.read().try_into()?)
|
||||
}
|
||||
}
|
||||
21
rosenpass/src/api/boilerplate/mod.rs
Normal file
21
rosenpass/src/api/boilerplate/mod.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
//! Boring, repetitive code related to message parsing for the API.
|
||||
//!
|
||||
//! Most of this should be automatically generated though some derive macro at some point.
|
||||
|
||||
mod byte_slice_ext;
|
||||
mod message_trait;
|
||||
mod message_type;
|
||||
mod payload;
|
||||
mod request_ref;
|
||||
mod request_response;
|
||||
mod response_ref;
|
||||
mod server;
|
||||
|
||||
pub use byte_slice_ext::*;
|
||||
pub use message_trait::*;
|
||||
pub use message_type::*;
|
||||
pub use payload::*;
|
||||
pub use request_ref::*;
|
||||
pub use request_response::*;
|
||||
pub use response_ref::*;
|
||||
pub use server::*;
|
||||
398
rosenpass/src/api/boilerplate/payload.rs
Normal file
398
rosenpass/src/api/boilerplate/payload.rs
Normal file
@@ -0,0 +1,398 @@
|
||||
use rosenpass_util::zerocopy::ZerocopyMutSliceExt;
|
||||
use zerocopy::{AsBytes, ByteSliceMut, FromBytes, FromZeroes, Ref};
|
||||
|
||||
use super::{Message, RawMsgType, RequestMsgType, ResponseMsgType};
|
||||
|
||||
/// Size required to fit any request message in binary form
|
||||
pub const MAX_REQUEST_LEN: usize = 2500; // TODO fix this
|
||||
/// Size required to fit any response message in binary form
|
||||
pub const MAX_RESPONSE_LEN: usize = 2500; // TODO fix this
|
||||
/// Maximum number of file descriptors that can be sent in a request.
|
||||
pub const MAX_REQUEST_FDS: usize = 2;
|
||||
|
||||
/// Message envelope for API messages
|
||||
#[repr(packed)]
|
||||
#[derive(Debug, Copy, Clone, Hash, AsBytes, FromBytes, FromZeroes, PartialEq, Eq)]
|
||||
pub struct Envelope<M: AsBytes + FromBytes> {
|
||||
/// Which message this is
|
||||
pub msg_type: RawMsgType,
|
||||
/// The actual Paylod
|
||||
pub payload: M,
|
||||
}
|
||||
|
||||
/// Message envelope for API requests
|
||||
pub type RequestEnvelope<M> = Envelope<M>;
|
||||
/// Message envelope for API responses
|
||||
pub type ResponseEnvelope<M> = Envelope<M>;
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[repr(packed)]
|
||||
#[derive(Debug, Copy, Clone, Hash, AsBytes, FromBytes, FromZeroes, PartialEq, Eq)]
|
||||
pub struct PingRequestPayload {
|
||||
/// Randomly generated connection id
|
||||
pub echo: [u8; 256],
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub type PingRequest = RequestEnvelope<PingRequestPayload>;
|
||||
|
||||
impl PingRequest {
|
||||
#[allow(missing_docs)]
|
||||
pub fn new(echo: [u8; 256]) -> Self {
|
||||
Self::from_payload(PingRequestPayload { echo })
|
||||
}
|
||||
}
|
||||
|
||||
impl Message for PingRequest {
|
||||
type Payload = PingRequestPayload;
|
||||
type MessageClass = RequestMsgType;
|
||||
const MESSAGE_TYPE: Self::MessageClass = RequestMsgType::Ping;
|
||||
|
||||
fn from_payload(payload: Self::Payload) -> Self {
|
||||
Self {
|
||||
msg_type: Self::MESSAGE_TYPE.into(),
|
||||
payload,
|
||||
}
|
||||
}
|
||||
|
||||
fn setup<B: ByteSliceMut>(buf: B) -> anyhow::Result<Ref<B, Self>> {
|
||||
let mut r: Ref<B, Self> = buf.zk_zeroized()?;
|
||||
r.init();
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
fn init(&mut self) {
|
||||
self.msg_type = Self::MESSAGE_TYPE.into();
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[repr(packed)]
|
||||
#[derive(Debug, Copy, Clone, Hash, AsBytes, FromBytes, FromZeroes, PartialEq, Eq)]
|
||||
pub struct PingResponsePayload {
|
||||
/// Randomly generated connection id
|
||||
pub echo: [u8; 256],
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub type PingResponse = ResponseEnvelope<PingResponsePayload>;
|
||||
|
||||
impl PingResponse {
|
||||
#[allow(missing_docs)]
|
||||
pub fn new(echo: [u8; 256]) -> Self {
|
||||
Self::from_payload(PingResponsePayload { echo })
|
||||
}
|
||||
}
|
||||
|
||||
impl Message for PingResponse {
|
||||
type Payload = PingResponsePayload;
|
||||
type MessageClass = ResponseMsgType;
|
||||
const MESSAGE_TYPE: Self::MessageClass = ResponseMsgType::Ping;
|
||||
|
||||
fn from_payload(payload: Self::Payload) -> Self {
|
||||
Self {
|
||||
msg_type: Self::MESSAGE_TYPE.into(),
|
||||
payload,
|
||||
}
|
||||
}
|
||||
|
||||
fn setup<B: ByteSliceMut>(buf: B) -> anyhow::Result<Ref<B, Self>> {
|
||||
let mut r: Ref<B, Self> = buf.zk_zeroized()?;
|
||||
r.init();
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
fn init(&mut self) {
|
||||
self.msg_type = Self::MESSAGE_TYPE.into();
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[repr(packed)]
|
||||
#[derive(Debug, Copy, Clone, Hash, AsBytes, FromBytes, FromZeroes, PartialEq, Eq)]
|
||||
pub struct SupplyKeypairRequestPayload {}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub type SupplyKeypairRequest = RequestEnvelope<SupplyKeypairRequestPayload>;
|
||||
|
||||
impl Default for SupplyKeypairRequest {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl SupplyKeypairRequest {
|
||||
#[allow(missing_docs)]
|
||||
pub fn new() -> Self {
|
||||
Self::from_payload(SupplyKeypairRequestPayload {})
|
||||
}
|
||||
}
|
||||
|
||||
impl Message for SupplyKeypairRequest {
|
||||
type Payload = SupplyKeypairRequestPayload;
|
||||
type MessageClass = RequestMsgType;
|
||||
const MESSAGE_TYPE: Self::MessageClass = RequestMsgType::SupplyKeypair;
|
||||
|
||||
fn from_payload(payload: Self::Payload) -> Self {
|
||||
Self {
|
||||
msg_type: Self::MESSAGE_TYPE.into(),
|
||||
payload,
|
||||
}
|
||||
}
|
||||
|
||||
fn setup<B: ByteSliceMut>(buf: B) -> anyhow::Result<Ref<B, Self>> {
|
||||
let mut r: Ref<B, Self> = buf.zk_zeroized()?;
|
||||
r.init();
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
fn init(&mut self) {
|
||||
self.msg_type = Self::MESSAGE_TYPE.into();
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub mod supply_keypair_response_status {
|
||||
#[allow(missing_docs)]
|
||||
pub const OK: u128 = 0;
|
||||
#[allow(missing_docs)]
|
||||
pub const KEYPAIR_ALREADY_SUPPLIED: u128 = 1;
|
||||
/// TODO: This is not actually part of the API. Remove.
|
||||
#[allow(missing_docs)]
|
||||
pub const INTERNAL_ERROR: u128 = 2;
|
||||
#[allow(missing_docs)]
|
||||
pub const INVALID_REQUEST: u128 = 3;
|
||||
/// TODO: Deprectaed, remove
|
||||
#[allow(missing_docs)]
|
||||
pub const IO_ERROR: u128 = 4;
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[repr(packed)]
|
||||
#[derive(Debug, Copy, Clone, Hash, AsBytes, FromBytes, FromZeroes, PartialEq, Eq)]
|
||||
pub struct SupplyKeypairResponsePayload {
|
||||
#[allow(missing_docs)]
|
||||
pub status: u128,
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub type SupplyKeypairResponse = ResponseEnvelope<SupplyKeypairResponsePayload>;
|
||||
|
||||
impl SupplyKeypairResponse {
|
||||
#[allow(missing_docs)]
|
||||
pub fn new(status: u128) -> Self {
|
||||
Self::from_payload(SupplyKeypairResponsePayload { status })
|
||||
}
|
||||
}
|
||||
|
||||
impl Message for SupplyKeypairResponse {
|
||||
type Payload = SupplyKeypairResponsePayload;
|
||||
type MessageClass = ResponseMsgType;
|
||||
const MESSAGE_TYPE: Self::MessageClass = ResponseMsgType::SupplyKeypair;
|
||||
|
||||
fn from_payload(payload: Self::Payload) -> Self {
|
||||
Self {
|
||||
msg_type: Self::MESSAGE_TYPE.into(),
|
||||
payload,
|
||||
}
|
||||
}
|
||||
|
||||
fn setup<B: ByteSliceMut>(buf: B) -> anyhow::Result<Ref<B, Self>> {
|
||||
let mut r: Ref<B, Self> = buf.zk_zeroized()?;
|
||||
r.init();
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
fn init(&mut self) {
|
||||
self.msg_type = Self::MESSAGE_TYPE.into();
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[repr(packed)]
|
||||
#[derive(Debug, Copy, Clone, Hash, AsBytes, FromBytes, FromZeroes, PartialEq, Eq)]
|
||||
pub struct AddListenSocketRequestPayload {}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub type AddListenSocketRequest = RequestEnvelope<AddListenSocketRequestPayload>;
|
||||
|
||||
impl Default for AddListenSocketRequest {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl AddListenSocketRequest {
|
||||
#[allow(missing_docs)]
|
||||
pub fn new() -> Self {
|
||||
Self::from_payload(AddListenSocketRequestPayload {})
|
||||
}
|
||||
}
|
||||
|
||||
impl Message for AddListenSocketRequest {
|
||||
type Payload = AddListenSocketRequestPayload;
|
||||
type MessageClass = RequestMsgType;
|
||||
const MESSAGE_TYPE: Self::MessageClass = RequestMsgType::AddListenSocket;
|
||||
|
||||
fn from_payload(payload: Self::Payload) -> Self {
|
||||
Self {
|
||||
msg_type: Self::MESSAGE_TYPE.into(),
|
||||
payload,
|
||||
}
|
||||
}
|
||||
|
||||
fn setup<B: ByteSliceMut>(buf: B) -> anyhow::Result<Ref<B, Self>> {
|
||||
let mut r: Ref<B, Self> = buf.zk_zeroized()?;
|
||||
r.init();
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
fn init(&mut self) {
|
||||
self.msg_type = Self::MESSAGE_TYPE.into();
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub mod add_listen_socket_response_status {
|
||||
#[allow(missing_docs)]
|
||||
pub const OK: u128 = 0;
|
||||
#[allow(missing_docs)]
|
||||
pub const INVALID_REQUEST: u128 = 1;
|
||||
#[allow(missing_docs)]
|
||||
pub const INTERNAL_ERROR: u128 = 2;
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[repr(packed)]
|
||||
#[derive(Debug, Copy, Clone, Hash, AsBytes, FromBytes, FromZeroes, PartialEq, Eq)]
|
||||
pub struct AddListenSocketResponsePayload {
|
||||
pub status: u128,
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub type AddListenSocketResponse = ResponseEnvelope<AddListenSocketResponsePayload>;
|
||||
|
||||
impl AddListenSocketResponse {
|
||||
#[allow(missing_docs)]
|
||||
pub fn new(status: u128) -> Self {
|
||||
Self::from_payload(AddListenSocketResponsePayload { status })
|
||||
}
|
||||
}
|
||||
|
||||
impl Message for AddListenSocketResponse {
|
||||
type Payload = AddListenSocketResponsePayload;
|
||||
type MessageClass = ResponseMsgType;
|
||||
const MESSAGE_TYPE: Self::MessageClass = ResponseMsgType::AddListenSocket;
|
||||
|
||||
fn from_payload(payload: Self::Payload) -> Self {
|
||||
Self {
|
||||
msg_type: Self::MESSAGE_TYPE.into(),
|
||||
payload,
|
||||
}
|
||||
}
|
||||
|
||||
fn setup<B: ByteSliceMut>(buf: B) -> anyhow::Result<Ref<B, Self>> {
|
||||
let mut r: Ref<B, Self> = buf.zk_zeroized()?;
|
||||
r.init();
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
fn init(&mut self) {
|
||||
self.msg_type = Self::MESSAGE_TYPE.into();
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[repr(packed)]
|
||||
#[derive(Debug, Copy, Clone, Hash, AsBytes, FromBytes, FromZeroes, PartialEq, Eq)]
|
||||
pub struct AddPskBrokerRequestPayload {}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub type AddPskBrokerRequest = RequestEnvelope<AddPskBrokerRequestPayload>;
|
||||
|
||||
impl Default for AddPskBrokerRequest {
|
||||
#[allow(missing_docs)]
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl AddPskBrokerRequest {
|
||||
#[allow(missing_docs)]
|
||||
pub fn new() -> Self {
|
||||
Self::from_payload(AddPskBrokerRequestPayload {})
|
||||
}
|
||||
}
|
||||
|
||||
impl Message for AddPskBrokerRequest {
|
||||
type Payload = AddPskBrokerRequestPayload;
|
||||
type MessageClass = RequestMsgType;
|
||||
const MESSAGE_TYPE: Self::MessageClass = RequestMsgType::AddPskBroker;
|
||||
|
||||
fn from_payload(payload: Self::Payload) -> Self {
|
||||
Self {
|
||||
msg_type: Self::MESSAGE_TYPE.into(),
|
||||
payload,
|
||||
}
|
||||
}
|
||||
|
||||
fn setup<B: ByteSliceMut>(buf: B) -> anyhow::Result<Ref<B, Self>> {
|
||||
let mut r: Ref<B, Self> = buf.zk_zeroized()?;
|
||||
r.init();
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
fn init(&mut self) {
|
||||
self.msg_type = Self::MESSAGE_TYPE.into();
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub mod add_psk_broker_response_status {
|
||||
#[allow(missing_docs)]
|
||||
pub const OK: u128 = 0;
|
||||
#[allow(missing_docs)]
|
||||
pub const INVALID_REQUEST: u128 = 1;
|
||||
#[allow(missing_docs)]
|
||||
pub const INTERNAL_ERROR: u128 = 2;
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[repr(packed)]
|
||||
#[derive(Debug, Copy, Clone, Hash, AsBytes, FromBytes, FromZeroes, PartialEq, Eq)]
|
||||
pub struct AddPskBrokerResponsePayload {
|
||||
pub status: u128,
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub type AddPskBrokerResponse = ResponseEnvelope<AddPskBrokerResponsePayload>;
|
||||
|
||||
impl AddPskBrokerResponse {
|
||||
#[allow(missing_docs)]
|
||||
pub fn new(status: u128) -> Self {
|
||||
Self::from_payload(AddPskBrokerResponsePayload { status })
|
||||
}
|
||||
}
|
||||
|
||||
impl Message for AddPskBrokerResponse {
|
||||
type Payload = AddPskBrokerResponsePayload;
|
||||
type MessageClass = ResponseMsgType;
|
||||
const MESSAGE_TYPE: Self::MessageClass = ResponseMsgType::AddPskBroker;
|
||||
|
||||
fn from_payload(payload: Self::Payload) -> Self {
|
||||
Self {
|
||||
msg_type: Self::MESSAGE_TYPE.into(),
|
||||
payload,
|
||||
}
|
||||
}
|
||||
|
||||
fn setup<B: ByteSliceMut>(buf: B) -> anyhow::Result<Ref<B, Self>> {
|
||||
let mut r: Ref<B, Self> = buf.zk_zeroized()?;
|
||||
r.init();
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
fn init(&mut self) {
|
||||
self.msg_type = Self::MESSAGE_TYPE.into();
|
||||
}
|
||||
}
|
||||
192
rosenpass/src/api/boilerplate/request_ref.rs
Normal file
192
rosenpass/src/api/boilerplate/request_ref.rs
Normal file
@@ -0,0 +1,192 @@
|
||||
use anyhow::ensure;
|
||||
|
||||
use zerocopy::{ByteSlice, ByteSliceMut, Ref};
|
||||
|
||||
use super::{ByteSliceRefExt, MessageAttributes, PingRequest, RequestMsgType};
|
||||
|
||||
/// Helper for producing API message request references, [RequestRef].
|
||||
///
|
||||
/// This is to [RequestRef] as [rosenpass_util::zerocopy::RefMaker] is to
|
||||
/// [zerocopy::Ref].
|
||||
struct RequestRefMaker<B> {
|
||||
buf: B,
|
||||
msg_type: RequestMsgType,
|
||||
}
|
||||
|
||||
impl<B: ByteSlice> RequestRef<B> {
|
||||
/// Produce a [RequestRef] from a raw message buffer,
|
||||
/// reading the type from the buffer
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use zerocopy::AsBytes;
|
||||
///
|
||||
/// use rosenpass::api::{PingRequest, RequestRef, RequestMsgType};
|
||||
///
|
||||
/// let msg = PingRequest::new([0u8; 256]);
|
||||
///
|
||||
/// // TODO: HEISENBUG: This is necessary for some reason to make the rest of the example work
|
||||
/// let typ = msg.msg_type;
|
||||
/// assert_eq!(typ, rosenpass::api::PING_REQUEST);
|
||||
///
|
||||
/// let buf = msg.as_bytes();
|
||||
/// let msg_ref = RequestRef::parse(buf)?;
|
||||
/// assert!(matches!(msg_ref, RequestRef::Ping(_)));
|
||||
///
|
||||
/// assert_eq!(msg_ref.message_type(), RequestMsgType::Ping);
|
||||
///
|
||||
/// assert!(std::ptr::eq(buf, msg_ref.bytes()));
|
||||
///
|
||||
/// Ok::<(), anyhow::Error>(())
|
||||
/// ```
|
||||
pub fn parse(buf: B) -> anyhow::Result<Self> {
|
||||
RequestRefMaker::new(buf)?.parse()
|
||||
}
|
||||
|
||||
/// Produce a [ResponseRef] from the prefix of a raw message buffer,
|
||||
/// reading the type from the buffer.
|
||||
pub fn parse_from_prefix(buf: B) -> anyhow::Result<Self> {
|
||||
RequestRefMaker::new(buf)?.from_prefix()?.parse()
|
||||
}
|
||||
|
||||
/// Produce a [ResponseRef] from the prefix of a raw message buffer,
|
||||
/// reading the type from the buffer.
|
||||
pub fn parse_from_suffix(buf: B) -> anyhow::Result<Self> {
|
||||
RequestRefMaker::new(buf)?.from_suffix()?.parse()
|
||||
}
|
||||
|
||||
/// Get the message type [Self] contains
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// See [Self::parse]
|
||||
pub fn message_type(&self) -> RequestMsgType {
|
||||
match self {
|
||||
Self::Ping(_) => RequestMsgType::Ping,
|
||||
Self::SupplyKeypair(_) => RequestMsgType::SupplyKeypair,
|
||||
Self::AddListenSocket(_) => RequestMsgType::AddListenSocket,
|
||||
Self::AddPskBroker(_) => RequestMsgType::AddPskBroker,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> From<Ref<B, PingRequest>> for RequestRef<B> {
|
||||
fn from(v: Ref<B, PingRequest>) -> Self {
|
||||
Self::Ping(v)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> From<Ref<B, super::SupplyKeypairRequest>> for RequestRef<B> {
|
||||
fn from(v: Ref<B, super::SupplyKeypairRequest>) -> Self {
|
||||
Self::SupplyKeypair(v)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> From<Ref<B, super::AddListenSocketRequest>> for RequestRef<B> {
|
||||
fn from(v: Ref<B, super::AddListenSocketRequest>) -> Self {
|
||||
Self::AddListenSocket(v)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> From<Ref<B, super::AddPskBrokerRequest>> for RequestRef<B> {
|
||||
fn from(v: Ref<B, super::AddPskBrokerRequest>) -> Self {
|
||||
Self::AddPskBroker(v)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B: ByteSlice> RequestRefMaker<B> {
|
||||
fn new(buf: B) -> anyhow::Result<Self> {
|
||||
let msg_type = buf.deref().request_msg_type_from_prefix()?;
|
||||
Ok(Self { buf, msg_type })
|
||||
}
|
||||
|
||||
fn target_size(&self) -> usize {
|
||||
self.msg_type.message_size()
|
||||
}
|
||||
|
||||
fn parse(self) -> anyhow::Result<RequestRef<B>> {
|
||||
Ok(match self.msg_type {
|
||||
RequestMsgType::Ping => RequestRef::Ping(self.buf.ping_request()?),
|
||||
RequestMsgType::SupplyKeypair => {
|
||||
RequestRef::SupplyKeypair(self.buf.supply_keypair_request()?)
|
||||
}
|
||||
RequestMsgType::AddListenSocket => {
|
||||
RequestRef::AddListenSocket(self.buf.add_listen_socket_request()?)
|
||||
}
|
||||
RequestMsgType::AddPskBroker => {
|
||||
RequestRef::AddPskBroker(self.buf.add_psk_broker_request()?)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
fn from_prefix(self) -> anyhow::Result<Self> {
|
||||
self.ensure_fit()?;
|
||||
let point = self.target_size();
|
||||
let Self { buf, msg_type } = self;
|
||||
let (buf, _) = buf.split_at(point);
|
||||
Ok(Self { buf, msg_type })
|
||||
}
|
||||
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
fn from_suffix(self) -> anyhow::Result<Self> {
|
||||
self.ensure_fit()?;
|
||||
let point = self.buf.len() - self.target_size();
|
||||
let Self { buf, msg_type } = self;
|
||||
let (buf, _) = buf.split_at(point);
|
||||
Ok(Self { buf, msg_type })
|
||||
}
|
||||
|
||||
pub fn ensure_fit(&self) -> anyhow::Result<()> {
|
||||
let have = self.buf.len();
|
||||
let need = self.target_size();
|
||||
ensure!(
|
||||
need <= have,
|
||||
"Buffer is undersized at {have} bytes (need {need} bytes)!"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Reference to a API message response, typed as an enum.
|
||||
pub enum RequestRef<B> {
|
||||
Ping(Ref<B, PingRequest>),
|
||||
SupplyKeypair(Ref<B, super::SupplyKeypairRequest>),
|
||||
AddListenSocket(Ref<B, super::AddListenSocketRequest>),
|
||||
AddPskBroker(Ref<B, super::AddPskBrokerRequest>),
|
||||
}
|
||||
|
||||
impl<B> RequestRef<B>
|
||||
where
|
||||
B: ByteSlice,
|
||||
{
|
||||
/// Access the byte data of this reference
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// See [Self::parse].
|
||||
pub fn bytes(&self) -> &[u8] {
|
||||
match self {
|
||||
Self::Ping(r) => r.bytes(),
|
||||
Self::SupplyKeypair(r) => r.bytes(),
|
||||
Self::AddListenSocket(r) => r.bytes(),
|
||||
Self::AddPskBroker(r) => r.bytes(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> RequestRef<B>
|
||||
where
|
||||
B: ByteSliceMut,
|
||||
{
|
||||
/// Access the byte data of this reference; mutably
|
||||
pub fn bytes_mut(&mut self) -> &[u8] {
|
||||
match self {
|
||||
Self::Ping(r) => r.bytes_mut(),
|
||||
Self::SupplyKeypair(r) => r.bytes_mut(),
|
||||
Self::AddListenSocket(r) => r.bytes_mut(),
|
||||
Self::AddPskBroker(r) => r.bytes_mut(),
|
||||
}
|
||||
}
|
||||
}
|
||||
208
rosenpass/src/api/boilerplate/request_response.rs
Normal file
208
rosenpass/src/api/boilerplate/request_response.rs
Normal file
@@ -0,0 +1,208 @@
|
||||
use rosenpass_util::zerocopy::{
|
||||
RefMaker, ZerocopyEmancipateExt, ZerocopyEmancipateMutExt, ZerocopySliceExt,
|
||||
};
|
||||
use zerocopy::{ByteSlice, ByteSliceMut, Ref};
|
||||
|
||||
use super::{Message, PingRequest, PingResponse};
|
||||
use super::{RequestRef, ResponseRef, ZerocopyResponseMakerSetupMessageExt};
|
||||
|
||||
/// Extension trait for [Message]s that are requests messages
|
||||
pub trait RequestMsg: Sized + Message {
|
||||
/// The response message belonging to this request message
|
||||
type ResponseMsg: ResponseMsg;
|
||||
|
||||
/// Construct a response make for this particular message
|
||||
fn zk_response_maker<B: ByteSlice>(buf: B) -> RefMaker<B, Self::ResponseMsg> {
|
||||
buf.zk_ref_maker()
|
||||
}
|
||||
|
||||
/// Setup a response maker (through [Message::setup]) for this request message type
|
||||
fn setup_response<B: ByteSliceMut>(buf: B) -> anyhow::Result<Ref<B, Self::ResponseMsg>> {
|
||||
Self::zk_response_maker(buf).setup_msg()
|
||||
}
|
||||
|
||||
/// Setup a response maker from a buffer prefix (through [Message::setup]) for this request message type
|
||||
fn setup_response_from_prefix<B: ByteSliceMut>(
|
||||
buf: B,
|
||||
) -> anyhow::Result<Ref<B, Self::ResponseMsg>> {
|
||||
Self::zk_response_maker(buf).from_prefix()?.setup_msg()
|
||||
}
|
||||
|
||||
/// Setup a response maker from a buffer suffix (through [Message::setup]) for this request message type
|
||||
fn setup_response_from_suffix<B: ByteSliceMut>(
|
||||
buf: B,
|
||||
) -> anyhow::Result<Ref<B, Self::ResponseMsg>> {
|
||||
Self::zk_response_maker(buf).from_prefix()?.setup_msg()
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension trait for [Message]s that are response messages
|
||||
pub trait ResponseMsg: Message {
|
||||
type RequestMsg: RequestMsg;
|
||||
}
|
||||
|
||||
impl RequestMsg for PingRequest {
|
||||
type ResponseMsg = PingResponse;
|
||||
}
|
||||
|
||||
impl ResponseMsg for PingResponse {
|
||||
type RequestMsg = PingRequest;
|
||||
}
|
||||
|
||||
impl RequestMsg for super::SupplyKeypairRequest {
|
||||
type ResponseMsg = super::SupplyKeypairResponse;
|
||||
}
|
||||
|
||||
impl ResponseMsg for super::SupplyKeypairResponse {
|
||||
type RequestMsg = super::SupplyKeypairRequest;
|
||||
}
|
||||
|
||||
impl RequestMsg for super::AddListenSocketRequest {
|
||||
type ResponseMsg = super::AddListenSocketResponse;
|
||||
}
|
||||
|
||||
impl ResponseMsg for super::AddListenSocketResponse {
|
||||
type RequestMsg = super::AddListenSocketRequest;
|
||||
}
|
||||
|
||||
impl RequestMsg for super::AddPskBrokerRequest {
|
||||
type ResponseMsg = super::AddPskBrokerResponse;
|
||||
}
|
||||
|
||||
impl ResponseMsg for super::AddPskBrokerResponse {
|
||||
type RequestMsg = super::AddPskBrokerRequest;
|
||||
}
|
||||
|
||||
/// Request and response for the [crate::api::RequestMsgType::Ping] message type
|
||||
pub type PingPair<B1, B2> = (Ref<B1, PingRequest>, Ref<B2, PingResponse>);
|
||||
/// Request and response for the [crate::api::RequestMsgType::SupplyKeypair] message type
|
||||
pub type SupplyKeypairPair<B1, B2> = (
|
||||
Ref<B1, super::SupplyKeypairRequest>,
|
||||
Ref<B2, super::SupplyKeypairResponse>,
|
||||
);
|
||||
/// Request and response for the [crate::api::RequestMsgType::AddListenSocket] message type
|
||||
pub type AddListenSocketPair<B1, B2> = (
|
||||
Ref<B1, super::AddListenSocketRequest>,
|
||||
Ref<B2, super::AddListenSocketResponse>,
|
||||
);
|
||||
/// Request and response for the [crate::api::RequestMsgType::AddPskBroker] message type
|
||||
pub type AddPskBrokerPair<B1, B2> = (
|
||||
Ref<B1, super::AddPskBrokerRequest>,
|
||||
Ref<B2, super::AddPskBrokerResponse>,
|
||||
);
|
||||
|
||||
/// A pair of references to messages; request and response each.
|
||||
pub enum RequestResponsePair<B1, B2> {
|
||||
Ping(PingPair<B1, B2>),
|
||||
SupplyKeypair(SupplyKeypairPair<B1, B2>),
|
||||
AddListenSocket(AddListenSocketPair<B1, B2>),
|
||||
AddPskBroker(AddPskBrokerPair<B1, B2>),
|
||||
}
|
||||
|
||||
impl<B1, B2> From<PingPair<B1, B2>> for RequestResponsePair<B1, B2> {
|
||||
fn from(v: PingPair<B1, B2>) -> Self {
|
||||
RequestResponsePair::Ping(v)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B1, B2> From<SupplyKeypairPair<B1, B2>> for RequestResponsePair<B1, B2> {
|
||||
fn from(v: SupplyKeypairPair<B1, B2>) -> Self {
|
||||
RequestResponsePair::SupplyKeypair(v)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B1, B2> From<AddListenSocketPair<B1, B2>> for RequestResponsePair<B1, B2> {
|
||||
fn from(v: AddListenSocketPair<B1, B2>) -> Self {
|
||||
RequestResponsePair::AddListenSocket(v)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B1, B2> From<AddPskBrokerPair<B1, B2>> for RequestResponsePair<B1, B2> {
|
||||
fn from(v: AddPskBrokerPair<B1, B2>) -> Self {
|
||||
RequestResponsePair::AddPskBroker(v)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B1, B2> RequestResponsePair<B1, B2>
|
||||
where
|
||||
B1: ByteSlice,
|
||||
B2: ByteSlice,
|
||||
{
|
||||
/// Returns a tuple to both the request and the response message
|
||||
pub fn both(&self) -> (RequestRef<&[u8]>, ResponseRef<&[u8]>) {
|
||||
match self {
|
||||
Self::Ping((req, res)) => {
|
||||
let req = RequestRef::Ping(req.emancipate());
|
||||
let res = ResponseRef::Ping(res.emancipate());
|
||||
(req, res)
|
||||
}
|
||||
Self::SupplyKeypair((req, res)) => {
|
||||
let req = RequestRef::SupplyKeypair(req.emancipate());
|
||||
let res = ResponseRef::SupplyKeypair(res.emancipate());
|
||||
(req, res)
|
||||
}
|
||||
Self::AddListenSocket((req, res)) => {
|
||||
let req = RequestRef::AddListenSocket(req.emancipate());
|
||||
let res = ResponseRef::AddListenSocket(res.emancipate());
|
||||
(req, res)
|
||||
}
|
||||
Self::AddPskBroker((req, res)) => {
|
||||
let req = RequestRef::AddPskBroker(req.emancipate());
|
||||
let res = ResponseRef::AddPskBroker(res.emancipate());
|
||||
(req, res)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the request message
|
||||
pub fn request(&self) -> RequestRef<&[u8]> {
|
||||
self.both().0
|
||||
}
|
||||
|
||||
/// Returns the response message
|
||||
pub fn response(&self) -> ResponseRef<&[u8]> {
|
||||
self.both().1
|
||||
}
|
||||
}
|
||||
|
||||
impl<B1, B2> RequestResponsePair<B1, B2>
|
||||
where
|
||||
B1: ByteSliceMut,
|
||||
B2: ByteSliceMut,
|
||||
{
|
||||
/// Returns a mutable tuple to both the request and the response message
|
||||
pub fn both_mut(&mut self) -> (RequestRef<&mut [u8]>, ResponseRef<&mut [u8]>) {
|
||||
match self {
|
||||
Self::Ping((req, res)) => {
|
||||
let req = RequestRef::Ping(req.emancipate_mut());
|
||||
let res = ResponseRef::Ping(res.emancipate_mut());
|
||||
(req, res)
|
||||
}
|
||||
Self::SupplyKeypair((req, res)) => {
|
||||
let req = RequestRef::SupplyKeypair(req.emancipate_mut());
|
||||
let res = ResponseRef::SupplyKeypair(res.emancipate_mut());
|
||||
(req, res)
|
||||
}
|
||||
Self::AddListenSocket((req, res)) => {
|
||||
let req = RequestRef::AddListenSocket(req.emancipate_mut());
|
||||
let res = ResponseRef::AddListenSocket(res.emancipate_mut());
|
||||
(req, res)
|
||||
}
|
||||
Self::AddPskBroker((req, res)) => {
|
||||
let req = RequestRef::AddPskBroker(req.emancipate_mut());
|
||||
let res = ResponseRef::AddPskBroker(res.emancipate_mut());
|
||||
(req, res)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the request message, mutably
|
||||
pub fn request_mut(&mut self) -> RequestRef<&mut [u8]> {
|
||||
self.both_mut().0
|
||||
}
|
||||
|
||||
/// Returns the response message, mutably
|
||||
pub fn response_mut(&mut self) -> ResponseRef<&mut [u8]> {
|
||||
self.both_mut().1
|
||||
}
|
||||
}
|
||||
196
rosenpass/src/api/boilerplate/response_ref.rs
Normal file
196
rosenpass/src/api/boilerplate/response_ref.rs
Normal file
@@ -0,0 +1,196 @@
|
||||
// TODO: This is copied verbatim from ResponseRef…not pretty
|
||||
use anyhow::ensure;
|
||||
|
||||
use zerocopy::{ByteSlice, ByteSliceMut, Ref};
|
||||
|
||||
use super::{ByteSliceRefExt, MessageAttributes, PingResponse, ResponseMsgType};
|
||||
|
||||
/// Helper for producing API message response references, [ResponseRef].
|
||||
///
|
||||
/// This is to [ResponseRef] as [rosenpass_util::zerocopy::RefMaker] is to
|
||||
/// [zerocopy::Ref].
|
||||
struct ResponseRefMaker<B> {
|
||||
/// Buffer we are referencing
|
||||
buf: B,
|
||||
/// Message type we are producing
|
||||
msg_type: ResponseMsgType,
|
||||
}
|
||||
|
||||
impl<B: ByteSlice> ResponseRef<B> {
|
||||
/// Produce a [ResponseRef] from a raw message buffer,
|
||||
/// reading the type from the buffer
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use zerocopy::AsBytes;
|
||||
///
|
||||
/// use rosenpass::api::{PingResponse, ResponseRef, ResponseMsgType};
|
||||
/// // Produce the original PingResponse
|
||||
/// let msg = PingResponse::new([0u8; 256]);
|
||||
///
|
||||
/// // TODO: HEISENBUG: This is necessary for some reason to make the rest of the example work
|
||||
/// let typ = msg.msg_type;
|
||||
/// assert_eq!(typ, rosenpass::api::PING_RESPONSE);
|
||||
///
|
||||
/// // Parse as a message type
|
||||
/// let buf = msg.as_bytes();
|
||||
/// let msg_ref = ResponseRef::parse(buf)?;
|
||||
/// assert!(matches!(msg_ref, ResponseRef::Ping(_)));
|
||||
///
|
||||
/// // Buffers and message types of course match what we expect
|
||||
/// assert_eq!(msg_ref.message_type(), ResponseMsgType::Ping);
|
||||
/// assert!(std::ptr::eq(buf, msg_ref.bytes()));
|
||||
///
|
||||
/// Ok::<(), anyhow::Error>(())
|
||||
/// ```
|
||||
pub fn parse(buf: B) -> anyhow::Result<Self> {
|
||||
ResponseRefMaker::new(buf)?.parse()
|
||||
}
|
||||
|
||||
/// Produce a [ResponseRef] from the prefix of a raw message buffer,
|
||||
/// reading the type from the buffer.
|
||||
pub fn parse_from_prefix(buf: B) -> anyhow::Result<Self> {
|
||||
ResponseRefMaker::new(buf)?.from_prefix()?.parse()
|
||||
}
|
||||
|
||||
/// Produce a [ResponseRef] from the prefix of a raw message buffer,
|
||||
/// reading the type from the buffer.
|
||||
pub fn parse_from_suffix(buf: B) -> anyhow::Result<Self> {
|
||||
ResponseRefMaker::new(buf)?.from_suffix()?.parse()
|
||||
}
|
||||
|
||||
/// Get the message type [Self] contains
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// See [Self::parse]
|
||||
pub fn message_type(&self) -> ResponseMsgType {
|
||||
match self {
|
||||
Self::Ping(_) => ResponseMsgType::Ping,
|
||||
Self::SupplyKeypair(_) => ResponseMsgType::SupplyKeypair,
|
||||
Self::AddListenSocket(_) => ResponseMsgType::AddListenSocket,
|
||||
Self::AddPskBroker(_) => ResponseMsgType::AddPskBroker,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> From<Ref<B, PingResponse>> for ResponseRef<B> {
|
||||
fn from(v: Ref<B, PingResponse>) -> Self {
|
||||
Self::Ping(v)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> From<Ref<B, super::SupplyKeypairResponse>> for ResponseRef<B> {
|
||||
fn from(v: Ref<B, super::SupplyKeypairResponse>) -> Self {
|
||||
Self::SupplyKeypair(v)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> From<Ref<B, super::AddListenSocketResponse>> for ResponseRef<B> {
|
||||
fn from(v: Ref<B, super::AddListenSocketResponse>) -> Self {
|
||||
Self::AddListenSocket(v)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> From<Ref<B, super::AddPskBrokerResponse>> for ResponseRef<B> {
|
||||
fn from(v: Ref<B, super::AddPskBrokerResponse>) -> Self {
|
||||
Self::AddPskBroker(v)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B: ByteSlice> ResponseRefMaker<B> {
|
||||
fn new(buf: B) -> anyhow::Result<Self> {
|
||||
let msg_type = buf.deref().response_msg_type_from_prefix()?;
|
||||
Ok(Self { buf, msg_type })
|
||||
}
|
||||
|
||||
fn target_size(&self) -> usize {
|
||||
self.msg_type.message_size()
|
||||
}
|
||||
|
||||
fn parse(self) -> anyhow::Result<ResponseRef<B>> {
|
||||
Ok(match self.msg_type {
|
||||
ResponseMsgType::Ping => ResponseRef::Ping(self.buf.ping_response()?),
|
||||
ResponseMsgType::SupplyKeypair => {
|
||||
ResponseRef::SupplyKeypair(self.buf.supply_keypair_response()?)
|
||||
}
|
||||
ResponseMsgType::AddListenSocket => {
|
||||
ResponseRef::AddListenSocket(self.buf.add_listen_socket_response()?)
|
||||
}
|
||||
ResponseMsgType::AddPskBroker => {
|
||||
ResponseRef::AddPskBroker(self.buf.add_psk_broker_response()?)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
fn from_prefix(self) -> anyhow::Result<Self> {
|
||||
self.ensure_fit()?;
|
||||
let point = self.target_size();
|
||||
let Self { buf, msg_type } = self;
|
||||
let (buf, _) = buf.split_at(point);
|
||||
Ok(Self { buf, msg_type })
|
||||
}
|
||||
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
fn from_suffix(self) -> anyhow::Result<Self> {
|
||||
self.ensure_fit()?;
|
||||
let point = self.buf.len() - self.target_size();
|
||||
let Self { buf, msg_type } = self;
|
||||
let (buf, _) = buf.split_at(point);
|
||||
Ok(Self { buf, msg_type })
|
||||
}
|
||||
|
||||
pub fn ensure_fit(&self) -> anyhow::Result<()> {
|
||||
let have = self.buf.len();
|
||||
let need = self.target_size();
|
||||
ensure!(
|
||||
need <= have,
|
||||
"Buffer is undersized at {have} bytes (need {need} bytes)!"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Reference to a API message response, typed.
|
||||
pub enum ResponseRef<B> {
|
||||
Ping(Ref<B, PingResponse>),
|
||||
SupplyKeypair(Ref<B, super::SupplyKeypairResponse>),
|
||||
AddListenSocket(Ref<B, super::AddListenSocketResponse>),
|
||||
AddPskBroker(Ref<B, super::AddPskBrokerResponse>),
|
||||
}
|
||||
|
||||
impl<B> ResponseRef<B>
|
||||
where
|
||||
B: ByteSlice,
|
||||
{
|
||||
/// Access the byte data of this reference
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// See [Self::parse].
|
||||
pub fn bytes(&self) -> &[u8] {
|
||||
match self {
|
||||
Self::Ping(r) => r.bytes(),
|
||||
Self::SupplyKeypair(r) => r.bytes(),
|
||||
Self::AddListenSocket(r) => r.bytes(),
|
||||
Self::AddPskBroker(r) => r.bytes(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> ResponseRef<B>
|
||||
where
|
||||
B: ByteSliceMut,
|
||||
{
|
||||
/// Access the byte data of this reference; mutably
|
||||
pub fn bytes_mut(&mut self) -> &[u8] {
|
||||
match self {
|
||||
Self::Ping(r) => r.bytes_mut(),
|
||||
Self::SupplyKeypair(r) => r.bytes_mut(),
|
||||
Self::AddListenSocket(r) => r.bytes_mut(),
|
||||
Self::AddPskBroker(r) => r.bytes_mut(),
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user