mirror of
https://github.com/XRPLF/rippled.git
synced 2025-11-26 22:15:52 +00:00
Compare commits
1678 Commits
0.30.1-hf2
...
0.81.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4e8c8deeaa | ||
|
|
92c987a6b4 | ||
|
|
5e4dac41a7 | ||
|
|
d2fc4e3569 | ||
|
|
fbfb4bd74e | ||
|
|
6dc79c23ed | ||
|
|
5a9c3c797c | ||
|
|
a4a43a4de9 | ||
|
|
25b002b37f | ||
|
|
40c39c4afb | ||
|
|
f0e1024ad6 | ||
|
|
c11e186659 | ||
|
|
0a48916d98 | ||
|
|
dffb999efb | ||
|
|
044dd53513 | ||
|
|
02059a27d6 | ||
|
|
8f347a5333 | ||
|
|
bce9bca2ce | ||
|
|
cafe18c592 | ||
|
|
3e5490ef6d | ||
|
|
c76656cf7f | ||
|
|
c7c1b3cc3b | ||
|
|
39f9135104 | ||
|
|
dd52bdd2c4 | ||
|
|
7a0fa312ea | ||
|
|
cd2d52acdc | ||
|
|
62127d725d | ||
|
|
fc640504ba | ||
|
|
3c0b35092c | ||
|
|
89e8ea436a | ||
|
|
21dc05fc33 | ||
|
|
589570daa3 | ||
|
|
a02a469b20 | ||
|
|
be1f734845 | ||
|
|
98d7fa3fd9 | ||
|
|
74823cb7d1 | ||
|
|
e47bfa223f | ||
|
|
5c1ec051f0 | ||
|
|
65094d9c90 | ||
|
|
c00341a97e | ||
|
|
36423a5f77 | ||
|
|
60dd194b72 | ||
|
|
9ae717c433 | ||
|
|
d90a0647d6 | ||
|
|
35d81e65c1 | ||
|
|
cca574c9a9 | ||
|
|
c96c423afb | ||
|
|
463b154e3d | ||
|
|
3666948610 | ||
|
|
397410bac6 | ||
|
|
7aa838c091 | ||
|
|
458ac470aa | ||
|
|
a79cb95c85 | ||
|
|
5ad49454f1 | ||
|
|
1a56b9c5f2 | ||
|
|
efe3700f70 | ||
|
|
fc89d2e014 | ||
|
|
3c37539cee | ||
|
|
3d977aeacb | ||
|
|
4308b124c2 | ||
|
|
b23e9c207d | ||
|
|
c3a6b1600f | ||
|
|
138e1ba9a8 | ||
|
|
2858661bce | ||
|
|
afc791835e | ||
|
|
d981bff8ea | ||
|
|
b24d47c093 | ||
|
|
df086301b6 | ||
|
|
01b4d5cdd4 | ||
|
|
8c155dd875 | ||
|
|
3f9a38697d | ||
|
|
87742a5e6c | ||
|
|
5b9e8a77ca | ||
|
|
2821624ede | ||
|
|
ac1ab720c4 | ||
|
|
e0168b98d7 | ||
|
|
18801b81de | ||
|
|
1daefeb594 | ||
|
|
068048718e | ||
|
|
56946e8128 | ||
|
|
3dfb4a13f1 | ||
|
|
7ae3c91015 | ||
|
|
95f107d487 | ||
|
|
61316c7f95 | ||
|
|
49bdf2e72d | ||
|
|
f0f96bd1da | ||
|
|
3bfd9de677 | ||
|
|
f9b5ab4728 | ||
|
|
7abd70356d | ||
|
|
d8313288ad | ||
|
|
a89be5b269 | ||
|
|
7b0d482810 | ||
|
|
e81f3eb1d2 | ||
|
|
cd0a2d6ef3 | ||
|
|
d04d3d3c4a | ||
|
|
da7da5527c | ||
|
|
6f4bc30684 | ||
|
|
fa72795d84 | ||
|
|
68b8ffdb63 | ||
|
|
cb91d56d07 | ||
|
|
6e889e6a18 | ||
|
|
be9c3b218b | ||
|
|
a92f5b8e5a | ||
|
|
3eeb79ee12 | ||
|
|
24e1b9911a | ||
|
|
3a973ab719 | ||
|
|
6f10fe8502 | ||
|
|
d471e533b7 | ||
|
|
1a238048d5 | ||
|
|
aa2ff00485 | ||
|
|
f2787dc35c | ||
|
|
8002a13dd2 | ||
|
|
7dc2fe9ce7 | ||
|
|
5f37765292 | ||
|
|
a56d31910f | ||
|
|
24505a358a | ||
|
|
c570695aa1 | ||
|
|
208028a142 | ||
|
|
dceef25e2c | ||
|
|
256e58204a | ||
|
|
c1d64e1b1a | ||
|
|
1dbc5a57e6 | ||
|
|
9cc542fe67 | ||
|
|
f7a7f13287 | ||
|
|
96ece1b9f0 | ||
|
|
46004158a2 | ||
|
|
7e9ac16c22 | ||
|
|
2e5ab4e0e3 | ||
|
|
00c60d408a | ||
|
|
d5dc715d9c | ||
|
|
369909df84 | ||
|
|
2cd55ebf98 | ||
|
|
4e43e22a3a | ||
|
|
d8dea963fa | ||
|
|
84816d1c21 | ||
|
|
8430f9deff | ||
|
|
fcceb0aac1 | ||
|
|
2680b78b5b | ||
|
|
068889e5b1 | ||
|
|
3bd9772c04 | ||
|
|
af66c62814 | ||
|
|
5bc8f2e3e8 | ||
|
|
22c97ba801 | ||
|
|
026a249173 | ||
|
|
e52614ac81 | ||
|
|
10a7f5b933 | ||
|
|
c6b6d82a75 | ||
|
|
9a0249e793 | ||
|
|
e92760eec8 | ||
|
|
7b82051bdb | ||
|
|
aea54b7230 | ||
|
|
1a7a6f22e2 | ||
|
|
fab3ec0b56 | ||
|
|
2449f9c18d | ||
|
|
fee30262ac | ||
|
|
7cd4d78897 | ||
|
|
4ff40d4954 | ||
|
|
0d4fe469c6 | ||
|
|
8b43d67a73 | ||
|
|
128f7cefb1 | ||
|
|
09f9720ebb | ||
|
|
dbe74dffcb | ||
|
|
b958fa413e | ||
|
|
c453df927f | ||
|
|
1bb92d40aa | ||
|
|
15f969a469 | ||
|
|
bc5a74057d | ||
|
|
fc0d64f5ee | ||
|
|
885aaab8c8 | ||
|
|
9d4500cf69 | ||
|
|
9ff9fa0aea | ||
|
|
1d482eeecb | ||
|
|
b4e765362b | ||
|
|
c981eb81d9 | ||
|
|
95aebfc38c | ||
|
|
7265729446 | ||
|
|
846723d771 | ||
|
|
80d9b0464a | ||
|
|
09a1d1a593 | ||
|
|
aebcc2115d | ||
|
|
6fac038320 | ||
|
|
0df1b09a73 | ||
|
|
f432095532 | ||
|
|
e27a38939e | ||
|
|
ffa79ac6a5 | ||
|
|
2e632b1660 | ||
|
|
0b187a6a4e | ||
|
|
6cea5d0838 | ||
|
|
ffc7cf8f6c | ||
|
|
69bc58c5f6 | ||
|
|
f423181b94 | ||
|
|
112a863e73 | ||
|
|
cfde591ac9 | ||
|
|
0c97dda276 | ||
|
|
35f4698aed | ||
|
|
b7e2a3bd5f | ||
|
|
bb61b398a6 | ||
|
|
1e438f51c5 | ||
|
|
60416b18a5 | ||
|
|
4b0a0b0b85 | ||
|
|
f1377d5d30 | ||
|
|
30b6e4e2e5 | ||
|
|
5cf38bf88a | ||
|
|
9e3dadce0d | ||
|
|
73b4c818c5 | ||
|
|
2c2b0eb2f1 | ||
|
|
17726c2cac | ||
|
|
af79c9007e | ||
|
|
3de623bf66 | ||
|
|
7ec58cc554 | ||
|
|
3d6a1781e7 | ||
|
|
ce9238b389 | ||
|
|
2c6b0f3193 | ||
|
|
b4a16b165b | ||
|
|
a8cf5e0a5c | ||
|
|
2fcde0e0b6 | ||
|
|
b45f45dcef | ||
|
|
e823e60ca0 | ||
|
|
74977ab3db | ||
|
|
80dfb7d72d | ||
|
|
c30fe3066a | ||
|
|
0b605b3609 | ||
|
|
9bb337fb1f | ||
|
|
460dd8f186 | ||
|
|
6b0817b7ba | ||
|
|
7698477e86 | ||
|
|
8b60ef9db4 | ||
|
|
209fe8f7a9 | ||
|
|
b514f1aae9 | ||
|
|
f6a0345831 | ||
|
|
ce7e83f763 | ||
|
|
71b42dcec5 | ||
|
|
f5af8b03de | ||
|
|
e01f6e7455 | ||
|
|
b3eada1dc2 | ||
|
|
a3e3b9321e | ||
|
|
c652cf066d | ||
|
|
c218417d1a | ||
|
|
69db2ace58 | ||
|
|
79149b4c0c | ||
|
|
232ec62c75 | ||
|
|
7ca03d3bca | ||
|
|
15a30c745c | ||
|
|
8345475bc3 | ||
|
|
c7de7950c4 | ||
|
|
b6126f219f | ||
|
|
e05bf0844d | ||
|
|
fdff943262 | ||
|
|
a1a8ba7f53 | ||
|
|
d8a5f5b094 | ||
|
|
1ede09760e | ||
|
|
708fc6cd6f | ||
|
|
77999579b5 | ||
|
|
d24bb65639 | ||
|
|
d810f29e99 | ||
|
|
6e3e717876 | ||
|
|
2c87739d6c | ||
|
|
b00b81a861 | ||
|
|
a0a4eedc27 | ||
|
|
c0e9e3df49 | ||
|
|
7028579170 | ||
|
|
84ada74d53 | ||
|
|
be0fb67d8d | ||
|
|
fb60cc9b5b | ||
|
|
3c4d3b10c1 | ||
|
|
d9ef5ef98f | ||
|
|
be9c955506 | ||
|
|
1989b1028f | ||
|
|
0d577d9349 | ||
|
|
7536c53a48 | ||
|
|
e3ff30657c | ||
|
|
698ea58b39 | ||
|
|
a5500721db | ||
|
|
fd4ad29418 | ||
|
|
905c627043 | ||
|
|
8d8907e340 | ||
|
|
6724a63230 | ||
|
|
af4fe24939 | ||
|
|
c1c80dfc52 | ||
|
|
87273e21d8 | ||
|
|
610e51a162 | ||
|
|
e91aacc9a3 | ||
|
|
6e54461f4b | ||
|
|
ef23d72562 | ||
|
|
a1c0d15a1f | ||
|
|
b6a01ea41c | ||
|
|
8425e4558a | ||
|
|
5a688f9236 | ||
|
|
effd8c9737 | ||
|
|
a7c4d682d2 | ||
|
|
e00a6b0e5a | ||
|
|
dc3571184a | ||
|
|
22a375a5f4 | ||
|
|
3337d17fdd | ||
|
|
8ab2236cdd | ||
|
|
0cb6a0f961 | ||
|
|
28ae522ea2 | ||
|
|
c0cf7bd3c1 | ||
|
|
fd7a2835e4 | ||
|
|
3d0314c621 | ||
|
|
8d83aa5c07 | ||
|
|
7ff243ade9 | ||
|
|
2fd0540ed4 | ||
|
|
cdf470e68d | ||
|
|
7fc780dd70 | ||
|
|
4d7b1a3b61 | ||
|
|
9a9dffa4ff | ||
|
|
51e7f595bb | ||
|
|
293e520efc | ||
|
|
9e960ff6b8 | ||
|
|
0a8e690917 | ||
|
|
cf60d4c30e | ||
|
|
8f2480225b | ||
|
|
44167a6bcb | ||
|
|
db95808206 | ||
|
|
fd901f8081 | ||
|
|
b6ce0aa75a | ||
|
|
b712125bc0 | ||
|
|
d69b16895c | ||
|
|
d198b439fd | ||
|
|
83aa5517c0 | ||
|
|
5711e7caa9 | ||
|
|
8d0c93691d | ||
|
|
d8d0cb17ba | ||
|
|
47a919faf0 | ||
|
|
d572de769b | ||
|
|
810a6b0f30 | ||
|
|
665ad180cb | ||
|
|
361917e902 | ||
|
|
4b261b12a4 | ||
|
|
afd4b45036 | ||
|
|
47adc728db | ||
|
|
7e39d645b9 | ||
|
|
35504f1723 | ||
|
|
61d9dda4e0 | ||
|
|
38ca9d4a97 | ||
|
|
f37aa1d6c8 | ||
|
|
db13ddf844 | ||
|
|
bf642404c7 | ||
|
|
d53d5cfc42 | ||
|
|
bbf52056f9 | ||
|
|
b8cae2dfaf | ||
|
|
795ee8bb5e | ||
|
|
cfcd618aa6 | ||
|
|
19258cf980 | ||
|
|
cdaafeb4b6 | ||
|
|
7688a97d95 | ||
|
|
027b289c91 | ||
|
|
b55edfa8f0 | ||
|
|
7d46d153c6 | ||
|
|
96b17749af | ||
|
|
f27348c4d5 | ||
|
|
c6923dcf88 | ||
|
|
f456355da2 | ||
|
|
97806b42c4 | ||
|
|
ed02b0717e | ||
|
|
2963e91752 | ||
|
|
aa11effdd6 | ||
|
|
6e9c15af92 | ||
|
|
0ddeb29c35 | ||
|
|
98f878cf10 | ||
|
|
69b47890e6 | ||
|
|
41851022d3 | ||
|
|
a7630aaa55 | ||
|
|
1d15af6afd | ||
|
|
dd0075f2b8 | ||
|
|
4b0d8b630c | ||
|
|
b421559a47 | ||
|
|
05e7373086 | ||
|
|
bb0b97f46b | ||
|
|
3b639afac2 | ||
|
|
bd93ecbd6b | ||
|
|
79159ffd87 | ||
|
|
f05321d501 | ||
|
|
8734458cfb | ||
|
|
bdbb3caf47 | ||
|
|
076658e0f6 | ||
|
|
b2499c8fa0 | ||
|
|
231a5ae6fb | ||
|
|
45249e8746 | ||
|
|
e6ed9ae4d8 | ||
|
|
aca6db5601 | ||
|
|
8e9f9599b8 | ||
|
|
71d7d87bf3 | ||
|
|
7ffef30f1c | ||
|
|
7c90b9ef88 | ||
|
|
4bb74196c0 | ||
|
|
35fa20a110 | ||
|
|
ddaeae2855 | ||
|
|
9c8e3776de | ||
|
|
633cf86ad8 | ||
|
|
66ce8779e8 | ||
|
|
a9b3042d7e | ||
|
|
4df24c0e8e | ||
|
|
9a988963e9 | ||
|
|
5be33a650d | ||
|
|
5b09dc731f | ||
|
|
51d7e7336f | ||
|
|
ad9be4dbf6 | ||
|
|
87756b9324 | ||
|
|
e611a7a0f8 | ||
|
|
2f9a8440c2 | ||
|
|
a40dd2690a | ||
|
|
802c2395c1 | ||
|
|
8f97889176 | ||
|
|
104f12a9e2 | ||
|
|
e499743cdd | ||
|
|
241795cd73 | ||
|
|
411b2534ed | ||
|
|
253f138aff | ||
|
|
dadbab4c0f | ||
|
|
d9017a3f76 | ||
|
|
aedfaab93d | ||
|
|
8687f64429 | ||
|
|
1d08075c43 | ||
|
|
8a6c7f9208 | ||
|
|
fd061bba8a | ||
|
|
8f41817cb9 | ||
|
|
cf8b6be494 | ||
|
|
d263d4d449 | ||
|
|
b39e4817e5 | ||
|
|
4dfa250a34 | ||
|
|
8a6908c072 | ||
|
|
d8fe737ad7 | ||
|
|
61023c3f4a | ||
|
|
b607d47bd3 | ||
|
|
878c0f2a19 | ||
|
|
19dd983d2b | ||
|
|
c15751ced6 | ||
|
|
a55d9aa4c3 | ||
|
|
037d52114a | ||
|
|
68e123a8d6 | ||
|
|
b92a7d415e | ||
|
|
fc73fbd050 | ||
|
|
ab45c490d7 | ||
|
|
d8b85c00e8 | ||
|
|
e453c14b0a | ||
|
|
1c6e32ccc2 | ||
|
|
c4e581179c | ||
|
|
0d803e0fa2 | ||
|
|
c1b8efb7af | ||
|
|
b0704b47e8 | ||
|
|
2fa84e95b9 | ||
|
|
b02ec47b4f | ||
|
|
3ff56eb071 | ||
|
|
9e8a5a5765 | ||
|
|
461a8ea846 | ||
|
|
8d9c0daa9d | ||
|
|
1537527927 | ||
|
|
d51a2785ee | ||
|
|
e8a7ad4748 | ||
|
|
2ca18670d2 | ||
|
|
d4a56f223a | ||
|
|
2e7f5502bf | ||
|
|
4483079181 | ||
|
|
2f0b3bd427 | ||
|
|
d1ce07ef5d | ||
|
|
a252fefede | ||
|
|
e762d09e7e | ||
|
|
348e65074e | ||
|
|
64f2576fc8 | ||
|
|
9926d3188a | ||
|
|
cc8671b8b2 | ||
|
|
f5fcdd0b80 | ||
|
|
71a30a57cb | ||
|
|
f9fe2ef90f | ||
|
|
6c8673c7c3 | ||
|
|
11b64e049c | ||
|
|
33f153fc9a | ||
|
|
4758050444 | ||
|
|
a5589dcec6 | ||
|
|
08ea245101 | ||
|
|
3b58e36621 | ||
|
|
b343b0468a | ||
|
|
69b91065c5 | ||
|
|
e39316882e | ||
|
|
1ff972fbd3 | ||
|
|
8204d9524e | ||
|
|
07bf106cd3 | ||
|
|
225b5a1204 | ||
|
|
054d5de877 | ||
|
|
5349bcc1c5 | ||
|
|
17fd2ef2e2 | ||
|
|
e199c0555c | ||
|
|
42557b800c | ||
|
|
ef2330d477 | ||
|
|
cb7d0b508d | ||
|
|
cf72d70eca | ||
|
|
cf9d65f973 | ||
|
|
dd2feb8d1f | ||
|
|
d790c3b671 | ||
|
|
a56c43f3b3 | ||
|
|
6f3a35e8be | ||
|
|
c9d8fa9e96 | ||
|
|
b72724a4a4 | ||
|
|
f8a1ec0348 | ||
|
|
8375ae647e | ||
|
|
69da298aa7 | ||
|
|
6765507cc4 | ||
|
|
e2c67a1666 | ||
|
|
6397025435 | ||
|
|
2a448065da | ||
|
|
5c7130e4fd | ||
|
|
bbad20c66f | ||
|
|
e29163e922 | ||
|
|
2633949d5b | ||
|
|
16a38f3979 | ||
|
|
c4f8b38148 | ||
|
|
40678e9a78 | ||
|
|
8f5449dafb | ||
|
|
8c90ef810a | ||
|
|
177a52473a | ||
|
|
a22fa21ce4 | ||
|
|
beb9883705 | ||
|
|
654772a860 | ||
|
|
9cc80b7cb6 | ||
|
|
d46c21cc5f | ||
|
|
da18f7c053 | ||
|
|
0952ebfc1d | ||
|
|
a698104c55 | ||
|
|
f060820f3b | ||
|
|
119d5c1e47 | ||
|
|
2d53ee4051 | ||
|
|
66f0caa309 | ||
|
|
d88b63d4c8 | ||
|
|
63a5522406 | ||
|
|
d22eb0caa2 | ||
|
|
138cadc01c | ||
|
|
b590e2c96f | ||
|
|
c4c8a620c8 | ||
|
|
5dd8f28290 | ||
|
|
078436212c | ||
|
|
10521de2fc | ||
|
|
7d11471619 | ||
|
|
434855f500 | ||
|
|
11c4ca00d5 | ||
|
|
f16d701a2c | ||
|
|
34d590d93a | ||
|
|
a87f56448a | ||
|
|
bb61ad2afe | ||
|
|
57b8eb6ccd | ||
|
|
7f52249e40 | ||
|
|
321e2a94fe | ||
|
|
ceb01fb6a3 | ||
|
|
2206d0ef65 | ||
|
|
279c2a6f82 | ||
|
|
d1200224e2 | ||
|
|
fdd1f2ec36 | ||
|
|
7295d7f4bb | ||
|
|
05d98f4380 | ||
|
|
a187750b32 | ||
|
|
55377c12d3 | ||
|
|
cd11f3755e | ||
|
|
289c8c9f09 | ||
|
|
80a9a2bf5d | ||
|
|
1b0b8d7043 | ||
|
|
b81ec3545f | ||
|
|
999e2fa031 | ||
|
|
c060d08767 | ||
|
|
27ca1b2698 | ||
|
|
75af4ed9b5 | ||
|
|
1edc5e5ee0 | ||
|
|
28f90d17ac | ||
|
|
50b5dab5df | ||
|
|
8afdcb9e9f | ||
|
|
7e8f5401b2 | ||
|
|
5a0a47cbae | ||
|
|
bc2642f423 | ||
|
|
cd41a0decd | ||
|
|
803d145a5e | ||
|
|
8303266430 | ||
|
|
44e33121c7 | ||
|
|
8a6ff4803c | ||
|
|
abbf4b82b0 | ||
|
|
9752268308 | ||
|
|
1fe983948f | ||
|
|
39829a09cb | ||
|
|
3de738429f | ||
|
|
fa0ef25ffb | ||
|
|
a9e507da9b | ||
|
|
20dfecd2b6 | ||
|
|
eb7bd6a2f1 | ||
|
|
a570b74038 | ||
|
|
18c82465b2 | ||
|
|
d46db18a31 | ||
|
|
5cf4f4a5e2 | ||
|
|
e3ee23bcfd | ||
|
|
846cbf8c78 | ||
|
|
b14555c742 | ||
|
|
154e90b1ca | ||
|
|
97c89168f7 | ||
|
|
a039e7593a | ||
|
|
acebbf58eb | ||
|
|
84e3184106 | ||
|
|
3a1fa4a552 | ||
|
|
1cf518e82c | ||
|
|
951f479a1b | ||
|
|
a6e408510a | ||
|
|
f2f195f43e | ||
|
|
21c563f83a | ||
|
|
67b1acbf78 | ||
|
|
4ed6cbdd5b | ||
|
|
7e3dbce3d2 | ||
|
|
e38f01d1f4 | ||
|
|
32a01df0e1 | ||
|
|
814a8258fd | ||
|
|
f3b2153ba7 | ||
|
|
651fb45598 | ||
|
|
b5dc8eb9ce | ||
|
|
b12d1570a7 | ||
|
|
cd38492ceb | ||
|
|
411a12693d | ||
|
|
c0952e54db | ||
|
|
09c566a6eb | ||
|
|
97b1ac6eab | ||
|
|
2a8de0fd6b | ||
|
|
8921da91b8 | ||
|
|
2b69831f49 | ||
|
|
e9f924ca31 | ||
|
|
595912f82d | ||
|
|
ec1ffa2945 | ||
|
|
45ff08b6aa | ||
|
|
ab9e0c06b8 | ||
|
|
c549c9dff0 | ||
|
|
d7a778ce6a | ||
|
|
f45e279e06 | ||
|
|
4d19b8be07 | ||
|
|
2ae68923cc | ||
|
|
d197c9780a | ||
|
|
3d063edb72 | ||
|
|
f081e80c28 | ||
|
|
1c3ee48146 | ||
|
|
e499e908d2 | ||
|
|
e0956c36c1 | ||
|
|
2893f8c82a | ||
|
|
036c3098f3 | ||
|
|
24612eba4c | ||
|
|
6512b8894a | ||
|
|
1b44c9a3df | ||
|
|
8499cc9767 | ||
|
|
d49faa0f5c | ||
|
|
258cd2cb87 | ||
|
|
7eed701682 | ||
|
|
e62b9dc4c1 | ||
|
|
36ac1124f4 | ||
|
|
ddb34f1ed1 | ||
|
|
76b761d8e2 | ||
|
|
92b3cdb6f8 | ||
|
|
47eb7fcc2f | ||
|
|
6d8c73cc52 | ||
|
|
9e5e16c18d | ||
|
|
3af4cf0a28 | ||
|
|
d6903efc0c | ||
|
|
9390eb016c | ||
|
|
47dc31d8c2 | ||
|
|
f3c3e0bfff | ||
|
|
fbb5a753b1 | ||
|
|
a1951aff02 | ||
|
|
c28d36b500 | ||
|
|
0efb929898 | ||
|
|
3f84dd8cf9 | ||
|
|
5d9e53a37d | ||
|
|
2e2a7509cd | ||
|
|
7f97b7bc05 | ||
|
|
073ccf2705 | ||
|
|
6d8b25fdf8 | ||
|
|
22f62af9be | ||
|
|
2a014df60d | ||
|
|
6c8b4b2f8d | ||
|
|
8dad543671 | ||
|
|
cf0cea38fd | ||
|
|
db7b65ed42 | ||
|
|
61a8f7f078 | ||
|
|
e4f0b36f61 | ||
|
|
b7c34d8a96 | ||
|
|
b5dbd7942f | ||
|
|
5e5d5fdee4 | ||
|
|
89c05efe22 | ||
|
|
e5bf824c3b | ||
|
|
b509263ef5 | ||
|
|
13ec104154 | ||
|
|
4cfa1d5cd3 | ||
|
|
48396aebd1 | ||
|
|
7cfdab936c | ||
|
|
010444d77a | ||
|
|
1c8c3207fe | ||
|
|
ba7031cb3b | ||
|
|
7e88fdd0f1 | ||
|
|
af3d721f82 | ||
|
|
e09249abad | ||
|
|
0061f03cef | ||
|
|
53c82d54ea | ||
|
|
5602a24b22 | ||
|
|
f4cbb9d8e9 | ||
|
|
df82a734af | ||
|
|
2179ea85f8 | ||
|
|
f5299209d4 | ||
|
|
ca2384f230 | ||
|
|
9c88f76338 | ||
|
|
f07cd8ceb4 | ||
|
|
4469ff4b9a | ||
|
|
54f6f0ceba | ||
|
|
2cb3834bbb | ||
|
|
2e302a43aa | ||
|
|
5e8d028da2 | ||
|
|
8b62c23ab6 | ||
|
|
bcbe22c780 | ||
|
|
3461bafaa2 | ||
|
|
f25b448a49 | ||
|
|
874bbd0b8a | ||
|
|
73ddda4651 | ||
|
|
a433804ee6 | ||
|
|
08171dad5e | ||
|
|
f60eeaf08c | ||
|
|
7b3550c46e | ||
|
|
db8fb177b8 | ||
|
|
203739f7a4 | ||
|
|
735c341fae | ||
|
|
7bb9264c3d | ||
|
|
4b8d227922 | ||
|
|
4124850481 | ||
|
|
637fee5ecc | ||
|
|
47eb4da080 | ||
|
|
79ca82c078 | ||
|
|
d2a787805a | ||
|
|
d2071b7ea7 | ||
|
|
61ba1743ef | ||
|
|
d60426a19f | ||
|
|
a88bc564ee | ||
|
|
a5d5856638 | ||
|
|
4e64e3f1dd | ||
|
|
51850ded05 | ||
|
|
76d7c1c01a | ||
|
|
dabc5567f7 | ||
|
|
5c5ee6f763 | ||
|
|
06bfcad671 | ||
|
|
73e48e6595 | ||
|
|
64a005565d | ||
|
|
aa5098866c | ||
|
|
60ff83f280 | ||
|
|
8dbad62153 | ||
|
|
4a78b343d9 | ||
|
|
ab8102f927 | ||
|
|
fa02409c92 | ||
|
|
3a45ef0e65 | ||
|
|
f2cdeb7d9a | ||
|
|
ff7a2c63f2 | ||
|
|
efe4c9cae3 | ||
|
|
c37261858a | ||
|
|
ef3dc5bb58 | ||
|
|
968327d577 | ||
|
|
eedf724ccd | ||
|
|
bf3f33f8cb | ||
|
|
271feb02b8 | ||
|
|
f254ebb4ca | ||
|
|
c9f1966e08 | ||
|
|
095f85f159 | ||
|
|
fdd2740f8b | ||
|
|
8268162cac | ||
|
|
30eff2b520 | ||
|
|
d3962718aa | ||
|
|
388fd1262a | ||
|
|
7a4bd2278d | ||
|
|
b3f5986c83 | ||
|
|
48d28826d0 | ||
|
|
122a5cdf89 | ||
|
|
f3e93bbbeb | ||
|
|
6d2f7e46dd | ||
|
|
3d9589f010 | ||
|
|
906ef761ba | ||
|
|
6b87a67592 | ||
|
|
e16361826e | ||
|
|
a327cecee6 | ||
|
|
64d9f7c23e | ||
|
|
4f16a1cee9 | ||
|
|
8f83f69325 | ||
|
|
1d0ca51c88 | ||
|
|
ca70f4fab1 | ||
|
|
72cdf3f555 | ||
|
|
1c68fddad7 | ||
|
|
2f3b5f6d0a | ||
|
|
5663c45a0d | ||
|
|
172356d299 | ||
|
|
2323ea4493 | ||
|
|
d6a666f4e0 | ||
|
|
6e70a6c6f5 | ||
|
|
b82be0a9b0 | ||
|
|
94a47569d6 | ||
|
|
73df97f2d0 | ||
|
|
e8b75b80c2 | ||
|
|
f6bec473d5 | ||
|
|
9ab5611c65 | ||
|
|
92391332d7 | ||
|
|
8e26b187be | ||
|
|
79ce5901f3 | ||
|
|
29a4849024 | ||
|
|
0a1731c4c9 | ||
|
|
5cac2befb0 | ||
|
|
be60348f8f | ||
|
|
e6d4436e9d | ||
|
|
c4e9a464e7 | ||
|
|
16b1adfa86 | ||
|
|
404d58d77c | ||
|
|
6c712ff2df | ||
|
|
e3414bf042 | ||
|
|
308aaa6f78 | ||
|
|
eb62959216 | ||
|
|
70d5c4eca7 | ||
|
|
a025d365b8 | ||
|
|
89fa10b40b | ||
|
|
34e85ccb62 | ||
|
|
77a4218a9e | ||
|
|
93bcdf5318 | ||
|
|
d8ee487c19 | ||
|
|
bac303273f | ||
|
|
3605bf1f60 | ||
|
|
7c2e5f3ac8 | ||
|
|
d3b43bfa37 | ||
|
|
bf6079797f | ||
|
|
6366f62f11 | ||
|
|
f846b1a88f | ||
|
|
4bfcd12897 | ||
|
|
d736232142 | ||
|
|
9cb02028ed | ||
|
|
0703441ee7 | ||
|
|
2c3128d9ba | ||
|
|
db092449f9 | ||
|
|
d321b446db | ||
|
|
78ce7a08c0 | ||
|
|
15adb73a13 | ||
|
|
f15cc6c4f6 | ||
|
|
61e6e5694c | ||
|
|
66bc0bb424 | ||
|
|
25589bacea | ||
|
|
d721d35a2d | ||
|
|
ba84fc2c77 | ||
|
|
0f7dbc7bc0 | ||
|
|
137dd351b8 | ||
|
|
ba38bfad9d | ||
|
|
be71e8afa2 | ||
|
|
076a061997 | ||
|
|
de416adadd | ||
|
|
1fda99ba82 | ||
|
|
9f5b58c8ab | ||
|
|
a5131515ec | ||
|
|
8c11d24454 | ||
|
|
924b6b663e | ||
|
|
688452d971 | ||
|
|
2f94e16359 | ||
|
|
fbf736f169 | ||
|
|
1e92ac3cf5 | ||
|
|
acaf91a2f7 | ||
|
|
41125a0a34 | ||
|
|
35ed095dbf | ||
|
|
ce31e26f58 | ||
|
|
2b640532f2 | ||
|
|
94ed5b3a53 | ||
|
|
2eaf211e9b | ||
|
|
ed9f5639a8 | ||
|
|
8e842b5893 | ||
|
|
1b378172b6 | ||
|
|
0dc911c091 | ||
|
|
2be11874e3 | ||
|
|
5ac744ff66 | ||
|
|
57d6ab091c | ||
|
|
f13668371e | ||
|
|
c9486863c3 | ||
|
|
e9e0277b7c | ||
|
|
b69f0356ec | ||
|
|
4d72fc225a | ||
|
|
9786e432f8 | ||
|
|
040d7ebb46 | ||
|
|
f9f2b8124d | ||
|
|
dd5d938aa3 | ||
|
|
7c82adcc84 | ||
|
|
bdb1966573 | ||
|
|
a1582c610e | ||
|
|
2f9f217c11 | ||
|
|
278f679bb1 | ||
|
|
96bc727fcb | ||
|
|
7d2809eb27 | ||
|
|
9b8133f65f | ||
|
|
77c0236cae | ||
|
|
07c4262392 | ||
|
|
10a5421987 | ||
|
|
06beddcee6 | ||
|
|
48f0e1f51d | ||
|
|
6701b7f1d0 | ||
|
|
9063953ee7 | ||
|
|
7315d9c300 | ||
|
|
05c248f297 | ||
|
|
b92a58d11e | ||
|
|
767d253593 | ||
|
|
046a8f443d | ||
|
|
78e59191ed | ||
|
|
958b3a1dc0 | ||
|
|
de4d872b7a | ||
|
|
921b34eafd | ||
|
|
77955c74bc | ||
|
|
555cd59a59 | ||
|
|
b4f8dc7abf | ||
|
|
14dde47173 | ||
|
|
4fb6bf3e67 | ||
|
|
f73c55a922 | ||
|
|
49c86768e6 | ||
|
|
0fca91c6c1 | ||
|
|
749b4adc7c | ||
|
|
a67e4ab9f1 | ||
|
|
e0b2a26805 | ||
|
|
6c5b23b317 | ||
|
|
8da2a724fb | ||
|
|
d5363d1a85 | ||
|
|
8f74ee1d96 | ||
|
|
796ee8e3de | ||
|
|
25f611d0ec | ||
|
|
08e518af73 | ||
|
|
27673c1c3f | ||
|
|
3394129894 | ||
|
|
57625e06ed | ||
|
|
d7935192dd | ||
|
|
dd3e170e08 | ||
|
|
97d5325468 | ||
|
|
8433851652 | ||
|
|
f7b2b84ece | ||
|
|
8be67c1766 | ||
|
|
4d2e7ed404 | ||
|
|
e1018546ac | ||
|
|
44fcab1081 | ||
|
|
6dab1657b1 | ||
|
|
60ad21ae0d | ||
|
|
3974ddd8f7 | ||
|
|
8064e82774 | ||
|
|
e0af6ec567 | ||
|
|
0a96f3a249 | ||
|
|
40363f96a9 | ||
|
|
1c9577a1ac | ||
|
|
a7a30396be | ||
|
|
fee19390f5 | ||
|
|
3c6534dc91 | ||
|
|
95dcdf7ddc | ||
|
|
5423fa25d4 | ||
|
|
8db0094c73 | ||
|
|
7373a26333 | ||
|
|
d4c4a03e42 | ||
|
|
206cd3b529 | ||
|
|
37bf76692d | ||
|
|
26fc812e21 | ||
|
|
4e345b1c8a | ||
|
|
641ab8ddaf | ||
|
|
b783e353c4 | ||
|
|
1883b40083 | ||
|
|
5063839ce5 | ||
|
|
95eaf254c9 | ||
|
|
874b685a83 | ||
|
|
ea5fe35b54 | ||
|
|
3ed1382bbe | ||
|
|
b3749e4d95 | ||
|
|
237b5e704f | ||
|
|
0fcd3da046 | ||
|
|
9930b12d9d | ||
|
|
9cdc06ce43 | ||
|
|
7382a0c142 | ||
|
|
360e3fb81e | ||
|
|
d11510c34c | ||
|
|
845c9f8a51 | ||
|
|
9ad5644a8c | ||
|
|
ef1604a729 | ||
|
|
1185103a3d | ||
|
|
006020aef2 | ||
|
|
22d08b70ec | ||
|
|
ab77e36c70 | ||
|
|
a2530de06a | ||
|
|
c7c7084423 | ||
|
|
30121de963 | ||
|
|
fbf9c86c5c | ||
|
|
233127393f | ||
|
|
4a47ba9b35 | ||
|
|
44932b170f | ||
|
|
0675c2b374 | ||
|
|
bc5fcbbc88 | ||
|
|
da21c77ae3 | ||
|
|
49673c33b4 | ||
|
|
c489975015 | ||
|
|
9bbcbd546b | ||
|
|
7aa6b6b21d | ||
|
|
0a0795328f | ||
|
|
df89999891 | ||
|
|
11c472d701 | ||
|
|
e81d35c4db | ||
|
|
33c5f98824 | ||
|
|
b70cd27bda | ||
|
|
70a27b900a | ||
|
|
18c63a75fb | ||
|
|
d9aec19c87 | ||
|
|
bff5212386 | ||
|
|
3c32d0fbc3 | ||
|
|
4699b17508 | ||
|
|
b8c716a918 | ||
|
|
34f0f9dcf1 | ||
|
|
8d1cfaabe7 | ||
|
|
f78269b02d | ||
|
|
e8c07717fc | ||
|
|
9ca6740db3 | ||
|
|
92c9ebb0d6 | ||
|
|
8432b9e29a | ||
|
|
faf91d6697 | ||
|
|
adc69e72df | ||
|
|
c8ae3d1751 | ||
|
|
79f9cc534d | ||
|
|
5100eadf12 | ||
|
|
1d6721d345 | ||
|
|
c01b4e6baa | ||
|
|
16021591b2 | ||
|
|
a237f9d28c | ||
|
|
f1eb9d4f89 | ||
|
|
1da5e090d5 | ||
|
|
5cde522d5e | ||
|
|
8aa4a027bb | ||
|
|
d9c49386cb | ||
|
|
cd98d1c1f9 | ||
|
|
756ac603db | ||
|
|
933a98b97c | ||
|
|
b14751aad9 | ||
|
|
f3ebd508d6 | ||
|
|
5d33ce352e | ||
|
|
c1e070c042 | ||
|
|
ade26e2c86 | ||
|
|
b668b79341 | ||
|
|
7bc26c5ea0 | ||
|
|
5ece1fa568 | ||
|
|
98ea17f7fc | ||
|
|
8d25cc3c92 | ||
|
|
15c1055ff4 | ||
|
|
c930151a95 | ||
|
|
3d7c6f831c | ||
|
|
ef57b3954c | ||
|
|
878070084e | ||
|
|
ef5adc507a | ||
|
|
ea7013a34d | ||
|
|
eb64a4387d | ||
|
|
68fe1a7c8f | ||
|
|
320897bad6 | ||
|
|
0b692080cd | ||
|
|
eaa021c2e2 | ||
|
|
d352a744a5 | ||
|
|
14b439ce43 | ||
|
|
9e4c4ad8e5 | ||
|
|
f5941041d4 | ||
|
|
9ab4f7bcc6 | ||
|
|
186ca9c235 | ||
|
|
fefdb32d08 | ||
|
|
3cd391daa6 | ||
|
|
88cb0a1f7a | ||
|
|
f0f082d3e3 | ||
|
|
4a8555b3bf | ||
|
|
9d33e4bd7b | ||
|
|
75c8d7aa57 | ||
|
|
f034b02b92 | ||
|
|
96a3a34fa4 | ||
|
|
e965b7c0da | ||
|
|
7847ac3144 | ||
|
|
6708311a66 | ||
|
|
af7f0b5074 | ||
|
|
78dfb6bcf5 | ||
|
|
4640079f55 | ||
|
|
60330da25c | ||
|
|
cd97b5beec | ||
|
|
30eb927ad4 | ||
|
|
579b1e6f79 | ||
|
|
b5384cc964 | ||
|
|
146d706343 | ||
|
|
b69ecfe75c | ||
|
|
02d834e9bb | ||
|
|
e4ecc762c6 | ||
|
|
4f515adafe | ||
|
|
4f0a20ec68 | ||
|
|
b12676f701 | ||
|
|
b968821cc1 | ||
|
|
d81154bf6c | ||
|
|
c7c9a725b8 | ||
|
|
83add658f9 | ||
|
|
cb0e91c602 | ||
|
|
97d87dff09 | ||
|
|
fe9d77734f | ||
|
|
8b59a2f6b6 | ||
|
|
0857c6350d | ||
|
|
bf403a6142 | ||
|
|
5b8bb822ba | ||
|
|
df32f27762 | ||
|
|
ee570a49d0 | ||
|
|
6a1071ccd3 | ||
|
|
11d667c830 | ||
|
|
cc11b498cc | ||
|
|
ca71f18a6d | ||
|
|
6b79b5fc74 | ||
|
|
df24ae0fbb | ||
|
|
b74cdb2f9d | ||
|
|
bb730c00e9 | ||
|
|
7a92ac91d0 | ||
|
|
e68b989af5 | ||
|
|
fd27bcd7ee | ||
|
|
f5eb22253d | ||
|
|
0c5c901222 | ||
|
|
0a93f8db22 | ||
|
|
875f4c87ff | ||
|
|
7b4c705a3f | ||
|
|
b11e857148 | ||
|
|
0568a40d8d | ||
|
|
94dea7c06f | ||
|
|
8cd2ba48fc | ||
|
|
9d819b95bf | ||
|
|
d26dadd9d8 | ||
|
|
55ca9fb090 | ||
|
|
58ec5d5afe | ||
|
|
c00a976ff6 | ||
|
|
97a1af43ed | ||
|
|
558b914c64 | ||
|
|
dda5fd7390 | ||
|
|
ce8f56727e | ||
|
|
d9755d33d0 | ||
|
|
48d4fccc22 | ||
|
|
bfa8dec6f6 | ||
|
|
6e8230e1fb | ||
|
|
587a379b43 | ||
|
|
93021a054b | ||
|
|
bf4dd0cbde | ||
|
|
4ccc380a7b | ||
|
|
9efac86bb7 | ||
|
|
8be17cd60d | ||
|
|
caf5f7579e | ||
|
|
191f4496a7 | ||
|
|
df3f5e442d | ||
|
|
fdfcebd1cb | ||
|
|
8f5b4a6c96 | ||
|
|
a45b532664 | ||
|
|
c0dfbdc910 | ||
|
|
a4ef993282 | ||
|
|
6b0cec1189 | ||
|
|
4a3176e3a0 | ||
|
|
fd9206584f | ||
|
|
2ab7cfbf30 | ||
|
|
6b467e7e59 | ||
|
|
d64f5a387c | ||
|
|
cc6cd0bb8f | ||
|
|
3fbff6e620 | ||
|
|
5eb0aa2765 | ||
|
|
f63cf33118 | ||
|
|
0bb6171a85 | ||
|
|
e92dd73169 | ||
|
|
2d490f6e6a | ||
|
|
761affacc3 | ||
|
|
979c834ee0 | ||
|
|
aff0ec18b0 | ||
|
|
d32b91e0de | ||
|
|
8b659a6d32 | ||
|
|
18486169b3 | ||
|
|
d580e7c694 | ||
|
|
78ec5ccdbc | ||
|
|
d4a5c0353d | ||
|
|
6546c30e17 | ||
|
|
5478c540cb | ||
|
|
b5d81f4f92 | ||
|
|
2a99a8350f | ||
|
|
028935a254 | ||
|
|
6df466c692 | ||
|
|
956f2b98b8 | ||
|
|
4e3dcd1ce6 | ||
|
|
a690690b53 | ||
|
|
b9d0bf8822 | ||
|
|
bb02112752 | ||
|
|
8eddcfd3d5 | ||
|
|
78e1995365 | ||
|
|
c48b9244f2 | ||
|
|
e219008320 | ||
|
|
f603ae175b | ||
|
|
f62d034692 | ||
|
|
deb180cc83 | ||
|
|
17b970a387 | ||
|
|
d060eb498f | ||
|
|
c2bc7e2c30 | ||
|
|
eb5691e8fa | ||
|
|
864b20565b | ||
|
|
3e6818b407 | ||
|
|
aaced060bf | ||
|
|
af77ff3eed | ||
|
|
98162cec33 | ||
|
|
048a2d7bc3 | ||
|
|
307ad244b0 | ||
|
|
0c0fa877cf | ||
|
|
49bd8ac880 | ||
|
|
969ea98fd9 | ||
|
|
bd0f4b95ae | ||
|
|
dda419ddd6 | ||
|
|
1f9e2c920c | ||
|
|
43f38a240e | ||
|
|
705e629001 | ||
|
|
edc60e2e01 | ||
|
|
9f65b26288 | ||
|
|
496b337b27 | ||
|
|
3a895ccfaa | ||
|
|
eb19343f91 | ||
|
|
893b2d4587 | ||
|
|
370d98a858 | ||
|
|
d11983ae32 | ||
|
|
42692abd1c | ||
|
|
d1335a6efd | ||
|
|
61cfa6e37e | ||
|
|
f7aa4f9593 | ||
|
|
1dfd9e3c10 | ||
|
|
2f656e09c6 | ||
|
|
6c9d88bd6d | ||
|
|
045e38314a | ||
|
|
d346e6645a | ||
|
|
760f51910f | ||
|
|
9e5619ce99 | ||
|
|
b38dd98e71 | ||
|
|
71db1dfa06 | ||
|
|
08aa415c66 | ||
|
|
bf87614fa6 | ||
|
|
55045b5fe9 | ||
|
|
56c5c2ebaa | ||
|
|
a2c574fa15 | ||
|
|
b560f5a474 | ||
|
|
206169476f | ||
|
|
30648a1819 | ||
|
|
00575cf847 | ||
|
|
ef94f42b62 | ||
|
|
4ed36da458 | ||
|
|
68b5966fef | ||
|
|
7bae496257 | ||
|
|
dc591f8943 | ||
|
|
31089931d3 | ||
|
|
55dd5b5547 | ||
|
|
1f97a239dc | ||
|
|
8604e216eb | ||
|
|
f4120635e9 | ||
|
|
e8f1dd8421 | ||
|
|
2ec6050959 | ||
|
|
29c38ef0d4 | ||
|
|
74d616ac78 | ||
|
|
504a892887 | ||
|
|
30cf0f8d3a | ||
|
|
d794d7d30b | ||
|
|
b112b333bd | ||
|
|
0e7bac945f | ||
|
|
e5bb90fdfa | ||
|
|
ac0142a49e | ||
|
|
ef6e381de3 | ||
|
|
206e65cf05 | ||
|
|
695cc38b36 | ||
|
|
7b1e03a585 | ||
|
|
49bc04fa48 | ||
|
|
d5954fffa8 | ||
|
|
58da1065d6 | ||
|
|
fe58c1a383 | ||
|
|
2c02580c37 | ||
|
|
24c2315476 | ||
|
|
a3845f54e1 | ||
|
|
7442932b5e | ||
|
|
ed5a98f697 | ||
|
|
fcfa10d508 | ||
|
|
3cf0729878 | ||
|
|
55171f42f6 | ||
|
|
1311ca37e5 | ||
|
|
67d807d8fc | ||
|
|
ebf395ecc4 | ||
|
|
2c3ead339e | ||
|
|
6c7f5d093c | ||
|
|
93e9d8622e | ||
|
|
ca47d72aee | ||
|
|
c864e4d3db | ||
|
|
ff305e63b6 | ||
|
|
01fd05cb4b | ||
|
|
5831a53697 | ||
|
|
b60a7f3363 | ||
|
|
44445ff1b8 | ||
|
|
ac37c38133 | ||
|
|
8b7056b06c | ||
|
|
228b664ecf | ||
|
|
1dfd655959 | ||
|
|
ae22d5dc8a | ||
|
|
c67929ea39 | ||
|
|
2472a902dd | ||
|
|
f3d97c76df | ||
|
|
b0b8660132 | ||
|
|
ae551cde63 | ||
|
|
d0a0dbf430 | ||
|
|
0e46762962 | ||
|
|
a1ec423235 | ||
|
|
4f7dca3e5a | ||
|
|
4394594518 | ||
|
|
e5e0f527fe | ||
|
|
f07515eb88 | ||
|
|
d37dd46f65 | ||
|
|
3f6e7aa05a | ||
|
|
ad0064a310 | ||
|
|
0b7574ba00 | ||
|
|
cc05ce19f9 | ||
|
|
e132aabdae | ||
|
|
026b9268ae | ||
|
|
c807a4e383 | ||
|
|
2ff781b25f | ||
|
|
3acb474795 | ||
|
|
7e4c834c0e | ||
|
|
9c61a6df62 | ||
|
|
94e40dc554 | ||
|
|
38bf40884c | ||
|
|
1ef044d628 | ||
|
|
d5d37466e0 | ||
|
|
5f231d305f | ||
|
|
7b89bf6cc7 | ||
|
|
5c5de57290 | ||
|
|
9e18bb3c31 | ||
|
|
57703acf75 | ||
|
|
fbc247bf53 | ||
|
|
56496d8287 | ||
|
|
9d9c822efb | ||
|
|
1a3cddc002 | ||
|
|
373ca9cef0 | ||
|
|
9534516b42 | ||
|
|
755ab36f0d | ||
|
|
c0ca0373b6 | ||
|
|
7efb6a3ab8 | ||
|
|
69c26a180e | ||
|
|
40aa552489 | ||
|
|
7b1352d9c5 | ||
|
|
68cf7599d8 | ||
|
|
6501dea7a3 | ||
|
|
72fc42b60c | ||
|
|
2a164f0165 | ||
|
|
6a14f251ba | ||
|
|
92fd417962 | ||
|
|
ebbd9ff414 | ||
|
|
874b5240d2 | ||
|
|
14b34fc6ef | ||
|
|
34fffca202 | ||
|
|
4e59ab2261 | ||
|
|
327d7a6524 | ||
|
|
d5ece4e909 | ||
|
|
39f13beaf7 | ||
|
|
37624a7303 | ||
|
|
e82ec68820 | ||
|
|
90551a6eaf | ||
|
|
43ebbb1c70 | ||
|
|
f343941a96 | ||
|
|
001997e088 | ||
|
|
83b9d22ff0 | ||
|
|
77874ee518 | ||
|
|
2a04dcc334 | ||
|
|
50965ca9c0 | ||
|
|
277e32bb1e | ||
|
|
d94e4c2491 | ||
|
|
2dc25cec72 | ||
|
|
207ffdec8e | ||
|
|
1ad8ff9b64 | ||
|
|
1dd2836f1b | ||
|
|
a45fc47682 | ||
|
|
962a95d770 | ||
|
|
ca695fa6e1 | ||
|
|
e96ce99d3d | ||
|
|
550b8e55ee | ||
|
|
8e7e3b7256 | ||
|
|
f3dc7ce52c | ||
|
|
bfdda3212a | ||
|
|
02acf7d6d0 | ||
|
|
84ef06e35c | ||
|
|
f0acc9c901 | ||
|
|
55447b05ac | ||
|
|
41eb8a1e29 | ||
|
|
9eda4bc6fa | ||
|
|
6eda7772eb | ||
|
|
8c522aa758 | ||
|
|
057344e1af | ||
|
|
ee728e3dbc | ||
|
|
ae324fba94 | ||
|
|
43e6d345e4 | ||
|
|
27307fca0c | ||
|
|
8b1b6050e7 | ||
|
|
da925ee5cc | ||
|
|
4676db126a | ||
|
|
d8ea4f9b06 | ||
|
|
d2d946204c | ||
|
|
2b132ae892 | ||
|
|
34b7599df6 | ||
|
|
270ceb7ceb | ||
|
|
200c935228 | ||
|
|
f0c57cf95c | ||
|
|
19d05b8024 | ||
|
|
2a362a99da | ||
|
|
497c8aeabf | ||
|
|
c35843fda5 | ||
|
|
88af355028 | ||
|
|
10958459a3 | ||
|
|
5f6e2ee026 | ||
|
|
7a7514fa0c | ||
|
|
637eafedee | ||
|
|
3f74cb76e9 | ||
|
|
f79b34b2c9 | ||
|
|
85ced3dbf7 | ||
|
|
718569d6a1 | ||
|
|
a56c01d044 | ||
|
|
04a4219a75 | ||
|
|
16113e783f | ||
|
|
63b0a1c9e0 | ||
|
|
a386b458fb | ||
|
|
8017b18039 | ||
|
|
981f6a8972 | ||
|
|
429deb959c | ||
|
|
86793468de | ||
|
|
04d039eddd | ||
|
|
201221253d | ||
|
|
aa2773e483 | ||
|
|
25f29a2287 | ||
|
|
b0503691c5 | ||
|
|
457c3262d7 | ||
|
|
de2c4cc7b8 | ||
|
|
81e5b59060 | ||
|
|
96587dc68c | ||
|
|
d9d291abcb | ||
|
|
38516ef793 | ||
|
|
5acd433080 | ||
|
|
5c2da08c13 | ||
|
|
3c79ebda17 | ||
|
|
46e5dc2f5c | ||
|
|
0aa7e871d1 | ||
|
|
34527fbbe4 | ||
|
|
9fcab37445 | ||
|
|
5c0ee3d9a8 | ||
|
|
f3327e6dca | ||
|
|
0e6e22ac3d | ||
|
|
5002ab2169 | ||
|
|
db05b9ff04 | ||
|
|
04f9270772 | ||
|
|
29feaba22f | ||
|
|
dd3fc7a084 | ||
|
|
c5130e3eb3 | ||
|
|
b4855b6ac9 | ||
|
|
b09e975b4c | ||
|
|
ac9c02f73b | ||
|
|
52ff45549d | ||
|
|
325a265a33 | ||
|
|
918950ba49 | ||
|
|
34dfcee2e8 | ||
|
|
95cb192209 | ||
|
|
c1bef9b35c | ||
|
|
005672e8af | ||
|
|
e52e7d15dc | ||
|
|
dda53a96cc | ||
|
|
35f53516c7 | ||
|
|
14f8ed8dd2 | ||
|
|
21b78adfee | ||
|
|
503b8047b1 | ||
|
|
d959af430e | ||
|
|
4e908d3e96 | ||
|
|
3db2b7d6a6 | ||
|
|
4615297b00 | ||
|
|
6108aca055 | ||
|
|
068cb83803 | ||
|
|
22b4e17fad | ||
|
|
7333d13dd4 | ||
|
|
77268845a6 | ||
|
|
93d490bc8f | ||
|
|
e1d0bc83f4 | ||
|
|
0ac3f826e1 | ||
|
|
2ef937a9dc | ||
|
|
52b0f65bb3 | ||
|
|
0c92204927 | ||
|
|
8632d43748 | ||
|
|
9bf8c3765b | ||
|
|
5b691224f0 | ||
|
|
9cdf27cbf5 | ||
|
|
d1621692dc | ||
|
|
cc133ccc4c | ||
|
|
69f0eb109f | ||
|
|
a3ab23a36a | ||
|
|
5e5b49d4e2 | ||
|
|
377c983ecb | ||
|
|
69b20b52cc | ||
|
|
e4a68d9962 | ||
|
|
21f0d63507 | ||
|
|
e7b008c6d5 | ||
|
|
7735187d36 | ||
|
|
85f4d7d025 | ||
|
|
e0eaa08597 | ||
|
|
43c065c5b6 | ||
|
|
df154e43b1 | ||
|
|
ba21367b40 | ||
|
|
c420f60259 | ||
|
|
b13f35645a | ||
|
|
68560c9ea7 | ||
|
|
1a9019f5f5 | ||
|
|
969eeec717 | ||
|
|
1f59e38dc7 | ||
|
|
b202b82f5a | ||
|
|
d0bdafff71 | ||
|
|
685af493a4 | ||
|
|
e8ad5a0e6b | ||
|
|
000b5a2b7c | ||
|
|
88ffd3cdfb | ||
|
|
98352429c2 | ||
|
|
5171a00569 | ||
|
|
01af51308e | ||
|
|
65d4440d0d | ||
|
|
37276f357c | ||
|
|
e0b6620df2 | ||
|
|
ebabeb6fbd | ||
|
|
ab6c138a7c | ||
|
|
33e7c124a3 | ||
|
|
11ff62e120 | ||
|
|
155a6a09b6 | ||
|
|
74156b6b89 | ||
|
|
9387e7dd2d | ||
|
|
9aaaa6aef0 | ||
|
|
cc9358ee95 | ||
|
|
06ed133ae3 | ||
|
|
75b778924f | ||
|
|
fe902db322 | ||
|
|
0c00335627 | ||
|
|
5e3548fc00 | ||
|
|
c8837d07e7 | ||
|
|
fd14957072 | ||
|
|
1df03b67e7 | ||
|
|
6d9131084f | ||
|
|
66b96b90d5 | ||
|
|
03baeccda5 | ||
|
|
91b0c1cd0e | ||
|
|
6d974daaf2 | ||
|
|
849d0c1c34 | ||
|
|
63dc75fb81 | ||
|
|
f07732a9ca | ||
|
|
9702446d91 | ||
|
|
d3a07234cd | ||
|
|
cf2c43e7ab | ||
|
|
83c3beb2ed | ||
|
|
52133569f5 | ||
|
|
1c39721ff7 | ||
|
|
57b2c3c2e4 | ||
|
|
ed82643faf | ||
|
|
5131752402 | ||
|
|
1b44479311 | ||
|
|
4c987d04d6 | ||
|
|
728ace79c5 | ||
|
|
a937b97a61 | ||
|
|
35d2ea862b | ||
|
|
1c502948f5 | ||
|
|
45f321a3a2 | ||
|
|
85f5672280 | ||
|
|
62c1b02eb1 | ||
|
|
67af5ccda8 | ||
|
|
bd49b0edf8 | ||
|
|
3126654d33 | ||
|
|
1b55d39997 | ||
|
|
51372c01f2 | ||
|
|
7008db7dda | ||
|
|
47456a1723 | ||
|
|
933ba3c7d6 | ||
|
|
08ff5f02b1 | ||
|
|
2bf39203e8 | ||
|
|
d323a84dd9 | ||
|
|
2e402ba654 | ||
|
|
f0afa04037 | ||
|
|
b9290acb85 | ||
|
|
cb47146b3b | ||
|
|
04fadc84a6 | ||
|
|
18333eac29 | ||
|
|
abd3668b65 | ||
|
|
9064af3b1c | ||
|
|
2f7b42b13b | ||
|
|
56755c278b | ||
|
|
7266111b68 | ||
|
|
810954014b | ||
|
|
3248a1b57f | ||
|
|
4edf1fd8fa | ||
|
|
67f22e602e | ||
|
|
114012fab2 | ||
|
|
c50fc88827 | ||
|
|
f5e4b3aeb3 | ||
|
|
76e0d4c770 | ||
|
|
ed8d9b1ed3 | ||
|
|
c825fce0cf | ||
|
|
0f5b35f408 | ||
|
|
ae0cd50840 | ||
|
|
9a7c5b02d4 | ||
|
|
66c5b4ed01 | ||
|
|
f125c3a3c8 | ||
|
|
e36f027a9a | ||
|
|
dc7035ed19 | ||
|
|
d25fe19d87 | ||
|
|
dc52a31814 | ||
|
|
2f45613017 | ||
|
|
8d881d3b6f | ||
|
|
ce6f051551 | ||
|
|
b3b188061a | ||
|
|
49f70c083d | ||
|
|
ed392300b4 | ||
|
|
a3af6404b0 | ||
|
|
312b7352b2 | ||
|
|
23e4bc4f9a | ||
|
|
8208bb25ba | ||
|
|
a90ea340d2 | ||
|
|
e2fd51c282 | ||
|
|
0008193a20 | ||
|
|
5c691c0883 | ||
|
|
e363cbe542 | ||
|
|
d8bec0a43c | ||
|
|
e1c176ceb7 | ||
|
|
a1289eb502 | ||
|
|
4a00e8feed | ||
|
|
60e0e5c38f | ||
|
|
93ef4f2301 | ||
|
|
ccda3068ef | ||
|
|
ebad05a9df | ||
|
|
4f19ea4a8e | ||
|
|
8e2722da95 | ||
|
|
b5fbcfaa8e | ||
|
|
d0ea813515 | ||
|
|
932bc382dc | ||
|
|
bdfb19fe38 | ||
|
|
d798330f44 | ||
|
|
caa3a5d0bb | ||
|
|
d54a5c49c1 | ||
|
|
9e75075dc4 | ||
|
|
6a833e19aa | ||
|
|
941ac2ae5e | ||
|
|
34f4b3cad4 | ||
|
|
03948cd685 | ||
|
|
3c993884cd | ||
|
|
9e99aed182 | ||
|
|
66edf2822c | ||
|
|
9c9ba18619 | ||
|
|
485d048415 | ||
|
|
10a0db9160 | ||
|
|
8260b75586 | ||
|
|
8365ed7408 | ||
|
|
d4298d1bd2 | ||
|
|
75736f547a | ||
|
|
e84b524fc4 | ||
|
|
7eb80615c7 | ||
|
|
030c763549 | ||
|
|
57d0e556d8 | ||
|
|
3fea8a4202 | ||
|
|
ca1eda2df1 | ||
|
|
386fea5e71 | ||
|
|
e7bda30506 | ||
|
|
6dcf61669c | ||
|
|
1832463010 | ||
|
|
a0baaeafd1 | ||
|
|
c5a113ca80 | ||
|
|
e6b3ded33f | ||
|
|
aa66af151a | ||
|
|
ab57495e29 | ||
|
|
afb5d161e3 | ||
|
|
1fdaafb30b | ||
|
|
9328e6fdec | ||
|
|
98ac0697a9 | ||
|
|
b658f82510 | ||
|
|
b50894fa70 | ||
|
|
e002000764 | ||
|
|
9182c1558c | ||
|
|
ded7eca60f | ||
|
|
74e8c881f7 | ||
|
|
f1eac72d20 | ||
|
|
6d13ddd43a | ||
|
|
94e9632234 | ||
|
|
3699b74ebb | ||
|
|
4eeab3bc3e | ||
|
|
0d1ded5278 | ||
|
|
86142ecdf1 | ||
|
|
8cdc00230d | ||
|
|
d3d674ba4b | ||
|
|
6d961d87f8 | ||
|
|
5a28c54505 | ||
|
|
9968101d96 | ||
|
|
d0a309e6da | ||
|
|
f2d84f0a90 | ||
|
|
d81345d8f6 | ||
|
|
97b6260c9a | ||
|
|
48cf78af68 |
21
.gitignore
vendored
21
.gitignore
vendored
@@ -1,5 +1,9 @@
|
|||||||
# .gitignore
|
# .gitignore
|
||||||
|
|
||||||
|
bin/boostbook_catalog.xml
|
||||||
|
bin/config.log
|
||||||
|
bin/project-cache.jam
|
||||||
|
|
||||||
# Ignore vim swap files.
|
# Ignore vim swap files.
|
||||||
*.swp
|
*.swp
|
||||||
|
|
||||||
@@ -29,9 +33,6 @@ Release/*.*
|
|||||||
*.gcda
|
*.gcda
|
||||||
*.gcov
|
*.gcov
|
||||||
|
|
||||||
# Ignore locally installed node_modules
|
|
||||||
/node_modules
|
|
||||||
|
|
||||||
# Ignore tmp directory.
|
# Ignore tmp directory.
|
||||||
tmp
|
tmp
|
||||||
|
|
||||||
@@ -46,7 +47,6 @@ debug_log.txt
|
|||||||
# Ignore customized configs
|
# Ignore customized configs
|
||||||
rippled.cfg
|
rippled.cfg
|
||||||
validators.txt
|
validators.txt
|
||||||
test/config.js
|
|
||||||
|
|
||||||
# Doxygen generated documentation output
|
# Doxygen generated documentation output
|
||||||
HtmlDocumentation
|
HtmlDocumentation
|
||||||
@@ -73,10 +73,6 @@ DerivedData
|
|||||||
# Intel Parallel Studio 2013 XE
|
# Intel Parallel Studio 2013 XE
|
||||||
My Amplifier XE Results - RippleD
|
My Amplifier XE Results - RippleD
|
||||||
|
|
||||||
# KeyvaDB files
|
|
||||||
*.key
|
|
||||||
*.val
|
|
||||||
|
|
||||||
# Compiler intermediate output
|
# Compiler intermediate output
|
||||||
/out.txt
|
/out.txt
|
||||||
|
|
||||||
@@ -85,3 +81,12 @@ rippled-build.log
|
|||||||
|
|
||||||
# Profiling data
|
# Profiling data
|
||||||
gmon.out
|
gmon.out
|
||||||
|
|
||||||
|
Builds/VisualStudio2015/*.db
|
||||||
|
Builds/VisualStudio2015/*.user
|
||||||
|
Builds/VisualStudio2015/*.opendb
|
||||||
|
Builds/VisualStudio2015/*.sdf
|
||||||
|
|
||||||
|
# MSVC
|
||||||
|
*.pdb
|
||||||
|
.vs/
|
||||||
|
|||||||
12
.gitmodules
vendored
Normal file
12
.gitmodules
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
[submodule "docs/docca"]
|
||||||
|
path = docs/docca
|
||||||
|
url = https://github.com/vinniefalco/docca.git
|
||||||
|
[submodule "src/nudb/extras/beast"]
|
||||||
|
path = src/nudb/extras/beast
|
||||||
|
url = https://github.com/vinniefalco/Beast.git
|
||||||
|
[submodule "src/nudb/extras/rocksdb"]
|
||||||
|
path = src/nudb/extras/rocksdb
|
||||||
|
url = https://github.com/facebook/rocksdb.git
|
||||||
|
[submodule "src/nudb/doc/docca"]
|
||||||
|
path = src/nudb/doc/docca
|
||||||
|
url = https://github.com/vinniefalco/docca.git
|
||||||
83
.travis.yml
83
.travis.yml
@@ -3,72 +3,75 @@ language: cpp
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
global:
|
global:
|
||||||
|
- LLVM_VERSION=3.8.0
|
||||||
# Maintenance note: to move to a new version
|
# Maintenance note: to move to a new version
|
||||||
# of boost, update both BOOST_ROOT and BOOST_URL.
|
# of boost, update both BOOST_ROOT and BOOST_URL.
|
||||||
# Note that for simplicity, BOOST_ROOT's final
|
# Note that for simplicity, BOOST_ROOT's final
|
||||||
# namepart must match the folder name internal
|
# namepart must match the folder name internal
|
||||||
# to boost's .tar.gz.
|
# to boost's .tar.gz.
|
||||||
- BOOST_ROOT=$HOME/boost_1_59_0
|
- LCOV_ROOT=$HOME/lcov
|
||||||
- BOOST_URL='http://downloads.sourceforge.net/project/boost/boost/1.59.0/boost_1_59_0.tar.gz?r=http%3A%2F%2Fsourceforge.net%2Fprojects%2Fboost%2Ffiles%2Fboost%2F1.59.0%2Fboost_1_59_0.tar.gz%2Fdownload&ts=1441761349&use_mirror=skylineservers'
|
- GDB_ROOT=$HOME/gdb
|
||||||
|
- BOOST_ROOT=$HOME/boost_1_60_0
|
||||||
|
- BOOST_URL='http://sourceforge.net/projects/boost/files/boost/1.60.0/boost_1_60_0.tar.gz'
|
||||||
|
|
||||||
|
# Travis is timing out on Trusty. So, for now, use Precise. July 2017
|
||||||
|
dist: precise
|
||||||
|
|
||||||
packages: &gcc5_pkgs
|
addons:
|
||||||
- gcc-5
|
apt:
|
||||||
- g++-5
|
sources: ['ubuntu-toolchain-r-test']
|
||||||
- python-software-properties
|
packages:
|
||||||
- protobuf-compiler
|
- gcc-5
|
||||||
- libprotobuf-dev
|
- g++-5
|
||||||
- libssl-dev
|
- python-software-properties
|
||||||
- libstdc++6
|
- protobuf-compiler
|
||||||
- binutils-gold
|
- libprotobuf-dev
|
||||||
# Provides a backtrace if the unittests crash
|
- libssl-dev
|
||||||
- gdb
|
- libstdc++6
|
||||||
|
- binutils-gold
|
||||||
packages: &clang36_pkgs
|
# Provides a backtrace if the unittests crash
|
||||||
- clang-3.6
|
- gdb
|
||||||
- g++-5
|
# needed to build gdb
|
||||||
- python-software-properties
|
- texinfo
|
||||||
- protobuf-compiler
|
|
||||||
- libprotobuf-dev
|
|
||||||
- libssl-dev
|
|
||||||
- libstdc++6
|
|
||||||
- binutils-gold
|
|
||||||
# Provides a backtrace if the unittests crash
|
|
||||||
- gdb
|
|
||||||
|
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
|
# Default BUILD is "scons".
|
||||||
|
|
||||||
- compiler: gcc
|
- compiler: gcc
|
||||||
env: GCC_VER=5 TARGET=debug.nounity
|
env: GCC_VER=5 BUILD=cmake TARGET=debug.nounity PATH=$PWD/cmake/bin:$PATH
|
||||||
addons: &ao_gcc5
|
|
||||||
apt:
|
|
||||||
sources: ['ubuntu-toolchain-r-test']
|
|
||||||
packages: *gcc5_pkgs
|
|
||||||
|
|
||||||
- compiler: gcc
|
- compiler: gcc
|
||||||
env: GCC_VER=5 TARGET=coverage
|
env: GCC_VER=5 TARGET=coverage
|
||||||
addons: *ao_gcc5
|
|
||||||
|
|
||||||
- compiler: clang
|
- compiler: clang
|
||||||
env: GCC_VER=5 TARGET=debug CLANG_VER=3.6
|
env: GCC_VER=5 TARGET=debug CLANG_VER=3.8 PATH=$PWD/llvm-$LLVM_VERSION/bin:$PATH
|
||||||
addons: &ao_clang36
|
cache:
|
||||||
apt:
|
directories:
|
||||||
sources: ['ubuntu-toolchain-r-test', 'llvm-toolchain-precise-3.6']
|
- $GDB_ROOT
|
||||||
packages: *clang36_pkgs
|
|
||||||
|
|
||||||
- compiler: clang
|
- compiler: clang
|
||||||
env: GCC_VER=5 TARGET=debug.nounity CLANG_VER=3.6
|
env: GCC_VER=5 TARGET=debug.nounity CLANG_VER=3.8 PATH=$PWD/llvm-$LLVM_VERSION/bin:$PATH
|
||||||
addons: *ao_clang36
|
|
||||||
|
# The clang cmake builds do not link.
|
||||||
|
# - compiler: clang
|
||||||
|
# env: GCC_VER=5 BUILD=cmake TARGET=debug CLANG_VER=3.8 PATH=$PWD/llvm-$LLVM_VERSION/bin:$PWD/cmake/bin:$PATH
|
||||||
|
|
||||||
|
# - compiler: clang
|
||||||
|
# env: GCC_VER=5 BUILD=cmake TARGET=debug.nounity CLANG_VER=3.8 PATH=$PWD/llvm-$LLVM_VERSION/bin:$PWD/cmake/bin:$PATH
|
||||||
|
|
||||||
cache:
|
cache:
|
||||||
directories:
|
directories:
|
||||||
- $BOOST_ROOT
|
- $BOOST_ROOT
|
||||||
|
- llvm-$LLVM_VERSION
|
||||||
|
- cmake
|
||||||
|
- $GDB_ROOT
|
||||||
|
|
||||||
before_install:
|
before_install:
|
||||||
- bin/ci/ubuntu/install-dependencies.sh
|
- bin/ci/ubuntu/install-dependencies.sh
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- bin/ci/ubuntu/build-and-test.sh
|
- travis_retry bin/ci/ubuntu/build-and-test.sh
|
||||||
|
|
||||||
notifications:
|
notifications:
|
||||||
email:
|
email:
|
||||||
@@ -76,3 +79,5 @@ notifications:
|
|||||||
irc:
|
irc:
|
||||||
channels:
|
channels:
|
||||||
- "chat.freenode.net#ripple-dev"
|
- "chat.freenode.net#ripple-dev"
|
||||||
|
|
||||||
|
dist: precise
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ url="https://github.com/ripple/rippled"
|
|||||||
license=('custom:ISC')
|
license=('custom:ISC')
|
||||||
depends=('protobuf' 'openssl' 'boost-libs')
|
depends=('protobuf' 'openssl' 'boost-libs')
|
||||||
makedepends=('git' 'scons' 'boost')
|
makedepends=('git' 'scons' 'boost')
|
||||||
checkdepends=('nodejs')
|
|
||||||
backup=("etc/$pkgname/rippled.cfg")
|
backup=("etc/$pkgname/rippled.cfg")
|
||||||
source=("git://github.com/ripple/rippled.git#branch=master")
|
source=("git://github.com/ripple/rippled.git#branch=master")
|
||||||
sha512sums=('SKIP')
|
sha512sums=('SKIP')
|
||||||
@@ -26,8 +25,6 @@ build() {
|
|||||||
|
|
||||||
check() {
|
check() {
|
||||||
cd "$srcdir/$pkgname"
|
cd "$srcdir/$pkgname"
|
||||||
npm install
|
|
||||||
npm test
|
|
||||||
build/rippled --unittest
|
build/rippled --unittest
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
766
Builds/CMake/CMakeFuncs.cmake
Normal file
766
Builds/CMake/CMakeFuncs.cmake
Normal file
@@ -0,0 +1,766 @@
|
|||||||
|
# This is a set of common functions and settings for rippled
|
||||||
|
# and derived products.
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
cmake_minimum_required(VERSION 3.1.0)
|
||||||
|
|
||||||
|
if("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
|
||||||
|
message(WARNING "Builds are strongly discouraged in "
|
||||||
|
"${CMAKE_SOURCE_DIR}.")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
macro(parse_target)
|
||||||
|
|
||||||
|
if (NOT target OR target STREQUAL "default")
|
||||||
|
if (NOT CMAKE_BUILD_TYPE)
|
||||||
|
set(CMAKE_BUILD_TYPE Debug)
|
||||||
|
endif()
|
||||||
|
string(TOLOWER ${CMAKE_BUILD_TYPE} target)
|
||||||
|
if (APPLE)
|
||||||
|
set(target clang.${target})
|
||||||
|
elseif(WIN32)
|
||||||
|
set(target msvc)
|
||||||
|
else()
|
||||||
|
set(target gcc.${target})
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (target)
|
||||||
|
# Parse the target
|
||||||
|
set(remaining ${target})
|
||||||
|
while (remaining)
|
||||||
|
# get the component up to the next dot or end
|
||||||
|
string(REGEX REPLACE "^\\.?([^\\.]+).*$" "\\1" cur_component ${remaining})
|
||||||
|
string(REGEX REPLACE "^\\.?[^\\.]+(.*$)" "\\1" remaining ${remaining})
|
||||||
|
|
||||||
|
if (${cur_component} STREQUAL gcc)
|
||||||
|
if (DEFINED ENV{GNU_CC})
|
||||||
|
set(CMAKE_C_COMPILER $ENV{GNU_CC})
|
||||||
|
elseif ($ENV{CC} MATCHES .*gcc.*)
|
||||||
|
set(CMAKE_C_COMPILER $ENV{CC})
|
||||||
|
else()
|
||||||
|
find_program(CMAKE_C_COMPILER gcc)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (DEFINED ENV{GNU_CXX})
|
||||||
|
set(CMAKE_CXX_COMPILER $ENV{GNU_CXX})
|
||||||
|
elseif ($ENV{CXX} MATCHES .*g\\+\\+.*)
|
||||||
|
set(CMAKE_CXX_COMPILER $ENV{CXX})
|
||||||
|
else()
|
||||||
|
find_program(CMAKE_CXX_COMPILER g++)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (${cur_component} STREQUAL clang)
|
||||||
|
if (DEFINED ENV{CLANG_CC})
|
||||||
|
set(CMAKE_C_COMPILER $ENV{CLANG_CC})
|
||||||
|
elseif ($ENV{CC} MATCHES .*clang.*)
|
||||||
|
set(CMAKE_C_COMPILER $ENV{CC})
|
||||||
|
else()
|
||||||
|
find_program(CMAKE_C_COMPILER clang)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (DEFINED ENV{CLANG_CXX})
|
||||||
|
set(CMAKE_CXX_COMPILER $ENV{CLANG_CXX})
|
||||||
|
elseif ($ENV{CXX} MATCHES .*clang.*)
|
||||||
|
set(CMAKE_CXX_COMPILER $ENV{CXX})
|
||||||
|
else()
|
||||||
|
find_program(CMAKE_CXX_COMPILER clang++)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (${cur_component} STREQUAL msvc)
|
||||||
|
# TBD
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (${cur_component} STREQUAL unity)
|
||||||
|
set(unity true)
|
||||||
|
set(nonunity false)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (${cur_component} STREQUAL nounity)
|
||||||
|
set(unity false)
|
||||||
|
set(nonunity true)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (${cur_component} STREQUAL debug)
|
||||||
|
set(release false)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (${cur_component} STREQUAL release)
|
||||||
|
set(release true)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (${cur_component} STREQUAL coverage)
|
||||||
|
set(coverage true)
|
||||||
|
set(debug true)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (${cur_component} STREQUAL profile)
|
||||||
|
set(profile true)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (${cur_component} STREQUAL ci)
|
||||||
|
# Workarounds that make various CI builds work, but that
|
||||||
|
# we don't want in the general case.
|
||||||
|
set(ci true)
|
||||||
|
set(openssl_min 1.0.1)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
endwhile()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(CMAKE_C_COMPILER MATCHES "-NOTFOUND$" OR
|
||||||
|
CMAKE_CXX_COMPILER MATCHES "-NOTFOUND$")
|
||||||
|
message(FATAL_ERROR "Can not find appropriate compiler for target ${target}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# If defined, promote the compiler path values to the CACHE, then
|
||||||
|
# unset the locals to prevent shadowing. Some scenarios do not
|
||||||
|
# need or want to find a compiler, such as -GNinja under Windows.
|
||||||
|
# Setting these values in those case may prevent CMake from finding
|
||||||
|
# a valid compiler.
|
||||||
|
if (CMAKE_C_COMPILER)
|
||||||
|
set(CMAKE_C_COMPILER ${CMAKE_C_COMPILER} CACHE FILEPATH
|
||||||
|
"Path to a program" FORCE)
|
||||||
|
unset(CMAKE_C_COMPILER)
|
||||||
|
endif (CMAKE_C_COMPILER)
|
||||||
|
if (CMAKE_CXX_COMPILER)
|
||||||
|
set(CMAKE_CXX_COMPILER ${CMAKE_CXX_COMPILER} CACHE FILEPATH
|
||||||
|
"Path to a program" FORCE)
|
||||||
|
unset(CMAKE_CXX_COMPILER)
|
||||||
|
endif (CMAKE_CXX_COMPILER)
|
||||||
|
|
||||||
|
if (release)
|
||||||
|
set(CMAKE_BUILD_TYPE Release)
|
||||||
|
else()
|
||||||
|
set(CMAKE_BUILD_TYPE Debug)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# ensure that the unity flags are set and exclusive
|
||||||
|
if (NOT DEFINED unity OR unity)
|
||||||
|
# Default to unity builds
|
||||||
|
set(unity true)
|
||||||
|
set(nonunity false)
|
||||||
|
else()
|
||||||
|
set(unity false)
|
||||||
|
set(nonunity true)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (NOT unity)
|
||||||
|
set(CMAKE_BUILD_TYPE ${CMAKE_BUILD_TYPE}Classic)
|
||||||
|
endif()
|
||||||
|
# Promote this value to the CACHE, then unset the local
|
||||||
|
# to prevent shadowing.
|
||||||
|
set(CMAKE_BUILD_TYPE ${CMAKE_BUILD_TYPE} CACHE INTERNAL
|
||||||
|
"Choose the type of build, options are in CMAKE_CONFIGURATION_TYPES"
|
||||||
|
FORCE)
|
||||||
|
unset(CMAKE_BUILD_TYPE)
|
||||||
|
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
macro(setup_build_cache)
|
||||||
|
set(san "" CACHE STRING "On gcc & clang, add sanitizer
|
||||||
|
instrumentation")
|
||||||
|
set_property(CACHE san PROPERTY STRINGS ";address;thread")
|
||||||
|
set(assert false CACHE BOOL "Enables asserts, even in release builds")
|
||||||
|
set(static false CACHE BOOL
|
||||||
|
"On linux, link protobuf, openssl, libc++, and boost statically")
|
||||||
|
set(jemalloc false CACHE BOOL "Enables jemalloc for heap profiling")
|
||||||
|
set(perf false CACHE BOOL "Enables flags that assist with perf recording")
|
||||||
|
|
||||||
|
if (static AND (WIN32 OR APPLE))
|
||||||
|
message(FATAL_ERROR "Static linking is only supported on linux.")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (perf AND (WIN32 OR APPLE))
|
||||||
|
message(FATAL_ERROR "perf flags are only supported on linux.")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (${CMAKE_GENERATOR} STREQUAL "Unix Makefiles" AND NOT CMAKE_BUILD_TYPE)
|
||||||
|
set(CMAKE_BUILD_TYPE Debug)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Can't exclude files from configurations, so can't support both
|
||||||
|
# unity and nonunity configurations at the same time
|
||||||
|
if (NOT DEFINED unity OR unity)
|
||||||
|
set(CMAKE_CONFIGURATION_TYPES
|
||||||
|
Debug
|
||||||
|
Release)
|
||||||
|
else()
|
||||||
|
set(CMAKE_CONFIGURATION_TYPES
|
||||||
|
DebugClassic
|
||||||
|
ReleaseClassic)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Promote this value to the CACHE, then unset the local
|
||||||
|
# to prevent shadowing.
|
||||||
|
set(CMAKE_CONFIGURATION_TYPES
|
||||||
|
${CMAKE_CONFIGURATION_TYPES} CACHE STRING "" FORCE)
|
||||||
|
unset(CMAKE_CONFIGURATION_TYPES)
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
function(prepend var prefix)
|
||||||
|
set(listVar "")
|
||||||
|
foreach(f ${ARGN})
|
||||||
|
list(APPEND listVar "${prefix}${f}")
|
||||||
|
endforeach(f)
|
||||||
|
set(${var} "${listVar}" PARENT_SCOPE)
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
macro(append_flags name)
|
||||||
|
foreach (arg ${ARGN})
|
||||||
|
set(${name} "${${name}} ${arg}")
|
||||||
|
endforeach()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
macro(group_sources_in source_dir curdir)
|
||||||
|
file(GLOB children RELATIVE ${source_dir}/${curdir}
|
||||||
|
${source_dir}/${curdir}/*)
|
||||||
|
foreach (child ${children})
|
||||||
|
if (IS_DIRECTORY ${source_dir}/${curdir}/${child})
|
||||||
|
group_sources_in(${source_dir} ${curdir}/${child})
|
||||||
|
else()
|
||||||
|
string(REPLACE "/" "\\" groupname ${curdir})
|
||||||
|
source_group(${groupname} FILES
|
||||||
|
${source_dir}/${curdir}/${child})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
macro(group_sources curdir)
|
||||||
|
group_sources_in(${PROJECT_SOURCE_DIR} ${curdir})
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
macro(add_with_props src_var files)
|
||||||
|
list(APPEND ${src_var} ${files})
|
||||||
|
foreach (arg ${ARGN})
|
||||||
|
set(props "${props} ${arg}")
|
||||||
|
endforeach()
|
||||||
|
set_source_files_properties(
|
||||||
|
${files}
|
||||||
|
PROPERTIES COMPILE_FLAGS
|
||||||
|
${props})
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
macro(determine_build_type)
|
||||||
|
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES ".*Clang") # both Clang and AppleClang
|
||||||
|
set(is_clang true)
|
||||||
|
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
|
||||||
|
set(is_gcc true)
|
||||||
|
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC")
|
||||||
|
set(is_msvc true)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (${CMAKE_GENERATOR} STREQUAL "Xcode")
|
||||||
|
set(is_xcode true)
|
||||||
|
else()
|
||||||
|
set(is_xcode false)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (NOT is_gcc AND NOT is_clang AND NOT is_msvc)
|
||||||
|
message("Current compiler is ${CMAKE_CXX_COMPILER_ID}")
|
||||||
|
message(FATAL_ERROR "Missing compiler. Must be GNU, Clang, or MSVC")
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
macro(check_gcc4_abi)
|
||||||
|
# Check if should use gcc4's ABI
|
||||||
|
set(gcc4_abi false)
|
||||||
|
|
||||||
|
if ($ENV{RIPPLED_OLD_GCC_ABI})
|
||||||
|
set(gcc4_abi true)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (is_gcc AND NOT gcc4_abi)
|
||||||
|
if (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 5)
|
||||||
|
execute_process(COMMAND lsb_release -si OUTPUT_VARIABLE lsb)
|
||||||
|
string(STRIP "${lsb}" lsb)
|
||||||
|
if ("${lsb}" STREQUAL "Ubuntu")
|
||||||
|
execute_process(COMMAND lsb_release -sr OUTPUT_VARIABLE lsb)
|
||||||
|
string(STRIP ${lsb} lsb)
|
||||||
|
if (${lsb} VERSION_LESS 15.1)
|
||||||
|
set(gcc4_abi true)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (gcc4_abi)
|
||||||
|
add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0)
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
macro(special_build_flags)
|
||||||
|
if (coverage)
|
||||||
|
add_compile_options(-fprofile-arcs -ftest-coverage)
|
||||||
|
append_flags(CMAKE_EXE_LINKER_FLAGS -fprofile-arcs -ftest-coverage)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (profile)
|
||||||
|
add_compile_options(-p -pg)
|
||||||
|
append_flags(CMAKE_EXE_LINKER_FLAGS -p -pg)
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
# Params: Boost components to search for.
|
||||||
|
macro(use_boost)
|
||||||
|
if ((NOT DEFINED BOOST_ROOT) AND (DEFINED ENV{BOOST_ROOT}))
|
||||||
|
set(BOOST_ROOT $ENV{BOOST_ROOT})
|
||||||
|
endif()
|
||||||
|
file(TO_CMAKE_PATH "${BOOST_ROOT}" BOOST_ROOT)
|
||||||
|
if(WIN32 OR CYGWIN)
|
||||||
|
# Workaround for MSVC having two boost versions - x86 and x64 on same PC in stage folders
|
||||||
|
if(DEFINED BOOST_ROOT)
|
||||||
|
if(CMAKE_SIZEOF_VOID_P EQUAL 8 AND IS_DIRECTORY ${BOOST_ROOT}/stage64/lib)
|
||||||
|
set(Boost_LIBRARY_DIR ${BOOST_ROOT}/stage64/lib)
|
||||||
|
else()
|
||||||
|
set(Boost_LIBRARY_DIR ${BOOST_ROOT}/stage/lib)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (is_clang AND DEFINED ENV{CLANG_BOOST_ROOT})
|
||||||
|
set(BOOST_ROOT $ENV{CLANG_BOOST_ROOT})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(Boost_USE_STATIC_LIBS on)
|
||||||
|
set(Boost_USE_MULTITHREADED on)
|
||||||
|
set(Boost_USE_STATIC_RUNTIME off)
|
||||||
|
if(MSVC)
|
||||||
|
find_package(Boost REQUIRED)
|
||||||
|
else()
|
||||||
|
find_package(Boost REQUIRED ${ARGN})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (Boost_FOUND OR
|
||||||
|
((CYGWIN OR WIN32) AND Boost_INCLUDE_DIRS AND Boost_LIBRARY_DIRS))
|
||||||
|
if(NOT Boost_FOUND)
|
||||||
|
message(WARNING "Boost directory found, but not all components. May not be able to build.")
|
||||||
|
endif()
|
||||||
|
include_directories(SYSTEM ${Boost_INCLUDE_DIRS})
|
||||||
|
link_directories(${Boost_LIBRARY_DIRS})
|
||||||
|
else()
|
||||||
|
message(FATAL_ERROR "Boost not found")
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
macro(use_pthread)
|
||||||
|
if (NOT WIN32)
|
||||||
|
set(THREADS_PREFER_PTHREAD_FLAG ON)
|
||||||
|
find_package(Threads)
|
||||||
|
add_compile_options(${CMAKE_THREAD_LIBS_INIT})
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
macro(use_openssl openssl_min)
|
||||||
|
if (APPLE AND NOT DEFINED ENV{OPENSSL_ROOT_DIR})
|
||||||
|
find_program(HOMEBREW brew)
|
||||||
|
if (NOT HOMEBREW STREQUAL "HOMEBREW-NOTFOUND")
|
||||||
|
execute_process(COMMAND brew --prefix openssl
|
||||||
|
OUTPUT_VARIABLE OPENSSL_ROOT_DIR
|
||||||
|
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (WIN32)
|
||||||
|
if (DEFINED ENV{OPENSSL_ROOT})
|
||||||
|
include_directories($ENV{OPENSSL_ROOT}/include)
|
||||||
|
link_directories($ENV{OPENSSL_ROOT}/lib)
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
if (static)
|
||||||
|
set(tmp CMAKE_FIND_LIBRARY_SUFFIXES)
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES .a)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
find_package(OpenSSL)
|
||||||
|
# depending on how openssl is built, it might depend
|
||||||
|
# on zlib. In fact, the openssl find package should
|
||||||
|
# figure this out for us, but it does not currently...
|
||||||
|
# so let's add zlib ourselves to the lib list
|
||||||
|
find_package(ZLIB)
|
||||||
|
|
||||||
|
if (static)
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES tmp)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (OPENSSL_FOUND)
|
||||||
|
include_directories(${OPENSSL_INCLUDE_DIR})
|
||||||
|
list(APPEND OPENSSL_LIBRARIES ${ZLIB_LIBRARIES})
|
||||||
|
else()
|
||||||
|
message(FATAL_ERROR "OpenSSL not found")
|
||||||
|
endif()
|
||||||
|
if (UNIX AND NOT APPLE AND ${OPENSSL_VERSION} VERSION_LESS ${openssl_min})
|
||||||
|
message(FATAL_ERROR
|
||||||
|
"Your openssl is Version: ${OPENSSL_VERSION}, ${openssl_min} or better is required.")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
macro(use_protobuf)
|
||||||
|
if (WIN32)
|
||||||
|
if (DEFINED ENV{PROTOBUF_ROOT})
|
||||||
|
include_directories($ENV{PROTOBUF_ROOT}/src)
|
||||||
|
link_directories($ENV{PROTOBUF_ROOT}/src/.libs)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Modified from FindProtobuf.cmake
|
||||||
|
FUNCTION(PROTOBUF_GENERATE_CPP SRCS HDRS PROTOFILES)
|
||||||
|
# argument parsing
|
||||||
|
IF(NOT PROTOFILES)
|
||||||
|
MESSAGE(SEND_ERROR "Error: PROTOBUF_GENERATE_CPP() called without any proto files")
|
||||||
|
RETURN()
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
SET(OUTPATH ${CMAKE_CURRENT_BINARY_DIR})
|
||||||
|
SET(PROTOROOT ${CMAKE_CURRENT_SOURCE_DIR})
|
||||||
|
# the real logic
|
||||||
|
SET(${SRCS})
|
||||||
|
SET(${HDRS})
|
||||||
|
FOREACH(PROTOFILE ${PROTOFILES})
|
||||||
|
# ensure that the file ends with .proto
|
||||||
|
STRING(REGEX MATCH "\\.proto$$" PROTOEND ${PROTOFILE})
|
||||||
|
IF(NOT PROTOEND)
|
||||||
|
MESSAGE(SEND_ERROR "Proto file '${PROTOFILE}' does not end with .proto")
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
GET_FILENAME_COMPONENT(PROTO_PATH ${PROTOFILE} PATH)
|
||||||
|
GET_FILENAME_COMPONENT(ABS_FILE ${PROTOFILE} ABSOLUTE)
|
||||||
|
GET_FILENAME_COMPONENT(FILE_WE ${PROTOFILE} NAME_WE)
|
||||||
|
|
||||||
|
STRING(REGEX MATCH "^${PROTOROOT}" IN_ROOT_PATH ${PROTOFILE})
|
||||||
|
STRING(REGEX MATCH "^${PROTOROOT}" IN_ROOT_ABS_FILE ${ABS_FILE})
|
||||||
|
|
||||||
|
IF(IN_ROOT_PATH)
|
||||||
|
SET(MATCH_PATH ${PROTOFILE})
|
||||||
|
ELSEIF(IN_ROOT_ABS_FILE)
|
||||||
|
SET(MATCH_PATH ${ABS_FILE})
|
||||||
|
ELSE()
|
||||||
|
MESSAGE(SEND_ERROR "Proto file '${PROTOFILE}' is not in protoroot '${PROTOROOT}'")
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
# build the result file name
|
||||||
|
STRING(REGEX REPLACE "^${PROTOROOT}(/?)" "" ROOT_CLEANED_FILE ${MATCH_PATH})
|
||||||
|
STRING(REGEX REPLACE "\\.proto$$" "" EXT_CLEANED_FILE ${ROOT_CLEANED_FILE})
|
||||||
|
|
||||||
|
SET(CPP_FILE "${OUTPATH}/${EXT_CLEANED_FILE}.pb.cc")
|
||||||
|
SET(H_FILE "${OUTPATH}/${EXT_CLEANED_FILE}.pb.h")
|
||||||
|
|
||||||
|
LIST(APPEND ${SRCS} "${CPP_FILE}")
|
||||||
|
LIST(APPEND ${HDRS} "${H_FILE}")
|
||||||
|
|
||||||
|
ADD_CUSTOM_COMMAND(
|
||||||
|
OUTPUT "${CPP_FILE}" "${H_FILE}"
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E make_directory ${OUTPATH}
|
||||||
|
COMMAND ${PROTOBUF_PROTOC_EXECUTABLE}
|
||||||
|
ARGS "--cpp_out=${OUTPATH}" --proto_path "${PROTOROOT}" "${MATCH_PATH}"
|
||||||
|
DEPENDS ${ABS_FILE}
|
||||||
|
COMMENT "Running C++ protocol buffer compiler on ${MATCH_PATH} with root ${PROTOROOT}, generating: ${CPP_FILE}"
|
||||||
|
VERBATIM)
|
||||||
|
|
||||||
|
ENDFOREACH()
|
||||||
|
|
||||||
|
SET_SOURCE_FILES_PROPERTIES(${${SRCS}} ${${HDRS}} PROPERTIES GENERATED TRUE)
|
||||||
|
SET(${SRCS} ${${SRCS}} PARENT_SCOPE)
|
||||||
|
SET(${HDRS} ${${HDRS}} PARENT_SCOPE)
|
||||||
|
|
||||||
|
ENDFUNCTION()
|
||||||
|
|
||||||
|
set(PROTOBUF_PROTOC_EXECUTABLE Protoc) # must be on path
|
||||||
|
else()
|
||||||
|
if (static)
|
||||||
|
set(tmp CMAKE_FIND_LIBRARY_SUFFIXES)
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES .a)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
find_package(Protobuf REQUIRED)
|
||||||
|
|
||||||
|
if (static)
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES tmp)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (is_clang AND DEFINED ENV{CLANG_PROTOBUF_ROOT})
|
||||||
|
link_directories($ENV{CLANG_PROTOBUF_ROOT}/src/.libs)
|
||||||
|
include_directories($ENV{CLANG_PROTOBUF_ROOT}/src)
|
||||||
|
else()
|
||||||
|
include_directories(${PROTOBUF_INCLUDE_DIRS})
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
include_directories(${CMAKE_CURRENT_BINARY_DIR})
|
||||||
|
|
||||||
|
file(GLOB ripple_proto src/ripple/proto/*.proto)
|
||||||
|
PROTOBUF_GENERATE_CPP(PROTO_SRCS PROTO_HDRS ${ripple_proto})
|
||||||
|
|
||||||
|
if (WIN32)
|
||||||
|
include_directories(src/protobuf/src
|
||||||
|
src/protobuf/vsprojects
|
||||||
|
${CMAKE_CURRENT_BINARY_DIR}/src/ripple/proto)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
macro(setup_build_boilerplate)
|
||||||
|
if (NOT WIN32 AND san)
|
||||||
|
add_compile_options(-fsanitize=${san} -fno-omit-frame-pointer)
|
||||||
|
|
||||||
|
append_flags(CMAKE_EXE_LINKER_FLAGS
|
||||||
|
-fsanitize=${san})
|
||||||
|
|
||||||
|
string(TOLOWER ${san} ci_san)
|
||||||
|
if (${ci_san} STREQUAL address)
|
||||||
|
set(SANITIZER_LIBRARIES asan)
|
||||||
|
add_definitions(-DSANITIZER=ASAN)
|
||||||
|
endif()
|
||||||
|
if (${ci_san} STREQUAL thread)
|
||||||
|
set(SANITIZER_LIBRARIES tsan)
|
||||||
|
add_definitions(-DSANITIZER=TSAN)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (perf)
|
||||||
|
add_compile_options(-fno-omit-frame-pointer)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
add_definitions(
|
||||||
|
-DOPENSSL_NO_SSL2
|
||||||
|
-DDEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER
|
||||||
|
-DHAVE_USLEEP=1
|
||||||
|
-DSOCI_CXX_C11=1
|
||||||
|
-D_SILENCE_STDEXT_HASH_DEPRECATION_WARNINGS
|
||||||
|
-DBOOST_NO_AUTO_PTR
|
||||||
|
)
|
||||||
|
|
||||||
|
if (is_gcc)
|
||||||
|
add_compile_options(-Wno-unused-but-set-variable -Wno-deprecated)
|
||||||
|
|
||||||
|
# use gold linker if available
|
||||||
|
execute_process(
|
||||||
|
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version
|
||||||
|
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||||
|
# NOTE: THE gold linker inserts -rpath as DT_RUNPATH by default
|
||||||
|
# intead of DT_RPATH, so you might have slightly unexpected
|
||||||
|
# runtime ld behavior if you were expecting DT_RPATH.
|
||||||
|
# Specify --disable-new-dtags to gold if you do not want
|
||||||
|
# the default DT_RUNPATH behavior. This rpath treatment as well
|
||||||
|
# as static/dynamic selection means that gold does not currently
|
||||||
|
# have ideal default behavior when we are using jemalloc. Thus
|
||||||
|
# for simplicity we don't use it when jemalloc is requested.
|
||||||
|
# An alternative to disabling would be to figure out all the settings
|
||||||
|
# required to make gold play nicely with jemalloc.
|
||||||
|
if (("${LD_VERSION}" MATCHES "GNU gold") AND (NOT jemalloc))
|
||||||
|
append_flags(CMAKE_EXE_LINKER_FLAGS -fuse-ld=gold)
|
||||||
|
endif ()
|
||||||
|
unset(LD_VERSION)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Generator expressions are not supported in add_definitions, use set_property instead
|
||||||
|
set_property(
|
||||||
|
DIRECTORY
|
||||||
|
APPEND
|
||||||
|
PROPERTY COMPILE_DEFINITIONS
|
||||||
|
$<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:DEBUG _DEBUG>)
|
||||||
|
|
||||||
|
if (NOT assert)
|
||||||
|
set_property(
|
||||||
|
DIRECTORY
|
||||||
|
APPEND
|
||||||
|
PROPERTY COMPILE_DEFINITIONS
|
||||||
|
$<$<OR:$<BOOL:${profile}>,$<CONFIG:Release>,$<CONFIG:ReleaseClassic>>:NDEBUG>)
|
||||||
|
else()
|
||||||
|
# CMAKE_CXX_FLAGS_RELEASE is created by CMake for most / all generators
|
||||||
|
# with defaults including /DNDEBUG or -DNDEBUG, and that value is stored
|
||||||
|
# in the cache. Override that locally so that the cache value will be
|
||||||
|
# avaiable if "assert" is ever changed.
|
||||||
|
STRING(REGEX REPLACE "[-/]DNDEBUG" "" CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE}")
|
||||||
|
STRING(REGEX REPLACE "[-/]DNDEBUG" "" CMAKE_CXX_FLAGS_RELEASECLASSIC "${CMAKE_CXX_FLAGS_RELEASECLASSIC}")
|
||||||
|
STRING(REGEX REPLACE "[-/]DNDEBUG" "" CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE}")
|
||||||
|
STRING(REGEX REPLACE "[-/]DNDEBUG" "" CMAKE_C_FLAGS_RELEASECLASSIC "${CMAKE_C_FLAGS_RELEASECLASSIC}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (jemalloc)
|
||||||
|
find_package(jemalloc REQUIRED)
|
||||||
|
add_definitions(-DPROFILE_JEMALLOC)
|
||||||
|
include_directories(SYSTEM ${JEMALLOC_INCLUDE_DIRS})
|
||||||
|
link_libraries(${JEMALLOC_LIBRARIES})
|
||||||
|
get_filename_component(JEMALLOC_LIB_PATH ${JEMALLOC_LIBRARIES} DIRECTORY)
|
||||||
|
set(CMAKE_BUILD_RPATH ${CMAKE_BUILD_RPATH} ${JEMALLOC_LIB_PATH})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (NOT WIN32)
|
||||||
|
add_definitions(-D_FILE_OFFSET_BITS=64)
|
||||||
|
append_flags(CMAKE_CXX_FLAGS -frtti -std=c++14 -Wno-invalid-offsetof
|
||||||
|
-DBOOST_COROUTINE_NO_DEPRECATION_WARNING -DBOOST_COROUTINES_NO_DEPRECATION_WARNING)
|
||||||
|
add_compile_options(-Wall -Wno-sign-compare -Wno-char-subscripts -Wno-format
|
||||||
|
-Wno-unused-local-typedefs -g)
|
||||||
|
# There seems to be an issue using generator experssions with multiple values,
|
||||||
|
# split the expression
|
||||||
|
add_compile_options($<$<OR:$<CONFIG:Release>,$<CONFIG:ReleaseClassic>>:-O3>)
|
||||||
|
add_compile_options($<$<OR:$<CONFIG:Release>,$<CONFIG:ReleaseClassic>>:-fno-strict-aliasing>)
|
||||||
|
append_flags(CMAKE_EXE_LINKER_FLAGS -rdynamic -g)
|
||||||
|
|
||||||
|
if (is_clang)
|
||||||
|
add_compile_options(
|
||||||
|
-Wno-redeclared-class-member -Wno-mismatched-tags -Wno-deprecated-register)
|
||||||
|
add_definitions(-DBOOST_ASIO_HAS_STD_ARRAY)
|
||||||
|
|
||||||
|
# use ldd linker if available
|
||||||
|
execute_process(
|
||||||
|
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version
|
||||||
|
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||||
|
if ("${LD_VERSION}" MATCHES "LLD")
|
||||||
|
append_flags(CMAKE_EXE_LINKER_FLAGS -fuse-ld=lld)
|
||||||
|
endif ()
|
||||||
|
unset(LD_VERSION)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (APPLE)
|
||||||
|
add_definitions(-DBEAST_COMPILE_OBJECTIVE_CPP=1)
|
||||||
|
add_compile_options(
|
||||||
|
-Wno-deprecated -Wno-deprecated-declarations -Wno-unused-function)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (is_gcc)
|
||||||
|
add_compile_options(-Wno-unused-but-set-variable -Wno-unused-local-typedefs)
|
||||||
|
add_compile_options($<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:-O0>)
|
||||||
|
endif (is_gcc)
|
||||||
|
else(NOT WIN32)
|
||||||
|
add_compile_options(
|
||||||
|
/bigobj # Increase object file max size
|
||||||
|
/EHa # ExceptionHandling all
|
||||||
|
/fp:precise # Floating point behavior
|
||||||
|
/Gd # __cdecl calling convention
|
||||||
|
/Gm- # Minimal rebuild: disabled
|
||||||
|
/GR # Enable RTTI
|
||||||
|
/Gy- # Function level linking: disabled
|
||||||
|
/FS
|
||||||
|
/MP # Multiprocessor compilation
|
||||||
|
/openmp- # pragma omp: disabled
|
||||||
|
/Zc:forScope # Language conformance: for scope
|
||||||
|
/Zi # Generate complete debug info
|
||||||
|
/errorReport:none # No error reporting to Internet
|
||||||
|
/nologo # Suppress login banner
|
||||||
|
/W3 # Warning level 3
|
||||||
|
/WX- # Disable warnings as errors
|
||||||
|
/wd4018 # Disable signed/unsigned comparison warnings
|
||||||
|
/wd4244 # Disable float to int possible loss of data warnings
|
||||||
|
/wd4267 # Disable size_t to T possible loss of data warnings
|
||||||
|
/wd4800 # Disable C4800(int to bool performance)
|
||||||
|
/wd4503 # Decorated name length exceeded, name was truncated
|
||||||
|
)
|
||||||
|
add_definitions(
|
||||||
|
-D_WIN32_WINNT=0x6000
|
||||||
|
-D_SCL_SECURE_NO_WARNINGS
|
||||||
|
-D_CRT_SECURE_NO_WARNINGS
|
||||||
|
-DWIN32_CONSOLE
|
||||||
|
-DNOMINMAX
|
||||||
|
-DBOOST_COROUTINE_NO_DEPRECATION_WARNING
|
||||||
|
-DBOOST_COROUTINES_NO_DEPRECATION_WARNING)
|
||||||
|
append_flags(CMAKE_EXE_LINKER_FLAGS
|
||||||
|
/DEBUG
|
||||||
|
/DYNAMICBASE
|
||||||
|
/ERRORREPORT:NONE
|
||||||
|
/MACHINE:X64
|
||||||
|
/MANIFEST
|
||||||
|
/nologo
|
||||||
|
/NXCOMPAT
|
||||||
|
/SUBSYSTEM:CONSOLE
|
||||||
|
/TLBID:1)
|
||||||
|
|
||||||
|
|
||||||
|
# There seems to be an issue using generator experssions with multiple values,
|
||||||
|
# split the expression
|
||||||
|
# /GS Buffers security check: enable
|
||||||
|
add_compile_options($<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:/GS>)
|
||||||
|
# /MTd Language: Multi-threaded Debug CRT
|
||||||
|
add_compile_options($<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:/MTd>)
|
||||||
|
# /Od Optimization: Disabled
|
||||||
|
add_compile_options($<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:/Od>)
|
||||||
|
# /RTC1 Run-time error checks:
|
||||||
|
add_compile_options($<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:/RTC1>)
|
||||||
|
|
||||||
|
# Generator expressions are not supported in add_definitions, use set_property instead
|
||||||
|
set_property(
|
||||||
|
DIRECTORY
|
||||||
|
APPEND
|
||||||
|
PROPERTY COMPILE_DEFINITIONS
|
||||||
|
$<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:_CRTDBG_MAP_ALLOC>)
|
||||||
|
|
||||||
|
# /MT Language: Multi-threaded CRT
|
||||||
|
add_compile_options($<$<OR:$<CONFIG:Release>,$<CONFIG:ReleaseClassic>>:/MT>)
|
||||||
|
add_compile_options($<$<OR:$<CONFIG:Release>,$<CONFIG:ReleaseClassic>>:/Ox>)
|
||||||
|
# /Ox Optimization: Full
|
||||||
|
|
||||||
|
endif (NOT WIN32)
|
||||||
|
|
||||||
|
if (static)
|
||||||
|
append_flags(CMAKE_EXE_LINKER_FLAGS -static-libstdc++)
|
||||||
|
# set_target_properties(ripple-libpp PROPERTIES LINK_SEARCH_START_STATIC 1)
|
||||||
|
# set_target_properties(ripple-libpp PROPERTIES LINK_SEARCH_END_STATIC 1)
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
macro(create_build_folder cur_project)
|
||||||
|
if (NOT WIN32)
|
||||||
|
ADD_CUSTOM_TARGET(build_folder ALL
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}
|
||||||
|
COMMENT "Creating build output folder")
|
||||||
|
add_dependencies(${cur_project} build_folder)
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
macro(set_startup_project cur_project)
|
||||||
|
if (WIN32 AND NOT ci)
|
||||||
|
if (CMAKE_VERSION VERSION_LESS 3.6)
|
||||||
|
message(WARNING
|
||||||
|
"Setting the VS startup project requires cmake 3.6 or later. Please upgrade.")
|
||||||
|
endif()
|
||||||
|
set_property(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY
|
||||||
|
VS_STARTUP_PROJECT ${cur_project})
|
||||||
|
endif()
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
macro(link_common_libraries cur_project)
|
||||||
|
if (NOT MSVC)
|
||||||
|
target_link_libraries(${cur_project} ${Boost_LIBRARIES})
|
||||||
|
target_link_libraries(${cur_project} dl)
|
||||||
|
target_link_libraries(${cur_project} Threads::Threads)
|
||||||
|
if (APPLE)
|
||||||
|
find_library(app_kit AppKit)
|
||||||
|
find_library(foundation Foundation)
|
||||||
|
target_link_libraries(${cur_project}
|
||||||
|
${app_kit} ${foundation})
|
||||||
|
else()
|
||||||
|
target_link_libraries(${cur_project} rt)
|
||||||
|
endif()
|
||||||
|
else(NOT MSVC)
|
||||||
|
target_link_libraries(${cur_project}
|
||||||
|
$<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:VC/static/ssleay32MTd>
|
||||||
|
$<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:VC/static/libeay32MTd>)
|
||||||
|
target_link_libraries(${cur_project}
|
||||||
|
$<$<OR:$<CONFIG:Release>,$<CONFIG:ReleaseClassic>>:VC/static/ssleay32MT>
|
||||||
|
$<$<OR:$<CONFIG:Release>,$<CONFIG:ReleaseClassic>>:VC/static/libeay32MT>)
|
||||||
|
target_link_libraries(${cur_project}
|
||||||
|
legacy_stdio_definitions.lib Shlwapi kernel32 user32 gdi32 winspool comdlg32
|
||||||
|
advapi32 shell32 ole32 oleaut32 uuid odbc32 odbccp32 crypt32)
|
||||||
|
endif (NOT MSVC)
|
||||||
|
endmacro()
|
||||||
47
Builds/CMake/Findjemalloc.cmake
Normal file
47
Builds/CMake/Findjemalloc.cmake
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
# - Try to find jemalloc
|
||||||
|
# Once done this will define
|
||||||
|
# JEMALLOC_FOUND - System has jemalloc
|
||||||
|
# JEMALLOC_INCLUDE_DIRS - The jemalloc include directories
|
||||||
|
# JEMALLOC_LIBRARIES - The libraries needed to use jemalloc
|
||||||
|
|
||||||
|
if(NOT USE_BUNDLED_JEMALLOC)
|
||||||
|
find_package(PkgConfig)
|
||||||
|
if (PKG_CONFIG_FOUND)
|
||||||
|
pkg_check_modules(PC_JEMALLOC QUIET jemalloc)
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
set(PC_JEMALLOC_INCLUDEDIR)
|
||||||
|
set(PC_JEMALLOC_INCLUDE_DIRS)
|
||||||
|
set(PC_JEMALLOC_LIBDIR)
|
||||||
|
set(PC_JEMALLOC_LIBRARY_DIRS)
|
||||||
|
set(LIMIT_SEARCH NO_DEFAULT_PATH)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(JEMALLOC_DEFINITIONS ${PC_JEMALLOC_CFLAGS_OTHER})
|
||||||
|
|
||||||
|
find_path(JEMALLOC_INCLUDE_DIR jemalloc/jemalloc.h
|
||||||
|
PATHS ${PC_JEMALLOC_INCLUDEDIR} ${PC_JEMALLOC_INCLUDE_DIRS}
|
||||||
|
${LIMIT_SEARCH})
|
||||||
|
|
||||||
|
# If we're asked to use static linkage, add libjemalloc.a as a preferred library name.
|
||||||
|
if(JEMALLOC_USE_STATIC)
|
||||||
|
list(APPEND JEMALLOC_NAMES
|
||||||
|
"${CMAKE_STATIC_LIBRARY_PREFIX}jemalloc${CMAKE_STATIC_LIBRARY_SUFFIX}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
list(APPEND JEMALLOC_NAMES jemalloc)
|
||||||
|
|
||||||
|
find_library(JEMALLOC_LIBRARY NAMES ${JEMALLOC_NAMES}
|
||||||
|
HINTS ${PC_JEMALLOC_LIBDIR} ${PC_JEMALLOC_LIBRARY_DIRS}
|
||||||
|
${LIMIT_SEARCH})
|
||||||
|
|
||||||
|
set(JEMALLOC_LIBRARIES ${JEMALLOC_LIBRARY})
|
||||||
|
set(JEMALLOC_INCLUDE_DIRS ${JEMALLOC_INCLUDE_DIR})
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
# handle the QUIETLY and REQUIRED arguments and set JEMALLOC_FOUND to TRUE
|
||||||
|
# if all listed variables are TRUE
|
||||||
|
find_package_handle_standard_args(JeMalloc DEFAULT_MSG
|
||||||
|
JEMALLOC_LIBRARY JEMALLOC_INCLUDE_DIR)
|
||||||
|
|
||||||
|
mark_as_advanced(JEMALLOC_INCLUDE_DIR JEMALLOC_LIBRARY)
|
||||||
411
Builds/Test.py
411
Builds/Test.py
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# This file is part of rippled: https://github.com/ripple/rippled
|
# This file is part of rippled: https://github.com/ripple/rippled
|
||||||
# Copyright (c) 2012 - 2015 Ripple Labs Inc.
|
# Copyright (c) 2012 - 2017 Ripple Labs Inc.
|
||||||
#
|
#
|
||||||
# Permission to use, copy, modify, and/or distribute this software for any
|
# Permission to use, copy, modify, and/or distribute this software for any
|
||||||
# purpose with or without fee is hereby granted, provided that the above
|
# purpose with or without fee is hereby granted, provided that the above
|
||||||
@@ -27,6 +27,11 @@ the -- flag - like this:
|
|||||||
|
|
||||||
./Builds/Test.py -- -j4 # Pass -j4 to scons.
|
./Builds/Test.py -- -j4 # Pass -j4 to scons.
|
||||||
|
|
||||||
|
To build with CMake, use the --cmake flag, or any of the specific configuration
|
||||||
|
flags
|
||||||
|
|
||||||
|
./Builds/Test.py --cmake -- -j4 # Pass -j4 to cmake --build
|
||||||
|
|
||||||
|
|
||||||
Common problems:
|
Common problems:
|
||||||
|
|
||||||
@@ -48,18 +53,71 @@ import itertools
|
|||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
def powerset(iterable):
|
||||||
|
"""powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"""
|
||||||
|
s = list(iterable)
|
||||||
|
return itertools.chain.from_iterable(itertools.combinations(s, r) for r in range(len(s) + 1))
|
||||||
|
|
||||||
IS_WINDOWS = platform.system().lower() == 'windows'
|
IS_WINDOWS = platform.system().lower() == 'windows'
|
||||||
|
IS_OS_X = platform.system().lower() == 'darwin'
|
||||||
|
|
||||||
|
# CMake
|
||||||
|
if IS_WINDOWS:
|
||||||
|
CMAKE_UNITY_CONFIGS = ['Debug', 'Release']
|
||||||
|
CMAKE_NONUNITY_CONFIGS = ['DebugClassic', 'ReleaseClassic']
|
||||||
|
else:
|
||||||
|
CMAKE_UNITY_CONFIGS = []
|
||||||
|
CMAKE_NONUNITY_CONFIGS = []
|
||||||
|
CMAKE_UNITY_COMBOS = { '' : [['rippled', 'rippled_classic'], CMAKE_UNITY_CONFIGS],
|
||||||
|
'.nounity' : [['rippled', 'rippled_unity'], CMAKE_NONUNITY_CONFIGS] }
|
||||||
|
|
||||||
if IS_WINDOWS:
|
if IS_WINDOWS:
|
||||||
BINARY_RE = re.compile(r'build\\([^\\]+)\\rippled.exe')
|
CMAKE_DIR_TARGETS = { ('msvc' + unity,) : targets for unity, targets in
|
||||||
|
CMAKE_UNITY_COMBOS.items() }
|
||||||
|
elif IS_OS_X:
|
||||||
|
CMAKE_DIR_TARGETS = { (build + unity,) : targets
|
||||||
|
for build in ['debug', 'release']
|
||||||
|
for unity, targets in CMAKE_UNITY_COMBOS.items() }
|
||||||
else:
|
else:
|
||||||
BINARY_RE = re.compile(r'build/([^/]+)/rippled')
|
CMAKE_DIR_TARGETS = { (cc + "." + build + unity,) : targets
|
||||||
|
for cc in ['gcc', 'clang']
|
||||||
|
for build in ['debug', 'release', 'coverage', 'profile']
|
||||||
|
for unity, targets in CMAKE_UNITY_COMBOS.items() }
|
||||||
|
|
||||||
ALL_TARGETS = ['debug', 'release']
|
# list of tuples of all possible options
|
||||||
|
if IS_WINDOWS or IS_OS_X:
|
||||||
|
CMAKE_ALL_GENERATE_OPTIONS = [tuple(x) for x in powerset(['-GNinja', '-Dassert=true'])]
|
||||||
|
else:
|
||||||
|
CMAKE_ALL_GENERATE_OPTIONS = list(set(
|
||||||
|
[tuple(x) for x in powerset(['-GNinja', '-Dstatic=true', '-Dassert=true', '-Dsan=address'])] +
|
||||||
|
[tuple(x) for x in powerset(['-GNinja', '-Dstatic=true', '-Dassert=true', '-Dsan=thread'])]))
|
||||||
|
|
||||||
|
# Scons
|
||||||
|
if IS_WINDOWS or IS_OS_X:
|
||||||
|
ALL_TARGETS = [('debug',), ('release',)]
|
||||||
|
else:
|
||||||
|
ALL_TARGETS = [(cc + "." + target,)
|
||||||
|
for cc in ['gcc', 'clang']
|
||||||
|
for target in ['debug', 'release', 'coverage', 'profile',
|
||||||
|
'debug.nounity', 'release.nounity', 'coverage.nounity', 'profile.nounity']]
|
||||||
|
|
||||||
|
# list of tuples of all possible options
|
||||||
|
if IS_WINDOWS or IS_OS_X:
|
||||||
|
ALL_OPTIONS = [tuple(x) for x in powerset(['--ninja', '--assert'])]
|
||||||
|
else:
|
||||||
|
ALL_OPTIONS = list(set(
|
||||||
|
[tuple(x) for x in powerset(['--ninja', '--static', '--assert', '--sanitize=address'])] +
|
||||||
|
[tuple(x) for x in powerset(['--ninja', '--static', '--assert', '--sanitize=thread'])]))
|
||||||
|
|
||||||
|
# list of tuples of all possible options + all possible targets
|
||||||
|
ALL_BUILDS = [options + target
|
||||||
|
for target in ALL_TARGETS
|
||||||
|
for options in ALL_OPTIONS]
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description='Test.py - run ripple tests'
|
description='Test.py - run ripple tests'
|
||||||
@@ -75,55 +133,134 @@ parser.add_argument(
|
|||||||
'--keep_going', '-k',
|
'--keep_going', '-k',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Keep going after one configuration has failed.',
|
help='Keep going after one configuration has failed.',
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--silent', '-s',
|
'--silent', '-s',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Silence all messages except errors',
|
help='Silence all messages except errors',
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--verbose', '-v',
|
'--verbose', '-v',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help=('Report more information about which commands are executed and the '
|
help=('Report more information about which commands are executed and the '
|
||||||
'results.'),
|
'results.'),
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--test', '-t',
|
'--test', '-t',
|
||||||
default='',
|
default='',
|
||||||
help='Add a prefix for unit tests',
|
help='Add a prefix for unit tests',
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'scons_args',
|
'--clean', '-c',
|
||||||
|
action='store_true',
|
||||||
|
help='delete all build artifacts after testing',
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--quiet', '-q',
|
||||||
|
action='store_true',
|
||||||
|
help='Reduce output where possible (unit tests)',
|
||||||
|
)
|
||||||
|
|
||||||
|
# Scons and CMake parameters are too different to run
|
||||||
|
# both side-by-side
|
||||||
|
pgroup = parser.add_mutually_exclusive_group()
|
||||||
|
|
||||||
|
pgroup.add_argument(
|
||||||
|
'--cmake',
|
||||||
|
action='store_true',
|
||||||
|
help='Build using CMake.',
|
||||||
|
)
|
||||||
|
|
||||||
|
pgroup.add_argument(
|
||||||
|
'--scons',
|
||||||
|
action='store_true',
|
||||||
|
help='Build using Scons. Default behavior.')
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--dir', '-d',
|
||||||
default=(),
|
default=(),
|
||||||
nargs='*'
|
nargs='*',
|
||||||
|
help='Specify one or more CMake dir names. Implies --cmake. '
|
||||||
|
'Will also be used as -Dtarget=<dir> running cmake.'
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--target',
|
||||||
|
default=(),
|
||||||
|
nargs='*',
|
||||||
|
help='Specify one or more CMake build targets. Implies --cmake. '
|
||||||
|
'Will be used as --target <target> running cmake --build.'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--config',
|
||||||
|
default=(),
|
||||||
|
nargs='*',
|
||||||
|
help='Specify one or more CMake build configs. Implies --cmake. '
|
||||||
|
'Will be used as --config <config> running cmake --build.'
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--generator_option',
|
||||||
|
action='append',
|
||||||
|
help='Specify a CMake generator option. Repeat for multiple options. '
|
||||||
|
'Implies --cmake. Will be passed to the cmake generator. '
|
||||||
|
'Due to limits of the argument parser, arguments starting with \'-\' '
|
||||||
|
'must be attached to this option. e.g. --generator_option=-GNinja.')
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--build_option',
|
||||||
|
action='append',
|
||||||
|
help='Specify a build option. Repeat for multiple options. Implies --cmake. '
|
||||||
|
'Will be passed to the build tool via cmake --build. '
|
||||||
|
'Due to limits of the argument parser, arguments starting with \'-\' '
|
||||||
|
'must be attached to this option. e.g. --build_option=-j8.')
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'extra_args',
|
||||||
|
default=(),
|
||||||
|
nargs='*',
|
||||||
|
help='Extra arguments are passed through to the tools'
|
||||||
|
)
|
||||||
|
|
||||||
ARGS = parser.parse_args()
|
ARGS = parser.parse_args()
|
||||||
|
|
||||||
def shell(*cmd, **kwds):
|
def decodeString(line):
|
||||||
"Execute a shell command and return the output."
|
# Python 2 vs. Python 3
|
||||||
silent = kwds.pop('silent', ARGS.silent)
|
if isinstance(line, str):
|
||||||
verbose = not silent and kwds.pop('verbose', ARGS.verbose)
|
return line
|
||||||
if verbose:
|
else:
|
||||||
print('$', ' '.join(cmd))
|
return line.decode()
|
||||||
kwds['shell'] = IS_WINDOWS
|
|
||||||
|
|
||||||
|
def shell(cmd, args=(), silent=False):
|
||||||
|
""""Execute a shell command and return the output."""
|
||||||
|
silent = ARGS.silent or silent
|
||||||
|
verbose = not silent and ARGS.verbose
|
||||||
|
if verbose:
|
||||||
|
print('$' + cmd, *args)
|
||||||
|
|
||||||
|
command = (cmd,) + args
|
||||||
|
|
||||||
|
# shell is needed in Windows to find scons in the path
|
||||||
process = subprocess.Popen(
|
process = subprocess.Popen(
|
||||||
cmd,
|
command,
|
||||||
stdin=subprocess.PIPE,
|
stdin=subprocess.PIPE,
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.STDOUT,
|
stderr=subprocess.STDOUT,
|
||||||
**kwds)
|
shell=IS_WINDOWS)
|
||||||
lines = []
|
lines = []
|
||||||
count = 0
|
count = 0
|
||||||
for line in process.stdout:
|
# readline returns '' at EOF
|
||||||
lines.append(line)
|
for line in iter(process.stdout.readline, ''):
|
||||||
|
decoded = decodeString(line)
|
||||||
|
lines.append(decoded)
|
||||||
if verbose:
|
if verbose:
|
||||||
print(line, end='')
|
print(decoded, end='')
|
||||||
elif not silent:
|
elif not silent:
|
||||||
count += 1
|
count += 1
|
||||||
if count >= 80:
|
if count >= 80:
|
||||||
@@ -137,55 +274,217 @@ def shell(*cmd, **kwds):
|
|||||||
process.wait()
|
process.wait()
|
||||||
return process.returncode, lines
|
return process.returncode, lines
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
args = list(ARGS.scons_args)
|
|
||||||
if ARGS.all:
|
|
||||||
for a in ALL_TARGETS:
|
|
||||||
if a not in args:
|
|
||||||
args.append(a)
|
|
||||||
print('Building:', *(args or ['(default)']))
|
|
||||||
|
|
||||||
# Build everything.
|
def run_tests(args):
|
||||||
resultcode, lines = shell('scons', *args)
|
|
||||||
if resultcode:
|
|
||||||
print('Build FAILED:')
|
|
||||||
if not ARGS.verbose:
|
|
||||||
print(*lines, sep='')
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
# Now extract the executable names and corresponding targets.
|
|
||||||
failed = []
|
failed = []
|
||||||
_, lines = shell('scons', '-n', '--tree=derived', *args, silent=True)
|
if IS_WINDOWS:
|
||||||
|
binary_re = re.compile(r'build\\([^\\]+)\\rippled.exe')
|
||||||
|
else:
|
||||||
|
binary_re = re.compile(r'build/([^/]+)/rippled')
|
||||||
|
_, lines = shell('scons', ('-n', '--tree=derived',) + args, silent=True)
|
||||||
for line in lines:
|
for line in lines:
|
||||||
match = BINARY_RE.search(line)
|
match = binary_re.search(line)
|
||||||
if match:
|
if match:
|
||||||
executable, target = match.group(0, 1)
|
executable, target = match.group(0, 1)
|
||||||
|
|
||||||
print('Unit tests for', target)
|
print('Unit tests for', target)
|
||||||
testflag = '--unittest'
|
testflag = '--unittest'
|
||||||
|
quiet = ''
|
||||||
if ARGS.test:
|
if ARGS.test:
|
||||||
testflag += ('=' + ARGS.test)
|
testflag += ('=' + ARGS.test)
|
||||||
|
if ARGS.quiet:
|
||||||
|
quiet = '-q'
|
||||||
|
resultcode, lines = shell(executable, (testflag, quiet,))
|
||||||
|
|
||||||
resultcode, lines = shell(executable, testflag)
|
|
||||||
if resultcode:
|
if resultcode:
|
||||||
print('ERROR:', *lines, sep='')
|
if not ARGS.verbose:
|
||||||
|
print('ERROR:', *lines, sep='')
|
||||||
failed.append([target, 'unittest'])
|
failed.append([target, 'unittest'])
|
||||||
if not ARGS.keep_going:
|
if not ARGS.keep_going:
|
||||||
break
|
break
|
||||||
ARGS.verbose and print(*lines, sep='')
|
|
||||||
|
|
||||||
print('npm tests for', target)
|
return failed
|
||||||
resultcode, lines = shell('npm', 'test', '--rippled=' + executable)
|
|
||||||
if resultcode:
|
|
||||||
print('ERROR:\n', *lines, sep='')
|
def run_build(args=None):
|
||||||
failed.append([target, 'npm'])
|
print('Building:', *args or ('(default)',))
|
||||||
|
resultcode, lines = shell('scons', args)
|
||||||
|
|
||||||
|
if resultcode:
|
||||||
|
print('Build FAILED:')
|
||||||
|
if not ARGS.verbose:
|
||||||
|
print(*lines, sep='')
|
||||||
|
sys.exit(1)
|
||||||
|
if '--ninja' in args:
|
||||||
|
resultcode, lines = shell('ninja')
|
||||||
|
|
||||||
|
if resultcode:
|
||||||
|
print('Ninja build FAILED:')
|
||||||
|
if not ARGS.verbose:
|
||||||
|
print(*lines, sep='')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def get_cmake_dir(cmake_dir):
|
||||||
|
return os.path.join('build' , 'cmake' , cmake_dir)
|
||||||
|
|
||||||
|
def run_cmake(directory, cmake_dir, args):
|
||||||
|
print('Generating build in', directory, 'with', *args or ('default options',))
|
||||||
|
old_dir = os.getcwd()
|
||||||
|
if not os.path.exists(directory):
|
||||||
|
os.makedirs(directory)
|
||||||
|
os.chdir(directory)
|
||||||
|
if IS_WINDOWS and not any(arg.startswith("-G") for arg in args) and not os.path.exists("CMakeCache.txt"):
|
||||||
|
if '--ninja' in args:
|
||||||
|
args += ( '-GNinja', )
|
||||||
|
else:
|
||||||
|
args += ( '-GVisual Studio 14 2015 Win64', )
|
||||||
|
args += ( '-Dtarget=' + cmake_dir, os.path.join('..', '..', '..'), )
|
||||||
|
resultcode, lines = shell('cmake', args)
|
||||||
|
|
||||||
|
if resultcode:
|
||||||
|
print('Generating FAILED:')
|
||||||
|
if not ARGS.verbose:
|
||||||
|
print(*lines, sep='')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
os.chdir(old_dir)
|
||||||
|
|
||||||
|
def run_cmake_build(directory, target, config, args):
|
||||||
|
print('Building', target, config, 'in', directory, 'with', *args or ('default options',))
|
||||||
|
build_args=('--build', directory)
|
||||||
|
if target:
|
||||||
|
build_args += ('--target', target)
|
||||||
|
if config:
|
||||||
|
build_args += ('--config', config)
|
||||||
|
if args:
|
||||||
|
build_args += ('--',)
|
||||||
|
build_args += tuple(args)
|
||||||
|
resultcode, lines = shell('cmake', build_args)
|
||||||
|
|
||||||
|
if resultcode:
|
||||||
|
print('Build FAILED:')
|
||||||
|
if not ARGS.verbose:
|
||||||
|
print(*lines, sep='')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def run_cmake_tests(directory, target, config):
|
||||||
|
failed = []
|
||||||
|
if IS_WINDOWS:
|
||||||
|
target += '.exe'
|
||||||
|
executable = os.path.join(directory, config if config else 'Debug', target)
|
||||||
|
if(not os.path.exists(executable)):
|
||||||
|
executable = os.path.join(directory, target)
|
||||||
|
print('Unit tests for', executable)
|
||||||
|
testflag = '--unittest'
|
||||||
|
quiet = ''
|
||||||
|
if ARGS.test:
|
||||||
|
testflag += ('=' + ARGS.test)
|
||||||
|
if ARGS.quiet:
|
||||||
|
quiet = '-q'
|
||||||
|
resultcode, lines = shell(executable, (testflag, quiet,))
|
||||||
|
|
||||||
|
if resultcode:
|
||||||
|
if not ARGS.verbose:
|
||||||
|
print('ERROR:', *lines, sep='')
|
||||||
|
failed.append([target, 'unittest'])
|
||||||
|
|
||||||
|
return failed
|
||||||
|
|
||||||
|
def main():
|
||||||
|
all_failed = []
|
||||||
|
|
||||||
|
if ARGS.dir or ARGS.target or ARGS.config or ARGS.build_option or ARGS.generator_option:
|
||||||
|
ARGS.cmake=True
|
||||||
|
|
||||||
|
if not ARGS.cmake:
|
||||||
|
if ARGS.all:
|
||||||
|
to_build = ALL_BUILDS
|
||||||
|
else:
|
||||||
|
to_build = [tuple(ARGS.extra_args)]
|
||||||
|
|
||||||
|
for build in to_build:
|
||||||
|
args = ()
|
||||||
|
# additional arguments come first
|
||||||
|
for arg in list(ARGS.extra_args):
|
||||||
|
if arg not in build:
|
||||||
|
args += (arg,)
|
||||||
|
args += build
|
||||||
|
|
||||||
|
run_build(args)
|
||||||
|
failed = run_tests(args)
|
||||||
|
|
||||||
|
if failed:
|
||||||
|
print('FAILED:', *(':'.join(f) for f in failed))
|
||||||
if not ARGS.keep_going:
|
if not ARGS.keep_going:
|
||||||
break
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
all_failed.extend([','.join(build), ':'.join(f)]
|
||||||
|
for f in failed)
|
||||||
else:
|
else:
|
||||||
ARGS.verbose and print(*lines, sep='')
|
print('Success')
|
||||||
|
|
||||||
if failed:
|
if ARGS.clean:
|
||||||
print('FAILED:', *(':'.join(f) for f in failed))
|
shutil.rmtree('build')
|
||||||
exit(1)
|
if '--ninja' in args:
|
||||||
|
os.remove('build.ninja')
|
||||||
|
os.remove('.ninja_deps')
|
||||||
|
os.remove('.ninja_log')
|
||||||
else:
|
else:
|
||||||
print('Success')
|
if ARGS.all:
|
||||||
|
build_dir_targets = CMAKE_DIR_TARGETS
|
||||||
|
generator_options = CMAKE_ALL_GENERATE_OPTIONS
|
||||||
|
else:
|
||||||
|
build_dir_targets = { tuple(ARGS.dir) : [ARGS.target, ARGS.config] }
|
||||||
|
if ARGS.generator_option:
|
||||||
|
generator_options = [tuple(ARGS.generator_option)]
|
||||||
|
else:
|
||||||
|
generator_options = [tuple()]
|
||||||
|
|
||||||
|
if not build_dir_targets:
|
||||||
|
# Let CMake choose the build tool.
|
||||||
|
build_dir_targets = { () : [] }
|
||||||
|
|
||||||
|
if ARGS.build_option:
|
||||||
|
ARGS.build_option = ARGS.build_option + list(ARGS.extra_args)
|
||||||
|
else:
|
||||||
|
ARGS.build_option = list(ARGS.extra_args)
|
||||||
|
|
||||||
|
for args in generator_options:
|
||||||
|
for build_dirs, (build_targets, build_configs) in build_dir_targets.items():
|
||||||
|
if not build_dirs:
|
||||||
|
build_dirs = ('default',)
|
||||||
|
if not build_targets:
|
||||||
|
build_targets = ('rippled',)
|
||||||
|
if not build_configs:
|
||||||
|
build_configs = ('',)
|
||||||
|
for cmake_dir in build_dirs:
|
||||||
|
cmake_full_dir = get_cmake_dir(cmake_dir)
|
||||||
|
run_cmake(cmake_full_dir, cmake_dir, args)
|
||||||
|
|
||||||
|
for target in build_targets:
|
||||||
|
for config in build_configs:
|
||||||
|
run_cmake_build(cmake_full_dir, target, config, ARGS.build_option)
|
||||||
|
failed = run_cmake_tests(cmake_full_dir, target, config)
|
||||||
|
|
||||||
|
if failed:
|
||||||
|
print('FAILED:', *(':'.join(f) for f in failed))
|
||||||
|
if not ARGS.keep_going:
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
all_failed.extend([decodeString(cmake_dir +
|
||||||
|
"." + target + "." + config), ':'.join(f)]
|
||||||
|
for f in failed)
|
||||||
|
else:
|
||||||
|
print('Success')
|
||||||
|
if ARGS.clean:
|
||||||
|
shutil.rmtree(cmake_full_dir)
|
||||||
|
|
||||||
|
if all_failed:
|
||||||
|
if len(all_failed) > 1:
|
||||||
|
print()
|
||||||
|
print('FAILED:', *(':'.join(f) for f in all_failed))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
|
sys.exit(0)
|
||||||
|
|||||||
82
Builds/Ubuntu/build_clang_libs.sh
Executable file
82
Builds/Ubuntu/build_clang_libs.sh
Executable file
@@ -0,0 +1,82 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
#
|
||||||
|
# This scripts installs boost and protobuf built with clang. This is needed on
|
||||||
|
# ubuntu 15.10 when building with clang
|
||||||
|
# It will build these in a 'clang' subdirectory that it creates below the directory
|
||||||
|
# this script is run from. If a clang directory already exists the script will refuse
|
||||||
|
# to run.
|
||||||
|
|
||||||
|
if hash lsb_release 2>/dev/null; then
|
||||||
|
if [ $(lsb_release -si) == "Ubuntu" ]; then
|
||||||
|
ubuntu_release=$(lsb_release -sr)
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "${ubuntu_release}" ]; then
|
||||||
|
echo "System not supported"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! hash clang 2>/dev/null; then
|
||||||
|
clang_version=3.7
|
||||||
|
if [ ${ubuntu_release} == "16.04" ]; then
|
||||||
|
clang_version=3.8
|
||||||
|
fi
|
||||||
|
sudo apt-get -y install clang-${clang_version}
|
||||||
|
update-alternatives --install /usr/bin/clang clang /usr/bin/clang-${clang_version} 99 clang++
|
||||||
|
hash -r
|
||||||
|
if ! hash clang 2>/dev/null; then
|
||||||
|
echo "Please install clang"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ${ubuntu_release} != "16.04" ] && [ ${ubuntu_release} != "15.10" ]; then
|
||||||
|
echo "clang specific boost and protobuf not needed"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -d clang ]; then
|
||||||
|
echo "clang directory already exists. Cowardly refusing to run"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! hash wget 2>/dev/null; then
|
||||||
|
sudo apt-get -y install wget
|
||||||
|
hash -r
|
||||||
|
if ! hash wget 2>/dev/null; then
|
||||||
|
echo "Please install wget"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
num_procs=$(lscpu -p | grep -v '^#' | sort -u -t, -k 2,4 | wc -l) # pysical cores
|
||||||
|
|
||||||
|
mkdir clang
|
||||||
|
pushd clang > /dev/null
|
||||||
|
|
||||||
|
# Install protobuf
|
||||||
|
pb=protobuf-2.6.1
|
||||||
|
pb_tar=${pb}.tar.gz
|
||||||
|
wget -O ${pb_tar} https://github.com/google/protobuf/releases/download/v2.6.1/${pb_tar}
|
||||||
|
tar xf ${pb_tar}
|
||||||
|
rm ${pb_tar}
|
||||||
|
pushd ${pb} > /dev/null
|
||||||
|
./configure CC=clang CXX=clang++ CXXFLAGS='-std=c++14 -O3 -g'
|
||||||
|
make -j${num_procs}
|
||||||
|
popd > /dev/null
|
||||||
|
|
||||||
|
# Install boost
|
||||||
|
boost_ver=1.60.0
|
||||||
|
bd=boost_${boost_ver//./_}
|
||||||
|
bd_tar=${bd}.tar.gz
|
||||||
|
wget -O ${bd_tar} http://sourceforge.net/projects/boost/files/boost/${boost_ver}/${bd_tar}
|
||||||
|
tar xf ${bd_tar}
|
||||||
|
rm ${bd_tar}
|
||||||
|
pushd ${bd} > /dev/null
|
||||||
|
./bootstrap.sh
|
||||||
|
./b2 toolset=clang -j${num_procs}
|
||||||
|
popd > /dev/null
|
||||||
|
|
||||||
|
popd > /dev/null
|
||||||
@@ -4,7 +4,7 @@
|
|||||||
# This script builds boost with the correct ABI flags for ubuntu
|
# This script builds boost with the correct ABI flags for ubuntu
|
||||||
#
|
#
|
||||||
|
|
||||||
version=59
|
version=63
|
||||||
patch=0
|
patch=0
|
||||||
|
|
||||||
if hash lsb_release 2>/dev/null; then
|
if hash lsb_release 2>/dev/null; then
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ if [ ${ubuntu_release} == "12.04" ]; then
|
|||||||
add-apt-repository ppa:ubuntu-toolchain-r/test
|
add-apt-repository ppa:ubuntu-toolchain-r/test
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get -y upgrade
|
apt-get -y upgrade
|
||||||
apt-get -y install curl git scons ctags pkg-config protobuf-compiler libprotobuf-dev libssl-dev python-software-properties boost1.57-all-dev nodejs g++-5 g++-4.9
|
apt-get -y install curl git scons ctags pkg-config protobuf-compiler libprotobuf-dev libssl-dev python-software-properties boost1.57-all-dev g++-5 g++-4.9
|
||||||
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 99 --slave /usr/bin/g++ g++ /usr/bin/g++-5
|
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 99 --slave /usr/bin/g++ g++ /usr/bin/g++-5
|
||||||
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.9 99 --slave /usr/bin/g++ g++ /usr/bin/g++-4.9
|
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.9 99 --slave /usr/bin/g++ g++ /usr/bin/g++-4.9
|
||||||
exit 0
|
exit 0
|
||||||
@@ -33,21 +33,25 @@ fi
|
|||||||
|
|
||||||
if [ ${ubuntu_release} == "14.04" ] || [ ${ubuntu_release} == "15.04" ]; then
|
if [ ${ubuntu_release} == "14.04" ] || [ ${ubuntu_release} == "15.04" ]; then
|
||||||
apt-get install python-software-properties
|
apt-get install python-software-properties
|
||||||
echo "deb [arch=amd64] https://mirrors.ripple.com/ubuntu/ trusty stable contrib" | sudo tee /etc/apt/sources.list.d/ripple.list
|
echo "deb [arch=amd64] https://mirrors.ripple.com/ubuntu/ trusty stable contrib" | sudo tee /etc/apt/sources.list.d/ripple.list
|
||||||
wget -O- -q https://mirrors.ripple.com/mirrors.ripple.com.gpg.key | sudo apt-key add -
|
wget -O- -q https://mirrors.ripple.com/mirrors.ripple.com.gpg.key | sudo apt-key add -
|
||||||
add-apt-repository ppa:ubuntu-toolchain-r/test
|
add-apt-repository ppa:ubuntu-toolchain-r/test
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get -y upgrade
|
apt-get -y upgrade
|
||||||
apt-get -y install curl git scons ctags pkg-config protobuf-compiler libprotobuf-dev libssl-dev python-software-properties boost-all-dev nodejs g++-5 g++-4.9
|
apt-get -y install curl git scons ctags pkg-config protobuf-compiler libprotobuf-dev libssl-dev python-software-properties boost-all-dev g++-5 g++-4.9
|
||||||
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 99 --slave /usr/bin/g++ g++ /usr/bin/g++-5
|
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 99 --slave /usr/bin/g++ g++ /usr/bin/g++-5
|
||||||
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.9 99 --slave /usr/bin/g++ g++ /usr/bin/g++-4.9
|
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.9 99 --slave /usr/bin/g++ g++ /usr/bin/g++-4.9
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ ${ubuntu_release} == "15.10" ]; then
|
# Test if 0th parameter has a version number greater than or equal to the 1st param
|
||||||
|
function version_check() { test "$(printf '%s\n' "$@" | sort -V | tail -n 1)" == "$1"; }
|
||||||
|
|
||||||
|
# this should work for versions greater than 15.10
|
||||||
|
if version_check ${ubuntu_release} 15.10; then
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get -y upgrade
|
apt-get -y upgrade
|
||||||
apt-get -y install python-software-properties curl git scons ctags pkg-config protobuf-compiler libprotobuf-dev libssl-dev python-software-properties libboost-all-dev nodejs
|
apt-get -y install python-software-properties curl git scons ctags pkg-config protobuf-compiler libprotobuf-dev libssl-dev python-software-properties libboost-all-dev
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|||||||
@@ -17,8 +17,7 @@ software components:
|
|||||||
* [Google Protocol Buffers Compiler](README.md#install-google-protocol-buffers-compiler)
|
* [Google Protocol Buffers Compiler](README.md#install-google-protocol-buffers-compiler)
|
||||||
* (Optional) [Python and Scons](README.md#optional-install-python-and-scons)
|
* (Optional) [Python and Scons](README.md#optional-install-python-and-scons)
|
||||||
* [OpenSSL Library](README.md#install-openssl)
|
* [OpenSSL Library](README.md#install-openssl)
|
||||||
* [Boost 1.59 library](README.md#build-boost)
|
* [Boost library](README.md#build-boost)
|
||||||
* [Node.js](README.md#install-nodejs)
|
|
||||||
|
|
||||||
## Install Software
|
## Install Software
|
||||||
|
|
||||||
@@ -84,8 +83,8 @@ for Visual Studio 2015 support.
|
|||||||
[Download OpenSSL.](http://slproweb.com/products/Win32OpenSSL.html)
|
[Download OpenSSL.](http://slproweb.com/products/Win32OpenSSL.html)
|
||||||
There will be four variants available:
|
There will be four variants available:
|
||||||
|
|
||||||
1. 64-bit. Use this if you are running 64-bit windows. As of this writing, the link is called: "Win64 OpenSSL v1.0.2d".
|
1. 64-bit. Use this if you are running 64-bit windows. As of this writing, the link is called: "Win64 OpenSSL v1.0.2j".
|
||||||
2. 64-bit light - Don't use this. It is missing files needed to build rippled. As of this writing, the link is called: "Win64 OpenSSL v1.0.2d Light"
|
2. 64-bit light - Don't use this. It is missing files needed to build rippled. As of this writing, the link is called: "Win64 OpenSSL v1.0.2j Light"
|
||||||
|
|
||||||
Run the installer, and choose an appropriate location for your OpenSSL
|
Run the installer, and choose an appropriate location for your OpenSSL
|
||||||
installation. In this guide we use **C:\lib\OpenSSL-Win64** as the
|
installation. In this guide we use **C:\lib\OpenSSL-Win64** as the
|
||||||
@@ -108,8 +107,13 @@ unpacking it, open a **Developer Command Prompt** for
|
|||||||
Visual Studio, change to the directory containing boost, then
|
Visual Studio, change to the directory containing boost, then
|
||||||
bootstrap the build tools:
|
bootstrap the build tools:
|
||||||
|
|
||||||
|
(As of this writing, the most recent version of boost is 1.62.0, which
|
||||||
|
will unpack into a directory named `boost_1_62_0`. For higher versions
|
||||||
|
of boost, adjust the directories provided in these examples as
|
||||||
|
appropriate.)
|
||||||
|
|
||||||
```powershell
|
```powershell
|
||||||
cd C:\lib\boost_1_59_0
|
cd C:\lib\boost_1_62_0
|
||||||
bootstrap
|
bootstrap
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -119,7 +123,7 @@ affected by changes in outside files. Therefore, it is necessary to build the
|
|||||||
required boost static libraries using this command:
|
required boost static libraries using this command:
|
||||||
|
|
||||||
```powershell
|
```powershell
|
||||||
bjam --toolset=msvc-14.0 --build-type=complete variant=debug,release link=static runtime-link=static address-model=64
|
bjam --toolset=msvc-14.0 address-model=64 architecture=x86 link=static threading=multi runtime-link=shared,static stage --stagedir=stage64
|
||||||
```
|
```
|
||||||
|
|
||||||
Building the boost libraries may take considerable time. When the build process
|
Building the boost libraries may take considerable time. When the build process
|
||||||
@@ -161,7 +165,7 @@ git checkout master
|
|||||||
### Configure Library Paths
|
### Configure Library Paths
|
||||||
|
|
||||||
Open the solution file located at **Builds/Visual Studio 2015/ripple.sln**
|
Open the solution file located at **Builds/Visual Studio 2015/ripple.sln**
|
||||||
and select the "View->Other Windows->Property Manager" to bring up the Property Manager.
|
and select the "View->Property Manager" to bring up the Property Manager.
|
||||||
Expand the *debug | x64* section and
|
Expand the *debug | x64* section and
|
||||||
double click the *Microsoft.Cpp.x64.user* property sheet to bring up the
|
double click the *Microsoft.Cpp.x64.user* property sheet to bring up the
|
||||||
*Property Pages* dialog. These are global properties applied to all
|
*Property Pages* dialog. These are global properties applied to all
|
||||||
@@ -237,9 +241,7 @@ and then choose the **Build->Build Solution** menu item.
|
|||||||
|
|
||||||
# Unit Tests (Recommended)
|
# Unit Tests (Recommended)
|
||||||
|
|
||||||
## Internal
|
The rippled unit tests are written in C++ and are part
|
||||||
|
|
||||||
The internal rippled unit tests are written in C++ and are part
|
|
||||||
of the rippled executable.
|
of the rippled executable.
|
||||||
|
|
||||||
From a Windows console, run the unit tests:
|
From a Windows console, run the unit tests:
|
||||||
@@ -250,108 +252,4 @@ From a Windows console, run the unit tests:
|
|||||||
|
|
||||||
Substitute the correct path to the executable to test different builds.
|
Substitute the correct path to the executable to test different builds.
|
||||||
|
|
||||||
## External
|
|
||||||
|
|
||||||
The external rippled unit tests are written in Javascript using Node.js,
|
|
||||||
and utilize the mocha unit test framework. To run the unit tests, it
|
|
||||||
will be necessary to perform the following steps:
|
|
||||||
|
|
||||||
### Install Node.js
|
|
||||||
|
|
||||||
[Install Node.js](http://nodejs.org/download/). We recommend the Windows
|
|
||||||
installer (**.msi** file) as it takes care of updating the *PATH* environment
|
|
||||||
variable so that scripts can find the command. On Windows systems,
|
|
||||||
**Node.js** comes with **npm**. A separate installation of **npm**
|
|
||||||
is not necessary.
|
|
||||||
|
|
||||||
### Create node_modules
|
|
||||||
|
|
||||||
Open a windows console. From the root of your local rippled repository
|
|
||||||
directory, invoke **npm** to bring in the necessary components:
|
|
||||||
|
|
||||||
```
|
|
||||||
npm install
|
|
||||||
```
|
|
||||||
|
|
||||||
If you get an error that looks like
|
|
||||||
|
|
||||||
```
|
|
||||||
Error: ENOENT, stat 'C:\Users\username\AppData\Roaming\npm'
|
|
||||||
```
|
|
||||||
|
|
||||||
simply create the indicated folder and try again.
|
|
||||||
|
|
||||||
### Create a test config.js
|
|
||||||
|
|
||||||
From a *bash* shell (installed with Git for Windows), copy the
|
|
||||||
example configuration file into the appropriate location:
|
|
||||||
|
|
||||||
```
|
|
||||||
cp test/config-example.js test/config.js
|
|
||||||
```
|
|
||||||
|
|
||||||
Edit your version of test/config.js to reflect the correct path to the rippled executable:
|
|
||||||
|
|
||||||
```
|
|
||||||
exports.default_server_config = {
|
|
||||||
// Where to find the binary.
|
|
||||||
rippled_path: path.resolve(__dirname, "../build/msvc.debug/rippled.exe")
|
|
||||||
};
|
|
||||||
```
|
|
||||||
|
|
||||||
Also in **test/config.js**, change any occurrences of the
|
|
||||||
IP address *0.0.0.0* to *127.0.0.1*.
|
|
||||||
|
|
||||||
### Run Tests
|
|
||||||
|
|
||||||
From a windows console, run the unit tests:
|
|
||||||
|
|
||||||
```
|
|
||||||
npm test
|
|
||||||
```
|
|
||||||
|
|
||||||
Alternatively, run an individual test using mocha:
|
|
||||||
|
|
||||||
```
|
|
||||||
sh
|
|
||||||
node_modules/mocha/bin/mocha test/account_tx-test.js
|
|
||||||
```
|
|
||||||
|
|
||||||
* NOTE: The version of ripple-lib provided by the npm install
|
|
||||||
facility is usually slightly behind the develop branch of the
|
|
||||||
authoritative ripple-lib repository. Therefore, some tests might fail.
|
|
||||||
|
|
||||||
### Development ripple-lib
|
|
||||||
|
|
||||||
To use the latest branch of **ripple-lib** during the unit tests,
|
|
||||||
first clone the repository in a new location outside of your rippled
|
|
||||||
repository. Then update the submodules. After, run **npm install**
|
|
||||||
to set up the **node_modules** directory. Finally, install the
|
|
||||||
**grunt** command line tools required to run **grunt** and
|
|
||||||
build **ripple-lib**.
|
|
||||||
|
|
||||||
```
|
|
||||||
git clone git@github.com:ripple/ripple-lib.git
|
|
||||||
cd ripple-lib
|
|
||||||
git submodule update --init
|
|
||||||
npm install
|
|
||||||
npm install -g grunt-cli
|
|
||||||
grunt
|
|
||||||
```
|
|
||||||
|
|
||||||
Now link this version of **ripple-lib** into the global packages:
|
|
||||||
|
|
||||||
```
|
|
||||||
sudo npm link
|
|
||||||
```
|
|
||||||
|
|
||||||
To make rippled use the newly linked global **ripple-lib** package
|
|
||||||
instead of the one installed under **node_modules**, change
|
|
||||||
directories to the local rippled repository and delete the old
|
|
||||||
**ripple-lib** then link to the new one:
|
|
||||||
|
|
||||||
```
|
|
||||||
sh
|
|
||||||
rm -rf node_modules/ripple-lib
|
|
||||||
npm link ripple-lib
|
|
||||||
```
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Binary file not shown.
|
Before Width: | Height: | Size: 59 KiB After Width: | Height: | Size: 67 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 59 KiB After Width: | Height: | Size: 66 KiB |
@@ -1,26 +1,34 @@
|
|||||||
|
|
||||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||||
# Visual Studio 14
|
# Visual Studio 14
|
||||||
VisualStudioVersion = 14.0.23107.0
|
VisualStudioVersion = 14.0.25123.0
|
||||||
MinimumVisualStudioVersion = 10.0.40219.1
|
MinimumVisualStudioVersion = 10.0.40219.1
|
||||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "RippleD", "RippleD.vcxproj", "{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}"
|
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "RippleD", "RippleD.vcxproj", "{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}"
|
||||||
EndProject
|
EndProject
|
||||||
Global
|
Global
|
||||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||||
debug.classic|x64 = debug.classic|x64
|
debug.classic|x64 = debug.classic|x64
|
||||||
|
debug.classic|x86 = debug.classic|x86
|
||||||
debug|x64 = debug|x64
|
debug|x64 = debug|x64
|
||||||
|
debug|x86 = debug|x86
|
||||||
release.classic|x64 = release.classic|x64
|
release.classic|x64 = release.classic|x64
|
||||||
|
release.classic|x86 = release.classic|x86
|
||||||
release|x64 = release|x64
|
release|x64 = release|x64
|
||||||
|
release|x86 = release|x86
|
||||||
EndGlobalSection
|
EndGlobalSection
|
||||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug.classic|x64.ActiveCfg = debug.classic|x64
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug.classic|x64.ActiveCfg = debug.classic|x64
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug.classic|x64.Build.0 = debug.classic|x64
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug.classic|x64.Build.0 = debug.classic|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug.classic|x86.ActiveCfg = debug.classic|x64
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug|x64.ActiveCfg = debug|x64
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug|x64.ActiveCfg = debug|x64
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug|x64.Build.0 = debug|x64
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug|x64.Build.0 = debug|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug|x86.ActiveCfg = debug|x64
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release.classic|x64.ActiveCfg = release.classic|x64
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release.classic|x64.ActiveCfg = release.classic|x64
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release.classic|x64.Build.0 = release.classic|x64
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release.classic|x64.Build.0 = release.classic|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release.classic|x86.ActiveCfg = release.classic|x64
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release|x64.ActiveCfg = release|x64
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release|x64.ActiveCfg = release|x64
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release|x64.Build.0 = release|x64
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release|x64.Build.0 = release|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release|x86.ActiveCfg = release|x64
|
||||||
EndGlobalSection
|
EndGlobalSection
|
||||||
GlobalSection(SolutionProperties) = preSolution
|
GlobalSection(SolutionProperties) = preSolution
|
||||||
HideSolutionNode = FALSE
|
HideSolutionNode = FALSE
|
||||||
|
|||||||
183
Builds/XCode/README.md
Normal file
183
Builds/XCode/README.md
Normal file
@@ -0,0 +1,183 @@
|
|||||||
|
# macos Build Instructions
|
||||||
|
|
||||||
|
## Important
|
||||||
|
|
||||||
|
We don't recommend macos for rippled production use at this time. Currently, the
|
||||||
|
Ubuntu platform has received the highest level of quality assurance and
|
||||||
|
testing. That said, macos is suitable for many development/test tasks.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
You'll need macos 10.8 or later
|
||||||
|
|
||||||
|
To clone the source code repository, create branches for inspection or
|
||||||
|
modification, build rippled using clang, and run the system tests you will need
|
||||||
|
these software components:
|
||||||
|
|
||||||
|
* [XCode](https://developer.apple.com/xcode/)
|
||||||
|
* [Homebrew](http://brew.sh/)
|
||||||
|
* [Git](http://git-scm.com/)
|
||||||
|
* [CMake](http://cmake.org/)
|
||||||
|
|
||||||
|
## Install Software
|
||||||
|
|
||||||
|
### Install XCode
|
||||||
|
|
||||||
|
If not already installed on your system, download and install XCode using the
|
||||||
|
appstore or by using [this link](https://developer.apple.com/xcode/).
|
||||||
|
|
||||||
|
For more info, see "Step 1: Download and Install the Command Line Tools"
|
||||||
|
[here](http://www.moncefbelyamani.com/how-to-install-xcode-homebrew-git-rvm-ruby-on-mac)
|
||||||
|
|
||||||
|
The command line tools can be installed through the terminal with the command:
|
||||||
|
|
||||||
|
```
|
||||||
|
xcode-select --install
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install Homebrew
|
||||||
|
|
||||||
|
> "[Homebrew](http://brew.sh/) installs the stuff you need that Apple didn’t."
|
||||||
|
|
||||||
|
Open a terminal and type:
|
||||||
|
|
||||||
|
```
|
||||||
|
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
|
||||||
|
```
|
||||||
|
|
||||||
|
For more info, see "Step 3: Install Homebrew"
|
||||||
|
[here](http://www.moncefbelyamani.com/how-to-install-xcode-homebrew-git-rvm-ruby-on-mac)
|
||||||
|
|
||||||
|
### Install Git
|
||||||
|
|
||||||
|
```
|
||||||
|
brew update brew install git
|
||||||
|
```
|
||||||
|
|
||||||
|
For more info, see "Step 4: Install Git"
|
||||||
|
[here](http://www.moncefbelyamani.com/how-to-install-xcode-homebrew-git-rvm-ruby-on-mac)
|
||||||
|
|
||||||
|
**NOTE**: To gain full featured access to the
|
||||||
|
[git-subtree](http://blogs.atlassian.com/2013/05/alternatives-to-git-submodule-git-subtree/)
|
||||||
|
functionality used in the rippled repository, we suggest Git version 1.8.3.2 or
|
||||||
|
later.
|
||||||
|
|
||||||
|
### Install Scons
|
||||||
|
|
||||||
|
Requires version 3.6.0 or later
|
||||||
|
|
||||||
|
```
|
||||||
|
brew install cmake
|
||||||
|
```
|
||||||
|
|
||||||
|
`brew` will generally install the latest stable version of any package, which
|
||||||
|
should satisfy the cmake minimum version requirement for rippled.
|
||||||
|
|
||||||
|
### Install Package Config
|
||||||
|
|
||||||
|
```
|
||||||
|
brew install pkg-config
|
||||||
|
```
|
||||||
|
|
||||||
|
## Install/Build/Configure Dependencies
|
||||||
|
|
||||||
|
### Build Google Protocol Buffers Compiler
|
||||||
|
|
||||||
|
Building rippled on osx requires `protoc` version 2.5.x or 2.6.x (later versions
|
||||||
|
do not work with rippled at this time).
|
||||||
|
|
||||||
|
Download [this](https://github.com/google/protobuf/releases/download/v2.6.1/protobuf-2.6.1.tar.bz2)
|
||||||
|
|
||||||
|
We want to compile protocol buffers with clang/libc++:
|
||||||
|
|
||||||
|
```
|
||||||
|
tar xfvj protobuf-2.6.1.tar.bz2
|
||||||
|
cd protobuf-2.6.1
|
||||||
|
./configure CC=clang CXX=clang++ CXXFLAGS='-std=c++11 -stdlib=libc++ -O3 -g' LDFLAGS='-stdlib=libc++' LIBS="-lc++ -lc++abi"
|
||||||
|
make -j 4
|
||||||
|
sudo make install
|
||||||
|
```
|
||||||
|
|
||||||
|
If you have installed `protobuf` via brew - either directly or indirectly as a
|
||||||
|
dependency of some other package - this is likely to conflict with our specific
|
||||||
|
version requirements. The simplest way to avoid conflicts is to uninstall it.
|
||||||
|
`brew ls --versions protobuf` will list any versions of protobuf
|
||||||
|
you currently have installed.
|
||||||
|
|
||||||
|
### Install OpenSSL
|
||||||
|
|
||||||
|
```
|
||||||
|
brew install openssl
|
||||||
|
```
|
||||||
|
|
||||||
|
### Build Boost
|
||||||
|
|
||||||
|
We want to compile boost with clang/libc++
|
||||||
|
|
||||||
|
Download [a release](https://sourceforge.net/projects/boost/files/boost/1.61.0/boost_1_61_0.tar.bz2)
|
||||||
|
|
||||||
|
Extract it to a folder, making note of where, open a terminal, then:
|
||||||
|
|
||||||
|
```
|
||||||
|
./bootstrap.sh ./b2 toolset=clang threading=multi runtime-link=static link=static cxxflags="-stdlib=libc++" linkflags="-stdlib=libc++" address-model=64
|
||||||
|
```
|
||||||
|
|
||||||
|
Create an environment variable `BOOST_ROOT` in one of your `rc` files, pointing
|
||||||
|
to the root of the extracted directory.
|
||||||
|
|
||||||
|
### Clone the rippled repository
|
||||||
|
|
||||||
|
From the terminal
|
||||||
|
|
||||||
|
```
|
||||||
|
git clone git@github.com:ripple/rippled.git
|
||||||
|
cd rippled
|
||||||
|
```
|
||||||
|
|
||||||
|
Choose the master branch or one of the tagged releases listed on
|
||||||
|
[GitHub](https://github.com/ripple/rippled/releases GitHub).
|
||||||
|
|
||||||
|
```
|
||||||
|
git checkout master
|
||||||
|
```
|
||||||
|
|
||||||
|
or to test the latest release candidate, choose the `release` branch.
|
||||||
|
|
||||||
|
```
|
||||||
|
git checkout release
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configure Library Paths
|
||||||
|
|
||||||
|
If you didn't persistently set the `BOOST_ROOT` environment variable to the
|
||||||
|
root of the extracted directory above, then you should set it temporarily.
|
||||||
|
|
||||||
|
For example, assuming your username were `Abigail` and you extracted Boost
|
||||||
|
1.61.0 in `/Users/Abigail/Downloads/boost_1_61_0`, you would do for any
|
||||||
|
shell in which you want to build:
|
||||||
|
|
||||||
|
```
|
||||||
|
export BOOST_ROOT=/Users/Abigail/Downloads/boost_1_61_0
|
||||||
|
```
|
||||||
|
|
||||||
|
## Build
|
||||||
|
|
||||||
|
```
|
||||||
|
mkdir xcode_build && cd xcode_build
|
||||||
|
cmake -GXcode ..
|
||||||
|
```
|
||||||
|
|
||||||
|
There are a number of variables/options that our CMake files support and they
|
||||||
|
can be added to the above command as needed (e.g. `-Dassert=ON` to enable
|
||||||
|
asserts)
|
||||||
|
|
||||||
|
After generation succeeds, the xcode project file can be opened and used to
|
||||||
|
build and debug.
|
||||||
|
|
||||||
|
## Unit Tests (Recommended)
|
||||||
|
|
||||||
|
rippled builds a set of unit tests into the server executable. To run these unit
|
||||||
|
tests after building, pass the `--unittest` option to the compiled `rippled`
|
||||||
|
executable. The executable will exit after running the unit tests.
|
||||||
|
|
||||||
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Place XCode project file here!
|
|
||||||
8
Builds/build_all.sh
Executable file
8
Builds/build_all.sh
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
num_procs=$(lscpu -p | grep -v '^#' | sort -u -t, -k 2,4 | wc -l) # number of physical cores
|
||||||
|
|
||||||
|
path=$(cd $(dirname $0) && pwd)
|
||||||
|
cd $(dirname $path)
|
||||||
|
${path}/Test.py -a -c --test=TxQ -- -j${num_procs}
|
||||||
|
${path}/Test.py -a -c -k --test=TxQ --cmake -- -j${num_procs}
|
||||||
541
CMakeLists.txt
Normal file
541
CMakeLists.txt
Normal file
@@ -0,0 +1,541 @@
|
|||||||
|
# !!! The official build system is SConstruct !!!
|
||||||
|
# This is an experimental cmake build file for rippled
|
||||||
|
#
|
||||||
|
# cmake support in rippled. Currently supports:
|
||||||
|
#
|
||||||
|
# * unity/nounity debug/release
|
||||||
|
# * running protobuf
|
||||||
|
# * sanitizer builds
|
||||||
|
# * optional release build with assert turned on
|
||||||
|
# * `target` variable to easily set compiler/debug/unity
|
||||||
|
# (i.e. -Dtarget=gcc.debug.nounity)
|
||||||
|
# * gcc/clang/visual studio/xcode
|
||||||
|
# * linux/mac/win
|
||||||
|
# * gcc 4 ABI, when needed
|
||||||
|
# * ninja builds
|
||||||
|
# * check openssl version on linux
|
||||||
|
# * static builds (swd TBD: needs to be tested by building & deploying on different systems)
|
||||||
|
# * jemalloc enabled builds (linux and macos only)
|
||||||
|
# * perf builds (linux only) - which just sets recommended compiler flags
|
||||||
|
# for running perf on the executable
|
||||||
|
#
|
||||||
|
# Notes:
|
||||||
|
# * Use the -G"Visual Studio 14 2015 Win64" generator, or the "VS2015 x86 x64
|
||||||
|
# Cross Tools" Command Prompt on Windows. Without this a 32-bit project will be
|
||||||
|
# created. There is no way to set the generator or force a 64-bit build in
|
||||||
|
# CMakeLists.txt (setting CMAKE_GENERATOR_PLATFORM won't work). The best solution
|
||||||
|
# may be to wrap cmake with a script.
|
||||||
|
#
|
||||||
|
# * Ninja command line builds seem to work under Windows, but only from within
|
||||||
|
# the "VS2015 x86 x64 Cross Tools" Command Prompt.
|
||||||
|
#
|
||||||
|
# * It is not possible to generate a visual studio project on linux or
|
||||||
|
# mac. The visual studio generator is only available on windows.
|
||||||
|
#
|
||||||
|
# * The Visual Studio solution will be generated with two projects, one
|
||||||
|
# unity, one non-unity. Which is default depends on the nounity flag in
|
||||||
|
# -Dtarget. Unity targets will create `rippled` and `rippled_classic`.
|
||||||
|
# Non-unity targets will create `rippled` and `rippled_unity`. In either
|
||||||
|
# case, only the `rippled` build will be enabled by default. It does
|
||||||
|
# not appear possible to include both unity and non-unity configs in one
|
||||||
|
# project and disable compilation based on configuration.
|
||||||
|
#
|
||||||
|
# * Language is _much_ worse than python, poor documentation and "quirky"
|
||||||
|
# language support (for example, generator expressions can only be used
|
||||||
|
# in limited contexts and seem to work differently based on
|
||||||
|
# context (set_property can set multiple values, add_compile_options
|
||||||
|
# can not/or is buggy)
|
||||||
|
#
|
||||||
|
# * Could not call out to `sed` because cmake messed with the regular
|
||||||
|
# expression before calling the external command. I did not see a way
|
||||||
|
# around this.
|
||||||
|
#
|
||||||
|
# * Makefile generators want to be single target. It wants a separate
|
||||||
|
# directory for each target type. I saw some mentions on the web for
|
||||||
|
# ways around this bug haven't look into it. The visual studio project
|
||||||
|
# does support debug/release configurations in the same project (but
|
||||||
|
# not unity/non-unity).
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
#########################################################
|
||||||
|
# CMAKE_C_COMPILER and CMAKE_CXX_COMPILER must be defined
|
||||||
|
# before the project statement; However, the project
|
||||||
|
# statement will clear CMAKE_BUILD_TYPE. CACHE variables,
|
||||||
|
# along with the order of this code, are used to work
|
||||||
|
# around these constraints.
|
||||||
|
#
|
||||||
|
# Don't put any code above or in this block, unless it
|
||||||
|
# has similar constraints.
|
||||||
|
cmake_minimum_required(VERSION 3.1.0)
|
||||||
|
set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/Builds/CMake")
|
||||||
|
include(CMakeFuncs)
|
||||||
|
set(openssl_min 1.0.2)
|
||||||
|
parse_target()
|
||||||
|
project(rippled)
|
||||||
|
#########################################################
|
||||||
|
|
||||||
|
if("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
|
||||||
|
set(dir "build")
|
||||||
|
set(cmd "cmake")
|
||||||
|
if (target)
|
||||||
|
set(dir "${dir}/${target}")
|
||||||
|
set(cmd "${cmd} -Dtarget=${target}")
|
||||||
|
elseif(CMAKE_BUILD_TYPE)
|
||||||
|
set(dir "${dir}/${CMAKE_BUILD_TYPE}")
|
||||||
|
set(cmd "${cmd} -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}")
|
||||||
|
else()
|
||||||
|
set(dir "${dir}/default")
|
||||||
|
endif()
|
||||||
|
set(cmd "${cmd} ${CMAKE_SOURCE_DIR}")
|
||||||
|
|
||||||
|
message(FATAL_ERROR "Builds are not allowed in ${CMAKE_SOURCE_DIR}.\n"
|
||||||
|
"Instead:\n"
|
||||||
|
"1) Remove the CMakeCache.txt file and CMakeFiles directory "
|
||||||
|
"from ${CMAKE_SOURCE_DIR}.\n"
|
||||||
|
"2) Create a directory to hold your build files, for example: ${dir}.\n"
|
||||||
|
"3) Change to that directory.\n"
|
||||||
|
"4) Run cmake targetting ${CMAKE_SOURCE_DIR}, for example: ${cmd}")
|
||||||
|
endif()
|
||||||
|
if("${CMAKE_GENERATOR}" MATCHES "Visual Studio" AND
|
||||||
|
NOT ("${CMAKE_GENERATOR}" MATCHES .*Win64.*))
|
||||||
|
message(FATAL_ERROR "Visual Studio 32-bit build is unsupported. Use
|
||||||
|
-G\"${CMAKE_GENERATOR} Win64\"")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
setup_build_cache()
|
||||||
|
|
||||||
|
if(nonunity)
|
||||||
|
get_cmake_property(allvars VARIABLES)
|
||||||
|
string(REGEX MATCHALL "[^;]*(DEBUG|RELEASE)[^;]*" matchvars "${allvars}")
|
||||||
|
foreach(var IN LISTS matchvars)
|
||||||
|
string(REGEX REPLACE "(DEBUG|RELEASE)" "\\1CLASSIC" newvar ${var})
|
||||||
|
set(${newvar} ${${var}})
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
get_cmake_property(allvars CACHE_VARIABLES)
|
||||||
|
string(REGEX MATCHALL "[^;]*(DEBUG|RELEASE)[^;]*" matchvars "${allvars}")
|
||||||
|
foreach(var IN LISTS matchvars)
|
||||||
|
string(REGEX REPLACE "(DEBUG|RELEASE)" "\\1CLASSIC" newvar ${var})
|
||||||
|
set(${newvar} ${${var}} CACHE STRING "Copied from ${var}")
|
||||||
|
endforeach()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
determine_build_type()
|
||||||
|
|
||||||
|
check_gcc4_abi()
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
include_directories(
|
||||||
|
src
|
||||||
|
src/beast
|
||||||
|
src/beast/include
|
||||||
|
src/beast/extras
|
||||||
|
src/nudb/include
|
||||||
|
src/soci/src
|
||||||
|
src/soci/include)
|
||||||
|
|
||||||
|
special_build_flags()
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
use_boost(
|
||||||
|
# resist the temptation to alphabetize these. coroutine
|
||||||
|
# must come before context.
|
||||||
|
chrono
|
||||||
|
coroutine
|
||||||
|
context
|
||||||
|
date_time
|
||||||
|
filesystem
|
||||||
|
program_options
|
||||||
|
regex
|
||||||
|
system
|
||||||
|
thread)
|
||||||
|
|
||||||
|
use_pthread()
|
||||||
|
|
||||||
|
use_openssl(${openssl_min})
|
||||||
|
|
||||||
|
use_protobuf()
|
||||||
|
|
||||||
|
setup_build_boilerplate()
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
if (is_clang)
|
||||||
|
set(rocks_db_system_header --system-header-prefix=rocksdb2)
|
||||||
|
else()
|
||||||
|
unset(rocks_db_system_header)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(soci_extra_includes
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/soci/src/core
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/soci/include/private
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/sqlite)
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
# Unity sources
|
||||||
|
prepend(beast_unity_srcs
|
||||||
|
src/ripple/beast/unity/
|
||||||
|
beast_insight_unity.cpp
|
||||||
|
beast_net_unity.cpp
|
||||||
|
beast_utility_unity.cpp)
|
||||||
|
|
||||||
|
prepend(ripple_unity_srcs
|
||||||
|
src/ripple/unity/
|
||||||
|
app_consensus.cpp
|
||||||
|
app_ledger.cpp
|
||||||
|
app_ledger_impl.cpp
|
||||||
|
app_main1.cpp
|
||||||
|
app_main2.cpp
|
||||||
|
app_misc.cpp
|
||||||
|
app_misc_impl.cpp
|
||||||
|
app_paths.cpp
|
||||||
|
app_tx.cpp
|
||||||
|
conditions.cpp
|
||||||
|
consensus.cpp
|
||||||
|
core.cpp
|
||||||
|
basics.cpp
|
||||||
|
crypto.cpp
|
||||||
|
ledger.cpp
|
||||||
|
net.cpp
|
||||||
|
overlay1.cpp
|
||||||
|
overlay2.cpp
|
||||||
|
peerfinder.cpp
|
||||||
|
json.cpp
|
||||||
|
protocol.cpp
|
||||||
|
rpcx1.cpp
|
||||||
|
rpcx2.cpp
|
||||||
|
shamap.cpp
|
||||||
|
server.cpp)
|
||||||
|
|
||||||
|
prepend(test_unity_srcs
|
||||||
|
src/test/unity/
|
||||||
|
app_test_unity1.cpp
|
||||||
|
app_test_unity2.cpp
|
||||||
|
basics_test_unity.cpp
|
||||||
|
beast_test_unity1.cpp
|
||||||
|
beast_test_unity2.cpp
|
||||||
|
conditions_test_unity.cpp
|
||||||
|
consensus_test_unity.cpp
|
||||||
|
core_test_unity.cpp
|
||||||
|
json_test_unity.cpp
|
||||||
|
ledger_test_unity.cpp
|
||||||
|
overlay_test_unity.cpp
|
||||||
|
peerfinder_test_unity.cpp
|
||||||
|
protocol_test_unity.cpp
|
||||||
|
resource_test_unity.cpp
|
||||||
|
rpc_test_unity.cpp
|
||||||
|
server_test_unity.cpp
|
||||||
|
server_status_test_unity.cpp
|
||||||
|
shamap_test_unity.cpp
|
||||||
|
jtx_unity1.cpp
|
||||||
|
jtx_unity2.cpp
|
||||||
|
csf_unity.cpp)
|
||||||
|
|
||||||
|
list(APPEND rippled_src_unity ${beast_unity_srcs} ${ripple_unity_srcs} ${test_unity_srcs})
|
||||||
|
|
||||||
|
add_with_props(rippled_src_unity src/test/unity/nodestore_test_unity.cpp
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/rocksdb2/include
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/snappy/snappy
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/snappy/config
|
||||||
|
${rocks_db_system_header})
|
||||||
|
|
||||||
|
add_with_props(rippled_src_unity src/ripple/unity/nodestore.cpp
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/rocksdb2/include
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/snappy/snappy
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/snappy/config
|
||||||
|
${rocks_db_system_header})
|
||||||
|
|
||||||
|
add_with_props(rippled_src_unity src/ripple/unity/soci_ripple.cpp ${soci_extra_includes})
|
||||||
|
|
||||||
|
list(APPEND ripple_unity_srcs ${beast_unity_srcs} ${test_unity_srcs}
|
||||||
|
src/ripple/unity/nodestore.cpp
|
||||||
|
src/ripple/unity/soci_ripple.cpp
|
||||||
|
src/test/unity/nodestore_test_unity.cpp)
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
# Non-unity sources
|
||||||
|
file(GLOB_RECURSE core_srcs src/ripple/core/*.cpp)
|
||||||
|
add_with_props(rippled_src_nonunity "${core_srcs}"
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/soci/src/core
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/sqlite)
|
||||||
|
|
||||||
|
set(non_unity_srcs ${core_srcs})
|
||||||
|
|
||||||
|
foreach(curdir
|
||||||
|
beast/clock
|
||||||
|
beast/container
|
||||||
|
beast/insight
|
||||||
|
beast/net
|
||||||
|
beast/utility
|
||||||
|
app
|
||||||
|
basics
|
||||||
|
conditions
|
||||||
|
crypto
|
||||||
|
consensus
|
||||||
|
json
|
||||||
|
ledger
|
||||||
|
legacy
|
||||||
|
net
|
||||||
|
overlay
|
||||||
|
peerfinder
|
||||||
|
protocol
|
||||||
|
rpc
|
||||||
|
server
|
||||||
|
shamap)
|
||||||
|
file(GLOB_RECURSE cursrcs src/ripple/${curdir}/*.cpp)
|
||||||
|
list(APPEND rippled_src_nonunity "${cursrcs}")
|
||||||
|
list(APPEND non_unity_srcs "${cursrcs}")
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
file(GLOB_RECURSE nodestore_srcs src/ripple/nodestore/*.cpp
|
||||||
|
src/test/nodestore/*.cpp)
|
||||||
|
|
||||||
|
add_with_props(rippled_src_nonunity "${nodestore_srcs}"
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/rocksdb2/include
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/snappy/snappy
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/snappy/config
|
||||||
|
${rocks_db_system_header})
|
||||||
|
|
||||||
|
list(APPEND non_unity_srcs "${nodestore_srcs}")
|
||||||
|
|
||||||
|
# unit test sources
|
||||||
|
foreach(curdir
|
||||||
|
app
|
||||||
|
basics
|
||||||
|
beast
|
||||||
|
conditions
|
||||||
|
consensus
|
||||||
|
core
|
||||||
|
csf
|
||||||
|
json
|
||||||
|
jtx
|
||||||
|
ledger
|
||||||
|
nodestore
|
||||||
|
overlay
|
||||||
|
peerfinder
|
||||||
|
protocol
|
||||||
|
resource
|
||||||
|
rpc
|
||||||
|
server
|
||||||
|
shamap)
|
||||||
|
file(GLOB_RECURSE cursrcs src/test/${curdir}/*.cpp)
|
||||||
|
list(APPEND test_srcs "${cursrcs}")
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
add_with_props(rippled_src_nonunity "${test_srcs}"
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/rocksdb2/include
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/snappy/snappy
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/snappy/config
|
||||||
|
${rocks_db_system_header})
|
||||||
|
|
||||||
|
list(APPEND non_unity_srcs "${test_srcs}")
|
||||||
|
|
||||||
|
if(WIN32 OR is_xcode)
|
||||||
|
# Rippled headers. Only needed for IDEs.
|
||||||
|
file(GLOB_RECURSE rippled_headers src/*.h src/*.hpp *.md)
|
||||||
|
list(APPEND rippled_headers Builds/CMake/CMakeFuncs.cmake)
|
||||||
|
foreach(curdir
|
||||||
|
beast/asio
|
||||||
|
beast/core
|
||||||
|
beast/crypto
|
||||||
|
beast/cxx17
|
||||||
|
beast/hash
|
||||||
|
proto
|
||||||
|
resource
|
||||||
|
validators
|
||||||
|
websocket)
|
||||||
|
file(GLOB_RECURSE cursrcs src/ripple/${curdir}/*.cpp)
|
||||||
|
list(APPEND rippled_headers "${cursrcs}")
|
||||||
|
endforeach()
|
||||||
|
list(APPEND rippled_src_nonunity "${rippled_headers}")
|
||||||
|
|
||||||
|
set_property(
|
||||||
|
SOURCE ${rippled_headers}
|
||||||
|
APPEND
|
||||||
|
PROPERTY HEADER_FILE_ONLY
|
||||||
|
true)
|
||||||
|
# Doesn't work
|
||||||
|
# $<OR:$<CONFIG:Debug>,$<CONFIG:Release>>)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (WIN32 OR is_xcode)
|
||||||
|
# Documentation sources. Only needed for IDEs.
|
||||||
|
prepend(doc_srcs
|
||||||
|
docs/
|
||||||
|
Jamfile.v2
|
||||||
|
boostbook.dtd
|
||||||
|
consensus.qbk
|
||||||
|
index.xml
|
||||||
|
main.qbk
|
||||||
|
quickref.xml
|
||||||
|
reference.xsl
|
||||||
|
source.dox)
|
||||||
|
|
||||||
|
set_property(
|
||||||
|
SOURCE ${doc_srcs}
|
||||||
|
APPEND
|
||||||
|
PROPERTY HEADER_FILE_ONLY
|
||||||
|
true)
|
||||||
|
# Doesn't work
|
||||||
|
# $<OR:$<CONFIG:Debug>,$<CONFIG:Release>>)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
add_with_props(rippled_src_all src/ripple/unity/soci.cpp
|
||||||
|
${soci_extra_includes})
|
||||||
|
|
||||||
|
if (NOT is_msvc)
|
||||||
|
set(no_unused_w -Wno-unused-function)
|
||||||
|
else()
|
||||||
|
unset(no_unused_w)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
add_with_props(rippled_src_all src/ripple/unity/secp256k1.cpp
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/secp256k1
|
||||||
|
${no_unused_w}
|
||||||
|
)
|
||||||
|
|
||||||
|
foreach(cursrc
|
||||||
|
src/ripple/beast/unity/beast_hash_unity.cpp
|
||||||
|
src/ripple/unity/beast.cpp
|
||||||
|
src/ripple/unity/lz4.c
|
||||||
|
src/ripple/unity/protobuf.cpp
|
||||||
|
src/ripple/unity/ripple.proto.cpp
|
||||||
|
src/ripple/unity/resource.cpp)
|
||||||
|
|
||||||
|
add_with_props(rippled_src_all ${cursrc}
|
||||||
|
${rocks_db_system_header}
|
||||||
|
)
|
||||||
|
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
if (NOT is_msvc)
|
||||||
|
set(extra_props -Wno-array-bounds)
|
||||||
|
else()
|
||||||
|
unset(extra_props)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
add_with_props(rippled_src_all src/sqlite/sqlite_unity.c
|
||||||
|
${extra_props})
|
||||||
|
|
||||||
|
add_with_props(rippled_src_all src/ripple/unity/ed25519_donna.c
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/ed25519-donna)
|
||||||
|
|
||||||
|
if (is_gcc)
|
||||||
|
set(no_init_w -Wno-maybe-uninitialized)
|
||||||
|
else()
|
||||||
|
unset(no_init_w)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
add_with_props(rippled_src_all src/ripple/unity/rocksdb.cpp
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/rocksdb2
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/rocksdb2/include
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/snappy/snappy
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/snappy/config
|
||||||
|
${no_init_w} ${rocks_db_system_header})
|
||||||
|
|
||||||
|
if (NOT is_msvc)
|
||||||
|
set(no_unused_w -Wno-unused-function)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
add_with_props(rippled_src_all src/ripple/unity/snappy.cpp
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/snappy/snappy
|
||||||
|
-I"${CMAKE_SOURCE_DIR}/"src/snappy/config
|
||||||
|
${no_unused_w})
|
||||||
|
|
||||||
|
if (APPLE AND is_clang)
|
||||||
|
list(APPEND rippled_src_all src/ripple/unity/beastobjc.mm)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
list(APPEND rippled_src_unity "${rippled_src_all}")
|
||||||
|
list(APPEND rippled_src_nonunity "${rippled_src_all}")
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
if (WIN32 OR is_xcode)
|
||||||
|
group_sources(src)
|
||||||
|
group_sources(docs)
|
||||||
|
group_sources(Builds)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(unity)
|
||||||
|
add_executable(rippled ${rippled_src_unity} ${PROTO_HDRS})
|
||||||
|
add_executable(rippled_classic EXCLUDE_FROM_ALL ${rippled_src_nonunity} ${PROTO_HDRS})
|
||||||
|
set(other_target rippled_classic)
|
||||||
|
else()
|
||||||
|
add_executable(rippled ${rippled_src_nonunity} ${PROTO_HDRS})
|
||||||
|
add_executable(rippled_unity EXCLUDE_FROM_ALL ${rippled_src_unity} ${PROTO_HDRS})
|
||||||
|
set(other_target rippled_unity)
|
||||||
|
endif()
|
||||||
|
list(APPEND targets "rippled")
|
||||||
|
list(APPEND targets ${other_target})
|
||||||
|
# Not the same as EXCLUDE_FROM_ALL. Prevents Visual Studio from building the
|
||||||
|
# other_target when the user builds the solution (default when pressing <F7>)
|
||||||
|
set_property(TARGET ${other_target} PROPERTY EXCLUDE_FROM_DEFAULT_BUILD true)
|
||||||
|
|
||||||
|
find_program(
|
||||||
|
B2_EXE
|
||||||
|
NAMES b2
|
||||||
|
HINTS ${BOOST_ROOT}
|
||||||
|
PATHS ${BOOST_ROOT}
|
||||||
|
DOC "Location of the b2 build executable from Boost")
|
||||||
|
if(${B2_EXE} STREQUAL "B2_EXE-NOTFOUND")
|
||||||
|
message(WARNING
|
||||||
|
"Boost b2 executable not found. docs target will not be buildable")
|
||||||
|
elseif(NOT BOOST_ROOT)
|
||||||
|
if(Boost_INCLUDE_DIRS)
|
||||||
|
set(BOOST_ROOT ${Boost_INCLUDE_DIRS})
|
||||||
|
else()
|
||||||
|
get_filename_component(BOOST_ROOT ${B2_EXE} DIRECTORY)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
# The value for BOOST_ROOT will be determined based on
|
||||||
|
# 1) The environment BOOST_ROOT
|
||||||
|
# 2) The Boost_INCLUDE_DIRS found by `get_boost`
|
||||||
|
# 3) The folder the `b2` executable is found in.
|
||||||
|
# If those checks don't yield the correct path, BOOST_ROOT
|
||||||
|
# can be defined on the cmake command line:
|
||||||
|
# cmake <path> -DBOOST_ROOT=<boost_path>
|
||||||
|
if(BOOST_ROOT)
|
||||||
|
set(B2_PARAMS "-sBOOST_ROOT=${BOOST_ROOT}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Find bash to help Windows avoid file association problems
|
||||||
|
find_program(
|
||||||
|
BASH_EXE
|
||||||
|
NAMES bash sh
|
||||||
|
DOC "Location of the bash shell executable"
|
||||||
|
)
|
||||||
|
if(${BASH_EXE} STREQUAL "BASH_EXE-NOTFOUND")
|
||||||
|
message(WARNING
|
||||||
|
"Unable to find bash executable. docs target may not be buildable")
|
||||||
|
set(BASH_EXE "")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
add_custom_target(docs
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E env "PATH=$ENV{PATH} " ${BASH_EXE} ./makeqbk.sh
|
||||||
|
COMMAND ${B2_EXE} ${B2_PARAMS}
|
||||||
|
BYPRODUCTS "${CMAKE_SOURCE_DIR}/docs/html/index.html"
|
||||||
|
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}/docs"
|
||||||
|
SOURCES "${doc_srcs}"
|
||||||
|
)
|
||||||
|
|
||||||
|
set_startup_project(rippled)
|
||||||
|
|
||||||
|
foreach(target IN LISTS targets)
|
||||||
|
target_link_libraries(${target}
|
||||||
|
${OPENSSL_LIBRARIES} ${PROTOBUF_LIBRARIES} ${SANITIZER_LIBRARIES})
|
||||||
|
|
||||||
|
link_common_libraries(${target})
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
if (NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||||
|
message(WARNING "Rippled requires a 64 bit target architecture.\n"
|
||||||
|
"The most likely cause of this warning is trying to build rippled with a 32-bit OS.")
|
||||||
|
endif()
|
||||||
11
Jamroot
Normal file
11
Jamroot
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2013-2016 Vinnie Falco (vinnie dot falco at gmail dot com)
|
||||||
|
#
|
||||||
|
# Distributed under the Boost Software License, Version 1.0. (See accompanying
|
||||||
|
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||||
|
#
|
||||||
|
|
||||||
|
import boost ;
|
||||||
|
|
||||||
|
boost.use-project ;
|
||||||
|
|
||||||
177
README.md
177
README.md
@@ -1,129 +1,94 @@
|
|||||||

|

|
||||||
|
|
||||||
#The World’s Fastest and Most Secure Payment System
|
**Do you work at a digital asset exchange or wallet provider?**
|
||||||
|
|
||||||
**What is Ripple?**
|
Please [contact us](mailto:support@ripple.com). We can help guide your integration.
|
||||||
|
|
||||||
Ripple is the open-source, distributed payment protocol that enables instant
|
# What is Ripple?
|
||||||
payments with low fees, no chargebacks, and currency flexibility (for example
|
Ripple is a network of computers which use the [Ripple consensus algorithm](https://www.youtube.com/watch?v=pj1QVb1vlC0) to atomically settle and record
|
||||||
dollars, yen, euros, bitcoins, or even loyalty points). Businesses of any size
|
transactions on a secure distributed database, the Ripple Consensus Ledger
|
||||||
can easily build payment solutions such as banking or remittance apps, and
|
(RCL). Because of its distributed nature, the RCL offers transaction immutability
|
||||||
accelerate the movement of money. Ripple enables the world to move value the
|
without a central operator. The RCL contains a built-in currency exchange and its
|
||||||
way it moves information on the Internet.
|
path-finding algorithm finds competitive exchange rates across order books
|
||||||
|
and currency pairs.
|
||||||
|
|
||||||

|
### Key Features
|
||||||
|
- **Distributed**
|
||||||
|
- Direct account-to-account settlement with no central operator
|
||||||
|
- Decentralized global market for competitive FX
|
||||||
|
- **Secure**
|
||||||
|
- Transactions are cryptographically signed using ECDSA or Ed25519
|
||||||
|
- Multi-signing capabilities
|
||||||
|
- **Scalable**
|
||||||
|
- Capacity to process the world’s cross-border payments volume
|
||||||
|
- Easy access to liquidity through a competitive FX marketplace
|
||||||
|
|
||||||
**What is a Gateway?**
|
## Cross-border payments
|
||||||
|
Ripple enables banks to settle cross-border payments in real-time, with
|
||||||
|
end-to-end transparency, and at lower costs. Banks can provide liquidity
|
||||||
|
for FX themselves or source it from third parties.
|
||||||
|
|
||||||
Ripple works with gateways: independent businesses which hold customer
|
As Ripple adoption grows, so do the number of currencies and counterparties.
|
||||||
deposits in various currencies such as U.S. dollars (USD) or Euros (EUR),
|
Liquidity providers need to maintain accounts with each counterparty for
|
||||||
in exchange for providing cryptographically-signed issuances that users can
|
each currency – a capital- and time-intensive endeavor that spreads liquidity
|
||||||
send and trade with one another in seconds on the Ripple network. Within the
|
thin. Further, some transactions, such as exotic currency trades, will require
|
||||||
protocol, exchanges between multiple currencies can occur atomically without
|
multiple trading parties, who each layer costs to the transaction. Thin
|
||||||
any central authority to monitor them. Later, customers can withdraw their
|
liquidity and many intermediary trading parties make competitive pricing
|
||||||
Ripple balances from the gateways that created those issuances.
|
challenging.
|
||||||
|
|
||||||
**How do Ripple payments work?**
|

|
||||||
|
|
||||||
A sender specifies the amount and currency the recipient should receive and
|
### XRP as a Bridge Currency
|
||||||
Ripple automatically converts the sender’s available currencies using the
|
Ripple can bridge even exotic currency pairs directly through XRP. Similar to
|
||||||
distributed order books integrated into the Ripple protocol. Independent third
|
USD in today’s currency market, XRP allows liquidity providers to focus on
|
||||||
parties acting as market makers provide liquidity in these order books.
|
offering competitive FX rates on fewer pairs and adding depth to order books.
|
||||||
|
Unlike USD, trading through XRP does not require bank accounts, service fees,
|
||||||
|
counterparty risk, or additional operational costs. By using XRP, liquidity
|
||||||
|
providers can specialize in certain currency corridors, reduce operational
|
||||||
|
costs, and ultimately, offer more competitive FX pricing.
|
||||||
|
|
||||||
Ripple uses a pathfinding algorithm that considers currency pairs when
|

|
||||||
converting from the source to the destination currency. This algorithm searches
|
|
||||||
for a series of currency swaps that gives the user the lowest cost. Since
|
|
||||||
anyone can participate as a market maker, market forces drive fees to the
|
|
||||||
lowest practical level.
|
|
||||||
|
|
||||||
**What can you do with Ripple?**
|
# rippled - Ripple server
|
||||||
|
`rippled` is the reference server implementation of the Ripple
|
||||||
|
protocol. To learn more about how to build and run a `rippled`
|
||||||
|
server, visit https://ripple.com/build/rippled-setup/
|
||||||
|
|
||||||
The protocol is entirely open-source and the network’s shared ledger is public
|
[](https://travis-ci.org/ripple/rippled)
|
||||||
information, so no central authority prevents anyone from participating. Anyone
|
[](https://codecov.io/gh/ripple/rippled)
|
||||||
can become a market maker, create a wallet or a gateway, or monitor network
|
|
||||||
behavior. Competition drives down spreads and fees, making the network useful
|
|
||||||
to everyone.
|
|
||||||
|
|
||||||
|
### License
|
||||||
|
`rippled` is open source and permissively licensed under the
|
||||||
|
ISC license. See the LICENSE file for more details.
|
||||||
|
|
||||||
###Key Protocol Features
|
#### Repository Contents
|
||||||
1. XRP is Ripple’s native [cryptocurrency]
|
|
||||||
(http://en.wikipedia.org/wiki/Cryptocurrency) with a fixed supply that
|
|
||||||
decreases slowly over time, with no mining. XRP acts as a bridge currency, and
|
|
||||||
pays for transaction fees that protect the network against spam.
|
|
||||||

|
|
||||||
|
|
||||||
2. Pathfinding discovers cheap and efficient payment paths through multiple
|
| Folder | Contents |
|
||||||
[order books](https://www.ripplecharts.com) allowing anyone to [trade](https://www.rippletrade.com) anything. When two accounts aren’t linked by relationships of trust, the Ripple pathfinding engine considers intermediate links and order books to produce a set of possible paths the transaction can take. When the payment is processed, the liquidity along these paths is iteratively consumed in best-first order.
|
|---------|----------|
|
||||||

|
| ./bin | Scripts and data files for Ripple integrators. |
|
||||||
|
| ./build | Intermediate and final build outputs. |
|
||||||
|
| ./Builds| Platform or IDE-specific project files. |
|
||||||
|
| ./doc | Documentation and example configuration files. |
|
||||||
|
| ./src | Source code. |
|
||||||
|
|
||||||
3. [Consensus](https://www.youtube.com/watch?v=pj1QVb1vlC0) confirms
|
Some of the directories under `src` are external repositories inlined via
|
||||||
transactions in an atomic fashion, without mining, ensuring efficient use of
|
git-subtree. See the corresponding README for more details.
|
||||||
resources.
|
|
||||||
|
|
||||||
[transact]: https://ripple.com/files/ripple-FIs.pdf
|
## For more information:
|
||||||
[build]: https://ripple.com/build/
|
|
||||||
|
|
||||||
[transact.png]: /images/transact.png
|
* [Ripple Knowledge Center](https://ripple.com/learn/)
|
||||||
[build.png]: /images/build.png
|
* [Ripple Developer Center](https://ripple.com/build/)
|
||||||
[contribute.png]: /images/contribute.png
|
* Ripple Whitepapers & Reports
|
||||||
|
* [Ripple Consensus Whitepaper](https://ripple.com/files/ripple_consensus_whitepaper.pdf)
|
||||||
|
* [Ripple Solutions Guide](https://ripple.com/files/ripple_solutions_guide.pdf)
|
||||||
|
|
||||||
###Join The Ripple Community
|
To learn about how Ripple is transforming global payments visit
|
||||||
|![Transact][transact.png]|![Build][build.png]|![Contribute][contribute.png]|
|
[https://ripple.com/contact/](https://ripple.com/contact/)
|
||||||
|:-----------------------:|:-----------------:|:---------------------------:|
|
|
||||||
|[Transact on the fastest payment infrastructure][transact]|[Build Imaginative Apps][build]|Contribute to the Ripple Protocol Implementation|
|
|
||||||
|
|
||||||
#rippled - Ripple P2P server
|
|
||||||
|
|
||||||
##[](https://travis-ci.org/ripple/rippled)
|
|
||||||
|
|
||||||
This is the repository for Ripple's `rippled`, reference P2P server.
|
|
||||||
|
|
||||||
###Build instructions:
|
|
||||||
* https://ripple.com/wiki/Rippled_build_instructions
|
|
||||||
|
|
||||||
###Setup instructions:
|
|
||||||
* https://ripple.com/wiki/Rippled_setup_instructions
|
|
||||||
|
|
||||||
###Issues
|
|
||||||
* https://ripplelabs.atlassian.net/browse/RIPD
|
|
||||||
|
|
||||||
### Repository Contents
|
|
||||||
|
|
||||||
#### ./bin
|
|
||||||
Scripts and data files for Ripple integrators.
|
|
||||||
|
|
||||||
#### ./build
|
|
||||||
Intermediate and final build outputs.
|
|
||||||
|
|
||||||
#### ./Builds
|
|
||||||
Platform or IDE-specific project files.
|
|
||||||
|
|
||||||
#### ./doc
|
|
||||||
Documentation and example configuration files.
|
|
||||||
|
|
||||||
#### ./src
|
|
||||||
Source code directory. Some of the directories contained here are
|
|
||||||
external repositories inlined via git-subtree, see the corresponding
|
|
||||||
README for more details.
|
|
||||||
|
|
||||||
#### ./test
|
|
||||||
Javascript / Mocha tests.
|
|
||||||
|
|
||||||
## License
|
|
||||||
Ripple is open source and permissively licensed under the ISC license. See the
|
|
||||||
LICENSE file for more details.
|
|
||||||
|
|
||||||
###For more information:
|
|
||||||
* Ripple Wiki - https://ripple.com/wiki/
|
|
||||||
* Ripple Primer - https://ripple.com/ripple_primer.pdf
|
|
||||||
* Ripple Primer (Market Making) - https://ripple.com/ripple-mm.pdf
|
|
||||||
* Ripple Gateway Primer - https://ripple.com/ripple-gateways.pdf
|
|
||||||
* Consensus - https://wiki.ripple.com/Consensus
|
|
||||||
|
|
||||||
- - -
|
- - -
|
||||||
|
|
||||||
Copyright © 2015, Ripple Labs. All rights reserved.
|
Copyright © 2017, Ripple Labs. All rights reserved.
|
||||||
|
|
||||||
Portions of this document, including but not limited to the Ripple logo, images
|
Portions of this document, including but not limited to the Ripple logo,
|
||||||
and image templates are the property of Ripple Labs and cannot be copied or
|
images and image templates are the property of Ripple Labs and cannot be
|
||||||
used without permission.
|
copied or used without permission.
|
||||||
|
|||||||
2641
RELEASENOTES.md
Normal file
2641
RELEASENOTES.md
Normal file
File diff suppressed because it is too large
Load Diff
302
SConstruct
302
SConstruct
@@ -61,10 +61,16 @@ The following environment variables modify the build environment:
|
|||||||
Path to the boost directory.
|
Path to the boost directory.
|
||||||
OPENSSL_ROOT
|
OPENSSL_ROOT
|
||||||
Path to the openssl directory.
|
Path to the openssl directory.
|
||||||
PROTOBUF_DIR
|
PROTOBUF_ROOT
|
||||||
Path to the protobuf directory. This is usually only needed when
|
Path to the protobuf directory.
|
||||||
the installed protobuf library uses a different ABI than clang
|
CLANG_PROTOBUF_ROOT
|
||||||
(as with ubuntu 15.10).
|
Override the path to the protobuf directory for the clang toolset. This is
|
||||||
|
usually only needed when the installed protobuf library uses a different
|
||||||
|
ABI than clang (as with ubuntu 15.10).
|
||||||
|
CLANG_BOOST_ROOT
|
||||||
|
Override the path to the boost directory for the clang toolset. This is
|
||||||
|
usually only needed when the installed protobuf library uses a different
|
||||||
|
ABI than clang (as with ubuntu 15.10).
|
||||||
|
|
||||||
The following extra options may be used:
|
The following extra options may be used:
|
||||||
--ninja Generate a `build.ninja` build file for the specified target
|
--ninja Generate a `build.ninja` build file for the specified target
|
||||||
@@ -73,6 +79,10 @@ The following extra options may be used:
|
|||||||
|
|
||||||
--static On linux, link protobuf, openssl, libc++, and boost statically
|
--static On linux, link protobuf, openssl, libc++, and boost statically
|
||||||
|
|
||||||
|
--sanitize=[address, thread] On gcc & clang, add sanitizer instrumentation
|
||||||
|
|
||||||
|
--assert Enable asserts, even in release builds.
|
||||||
|
|
||||||
GCC 5: If the gcc toolchain is used, gcc version 5 or better is required. On
|
GCC 5: If the gcc toolchain is used, gcc version 5 or better is required. On
|
||||||
linux distros that ship with gcc 4 (ubuntu < 15.10), rippled will force gcc
|
linux distros that ship with gcc 4 (ubuntu < 15.10), rippled will force gcc
|
||||||
to use gcc4's ABI (there was an ABI change between versions). This allows us
|
to use gcc4's ABI (there was an ABI change between versions). This allows us
|
||||||
@@ -105,13 +115,19 @@ TODO
|
|||||||
|
|
||||||
import collections
|
import collections
|
||||||
import os
|
import os
|
||||||
|
import platform
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
import time
|
import time
|
||||||
|
import glob
|
||||||
import SCons.Action
|
import SCons.Action
|
||||||
|
|
||||||
sys.path.append(os.path.join('src', 'beast', 'site_scons'))
|
if (not platform.machine().endswith('64')):
|
||||||
|
print('Warning: Detected {} architecture. Rippled requires a 64-bit OS.'.format(
|
||||||
|
platform.machine()));
|
||||||
|
|
||||||
|
sys.path.append(os.path.join('src', 'ripple', 'beast', 'site_scons'))
|
||||||
sys.path.append(os.path.join('src', 'ripple', 'site_scons'))
|
sys.path.append(os.path.join('src', 'ripple', 'site_scons'))
|
||||||
|
|
||||||
import Beast
|
import Beast
|
||||||
@@ -122,9 +138,15 @@ import scons_to_ninja
|
|||||||
AddOption('--ninja', dest='ninja', action='store_true',
|
AddOption('--ninja', dest='ninja', action='store_true',
|
||||||
help='generate ninja build file build.ninja')
|
help='generate ninja build file build.ninja')
|
||||||
|
|
||||||
|
AddOption('--sanitize', dest='sanitize', choices=['address', 'thread'],
|
||||||
|
help='Build with sanitizer support (gcc and clang only).')
|
||||||
|
|
||||||
AddOption('--static', dest='static', action='store_true',
|
AddOption('--static', dest='static', action='store_true',
|
||||||
help='On linux, link protobuf, openssl, libc++, and boost statically')
|
help='On linux, link protobuf, openssl, libc++, and boost statically')
|
||||||
|
|
||||||
|
AddOption('--assert', dest='assert', action='store_true',
|
||||||
|
help='Enable asserts, even in release mode')
|
||||||
|
|
||||||
def parse_time(t):
|
def parse_time(t):
|
||||||
l = len(t.split())
|
l = len(t.split())
|
||||||
if l==5:
|
if l==5:
|
||||||
@@ -348,24 +370,9 @@ def config_base(env):
|
|||||||
,{'HAVE_USLEEP' : '1'}
|
,{'HAVE_USLEEP' : '1'}
|
||||||
,{'SOCI_CXX_C11' : '1'}
|
,{'SOCI_CXX_C11' : '1'}
|
||||||
,'_SILENCE_STDEXT_HASH_DEPRECATION_WARNINGS'
|
,'_SILENCE_STDEXT_HASH_DEPRECATION_WARNINGS'
|
||||||
,'-DBOOST_NO_AUTO_PTR'
|
,'BOOST_NO_AUTO_PTR'
|
||||||
])
|
])
|
||||||
|
|
||||||
try:
|
|
||||||
BOOST_ROOT = os.path.normpath(os.environ['BOOST_ROOT'])
|
|
||||||
env.Append(LIBPATH=[
|
|
||||||
os.path.join(BOOST_ROOT, 'stage', 'lib'),
|
|
||||||
])
|
|
||||||
env['BOOST_ROOT'] = BOOST_ROOT
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
protobuf_dir = os.environ['PROTOBUF_DIR']
|
|
||||||
env.Append(LIBPATH=[protobuf_dir])
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if Beast.system.windows:
|
if Beast.system.windows:
|
||||||
try:
|
try:
|
||||||
OPENSSL_ROOT = os.path.normpath(os.environ['OPENSSL_ROOT'])
|
OPENSSL_ROOT = os.path.normpath(os.environ['OPENSSL_ROOT'])
|
||||||
@@ -378,11 +385,13 @@ def config_base(env):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
elif Beast.system.osx:
|
elif Beast.system.osx:
|
||||||
OSX_OPENSSL_ROOT = '/usr/local/Cellar/openssl/'
|
try:
|
||||||
most_recent = sorted(os.listdir(OSX_OPENSSL_ROOT))[-1]
|
openssl = subprocess.check_output(['brew', '--prefix','openssl'],
|
||||||
openssl = os.path.join(OSX_OPENSSL_ROOT, most_recent)
|
stderr=subprocess.STDOUT).strip()
|
||||||
env.Prepend(CPPPATH='%s/include' % openssl)
|
env.Prepend(CPPPATH='%s/include' % openssl)
|
||||||
env.Prepend(LIBPATH=['%s/lib' % openssl])
|
env.Prepend(LIBPATH=['%s/lib' % openssl])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
# handle command-line arguments
|
# handle command-line arguments
|
||||||
profile_jemalloc = ARGUMENTS.get('profile-jemalloc')
|
profile_jemalloc = ARGUMENTS.get('profile-jemalloc')
|
||||||
@@ -400,7 +409,7 @@ def add_static_libs(env, static_libs, dyn_libs=None):
|
|||||||
for k,l in [('STATICLIBS', static_libs or []), ('DYNAMICLIBS', dyn_libs or [])]:
|
for k,l in [('STATICLIBS', static_libs or []), ('DYNAMICLIBS', dyn_libs or [])]:
|
||||||
c = env.get(k, '')
|
c = env.get(k, '')
|
||||||
for f in l:
|
for f in l:
|
||||||
c += ' -l' + f
|
c += ' -l' + f
|
||||||
env[k] = c
|
env[k] = c
|
||||||
|
|
||||||
def get_libs(lib, static):
|
def get_libs(lib, static):
|
||||||
@@ -413,7 +422,7 @@ def get_libs(lib, static):
|
|||||||
try:
|
try:
|
||||||
cmd = ['pkg-config', '--static', '--libs', lib]
|
cmd = ['pkg-config', '--static', '--libs', lib]
|
||||||
libs = subprocess.check_output(cmd,
|
libs = subprocess.check_output(cmd,
|
||||||
stderr=subprocess.STDOUT).strip()
|
stderr=subprocess.STDOUT).strip().decode("utf-8")
|
||||||
all_libs = [l[2:] for l in libs.split() if l.startswith('-l')]
|
all_libs = [l[2:] for l in libs.split() if l.startswith('-l')]
|
||||||
if not static:
|
if not static:
|
||||||
return ([], all_libs)
|
return ([], all_libs)
|
||||||
@@ -425,33 +434,90 @@ def get_libs(lib, static):
|
|||||||
else:
|
else:
|
||||||
static_libs.append(l)
|
static_libs.append(l)
|
||||||
return (static_libs, dynamic_libs)
|
return (static_libs, dynamic_libs)
|
||||||
except:
|
except Exception as e:
|
||||||
raise Exception('pkg-config failed for ' + lib)
|
raise Exception('pkg-config failed for ' + lib + '; Exception: ' + str(e))
|
||||||
|
|
||||||
# Set toolchain and variant specific construction variables
|
def add_sanitizer (toolchain, env):
|
||||||
def config_env(toolchain, variant, env):
|
san = GetOption('sanitize')
|
||||||
if is_debug_variant(variant):
|
if not san: return
|
||||||
env.Append(CPPDEFINES=['DEBUG', '_DEBUG'])
|
san_to_lib = {'address': 'asan', 'thread': 'tsan'}
|
||||||
|
if toolchain not in Split('clang gcc'):
|
||||||
|
raise Exception("Sanitizers are only supported for gcc and clang")
|
||||||
|
env.Append(CCFLAGS=['-fsanitize='+san, '-fno-omit-frame-pointer'])
|
||||||
|
env.Append(LINKFLAGS=['-fsanitize='+san])
|
||||||
|
add_static_libs(env, [san_to_lib[san]])
|
||||||
|
env.Append(CPPDEFINES=['SANITIZER='+san_to_lib[san].upper()])
|
||||||
|
|
||||||
elif variant == 'release' or variant == 'profile':
|
def add_boost_and_protobuf(toolchain, env):
|
||||||
env.Append(CPPDEFINES=['NDEBUG'])
|
def get_environ_value(candidates):
|
||||||
|
for c in candidates:
|
||||||
|
try:
|
||||||
|
return os.environ[c]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
raise KeyError('Environment variable not set')
|
||||||
|
|
||||||
if 'BOOST_ROOT' in env:
|
try:
|
||||||
if toolchain == 'gcc':
|
br_cands = ['CLANG_BOOST_ROOT'] if toolchain == 'clang' else []
|
||||||
|
br_cands.append('BOOST_ROOT')
|
||||||
|
BOOST_ROOT = os.path.normpath(get_environ_value(br_cands))
|
||||||
|
stage64_path = os.path.join(BOOST_ROOT, 'stage64', 'lib')
|
||||||
|
if os.path.exists(stage64_path):
|
||||||
|
env.Append(LIBPATH=[
|
||||||
|
stage64_path,
|
||||||
|
])
|
||||||
|
else:
|
||||||
|
env.Append(LIBPATH=[
|
||||||
|
os.path.join(BOOST_ROOT, 'stage', 'lib'),
|
||||||
|
])
|
||||||
|
env['BOOST_ROOT'] = BOOST_ROOT
|
||||||
|
if toolchain in ['gcc', 'clang']:
|
||||||
env.Append(CCFLAGS=['-isystem' + env['BOOST_ROOT']])
|
env.Append(CCFLAGS=['-isystem' + env['BOOST_ROOT']])
|
||||||
else:
|
else:
|
||||||
env.Append(CPPPATH=[
|
env.Append(CPPPATH=[
|
||||||
env['BOOST_ROOT'],
|
env['BOOST_ROOT'],
|
||||||
])
|
])
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
pb_cands = ['CLANG_PROTOBUF_ROOT'] if toolchain == 'clang' else []
|
||||||
|
pb_cands.append('PROTOBUF_ROOT')
|
||||||
|
PROTOBUF_ROOT = os.path.normpath(get_environ_value(pb_cands))
|
||||||
|
env.Append(LIBPATH=[PROTOBUF_ROOT + '/src/.libs'])
|
||||||
|
if not should_link_static() and toolchain in['clang', 'gcc']:
|
||||||
|
env.Append(LINKFLAGS=['-Wl,-rpath,' + PROTOBUF_ROOT + '/src/.libs'])
|
||||||
|
env['PROTOBUF_ROOT'] = PROTOBUF_ROOT
|
||||||
|
env.Append(CPPPATH=[env['PROTOBUF_ROOT'] + '/src',])
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def enable_asserts ():
|
||||||
|
return GetOption('assert')
|
||||||
|
|
||||||
|
# Set toolchain and variant specific construction variables
|
||||||
|
def config_env(toolchain, variant, env):
|
||||||
|
add_boost_and_protobuf(toolchain, env)
|
||||||
|
env.Append(CPPDEFINES=[
|
||||||
|
'BOOST_COROUTINE_NO_DEPRECATION_WARNING',
|
||||||
|
'BOOST_COROUTINES_NO_DEPRECATION_WARNING'
|
||||||
|
])
|
||||||
|
if is_debug_variant(variant):
|
||||||
|
env.Append(CPPDEFINES=['DEBUG', '_DEBUG'])
|
||||||
|
|
||||||
|
elif (variant == 'release' or variant == 'profile') and (not enable_asserts()):
|
||||||
|
env.Append(CPPDEFINES=['NDEBUG'])
|
||||||
|
|
||||||
if should_link_static() and not Beast.system.linux:
|
if should_link_static() and not Beast.system.linux:
|
||||||
raise Exception("Static linking is only implemented for linux.")
|
raise Exception("Static linking is only implemented for linux.")
|
||||||
|
|
||||||
|
add_sanitizer(toolchain, env)
|
||||||
|
|
||||||
if toolchain in Split('clang gcc'):
|
if toolchain in Split('clang gcc'):
|
||||||
if Beast.system.linux:
|
if Beast.system.linux:
|
||||||
link_static = should_link_static()
|
link_static = should_link_static()
|
||||||
for l in ['openssl', 'protobuf']:
|
for l in ['openssl', 'protobuf']:
|
||||||
static, dynamic = get_libs(l, link_static)
|
static, dynamic = get_libs(l, link_static)
|
||||||
if link_static:
|
if link_static:
|
||||||
add_static_libs(env, static, dynamic)
|
add_static_libs(env, static, dynamic)
|
||||||
else:
|
else:
|
||||||
@@ -486,11 +552,19 @@ def config_env(toolchain, variant, env):
|
|||||||
if toolchain == 'clang':
|
if toolchain == 'clang':
|
||||||
env.Append(CCFLAGS=['-Wno-redeclared-class-member'])
|
env.Append(CCFLAGS=['-Wno-redeclared-class-member'])
|
||||||
env.Append(CPPDEFINES=['BOOST_ASIO_HAS_STD_ARRAY'])
|
env.Append(CPPDEFINES=['BOOST_ASIO_HAS_STD_ARRAY'])
|
||||||
|
try:
|
||||||
|
ldd_ver = subprocess.check_output([env['CLANG_CXX'], '-fuse-ld=lld', '-Wl,--version'],
|
||||||
|
stderr=subprocess.STDOUT).strip()
|
||||||
|
# have lld
|
||||||
|
env.Append(LINKFLAGS=['-fuse-ld=lld'])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
env.Append(CXXFLAGS=[
|
env.Append(CXXFLAGS=[
|
||||||
'-frtti',
|
'-frtti',
|
||||||
'-std=c++14',
|
'-std=c++14',
|
||||||
'-Wno-invalid-offsetof'])
|
'-Wno-invalid-offsetof'
|
||||||
|
])
|
||||||
|
|
||||||
env.Append(CPPDEFINES=['_FILE_OFFSET_BITS=64'])
|
env.Append(CPPDEFINES=['_FILE_OFFSET_BITS=64'])
|
||||||
|
|
||||||
@@ -504,7 +578,6 @@ def config_env(toolchain, variant, env):
|
|||||||
env.Append(CCFLAGS=[
|
env.Append(CCFLAGS=[
|
||||||
'-Wno-deprecated',
|
'-Wno-deprecated',
|
||||||
'-Wno-deprecated-declarations',
|
'-Wno-deprecated-declarations',
|
||||||
'-Wno-unused-variable',
|
|
||||||
'-Wno-unused-function',
|
'-Wno-unused-function',
|
||||||
])
|
])
|
||||||
else:
|
else:
|
||||||
@@ -517,13 +590,22 @@ def config_env(toolchain, variant, env):
|
|||||||
'-D_GLIBCXX_USE_CXX11_ABI' : 0
|
'-D_GLIBCXX_USE_CXX11_ABI' : 0
|
||||||
})
|
})
|
||||||
if toolchain == 'gcc':
|
if toolchain == 'gcc':
|
||||||
|
|
||||||
env.Append(CCFLAGS=[
|
env.Append(CCFLAGS=[
|
||||||
'-Wno-unused-but-set-variable',
|
'-Wno-unused-but-set-variable',
|
||||||
'-Wno-deprecated',
|
'-Wno-deprecated',
|
||||||
])
|
])
|
||||||
|
try:
|
||||||
|
ldd_ver = subprocess.check_output([env['GNU_CXX'], '-fuse-ld=gold', '-Wl,--version'],
|
||||||
|
stderr=subprocess.STDOUT).strip()
|
||||||
|
# have ld.gold
|
||||||
|
env.Append(LINKFLAGS=['-fuse-ld=gold'])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
boost_libs = [
|
boost_libs = [
|
||||||
|
# resist the temptation to alphabetize these. coroutine
|
||||||
|
# must come before context.
|
||||||
|
'boost_chrono',
|
||||||
'boost_coroutine',
|
'boost_coroutine',
|
||||||
'boost_context',
|
'boost_context',
|
||||||
'boost_date_time',
|
'boost_date_time',
|
||||||
@@ -540,9 +622,13 @@ def config_env(toolchain, variant, env):
|
|||||||
else:
|
else:
|
||||||
# We prefer static libraries for boost
|
# We prefer static libraries for boost
|
||||||
if env.get('BOOST_ROOT'):
|
if env.get('BOOST_ROOT'):
|
||||||
|
static_libs64 = ['%s/stage64/lib/lib%s.a' % (env['BOOST_ROOT'], l) for
|
||||||
|
l in boost_libs]
|
||||||
static_libs = ['%s/stage/lib/lib%s.a' % (env['BOOST_ROOT'], l) for
|
static_libs = ['%s/stage/lib/lib%s.a' % (env['BOOST_ROOT'], l) for
|
||||||
l in boost_libs]
|
l in boost_libs]
|
||||||
if all(os.path.exists(f) for f in static_libs):
|
if all(os.path.exists(f) for f in static_libs64):
|
||||||
|
boost_libs = [File(f) for f in static_libs64]
|
||||||
|
elif all(os.path.exists(f) for f in static_libs):
|
||||||
boost_libs = [File(f) for f in static_libs]
|
boost_libs = [File(f) for f in static_libs]
|
||||||
env.Append(LIBS=boost_libs)
|
env.Append(LIBS=boost_libs)
|
||||||
|
|
||||||
@@ -601,9 +687,6 @@ def config_env(toolchain, variant, env):
|
|||||||
# extra error checking into the code (e.g. std::vector will throw
|
# extra error checking into the code (e.g. std::vector will throw
|
||||||
# for out-of-bounds conditions)
|
# for out-of-bounds conditions)
|
||||||
if is_debug_variant(variant):
|
if is_debug_variant(variant):
|
||||||
env.Append(CPPDEFINES={
|
|
||||||
'_FORTIFY_SOURCE': 2
|
|
||||||
})
|
|
||||||
env.Append(CCFLAGS=[
|
env.Append(CCFLAGS=[
|
||||||
'-O0'
|
'-O0'
|
||||||
])
|
])
|
||||||
@@ -635,6 +718,7 @@ def config_env(toolchain, variant, env):
|
|||||||
'/wd"4244"',
|
'/wd"4244"',
|
||||||
'/wd"4267"',
|
'/wd"4267"',
|
||||||
'/wd"4800"', # Disable C4800 (int to bool performance)
|
'/wd"4800"', # Disable C4800 (int to bool performance)
|
||||||
|
'/wd"4503"', # Disable C4503 (Decorated name length exceeded)
|
||||||
])
|
])
|
||||||
env.Append(CPPDEFINES={
|
env.Append(CPPDEFINES={
|
||||||
'_WIN32_WINNT' : '0x6000',
|
'_WIN32_WINNT' : '0x6000',
|
||||||
@@ -643,6 +727,7 @@ def config_env(toolchain, variant, env):
|
|||||||
'_SCL_SECURE_NO_WARNINGS',
|
'_SCL_SECURE_NO_WARNINGS',
|
||||||
'_CRT_SECURE_NO_WARNINGS',
|
'_CRT_SECURE_NO_WARNINGS',
|
||||||
'WIN32_CONSOLE',
|
'WIN32_CONSOLE',
|
||||||
|
'NOMINMAX'
|
||||||
])
|
])
|
||||||
if variant == 'debug':
|
if variant == 'debug':
|
||||||
env.Append(LIBS=[
|
env.Append(LIBS=[
|
||||||
@@ -669,6 +754,7 @@ def config_env(toolchain, variant, env):
|
|||||||
'uuid.lib',
|
'uuid.lib',
|
||||||
'odbc32.lib',
|
'odbc32.lib',
|
||||||
'odbccp32.lib',
|
'odbccp32.lib',
|
||||||
|
'crypt32.lib'
|
||||||
])
|
])
|
||||||
env.Append(LINKFLAGS=[
|
env.Append(LINKFLAGS=[
|
||||||
'/DEBUG',
|
'/DEBUG',
|
||||||
@@ -710,7 +796,7 @@ root_dir = Dir('#').srcnode().get_abspath() # Path to this SConstruct file
|
|||||||
build_dir = os.path.join('build')
|
build_dir = os.path.join('build')
|
||||||
|
|
||||||
base = Environment(
|
base = Environment(
|
||||||
toolpath=[os.path.join ('src', 'beast', 'site_scons', 'site_tools')],
|
toolpath=[os.path.join ('src', 'ripple', 'beast', 'site_scons', 'site_tools')],
|
||||||
tools=['default', 'Protoc', 'VSProject'],
|
tools=['default', 'Protoc', 'VSProject'],
|
||||||
ENV=os.environ,
|
ENV=os.environ,
|
||||||
TARGET_ARCH='x86_64')
|
TARGET_ARCH='x86_64')
|
||||||
@@ -719,6 +805,9 @@ config_base(base)
|
|||||||
base.Append(CPPPATH=[
|
base.Append(CPPPATH=[
|
||||||
'src',
|
'src',
|
||||||
os.path.join('src', 'beast'),
|
os.path.join('src', 'beast'),
|
||||||
|
os.path.join('src', 'beast', 'include'),
|
||||||
|
os.path.join('src', 'beast', 'extras'),
|
||||||
|
os.path.join('src', 'nudb', 'include'),
|
||||||
os.path.join(build_dir, 'proto'),
|
os.path.join(build_dir, 'proto'),
|
||||||
os.path.join('src','soci','src'),
|
os.path.join('src','soci','src'),
|
||||||
os.path.join('src','soci','include'),
|
os.path.join('src','soci','include'),
|
||||||
@@ -852,9 +941,16 @@ def get_classic_sources(toolchain):
|
|||||||
'src/soci/src/core',
|
'src/soci/src/core',
|
||||||
'src/sqlite']
|
'src/sqlite']
|
||||||
)
|
)
|
||||||
|
append_sources(result, *list_sources('src/ripple/beast/clock', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/beast/container', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/beast/insight', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/beast/net', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/beast/utility', '.cpp'))
|
||||||
append_sources(result, *list_sources('src/ripple/app', '.cpp'))
|
append_sources(result, *list_sources('src/ripple/app', '.cpp'))
|
||||||
append_sources(result, *list_sources('src/ripple/basics', '.cpp'))
|
append_sources(result, *list_sources('src/ripple/basics', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/conditions', '.cpp'))
|
||||||
append_sources(result, *list_sources('src/ripple/crypto', '.cpp'))
|
append_sources(result, *list_sources('src/ripple/crypto', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/consensus', '.cpp'))
|
||||||
append_sources(result, *list_sources('src/ripple/json', '.cpp'))
|
append_sources(result, *list_sources('src/ripple/json', '.cpp'))
|
||||||
append_sources(result, *list_sources('src/ripple/ledger', '.cpp'))
|
append_sources(result, *list_sources('src/ripple/ledger', '.cpp'))
|
||||||
append_sources(result, *list_sources('src/ripple/legacy', '.cpp'))
|
append_sources(result, *list_sources('src/ripple/legacy', '.cpp'))
|
||||||
@@ -864,8 +960,25 @@ def get_classic_sources(toolchain):
|
|||||||
append_sources(result, *list_sources('src/ripple/protocol', '.cpp'))
|
append_sources(result, *list_sources('src/ripple/protocol', '.cpp'))
|
||||||
append_sources(result, *list_sources('src/ripple/rpc', '.cpp'))
|
append_sources(result, *list_sources('src/ripple/rpc', '.cpp'))
|
||||||
append_sources(result, *list_sources('src/ripple/shamap', '.cpp'))
|
append_sources(result, *list_sources('src/ripple/shamap', '.cpp'))
|
||||||
append_sources(result, *list_sources('src/ripple/test', '.cpp'))
|
append_sources(result, *list_sources('src/ripple/server', '.cpp'))
|
||||||
append_sources(result, *list_sources('src/ripple/unl', '.cpp'))
|
append_sources(result, *list_sources('src/test/app', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/basics', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/beast', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/conditions', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/consensus', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/core', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/json', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/ledger', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/overlay', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/peerfinder', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/protocol', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/resource', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/rpc', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/server', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/shamap', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/jtx', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/test/csf', '.cpp'))
|
||||||
|
|
||||||
|
|
||||||
if use_shp(toolchain):
|
if use_shp(toolchain):
|
||||||
cc_flags = {'CCFLAGS': ['--system-header-prefix=rocksdb2']}
|
cc_flags = {'CCFLAGS': ['--system-header-prefix=rocksdb2']}
|
||||||
@@ -874,7 +987,7 @@ def get_classic_sources(toolchain):
|
|||||||
|
|
||||||
append_sources(
|
append_sources(
|
||||||
result,
|
result,
|
||||||
*list_sources('src/ripple/nodestore', '.cpp'),
|
*(list_sources('src/ripple/nodestore', '.cpp') + list_sources('src/test/nodestore', '.cpp')),
|
||||||
CPPPATH=[
|
CPPPATH=[
|
||||||
'src/rocksdb2/include',
|
'src/rocksdb2/include',
|
||||||
'src/snappy/snappy',
|
'src/snappy/snappy',
|
||||||
@@ -891,25 +1004,55 @@ def get_unity_sources(toolchain):
|
|||||||
result = []
|
result = []
|
||||||
append_sources(
|
append_sources(
|
||||||
result,
|
result,
|
||||||
|
'src/ripple/beast/unity/beast_insight_unity.cpp',
|
||||||
|
'src/ripple/beast/unity/beast_net_unity.cpp',
|
||||||
|
'src/ripple/beast/unity/beast_utility_unity.cpp',
|
||||||
|
'src/ripple/unity/app_consensus.cpp',
|
||||||
'src/ripple/unity/app_ledger.cpp',
|
'src/ripple/unity/app_ledger.cpp',
|
||||||
'src/ripple/unity/app_main.cpp',
|
'src/ripple/unity/app_ledger_impl.cpp',
|
||||||
|
'src/ripple/unity/app_main1.cpp',
|
||||||
|
'src/ripple/unity/app_main2.cpp',
|
||||||
'src/ripple/unity/app_misc.cpp',
|
'src/ripple/unity/app_misc.cpp',
|
||||||
|
'src/ripple/unity/app_misc_impl.cpp',
|
||||||
'src/ripple/unity/app_paths.cpp',
|
'src/ripple/unity/app_paths.cpp',
|
||||||
'src/ripple/unity/app_tests.cpp',
|
|
||||||
'src/ripple/unity/app_tx.cpp',
|
'src/ripple/unity/app_tx.cpp',
|
||||||
|
'src/ripple/unity/conditions.cpp',
|
||||||
|
'src/ripple/unity/consensus.cpp',
|
||||||
'src/ripple/unity/core.cpp',
|
'src/ripple/unity/core.cpp',
|
||||||
'src/ripple/unity/basics.cpp',
|
'src/ripple/unity/basics.cpp',
|
||||||
'src/ripple/unity/crypto.cpp',
|
'src/ripple/unity/crypto.cpp',
|
||||||
'src/ripple/unity/ledger.cpp',
|
'src/ripple/unity/ledger.cpp',
|
||||||
'src/ripple/unity/net.cpp',
|
'src/ripple/unity/net.cpp',
|
||||||
'src/ripple/unity/overlay.cpp',
|
'src/ripple/unity/overlay1.cpp',
|
||||||
|
'src/ripple/unity/overlay2.cpp',
|
||||||
'src/ripple/unity/peerfinder.cpp',
|
'src/ripple/unity/peerfinder.cpp',
|
||||||
'src/ripple/unity/json.cpp',
|
'src/ripple/unity/json.cpp',
|
||||||
'src/ripple/unity/protocol.cpp',
|
'src/ripple/unity/protocol.cpp',
|
||||||
'src/ripple/unity/rpcx.cpp',
|
'src/ripple/unity/rpcx1.cpp',
|
||||||
|
'src/ripple/unity/rpcx2.cpp',
|
||||||
'src/ripple/unity/shamap.cpp',
|
'src/ripple/unity/shamap.cpp',
|
||||||
'src/ripple/unity/test.cpp',
|
'src/ripple/unity/server.cpp',
|
||||||
'src/ripple/unity/unl.cpp',
|
'src/test/unity/app_test_unity1.cpp',
|
||||||
|
'src/test/unity/app_test_unity2.cpp',
|
||||||
|
'src/test/unity/basics_test_unity.cpp',
|
||||||
|
'src/test/unity/beast_test_unity1.cpp',
|
||||||
|
'src/test/unity/beast_test_unity2.cpp',
|
||||||
|
'src/test/unity/consensus_test_unity.cpp',
|
||||||
|
'src/test/unity/core_test_unity.cpp',
|
||||||
|
'src/test/unity/conditions_test_unity.cpp',
|
||||||
|
'src/test/unity/json_test_unity.cpp',
|
||||||
|
'src/test/unity/ledger_test_unity.cpp',
|
||||||
|
'src/test/unity/overlay_test_unity.cpp',
|
||||||
|
'src/test/unity/peerfinder_test_unity.cpp',
|
||||||
|
'src/test/unity/protocol_test_unity.cpp',
|
||||||
|
'src/test/unity/resource_test_unity.cpp',
|
||||||
|
'src/test/unity/rpc_test_unity.cpp',
|
||||||
|
'src/test/unity/server_test_unity.cpp',
|
||||||
|
'src/test/unity/server_status_test_unity.cpp',
|
||||||
|
'src/test/unity/shamap_test_unity.cpp',
|
||||||
|
'src/test/unity/jtx_unity1.cpp',
|
||||||
|
'src/test/unity/jtx_unity2.cpp',
|
||||||
|
'src/test/unity/csf_unity.cpp'
|
||||||
)
|
)
|
||||||
|
|
||||||
if use_shp(toolchain):
|
if use_shp(toolchain):
|
||||||
@@ -920,6 +1063,7 @@ def get_unity_sources(toolchain):
|
|||||||
append_sources(
|
append_sources(
|
||||||
result,
|
result,
|
||||||
'src/ripple/unity/nodestore.cpp',
|
'src/ripple/unity/nodestore.cpp',
|
||||||
|
'src/test/unity/nodestore_test_unity.cpp',
|
||||||
CPPPATH=[
|
CPPPATH=[
|
||||||
'src/rocksdb2/include',
|
'src/rocksdb2/include',
|
||||||
'src/snappy/snappy',
|
'src/snappy/snappy',
|
||||||
@@ -1018,7 +1162,7 @@ for tu_style in ['classic', 'unity']:
|
|||||||
os.path.join(variant_dir, 'proto') :
|
os.path.join(variant_dir, 'proto') :
|
||||||
os.path.join (build_dir, 'proto'),
|
os.path.join (build_dir, 'proto'),
|
||||||
}
|
}
|
||||||
for dest, source in variant_dirs.iteritems():
|
for dest, source in variant_dirs.items():
|
||||||
env.VariantDir(dest, source, duplicate=0)
|
env.VariantDir(dest, source, duplicate=0)
|
||||||
|
|
||||||
object_builder = ObjectBuilder(env, variant_dirs)
|
object_builder = ObjectBuilder(env, variant_dirs)
|
||||||
@@ -1030,37 +1174,23 @@ for tu_style in ['classic', 'unity']:
|
|||||||
for s, k in sources:
|
for s, k in sources:
|
||||||
object_builder.add_source_files(*s, **k)
|
object_builder.add_source_files(*s, **k)
|
||||||
|
|
||||||
git_commit_tag = {}
|
|
||||||
if toolchain != 'msvc':
|
|
||||||
git = Beast.Git(env)
|
|
||||||
if git.exists:
|
|
||||||
id = '%s+%s.%s' % (git.tags, git.user, git.branch)
|
|
||||||
git_commit_tag = {'CPPDEFINES':
|
|
||||||
{'GIT_COMMIT_ID' : '\'"%s"\'' % id }}
|
|
||||||
|
|
||||||
object_builder.add_source_files(
|
|
||||||
'src/ripple/unity/git_id.cpp',
|
|
||||||
**git_commit_tag)
|
|
||||||
|
|
||||||
if use_shp(toolchain):
|
if use_shp(toolchain):
|
||||||
cc_flags = {'CCFLAGS': ['--system-header-prefix=rocksdb2']}
|
cc_flags = {'CCFLAGS': ['--system-header-prefix=rocksdb2']}
|
||||||
else:
|
else:
|
||||||
cc_flags = {}
|
cc_flags = {}
|
||||||
|
|
||||||
object_builder.add_source_files(
|
object_builder.add_source_files(
|
||||||
'src/beast/beast/unity/hash_unity.cpp',
|
'src/ripple/beast/unity/beast_hash_unity.cpp',
|
||||||
'src/ripple/unity/beast.cpp',
|
'src/ripple/unity/beast.cpp',
|
||||||
'src/ripple/unity/lz4.c',
|
'src/ripple/unity/lz4.c',
|
||||||
'src/ripple/unity/protobuf.cpp',
|
'src/ripple/unity/protobuf.cpp',
|
||||||
'src/ripple/unity/ripple.proto.cpp',
|
'src/ripple/unity/ripple.proto.cpp',
|
||||||
'src/ripple/unity/resource.cpp',
|
'src/ripple/unity/resource.cpp',
|
||||||
'src/ripple/unity/server.cpp',
|
|
||||||
'src/ripple/unity/websocket02.cpp',
|
|
||||||
**cc_flags
|
**cc_flags
|
||||||
)
|
)
|
||||||
|
|
||||||
object_builder.add_source_files(
|
object_builder.add_source_files(
|
||||||
'src/ripple/unity/beastc.c',
|
'src/sqlite/sqlite_unity.c',
|
||||||
CCFLAGS = ([] if toolchain == 'msvc' else ['-Wno-array-bounds']))
|
CCFLAGS = ([] if toolchain == 'msvc' else ['-Wno-array-bounds']))
|
||||||
|
|
||||||
if 'gcc' in toolchain:
|
if 'gcc' in toolchain:
|
||||||
@@ -1071,7 +1201,7 @@ for tu_style in ['classic', 'unity']:
|
|||||||
cc_flags = {}
|
cc_flags = {}
|
||||||
|
|
||||||
object_builder.add_source_files(
|
object_builder.add_source_files(
|
||||||
'src/ripple/unity/ed25519.c',
|
'src/ripple/unity/ed25519_donna.c',
|
||||||
CPPPATH=[
|
CPPPATH=[
|
||||||
'src/ed25519-donna',
|
'src/ed25519-donna',
|
||||||
]
|
]
|
||||||
@@ -1097,11 +1227,6 @@ for tu_style in ['classic', 'unity']:
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
object_builder.add_source_files(
|
|
||||||
'src/ripple/unity/websocket04.cpp',
|
|
||||||
CPPPATH='src/websocketpp',
|
|
||||||
)
|
|
||||||
|
|
||||||
if toolchain == "clang" and Beast.system.osx:
|
if toolchain == "clang" and Beast.system.osx:
|
||||||
object_builder.add_source_files('src/ripple/unity/beastobjc.mm')
|
object_builder.add_source_files('src/ripple/unity/beastobjc.mm')
|
||||||
|
|
||||||
@@ -1136,16 +1261,23 @@ for tu_style in ['classic', 'unity']:
|
|||||||
if should_build_ninja(tu_style, toolchain, variant):
|
if should_build_ninja(tu_style, toolchain, variant):
|
||||||
print('Generating ninja: {}:{}:{}'.format(tu_style, toolchain, variant))
|
print('Generating ninja: {}:{}:{}'.format(tu_style, toolchain, variant))
|
||||||
scons_to_ninja.GenerateNinjaFile(
|
scons_to_ninja.GenerateNinjaFile(
|
||||||
[object_builder.env] + object_builder.child_envs,
|
# add base env last to ensure protoc targets are added
|
||||||
|
[object_builder.env] + object_builder.child_envs + [base],
|
||||||
dest_file='build.ninja')
|
dest_file='build.ninja')
|
||||||
|
|
||||||
for key, value in aliases.iteritems():
|
for key, value in aliases.items():
|
||||||
env.Alias(key, value)
|
env.Alias(key, value)
|
||||||
|
|
||||||
vcxproj = base.VSProject(
|
vcxproj = base.VSProject(
|
||||||
os.path.join('Builds', 'VisualStudio2015', 'RippleD'),
|
os.path.join('Builds', 'VisualStudio2015', 'RippleD'),
|
||||||
source = [],
|
source = [],
|
||||||
VSPROJECT_ROOT_DIRS = ['src/beast', 'src', '.'],
|
VSPROJECT_ROOT_DIRS = [
|
||||||
|
'build/',
|
||||||
|
'src/beast/extras',
|
||||||
|
'src/beast/include',
|
||||||
|
'src/nudb/include',
|
||||||
|
'src',
|
||||||
|
'.'],
|
||||||
VSPROJECT_CONFIGS = msvc_configs)
|
VSPROJECT_CONFIGS = msvc_configs)
|
||||||
base.Alias('vcxproj', vcxproj)
|
base.Alias('vcxproj', vcxproj)
|
||||||
|
|
||||||
@@ -1167,13 +1299,13 @@ def do_count(target, source, env):
|
|||||||
path = os.path.join(parent, path)
|
path = os.path.join(parent, path)
|
||||||
r = os.path.splitext(path)
|
r = os.path.splitext(path)
|
||||||
if r[1] in suffixes:
|
if r[1] in suffixes:
|
||||||
if r[0].endswith('.test'):
|
if r[0].endswith('_test'):
|
||||||
yield os.path.normpath(path)
|
yield os.path.normpath(path)
|
||||||
return list(_iter(base))
|
return list(_iter(base))
|
||||||
testfiles = list_testfiles(os.path.join('src', 'ripple'), env.get('CPPSUFFIXES'))
|
testfiles = list_testfiles(os.path.join('src', 'test'), env.get('CPPSUFFIXES'))
|
||||||
lines = 0
|
lines = 0
|
||||||
for f in testfiles:
|
for f in testfiles:
|
||||||
lines = lines + sum(1 for line in open(f))
|
lines = lines + sum(1 for line in open(f))
|
||||||
print "Total unit test lines: %d" % lines
|
print ("Total unit test lines: %d" % lines)
|
||||||
|
|
||||||
PhonyTargets(env, count = do_count)
|
PhonyTargets(env, count = do_count)
|
||||||
|
|||||||
115
appveyor.yml
115
appveyor.yml
@@ -6,22 +6,39 @@ environment:
|
|||||||
# that it's a small download. We also use appveyor's free cache, avoiding fees
|
# that it's a small download. We also use appveyor's free cache, avoiding fees
|
||||||
# downloading from S3 each time.
|
# downloading from S3 each time.
|
||||||
# TODO: script to create this package.
|
# TODO: script to create this package.
|
||||||
RIPPLED_DEPS_URL: https://ripple.github.io/Downloads/appveyor/rippled_deps15.01.zip
|
RIPPLED_DEPS_PATH: rippled_deps15.02
|
||||||
|
RIPPLED_DEPS_URL: https://ripple.github.io/Downloads/appveyor/%RIPPLED_DEPS_PATH%.zip
|
||||||
|
|
||||||
# Other dependencies we just download each time.
|
# Other dependencies we just download each time.
|
||||||
PIP_URL: https://bootstrap.pypa.io/get-pip.py
|
PIP_PATH: get-pip.py
|
||||||
PYWIN32_URL: https://downloads.sourceforge.net/project/pywin32/pywin32/Build%20219/pywin32-219.win-amd64-py2.7.exe
|
PIP_URL: https://bootstrap.pypa.io/%PIP_PATH%
|
||||||
|
# The % in this URL messes up variable substition, so any updates will
|
||||||
|
# need to update both PYWIN32_PATH and PYWIN32_URL
|
||||||
|
PYWIN32_PATH: pywin32-220.win-amd64-py2.7.exe
|
||||||
|
PYWIN32_URL: https://downloads.sourceforge.net/project/pywin32/pywin32/Build%20220/pywin32-220.win-amd64-py2.7.exe
|
||||||
|
|
||||||
# Scons honours these environment variables, setting the include/lib paths.
|
# Scons honours these environment variables, setting the include/lib paths.
|
||||||
BOOST_ROOT: C:/rippled_deps15.01/boost
|
BOOST_ROOT: C:/%RIPPLED_DEPS_PATH%/boost
|
||||||
OPENSSL_ROOT: C:/rippled_deps15.01/openssl
|
OPENSSL_ROOT: C:/%RIPPLED_DEPS_PATH%/openssl
|
||||||
|
|
||||||
|
matrix:
|
||||||
|
# This build works, but our current Appveyor config runs matrix builds
|
||||||
|
# sequentially, and the one build is already slow enough.
|
||||||
|
# - build: scons
|
||||||
|
# target: msvc.debug
|
||||||
|
- build: cmake
|
||||||
|
target: msvc.debug
|
||||||
|
buildconfig: Debug
|
||||||
|
|
||||||
os: Visual Studio 2015
|
os: Visual Studio 2015
|
||||||
|
|
||||||
# At the end of each successful build we cache this directory. It must be less
|
# At the end of each successful build we cache this directory.
|
||||||
# than 100MB total compressed.
|
# https://www.appveyor.com/docs/build-cache/
|
||||||
|
# Resulting archive should not exceed 100 MB.
|
||||||
cache:
|
cache:
|
||||||
- "C:\\rippled_deps15.01"
|
- 'C:\%RIPPLED_DEPS_PATH%'
|
||||||
|
- '%PIP_PATH%'
|
||||||
|
- '%PYWIN32_PATH%'
|
||||||
|
|
||||||
# This means we'll download a zip of the branch we want, rather than the full
|
# This means we'll download a zip of the branch we want, rather than the full
|
||||||
# history.
|
# history.
|
||||||
@@ -29,36 +46,37 @@ shallow_clone: true
|
|||||||
|
|
||||||
install:
|
install:
|
||||||
# We want easy_install, python and protoc.exe on PATH.
|
# We want easy_install, python and protoc.exe on PATH.
|
||||||
- SET PATH=%PYTHON%;%PYTHON%/Scripts;C:/rippled_deps15.01;%PATH%
|
- SET PATH=%PYTHON%;%PYTHON%/Scripts;C:/%RIPPLED_DEPS_PATH%;%PATH%
|
||||||
|
|
||||||
# `ps` prefix means the command is executed by powershell.
|
# `ps` prefix means the command is executed by powershell.
|
||||||
- ps: Start-FileDownload $env:PIP_URL
|
- ps: |
|
||||||
- ps: Start-FileDownload $env:PYWIN32_URL
|
if ($env:build -eq "scons") {
|
||||||
|
if(-not(Test-Path $env:PIP_PATH)) {
|
||||||
# Installing pip will install setuptools/easy_install.
|
echo "Download from $env:PIP_URL"
|
||||||
- python get-pip.py
|
Start-FileDownload $env:PIP_URL
|
||||||
|
}
|
||||||
# Pip has some problems installing scons on windows so we use easy install.
|
if(-not(Test-Path $env:PYWIN32_PATH)) {
|
||||||
- easy_install scons
|
echo "Download from $env:PYWIN32_URL"
|
||||||
|
Start-FileDownload $env:PYWIN32_URL
|
||||||
# Scons has problems with parallel builds on windows without pywin32.
|
}
|
||||||
- easy_install pywin32-219.win-amd64-py2.7.exe
|
}
|
||||||
# (easy_install can do headless installs of .exe wizards)
|
- bin/ci/windows/install-dependencies.bat
|
||||||
|
|
||||||
# Download dependencies if appveyor didn't restore them from the cache.
|
# Download dependencies if appveyor didn't restore them from the cache.
|
||||||
# Use 7zip to unzip.
|
# Use 7zip to unzip.
|
||||||
- ps: |
|
- ps: |
|
||||||
if (-not(Test-Path 'C:/rippled_deps15.01')) {
|
if (-not(Test-Path 'C:/$env:RIPPLED_DEPS_PATH')) {
|
||||||
echo "Download from $env:RIPPLED_DEPS_URL"
|
echo "Download from $env:RIPPLED_DEPS_URL"
|
||||||
Start-FileDownload "$env:RIPPLED_DEPS_URL"
|
Start-FileDownload "$env:RIPPLED_DEPS_URL"
|
||||||
7z x rippled_deps15.01.zip -oC:\ -y > $null
|
7z x "$($env:RIPPLED_DEPS_PATH).zip" -oC:\ -y > $null
|
||||||
|
if ($LastExitCode -ne 0) { throw "7z failed" }
|
||||||
}
|
}
|
||||||
|
|
||||||
# Newer DEPS include a versions file.
|
# Newer DEPS include a versions file.
|
||||||
# Dump it so we can verify correct behavior.
|
# Dump it so we can verify correct behavior.
|
||||||
- ps: |
|
- ps: |
|
||||||
if (Test-Path 'C:/rippled_deps15.01/versions.txt') {
|
if (Test-Path "C:/$env:RIPPLED_DEPS_PATH/versions.txt") {
|
||||||
cat 'C:/rippled_deps15.01/versions.txt'
|
cat "C:/$env:RIPPLED_DEPS_PATH/versions.txt"
|
||||||
}
|
}
|
||||||
|
|
||||||
# TODO: This is giving me grief
|
# TODO: This is giving me grief
|
||||||
@@ -71,17 +89,46 @@ build_script:
|
|||||||
- '"%VS140COMNTOOLS%../../VC/vcvarsall.bat" x86_amd64'
|
- '"%VS140COMNTOOLS%../../VC/vcvarsall.bat" x86_amd64'
|
||||||
# Show which version of the compiler we are using.
|
# Show which version of the compiler we are using.
|
||||||
- cl
|
- cl
|
||||||
- scons msvc.debug -j%NUMBER_OF_PROCESSORS%
|
- ps: |
|
||||||
|
if ($env:build -eq "scons") {
|
||||||
|
# Build with scons
|
||||||
|
scons $env:target -j%NUMBER_OF_PROCESSORS%
|
||||||
|
if ($LastExitCode -ne 0) { throw "scons build failed" }
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
# Build with cmake
|
||||||
|
cmake --version
|
||||||
|
$cmake_target="$($env:target).ci"
|
||||||
|
"$cmake_target"
|
||||||
|
New-Item -ItemType Directory -Force -Path "build/$cmake_target"
|
||||||
|
Push-Location "build/$cmake_target"
|
||||||
|
cmake -G"Visual Studio 14 2015 Win64" -Dtarget="$cmake_target" ../..
|
||||||
|
if ($LastExitCode -ne 0) { throw "CMake failed" }
|
||||||
|
cmake --build . --config $env:buildconfig -- -m
|
||||||
|
if ($LastExitCode -ne 0) { throw "CMake build failed" }
|
||||||
|
Pop-Location
|
||||||
|
}
|
||||||
|
|
||||||
after_build:
|
after_build:
|
||||||
# Put our executable in a place where npm test can find it.
|
- ps: |
|
||||||
- ps: cp build/msvc.debug/rippled.exe build
|
if ($env:build -eq "scons") {
|
||||||
- ps: ls build
|
cp build/$($env:target)/rippled.exe build
|
||||||
|
ls build
|
||||||
|
$exe="build/rippled"
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
$exe="build/$cmake_target/$env:buildconfig/rippled"
|
||||||
|
}
|
||||||
|
"Exe is at $exe"
|
||||||
|
|
||||||
test_script:
|
test_script:
|
||||||
# Run the unit tests
|
- ps: |
|
||||||
- build\\rippled --unittest
|
& {
|
||||||
|
# Run the rippled unit tests
|
||||||
|
& $exe --unittest --quiet --unittest-log
|
||||||
|
# https://connect.microsoft.com/PowerShell/feedback/details/751703/option-to-stop-script-if-command-line-exe-fails
|
||||||
|
if ($LastExitCode -ne 0) { throw "Unit tests failed" }
|
||||||
|
}
|
||||||
|
|
||||||
# Run the integration tests
|
|
||||||
- npm install
|
|
||||||
- npm test
|
|
||||||
|
|||||||
@@ -1,41 +1,104 @@
|
|||||||
#!/bin/bash -u
|
#!/bin/bash -u
|
||||||
# We use set -e and bash with -u to bail on first non zero exit code of any
|
# We use set -e and bash with -u to bail on first non zero exit code of any
|
||||||
# processes launched or upon any unbound variable
|
# processes launched or upon any unbound variable.
|
||||||
set -e
|
# We use set -x to print commands before running them to help with
|
||||||
|
# debugging.
|
||||||
|
set -ex
|
||||||
__dirname=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
__dirname=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||||
echo "using CC: $CC"
|
echo "using CC: $CC"
|
||||||
echo "using TARGET: $TARGET"
|
echo "using TARGET: $TARGET"
|
||||||
export RIPPLED_PATH="$PWD/build/$CC.$TARGET/rippled"
|
|
||||||
echo "using RIPPLED_PATH: $RIPPLED_PATH"
|
|
||||||
# Make sure vcxproj is up to date
|
|
||||||
scons vcxproj
|
|
||||||
git diff --exit-code
|
|
||||||
# $CC will be either `clang` or `gcc`
|
|
||||||
# http://docs.travis-ci.com/user/migrating-from-legacy/?utm_source=legacy-notice&utm_medium=banner&utm_campaign=legacy-upgrade
|
|
||||||
# indicates that 2 cores are available to containers.
|
|
||||||
scons -j${NUM_PROCESSORS:-2} $CC.$TARGET
|
|
||||||
# We can be sure we're using the build/$CC.$TARGET variant
|
|
||||||
# (-f so never err)
|
|
||||||
rm -f build/rippled
|
|
||||||
|
|
||||||
# See what we've actually built
|
# Ensure APP defaults to rippled if it's not set.
|
||||||
ldd $RIPPLED_PATH
|
: ${APP:=rippled}
|
||||||
if [[ $TARGET == "coverage" ]]; then
|
|
||||||
$RIPPLED_PATH --unittest
|
JOBS=${NUM_PROCESSORS:-2}
|
||||||
# We pass along -p to keep path segments so as to avoid collisions
|
if [[ ${TARGET} == *.nounity ]]; then
|
||||||
codecov --gcov-args=-p --gcov-source-match='^src/(ripple|beast)'
|
JOBS=$((2*${JOBS}))
|
||||||
else
|
|
||||||
if [[ $CC == "clang" ]]; then
|
|
||||||
# gdb segfaults with a clang build
|
|
||||||
$RIPPLED_PATH --unittest
|
|
||||||
else
|
|
||||||
# Run unittests (under gdb)
|
|
||||||
cat $__dirname/unittests.gdb | gdb \
|
|
||||||
--return-child-result \
|
|
||||||
--args $RIPPLED_PATH --unittest
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Run NPM tests
|
if [[ ${BUILD:-scons} == "cmake" ]]; then
|
||||||
npm install
|
echo "cmake building ${APP}"
|
||||||
npm test --rippled=$RIPPLED_PATH
|
CMAKE_TARGET=$CC.$TARGET
|
||||||
|
if [[ ${CI:-} == true ]]; then
|
||||||
|
CMAKE_TARGET=$CMAKE_TARGET.ci
|
||||||
|
fi
|
||||||
|
mkdir -p "build/${CMAKE_TARGET}"
|
||||||
|
pushd "build/${CMAKE_TARGET}"
|
||||||
|
cmake ../.. -Dtarget=$CMAKE_TARGET
|
||||||
|
cmake --build . -- -j${JOBS}
|
||||||
|
popd
|
||||||
|
export APP_PATH="$PWD/build/${CMAKE_TARGET}/${APP}"
|
||||||
|
echo "using APP_PATH: $APP_PATH"
|
||||||
|
|
||||||
|
else
|
||||||
|
export APP_PATH="$PWD/build/$CC.$TARGET/${APP}"
|
||||||
|
echo "using APP_PATH: $APP_PATH"
|
||||||
|
# Make sure vcxproj is up to date
|
||||||
|
scons vcxproj
|
||||||
|
git diff --exit-code
|
||||||
|
# $CC will be either `clang` or `gcc`
|
||||||
|
# http://docs.travis-ci.com/user/migrating-from-legacy/?utm_source=legacy-notice&utm_medium=banner&utm_campaign=legacy-upgrade
|
||||||
|
# indicates that 2 cores are available to containers.
|
||||||
|
scons -j${JOBS} $CC.$TARGET
|
||||||
|
fi
|
||||||
|
# We can be sure we're using the build/$CC.$TARGET variant
|
||||||
|
# (-f so never err)
|
||||||
|
rm -f build/${APP}
|
||||||
|
|
||||||
|
# See what we've actually built
|
||||||
|
ldd $APP_PATH
|
||||||
|
|
||||||
|
if [[ ${APP} == "rippled" ]]; then
|
||||||
|
export APP_ARGS="--unittest --quiet --unittest-log"
|
||||||
|
# Only report on src/ripple files
|
||||||
|
export LCOV_FILES="*/src/ripple/*"
|
||||||
|
# Nothing to explicitly exclude
|
||||||
|
export LCOV_EXCLUDE_FILES="LCOV_NO_EXCLUDE"
|
||||||
|
else
|
||||||
|
: ${APP_ARGS:=}
|
||||||
|
: ${LCOV_FILES:="*/src/*"}
|
||||||
|
# Don't exclude anything
|
||||||
|
: ${LCOV_EXCLUDE_FILES:="LCOV_NO_EXCLUDE"}
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $TARGET == "coverage" ]]; then
|
||||||
|
export PATH=$PATH:$LCOV_ROOT/usr/bin
|
||||||
|
|
||||||
|
# Create baseline coverage data file
|
||||||
|
lcov --no-external -c -i -d . -o baseline.info
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${TARGET} == debug ]]; then
|
||||||
|
# Execute unit tests under gdb, printing a call stack
|
||||||
|
# if we get a crash.
|
||||||
|
$GDB_ROOT/bin/gdb -return-child-result -quiet -batch \
|
||||||
|
-ex "set env MALLOC_CHECK_=3" \
|
||||||
|
-ex "set print thread-events off" \
|
||||||
|
-ex run \
|
||||||
|
-ex "thread apply all backtrace full" \
|
||||||
|
-ex "quit" \
|
||||||
|
--args $APP_PATH $APP_ARGS
|
||||||
|
else
|
||||||
|
$APP_PATH $APP_ARGS
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $TARGET == "coverage" ]]; then
|
||||||
|
# Create test coverage data file
|
||||||
|
lcov --no-external -c -d . -o tests.info
|
||||||
|
|
||||||
|
# Combine baseline and test coverage data
|
||||||
|
lcov -a baseline.info -a tests.info -o lcov-all.info
|
||||||
|
|
||||||
|
# Included files
|
||||||
|
lcov -e "lcov-all.info" "${LCOV_FILES}" -o lcov.pre.info
|
||||||
|
|
||||||
|
# Excluded files
|
||||||
|
lcov --remove lcov.pre.info "${LCOV_EXCLUDE_FILES}" -o lcov.info
|
||||||
|
|
||||||
|
# Push the results (lcov.info) to codecov
|
||||||
|
codecov -X gcov # don't even try and look for .gcov files ;)
|
||||||
|
|
||||||
|
find . -name "*.gcda" | xargs rm -f
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,34 +1,82 @@
|
|||||||
#!/bin/bash -u
|
#!/bin/bash -u
|
||||||
# Exit if anything fails.
|
# Exit if anything fails. Echo commands to aid debugging.
|
||||||
set -e
|
set -ex
|
||||||
|
|
||||||
|
# Target working dir - defaults to current dir.
|
||||||
|
# Can be set from caller, or in the first parameter
|
||||||
|
TWD=$( cd ${TWD:-${1:-${PWD:-$( pwd )}}}; pwd )
|
||||||
|
echo "Target path is: $TWD"
|
||||||
# Override gcc version to $GCC_VER.
|
# Override gcc version to $GCC_VER.
|
||||||
# Put an appropriate symlink at the front of the path.
|
# Put an appropriate symlink at the front of the path.
|
||||||
mkdir -v $HOME/bin
|
mkdir -pv $HOME/bin
|
||||||
for g in gcc g++ gcov gcc-ar gcc-nm gcc-ranlib
|
for g in gcc g++ gcov gcc-ar gcc-nm gcc-ranlib
|
||||||
do
|
do
|
||||||
test -x $( type -p ${g}-$GCC_VER )
|
test -x $( type -p ${g}-$GCC_VER )
|
||||||
ln -sv $(type -p ${g}-$GCC_VER) $HOME/bin/${g}
|
ln -sv $(type -p ${g}-$GCC_VER) $HOME/bin/${g}
|
||||||
done
|
done
|
||||||
for c in clang clang++
|
|
||||||
do
|
if [[ -n ${CLANG_VER:-} ]]; then
|
||||||
test -x $( type -p ${c}-$CLANG_VER )
|
# There are cases where the directory exists, but the exe is not available.
|
||||||
ln -sv $(type -p ${c}-$CLANG_VER) $HOME/bin/${c}
|
# Use this workaround for now.
|
||||||
done
|
if [[ ! -x ${TWD}/llvm-${LLVM_VERSION}/bin/llvm-config && -d ${TWD}/llvm-${LLVM_VERSION} ]]; then
|
||||||
export PATH=$PWD/bin:$PATH
|
rm -fr ${TWD}/llvm-${LLVM_VERSION}
|
||||||
|
fi
|
||||||
|
if [[ ! -d ${TWD}/llvm-${LLVM_VERSION} ]]; then
|
||||||
|
mkdir ${TWD}/llvm-${LLVM_VERSION}
|
||||||
|
LLVM_URL="http://llvm.org/releases/${LLVM_VERSION}/clang+llvm-${LLVM_VERSION}-x86_64-linux-gnu-ubuntu-14.04.tar.xz"
|
||||||
|
wget -O - ${LLVM_URL} | tar -Jxvf - --strip 1 -C ${TWD}/llvm-${LLVM_VERSION}
|
||||||
|
fi
|
||||||
|
${TWD}/llvm-${LLVM_VERSION}/bin/llvm-config --version;
|
||||||
|
export LLVM_CONFIG="${TWD}/llvm-${LLVM_VERSION}/bin/llvm-config";
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${BUILD:-} == cmake ]]; then
|
||||||
|
# There are cases where the directory exists, but the exe is not available.
|
||||||
|
# Use this workaround for now.
|
||||||
|
if [[ ! -x ${TWD}/cmake/bin/cmake && -d ${TWD}/cmake ]]; then
|
||||||
|
rm -fr ${TWD}/cmake
|
||||||
|
fi
|
||||||
|
if [[ ! -d ${TWD}/cmake ]]; then
|
||||||
|
CMAKE_URL="https://www.cmake.org/files/v3.6/cmake-3.6.1-Linux-x86_64.tar.gz"
|
||||||
|
wget --version
|
||||||
|
# wget version 1.13.4 thinks this certificate is invalid, even though it's fine.
|
||||||
|
# "ERROR: no certificate subject alternative name matches"
|
||||||
|
# See also: https://github.com/travis-ci/travis-ci/issues/5059
|
||||||
|
mkdir ${TWD}/cmake &&
|
||||||
|
wget -O - --no-check-certificate ${CMAKE_URL} | tar --strip-components=1 -xz -C ${TWD}/cmake
|
||||||
|
cmake --version
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
# What versions are we ACTUALLY running?
|
# What versions are we ACTUALLY running?
|
||||||
if [ -x $HOME/bin/g++ ]; then
|
if [ -x $HOME/bin/g++ ]; then
|
||||||
$HOME/bin/g++ -v
|
$HOME/bin/g++ -v
|
||||||
fi
|
fi
|
||||||
if [ -x $HOME/bin/clang ]; then
|
|
||||||
$HOME/bin/clang -v
|
pip install --user requests==2.13.0
|
||||||
fi
|
pip install --user https://github.com/codecov/codecov-python/archive/master.zip
|
||||||
# Avoid `spurious errors` caused by ~/.npm permission issues
|
|
||||||
# Does it already exist? Who owns? What permissions?
|
|
||||||
ls -lah ~/.npm || mkdir ~/.npm
|
|
||||||
# Make sure we own it
|
|
||||||
chown -Rc $USER ~/.npm
|
|
||||||
# We use this so we can filter the subtrees from our coverage report
|
|
||||||
pip install --user https://github.com/sublimator/codecov-python/zipball/source-match
|
|
||||||
|
|
||||||
bash bin/sh/install-boost.sh
|
bash bin/sh/install-boost.sh
|
||||||
|
|
||||||
|
# Install lcov
|
||||||
|
# Download the archive
|
||||||
|
wget https://github.com/linux-test-project/lcov/releases/download/v1.12/lcov-1.12.tar.gz
|
||||||
|
# Extract to ~/lcov-1.12
|
||||||
|
tar xfvz lcov-1.12.tar.gz -C $HOME
|
||||||
|
# Set install path
|
||||||
|
mkdir -p $LCOV_ROOT
|
||||||
|
cd $HOME/lcov-1.12 && make install PREFIX=$LCOV_ROOT
|
||||||
|
|
||||||
|
|
||||||
|
if [[ ${TARGET} == debug && ! -x ${GDB_ROOT}/bin/gdb ]]; then
|
||||||
|
pushd $HOME
|
||||||
|
#install gdb
|
||||||
|
wget https://ftp.gnu.org/gnu/gdb/gdb-8.0.tar.xz
|
||||||
|
tar xf gdb-8.0.tar.xz
|
||||||
|
pushd gdb-8.0
|
||||||
|
./configure CFLAGS='-w -O2' CXXFLAGS='-std=gnu++11 -g -O2 -w' --prefix=$GDB_ROOT
|
||||||
|
make -j2
|
||||||
|
make install
|
||||||
|
popd
|
||||||
|
popd
|
||||||
|
fi
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
set env MALLOC_CHECK_=3
|
|
||||||
set print thread-events off
|
|
||||||
run
|
|
||||||
backtrace full
|
|
||||||
13
bin/ci/windows/install-dependencies.bat
Normal file
13
bin/ci/windows/install-dependencies.bat
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
if "%build%" == "scons" (
|
||||||
|
rem Installing pip will install setuptools/easy_install.
|
||||||
|
python "%PIP_PATH%"
|
||||||
|
|
||||||
|
rem Pip has some problems installing scons on windows so we use easy install.
|
||||||
|
rem - easy_install scons
|
||||||
|
rem Workaround
|
||||||
|
easy_install https://pypi.python.org/packages/source/S/SCons/scons-2.5.0.tar.gz#md5=bda5530a70a41a7831d83c8b191c021e
|
||||||
|
|
||||||
|
rem Scons has problems with parallel builds on windows without pywin32.
|
||||||
|
easy_install "%PYWIN32_PATH%"
|
||||||
|
rem (easy_install can do headless installs of .exe wizards)
|
||||||
|
)
|
||||||
86
bin/getInfoRippled.sh
Executable file
86
bin/getInfoRippled.sh
Executable file
@@ -0,0 +1,86 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
rippled_exe=/opt/ripple/bin/rippled
|
||||||
|
conf_file=/etc/opt/ripple/rippled.cfg
|
||||||
|
|
||||||
|
while getopts ":e:c:" opt; do
|
||||||
|
case $opt in
|
||||||
|
e)
|
||||||
|
rippled_exe=${OPTARG}
|
||||||
|
;;
|
||||||
|
c)
|
||||||
|
conf_file=${OPTARG}
|
||||||
|
;;
|
||||||
|
\?)
|
||||||
|
echo "Invalid option: -$OPTARG"
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
tmp_loc=$(mktemp -d --tmpdir ripple_info.XXXX)
|
||||||
|
cd /tmp
|
||||||
|
chmod 751 ripple_info.*
|
||||||
|
cd ~
|
||||||
|
echo ${tmp_loc}
|
||||||
|
|
||||||
|
cleaned_conf=${tmp_loc}/cleaned_rippled_cfg.txt
|
||||||
|
|
||||||
|
if [[ -f ${conf_file} ]]
|
||||||
|
then
|
||||||
|
db=$(sed -r -e 's/\<s[a-zA-Z0-9]{28}\>/secretsecretsecretsecretmaybe/g' ${conf_file} |\
|
||||||
|
awk -v OUT_FILE=${cleaned_conf} '
|
||||||
|
BEGIN {skip=0; db_path="";print > OUT_FILE}
|
||||||
|
/^\[validation_seed\]/ {skip=1; next}
|
||||||
|
/^\[node_seed\]/ {skip=1; next}
|
||||||
|
/^\[validation_manifest\]/ {skip=1; next}
|
||||||
|
/^\[validator_token\]/ {skip=1; next}
|
||||||
|
/^\[.*\]/ {skip=0}
|
||||||
|
skip==1 {next}
|
||||||
|
save==1 {save=0;db_path=$0}
|
||||||
|
/^\[database_path\]/ {save=1}
|
||||||
|
{print >> OUT_FILE}
|
||||||
|
END {print db_path}
|
||||||
|
')
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "database_path: ${db}"
|
||||||
|
df ${db} > ${tmp_loc}/db_path_df.txt
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Send output from this script to a log file
|
||||||
|
## this captures any messages
|
||||||
|
## or errors from the script itself
|
||||||
|
|
||||||
|
log_file=${tmp_loc}/get_info.log
|
||||||
|
exec 3>&1 1>>${log_file} 2>&1
|
||||||
|
|
||||||
|
## Send all stdout files to /tmp
|
||||||
|
|
||||||
|
if [[ -x ${rippled_exe} ]]
|
||||||
|
then
|
||||||
|
pgrep rippled && \
|
||||||
|
${rippled_exe} --conf ${conf_file} \
|
||||||
|
-- server_info > ${tmp_loc}/server_info.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
df -h > ${tmp_loc}/free_disk_space.txt
|
||||||
|
cat /proc/meminfo > ${tmp_loc}/amount_mem.txt
|
||||||
|
cat /proc/swaps > ${tmp_loc}/swap_space.txt
|
||||||
|
ulimit -a > ${tmp_loc}/reported_current_limits.txt
|
||||||
|
|
||||||
|
for dev_path in $(df | awk '$1 ~ /^\/dev\// {print $1}'); do
|
||||||
|
# strip numbers from end and remove '/dev/'
|
||||||
|
dev=$(basename ${dev_path%%[0-9]})
|
||||||
|
if [[ "$(cat /sys/block/${dev}/queue/rotational)" = 0 ]]
|
||||||
|
then
|
||||||
|
echo "${dev} : SSD" >> ${tmp_loc}/is_ssd.txt
|
||||||
|
else
|
||||||
|
echo "${dev} : NO SSD" >> ${tmp_loc}/is_ssd.txt
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
pushd ${tmp_loc}
|
||||||
|
tar -czvf info-package.tar.gz *.txt *.log
|
||||||
|
popd
|
||||||
|
|
||||||
|
echo "Use the following command on your local machine to download from your rippled instance: scp <remote_rippled_username>@<remote_host>:${tmp_loc}/info-package.tar.gz <path/to/local_machine/directory>"| tee /dev/fd/3
|
||||||
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
python/Manifest.py
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
from ripple.ledger import Server
|
|
||||||
from ripple.ledger.commands import Cache, Info, Print
|
|
||||||
from ripple.ledger.Args import ARGS
|
|
||||||
from ripple.util import Log
|
|
||||||
from ripple.util.CommandList import CommandList
|
|
||||||
|
|
||||||
_COMMANDS = CommandList(Cache, Info, Print)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
try:
|
|
||||||
server = Server.Server()
|
|
||||||
args = list(ARGS.command)
|
|
||||||
_COMMANDS.run_safe(args.pop(0), server, *args)
|
|
||||||
except Exception as e:
|
|
||||||
if ARGS.verbose:
|
|
||||||
print(traceback.format_exc(), sys.stderr)
|
|
||||||
Log.error(e)
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import sys
|
|
||||||
from ripple.util import Sign
|
|
||||||
|
|
||||||
result = Sign.run_command(sys.argv[1:])
|
|
||||||
exit(0 if result else -1)
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
Unit Tests
|
|
||||||
==========
|
|
||||||
|
|
||||||
To run the Python unit tests, execute:
|
|
||||||
|
|
||||||
python -m unittest discover
|
|
||||||
|
|
||||||
from this directory.
|
|
||||||
@@ -1,251 +0,0 @@
|
|||||||
########################## LICENCE ###############################
|
|
||||||
|
|
||||||
# Copyright (c) 2005-2012, Michele Simionato
|
|
||||||
# All rights reserved.
|
|
||||||
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are
|
|
||||||
# met:
|
|
||||||
|
|
||||||
# Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# Redistributions in bytecode form must reproduce the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer in
|
|
||||||
# the documentation and/or other materials provided with the
|
|
||||||
# distribution.
|
|
||||||
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
# HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
|
||||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
|
||||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
|
||||||
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
|
||||||
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
|
||||||
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
|
||||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
|
||||||
# DAMAGE.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Decorator module, see http://pypi.python.org/pypi/decorator
|
|
||||||
for the documentation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__version__ = '3.4.0'
|
|
||||||
|
|
||||||
__all__ = ["decorator", "FunctionMaker", "contextmanager"]
|
|
||||||
|
|
||||||
import sys, re, inspect
|
|
||||||
if sys.version >= '3':
|
|
||||||
from inspect import getfullargspec
|
|
||||||
def get_init(cls):
|
|
||||||
return cls.__init__
|
|
||||||
else:
|
|
||||||
class getfullargspec(object):
|
|
||||||
"A quick and dirty replacement for getfullargspec for Python 2.X"
|
|
||||||
def __init__(self, f):
|
|
||||||
self.args, self.varargs, self.varkw, self.defaults = \
|
|
||||||
inspect.getargspec(f)
|
|
||||||
self.kwonlyargs = []
|
|
||||||
self.kwonlydefaults = None
|
|
||||||
def __iter__(self):
|
|
||||||
yield self.args
|
|
||||||
yield self.varargs
|
|
||||||
yield self.varkw
|
|
||||||
yield self.defaults
|
|
||||||
def get_init(cls):
|
|
||||||
return cls.__init__.im_func
|
|
||||||
|
|
||||||
DEF = re.compile('\s*def\s*([_\w][_\w\d]*)\s*\(')
|
|
||||||
|
|
||||||
# basic functionality
|
|
||||||
class FunctionMaker(object):
|
|
||||||
"""
|
|
||||||
An object with the ability to create functions with a given signature.
|
|
||||||
It has attributes name, doc, module, signature, defaults, dict and
|
|
||||||
methods update and make.
|
|
||||||
"""
|
|
||||||
def __init__(self, func=None, name=None, signature=None,
|
|
||||||
defaults=None, doc=None, module=None, funcdict=None):
|
|
||||||
self.shortsignature = signature
|
|
||||||
if func:
|
|
||||||
# func can be a class or a callable, but not an instance method
|
|
||||||
self.name = func.__name__
|
|
||||||
if self.name == '<lambda>': # small hack for lambda functions
|
|
||||||
self.name = '_lambda_'
|
|
||||||
self.doc = func.__doc__
|
|
||||||
self.module = func.__module__
|
|
||||||
if inspect.isfunction(func):
|
|
||||||
argspec = getfullargspec(func)
|
|
||||||
self.annotations = getattr(func, '__annotations__', {})
|
|
||||||
for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs',
|
|
||||||
'kwonlydefaults'):
|
|
||||||
setattr(self, a, getattr(argspec, a))
|
|
||||||
for i, arg in enumerate(self.args):
|
|
||||||
setattr(self, 'arg%d' % i, arg)
|
|
||||||
if sys.version < '3': # easy way
|
|
||||||
self.shortsignature = self.signature = \
|
|
||||||
inspect.formatargspec(
|
|
||||||
formatvalue=lambda val: "", *argspec)[1:-1]
|
|
||||||
else: # Python 3 way
|
|
||||||
allargs = list(self.args)
|
|
||||||
allshortargs = list(self.args)
|
|
||||||
if self.varargs:
|
|
||||||
allargs.append('*' + self.varargs)
|
|
||||||
allshortargs.append('*' + self.varargs)
|
|
||||||
elif self.kwonlyargs:
|
|
||||||
allargs.append('*') # single star syntax
|
|
||||||
for a in self.kwonlyargs:
|
|
||||||
allargs.append('%s=None' % a)
|
|
||||||
allshortargs.append('%s=%s' % (a, a))
|
|
||||||
if self.varkw:
|
|
||||||
allargs.append('**' + self.varkw)
|
|
||||||
allshortargs.append('**' + self.varkw)
|
|
||||||
self.signature = ', '.join(allargs)
|
|
||||||
self.shortsignature = ', '.join(allshortargs)
|
|
||||||
self.dict = func.__dict__.copy()
|
|
||||||
# func=None happens when decorating a caller
|
|
||||||
if name:
|
|
||||||
self.name = name
|
|
||||||
if signature is not None:
|
|
||||||
self.signature = signature
|
|
||||||
if defaults:
|
|
||||||
self.defaults = defaults
|
|
||||||
if doc:
|
|
||||||
self.doc = doc
|
|
||||||
if module:
|
|
||||||
self.module = module
|
|
||||||
if funcdict:
|
|
||||||
self.dict = funcdict
|
|
||||||
# check existence required attributes
|
|
||||||
assert hasattr(self, 'name')
|
|
||||||
if not hasattr(self, 'signature'):
|
|
||||||
raise TypeError('You are decorating a non function: %s' % func)
|
|
||||||
|
|
||||||
def update(self, func, **kw):
|
|
||||||
"Update the signature of func with the data in self"
|
|
||||||
func.__name__ = self.name
|
|
||||||
func.__doc__ = getattr(self, 'doc', None)
|
|
||||||
func.__dict__ = getattr(self, 'dict', {})
|
|
||||||
func.func_defaults = getattr(self, 'defaults', ())
|
|
||||||
func.__kwdefaults__ = getattr(self, 'kwonlydefaults', None)
|
|
||||||
func.__annotations__ = getattr(self, 'annotations', None)
|
|
||||||
callermodule = sys._getframe(3).f_globals.get('__name__', '?')
|
|
||||||
func.__module__ = getattr(self, 'module', callermodule)
|
|
||||||
func.__dict__.update(kw)
|
|
||||||
|
|
||||||
def make(self, src_templ, evaldict=None, addsource=False, **attrs):
|
|
||||||
"Make a new function from a given template and update the signature"
|
|
||||||
src = src_templ % vars(self) # expand name and signature
|
|
||||||
evaldict = evaldict or {}
|
|
||||||
mo = DEF.match(src)
|
|
||||||
if mo is None:
|
|
||||||
raise SyntaxError('not a valid function template\n%s' % src)
|
|
||||||
name = mo.group(1) # extract the function name
|
|
||||||
names = set([name] + [arg.strip(' *') for arg in
|
|
||||||
self.shortsignature.split(',')])
|
|
||||||
for n in names:
|
|
||||||
if n in ('_func_', '_call_'):
|
|
||||||
raise NameError('%s is overridden in\n%s' % (n, src))
|
|
||||||
if not src.endswith('\n'): # add a newline just for safety
|
|
||||||
src += '\n' # this is needed in old versions of Python
|
|
||||||
try:
|
|
||||||
code = compile(src, '<string>', 'single')
|
|
||||||
# print >> sys.stderr, 'Compiling %s' % src
|
|
||||||
exec code in evaldict
|
|
||||||
except:
|
|
||||||
print >> sys.stderr, 'Error in generated code:'
|
|
||||||
print >> sys.stderr, src
|
|
||||||
raise
|
|
||||||
func = evaldict[name]
|
|
||||||
if addsource:
|
|
||||||
attrs['__source__'] = src
|
|
||||||
self.update(func, **attrs)
|
|
||||||
return func
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, obj, body, evaldict, defaults=None,
|
|
||||||
doc=None, module=None, addsource=True, **attrs):
|
|
||||||
"""
|
|
||||||
Create a function from the strings name, signature and body.
|
|
||||||
evaldict is the evaluation dictionary. If addsource is true an attribute
|
|
||||||
__source__ is added to the result. The attributes attrs are added,
|
|
||||||
if any.
|
|
||||||
"""
|
|
||||||
if isinstance(obj, str): # "name(signature)"
|
|
||||||
name, rest = obj.strip().split('(', 1)
|
|
||||||
signature = rest[:-1] #strip a right parens
|
|
||||||
func = None
|
|
||||||
else: # a function
|
|
||||||
name = None
|
|
||||||
signature = None
|
|
||||||
func = obj
|
|
||||||
self = cls(func, name, signature, defaults, doc, module)
|
|
||||||
ibody = '\n'.join(' ' + line for line in body.splitlines())
|
|
||||||
return self.make('def %(name)s(%(signature)s):\n' + ibody,
|
|
||||||
evaldict, addsource, **attrs)
|
|
||||||
|
|
||||||
def decorator(caller, func=None):
|
|
||||||
"""
|
|
||||||
decorator(caller) converts a caller function into a decorator;
|
|
||||||
decorator(caller, func) decorates a function using a caller.
|
|
||||||
"""
|
|
||||||
if func is not None: # returns a decorated function
|
|
||||||
evaldict = func.func_globals.copy()
|
|
||||||
evaldict['_call_'] = caller
|
|
||||||
evaldict['_func_'] = func
|
|
||||||
return FunctionMaker.create(
|
|
||||||
func, "return _call_(_func_, %(shortsignature)s)",
|
|
||||||
evaldict, undecorated=func, __wrapped__=func)
|
|
||||||
else: # returns a decorator
|
|
||||||
if inspect.isclass(caller):
|
|
||||||
name = caller.__name__.lower()
|
|
||||||
callerfunc = get_init(caller)
|
|
||||||
doc = 'decorator(%s) converts functions/generators into ' \
|
|
||||||
'factories of %s objects' % (caller.__name__, caller.__name__)
|
|
||||||
fun = getfullargspec(callerfunc).args[1] # second arg
|
|
||||||
elif inspect.isfunction(caller):
|
|
||||||
name = '_lambda_' if caller.__name__ == '<lambda>' \
|
|
||||||
else caller.__name__
|
|
||||||
callerfunc = caller
|
|
||||||
doc = caller.__doc__
|
|
||||||
fun = getfullargspec(callerfunc).args[0] # first arg
|
|
||||||
else: # assume caller is an object with a __call__ method
|
|
||||||
name = caller.__class__.__name__.lower()
|
|
||||||
callerfunc = caller.__call__.im_func
|
|
||||||
doc = caller.__call__.__doc__
|
|
||||||
fun = getfullargspec(callerfunc).args[1] # second arg
|
|
||||||
evaldict = callerfunc.func_globals.copy()
|
|
||||||
evaldict['_call_'] = caller
|
|
||||||
evaldict['decorator'] = decorator
|
|
||||||
return FunctionMaker.create(
|
|
||||||
'%s(%s)' % (name, fun),
|
|
||||||
'return decorator(_call_, %s)' % fun,
|
|
||||||
evaldict, undecorated=caller, __wrapped__=caller,
|
|
||||||
doc=doc, module=caller.__module__)
|
|
||||||
|
|
||||||
######################### contextmanager ########################
|
|
||||||
|
|
||||||
def __call__(self, func):
|
|
||||||
'Context manager decorator'
|
|
||||||
return FunctionMaker.create(
|
|
||||||
func, "with _self_: return _func_(%(shortsignature)s)",
|
|
||||||
dict(_self_=self, _func_=func), __wrapped__=func)
|
|
||||||
|
|
||||||
try: # Python >= 3.2
|
|
||||||
|
|
||||||
from contextlib import _GeneratorContextManager
|
|
||||||
ContextManager = type(
|
|
||||||
'ContextManager', (_GeneratorContextManager,), dict(__call__=__call__))
|
|
||||||
|
|
||||||
except ImportError: # Python >= 2.5
|
|
||||||
|
|
||||||
from contextlib import GeneratorContextManager
|
|
||||||
def __init__(self, f, *a, **k):
|
|
||||||
return GeneratorContextManager.__init__(self, f(*a, **k))
|
|
||||||
ContextManager = type(
|
|
||||||
'ContextManager', (GeneratorContextManager,),
|
|
||||||
dict(__call__=__call__, __init__=__init__))
|
|
||||||
|
|
||||||
contextmanager = decorator(ContextManager)
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
__all__ = ["curves", "der", "ecdsa", "ellipticcurve", "keys", "numbertheory",
|
|
||||||
"test_pyecdsa", "util", "six"]
|
|
||||||
from .keys import SigningKey, VerifyingKey, BadSignatureError, BadDigestError
|
|
||||||
from .curves import NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1
|
|
||||||
|
|
||||||
_hush_pyflakes = [SigningKey, VerifyingKey, BadSignatureError, BadDigestError,
|
|
||||||
NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1]
|
|
||||||
del _hush_pyflakes
|
|
||||||
|
|
||||||
# This code comes from http://github.com/warner/python-ecdsa
|
|
||||||
|
|
||||||
from ._version import get_versions
|
|
||||||
__version__ = get_versions()['version']
|
|
||||||
del get_versions
|
|
||||||
@@ -1,183 +0,0 @@
|
|||||||
|
|
||||||
# This file helps to compute a version number in source trees obtained from
|
|
||||||
# git-archive tarball (such as those provided by githubs download-from-tag
|
|
||||||
# feature). Distribution tarballs (built by setup.py sdist) and build
|
|
||||||
# directories (produced by setup.py build) will contain a much shorter file
|
|
||||||
# that just contains the computed version number.
|
|
||||||
|
|
||||||
# This file is released into the public domain. Generated by
|
|
||||||
# versioneer-0.12 (https://github.com/warner/python-versioneer)
|
|
||||||
|
|
||||||
# these strings will be replaced by git during git-archive
|
|
||||||
git_refnames = " (HEAD, master)"
|
|
||||||
git_full = "e7a6daff51221b8edd888cff404596ef90432869"
|
|
||||||
|
|
||||||
# these strings are filled in when 'setup.py versioneer' creates _version.py
|
|
||||||
tag_prefix = "python-ecdsa-"
|
|
||||||
parentdir_prefix = "ecdsa-"
|
|
||||||
versionfile_source = "ecdsa/_version.py"
|
|
||||||
|
|
||||||
import os, sys, re, subprocess, errno
|
|
||||||
|
|
||||||
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
|
|
||||||
assert isinstance(commands, list)
|
|
||||||
p = None
|
|
||||||
for c in commands:
|
|
||||||
try:
|
|
||||||
# remember shell=False, so use git.cmd on windows, not just git
|
|
||||||
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
|
|
||||||
stderr=(subprocess.PIPE if hide_stderr
|
|
||||||
else None))
|
|
||||||
break
|
|
||||||
except EnvironmentError:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
if e.errno == errno.ENOENT:
|
|
||||||
continue
|
|
||||||
if verbose:
|
|
||||||
print("unable to run %s" % args[0])
|
|
||||||
print(e)
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
if verbose:
|
|
||||||
print("unable to find command, tried %s" % (commands,))
|
|
||||||
return None
|
|
||||||
stdout = p.communicate()[0].strip()
|
|
||||||
if sys.version >= '3':
|
|
||||||
stdout = stdout.decode()
|
|
||||||
if p.returncode != 0:
|
|
||||||
if verbose:
|
|
||||||
print("unable to run %s (error)" % args[0])
|
|
||||||
return None
|
|
||||||
return stdout
|
|
||||||
|
|
||||||
|
|
||||||
def versions_from_parentdir(parentdir_prefix, root, verbose=False):
|
|
||||||
# Source tarballs conventionally unpack into a directory that includes
|
|
||||||
# both the project name and a version string.
|
|
||||||
dirname = os.path.basename(root)
|
|
||||||
if not dirname.startswith(parentdir_prefix):
|
|
||||||
if verbose:
|
|
||||||
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
|
|
||||||
(root, dirname, parentdir_prefix))
|
|
||||||
return None
|
|
||||||
return {"version": dirname[len(parentdir_prefix):], "full": ""}
|
|
||||||
|
|
||||||
def git_get_keywords(versionfile_abs):
|
|
||||||
# the code embedded in _version.py can just fetch the value of these
|
|
||||||
# keywords. When used from setup.py, we don't want to import _version.py,
|
|
||||||
# so we do it with a regexp instead. This function is not used from
|
|
||||||
# _version.py.
|
|
||||||
keywords = {}
|
|
||||||
try:
|
|
||||||
f = open(versionfile_abs,"r")
|
|
||||||
for line in f.readlines():
|
|
||||||
if line.strip().startswith("git_refnames ="):
|
|
||||||
mo = re.search(r'=\s*"(.*)"', line)
|
|
||||||
if mo:
|
|
||||||
keywords["refnames"] = mo.group(1)
|
|
||||||
if line.strip().startswith("git_full ="):
|
|
||||||
mo = re.search(r'=\s*"(.*)"', line)
|
|
||||||
if mo:
|
|
||||||
keywords["full"] = mo.group(1)
|
|
||||||
f.close()
|
|
||||||
except EnvironmentError:
|
|
||||||
pass
|
|
||||||
return keywords
|
|
||||||
|
|
||||||
def git_versions_from_keywords(keywords, tag_prefix, verbose=False):
|
|
||||||
if not keywords:
|
|
||||||
return {} # keyword-finding function failed to find keywords
|
|
||||||
refnames = keywords["refnames"].strip()
|
|
||||||
if refnames.startswith("$Format"):
|
|
||||||
if verbose:
|
|
||||||
print("keywords are unexpanded, not using")
|
|
||||||
return {} # unexpanded, so not in an unpacked git-archive tarball
|
|
||||||
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
|
||||||
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
|
|
||||||
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
|
|
||||||
TAG = "tag: "
|
|
||||||
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
|
|
||||||
if not tags:
|
|
||||||
# Either we're using git < 1.8.3, or there really are no tags. We use
|
|
||||||
# a heuristic: assume all version tags have a digit. The old git %d
|
|
||||||
# expansion behaves like git log --decorate=short and strips out the
|
|
||||||
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
|
|
||||||
# between branches and tags. By ignoring refnames without digits, we
|
|
||||||
# filter out many common branch names like "release" and
|
|
||||||
# "stabilization", as well as "HEAD" and "master".
|
|
||||||
tags = set([r for r in refs if re.search(r'\d', r)])
|
|
||||||
if verbose:
|
|
||||||
print("discarding '%s', no digits" % ",".join(refs-tags))
|
|
||||||
if verbose:
|
|
||||||
print("likely tags: %s" % ",".join(sorted(tags)))
|
|
||||||
for ref in sorted(tags):
|
|
||||||
# sorting will prefer e.g. "2.0" over "2.0rc1"
|
|
||||||
if ref.startswith(tag_prefix):
|
|
||||||
r = ref[len(tag_prefix):]
|
|
||||||
if verbose:
|
|
||||||
print("picking %s" % r)
|
|
||||||
return { "version": r,
|
|
||||||
"full": keywords["full"].strip() }
|
|
||||||
# no suitable tags, so we use the full revision id
|
|
||||||
if verbose:
|
|
||||||
print("no suitable tags, using full revision id")
|
|
||||||
return { "version": keywords["full"].strip(),
|
|
||||||
"full": keywords["full"].strip() }
|
|
||||||
|
|
||||||
|
|
||||||
def git_versions_from_vcs(tag_prefix, root, verbose=False):
|
|
||||||
# this runs 'git' from the root of the source tree. This only gets called
|
|
||||||
# if the git-archive 'subst' keywords were *not* expanded, and
|
|
||||||
# _version.py hasn't already been rewritten with a short version string,
|
|
||||||
# meaning we're inside a checked out source tree.
|
|
||||||
|
|
||||||
if not os.path.exists(os.path.join(root, ".git")):
|
|
||||||
if verbose:
|
|
||||||
print("no .git in %s" % root)
|
|
||||||
return {}
|
|
||||||
|
|
||||||
GITS = ["git"]
|
|
||||||
if sys.platform == "win32":
|
|
||||||
GITS = ["git.cmd", "git.exe"]
|
|
||||||
stdout = run_command(GITS, ["describe", "--tags", "--dirty", "--always"],
|
|
||||||
cwd=root)
|
|
||||||
if stdout is None:
|
|
||||||
return {}
|
|
||||||
if not stdout.startswith(tag_prefix):
|
|
||||||
if verbose:
|
|
||||||
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
|
|
||||||
return {}
|
|
||||||
tag = stdout[len(tag_prefix):]
|
|
||||||
stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
|
|
||||||
if stdout is None:
|
|
||||||
return {}
|
|
||||||
full = stdout.strip()
|
|
||||||
if tag.endswith("-dirty"):
|
|
||||||
full += "-dirty"
|
|
||||||
return {"version": tag, "full": full}
|
|
||||||
|
|
||||||
|
|
||||||
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
|
|
||||||
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
|
|
||||||
# __file__, we can work backwards from there to the root. Some
|
|
||||||
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
|
|
||||||
# case we can only use expanded keywords.
|
|
||||||
|
|
||||||
keywords = { "refnames": git_refnames, "full": git_full }
|
|
||||||
ver = git_versions_from_keywords(keywords, tag_prefix, verbose)
|
|
||||||
if ver:
|
|
||||||
return ver
|
|
||||||
|
|
||||||
try:
|
|
||||||
root = os.path.abspath(__file__)
|
|
||||||
# versionfile_source is the relative path from the top of the source
|
|
||||||
# tree (where the .git directory might live) to this file. Invert
|
|
||||||
# this to find the root from __file__.
|
|
||||||
for i in range(len(versionfile_source.split(os.sep))):
|
|
||||||
root = os.path.dirname(root)
|
|
||||||
except NameError:
|
|
||||||
return default
|
|
||||||
|
|
||||||
return (git_versions_from_vcs(tag_prefix, root, verbose)
|
|
||||||
or versions_from_parentdir(parentdir_prefix, root, verbose)
|
|
||||||
or default)
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
from __future__ import division
|
|
||||||
|
|
||||||
from . import der, ecdsa
|
|
||||||
|
|
||||||
class UnknownCurveError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def orderlen(order):
|
|
||||||
return (1+len("%x"%order))//2 # bytes
|
|
||||||
|
|
||||||
# the NIST curves
|
|
||||||
class Curve:
|
|
||||||
def __init__(self, name, openssl_name,
|
|
||||||
curve, generator, oid):
|
|
||||||
self.name = name
|
|
||||||
self.openssl_name = openssl_name # maybe None
|
|
||||||
self.curve = curve
|
|
||||||
self.generator = generator
|
|
||||||
self.order = generator.order()
|
|
||||||
self.baselen = orderlen(self.order)
|
|
||||||
self.verifying_key_length = 2*self.baselen
|
|
||||||
self.signature_length = 2*self.baselen
|
|
||||||
self.oid = oid
|
|
||||||
self.encoded_oid = der.encode_oid(*oid)
|
|
||||||
|
|
||||||
NIST192p = Curve("NIST192p", "prime192v1",
|
|
||||||
ecdsa.curve_192, ecdsa.generator_192,
|
|
||||||
(1, 2, 840, 10045, 3, 1, 1))
|
|
||||||
NIST224p = Curve("NIST224p", "secp224r1",
|
|
||||||
ecdsa.curve_224, ecdsa.generator_224,
|
|
||||||
(1, 3, 132, 0, 33))
|
|
||||||
NIST256p = Curve("NIST256p", "prime256v1",
|
|
||||||
ecdsa.curve_256, ecdsa.generator_256,
|
|
||||||
(1, 2, 840, 10045, 3, 1, 7))
|
|
||||||
NIST384p = Curve("NIST384p", "secp384r1",
|
|
||||||
ecdsa.curve_384, ecdsa.generator_384,
|
|
||||||
(1, 3, 132, 0, 34))
|
|
||||||
NIST521p = Curve("NIST521p", "secp521r1",
|
|
||||||
ecdsa.curve_521, ecdsa.generator_521,
|
|
||||||
(1, 3, 132, 0, 35))
|
|
||||||
SECP256k1 = Curve("SECP256k1", "secp256k1",
|
|
||||||
ecdsa.curve_secp256k1, ecdsa.generator_secp256k1,
|
|
||||||
(1, 3, 132, 0, 10))
|
|
||||||
|
|
||||||
curves = [NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1]
|
|
||||||
|
|
||||||
def find_curve(oid_curve):
|
|
||||||
for c in curves:
|
|
||||||
if c.oid == oid_curve:
|
|
||||||
return c
|
|
||||||
raise UnknownCurveError("I don't know about the curve with oid %s."
|
|
||||||
"I only know about these: %s" %
|
|
||||||
(oid_curve, [c.name for c in curves]))
|
|
||||||
@@ -1,199 +0,0 @@
|
|||||||
from __future__ import division
|
|
||||||
|
|
||||||
import binascii
|
|
||||||
import base64
|
|
||||||
from .six import int2byte, b, integer_types, text_type
|
|
||||||
|
|
||||||
class UnexpectedDER(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def encode_constructed(tag, value):
|
|
||||||
return int2byte(0xa0+tag) + encode_length(len(value)) + value
|
|
||||||
def encode_integer(r):
|
|
||||||
assert r >= 0 # can't support negative numbers yet
|
|
||||||
h = ("%x" % r).encode()
|
|
||||||
if len(h) % 2:
|
|
||||||
h = b("0") + h
|
|
||||||
s = binascii.unhexlify(h)
|
|
||||||
num = s[0] if isinstance(s[0], integer_types) else ord(s[0])
|
|
||||||
if num <= 0x7f:
|
|
||||||
return b("\x02") + int2byte(len(s)) + s
|
|
||||||
else:
|
|
||||||
# DER integers are two's complement, so if the first byte is
|
|
||||||
# 0x80-0xff then we need an extra 0x00 byte to prevent it from
|
|
||||||
# looking negative.
|
|
||||||
return b("\x02") + int2byte(len(s)+1) + b("\x00") + s
|
|
||||||
|
|
||||||
def encode_bitstring(s):
|
|
||||||
return b("\x03") + encode_length(len(s)) + s
|
|
||||||
def encode_octet_string(s):
|
|
||||||
return b("\x04") + encode_length(len(s)) + s
|
|
||||||
def encode_oid(first, second, *pieces):
|
|
||||||
assert first <= 2
|
|
||||||
assert second <= 39
|
|
||||||
encoded_pieces = [int2byte(40*first+second)] + [encode_number(p)
|
|
||||||
for p in pieces]
|
|
||||||
body = b('').join(encoded_pieces)
|
|
||||||
return b('\x06') + encode_length(len(body)) + body
|
|
||||||
def encode_sequence(*encoded_pieces):
|
|
||||||
total_len = sum([len(p) for p in encoded_pieces])
|
|
||||||
return b('\x30') + encode_length(total_len) + b('').join(encoded_pieces)
|
|
||||||
def encode_number(n):
|
|
||||||
b128_digits = []
|
|
||||||
while n:
|
|
||||||
b128_digits.insert(0, (n & 0x7f) | 0x80)
|
|
||||||
n = n >> 7
|
|
||||||
if not b128_digits:
|
|
||||||
b128_digits.append(0)
|
|
||||||
b128_digits[-1] &= 0x7f
|
|
||||||
return b('').join([int2byte(d) for d in b128_digits])
|
|
||||||
|
|
||||||
def remove_constructed(string):
|
|
||||||
s0 = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
|
||||||
if (s0 & 0xe0) != 0xa0:
|
|
||||||
raise UnexpectedDER("wanted constructed tag (0xa0-0xbf), got 0x%02x"
|
|
||||||
% s0)
|
|
||||||
tag = s0 & 0x1f
|
|
||||||
length, llen = read_length(string[1:])
|
|
||||||
body = string[1+llen:1+llen+length]
|
|
||||||
rest = string[1+llen+length:]
|
|
||||||
return tag, body, rest
|
|
||||||
|
|
||||||
def remove_sequence(string):
|
|
||||||
if not string.startswith(b("\x30")):
|
|
||||||
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
|
||||||
raise UnexpectedDER("wanted sequence (0x30), got 0x%02x" % n)
|
|
||||||
length, lengthlength = read_length(string[1:])
|
|
||||||
endseq = 1+lengthlength+length
|
|
||||||
return string[1+lengthlength:endseq], string[endseq:]
|
|
||||||
|
|
||||||
def remove_octet_string(string):
|
|
||||||
if not string.startswith(b("\x04")):
|
|
||||||
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
|
||||||
raise UnexpectedDER("wanted octetstring (0x04), got 0x%02x" % n)
|
|
||||||
length, llen = read_length(string[1:])
|
|
||||||
body = string[1+llen:1+llen+length]
|
|
||||||
rest = string[1+llen+length:]
|
|
||||||
return body, rest
|
|
||||||
|
|
||||||
def remove_object(string):
|
|
||||||
if not string.startswith(b("\x06")):
|
|
||||||
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
|
||||||
raise UnexpectedDER("wanted object (0x06), got 0x%02x" % n)
|
|
||||||
length, lengthlength = read_length(string[1:])
|
|
||||||
body = string[1+lengthlength:1+lengthlength+length]
|
|
||||||
rest = string[1+lengthlength+length:]
|
|
||||||
numbers = []
|
|
||||||
while body:
|
|
||||||
n, ll = read_number(body)
|
|
||||||
numbers.append(n)
|
|
||||||
body = body[ll:]
|
|
||||||
n0 = numbers.pop(0)
|
|
||||||
first = n0//40
|
|
||||||
second = n0-(40*first)
|
|
||||||
numbers.insert(0, first)
|
|
||||||
numbers.insert(1, second)
|
|
||||||
return tuple(numbers), rest
|
|
||||||
|
|
||||||
def remove_integer(string):
|
|
||||||
if not string.startswith(b("\x02")):
|
|
||||||
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
|
||||||
raise UnexpectedDER("wanted integer (0x02), got 0x%02x" % n)
|
|
||||||
length, llen = read_length(string[1:])
|
|
||||||
numberbytes = string[1+llen:1+llen+length]
|
|
||||||
rest = string[1+llen+length:]
|
|
||||||
nbytes = numberbytes[0] if isinstance(numberbytes[0], integer_types) else ord(numberbytes[0])
|
|
||||||
assert nbytes < 0x80 # can't support negative numbers yet
|
|
||||||
return int(binascii.hexlify(numberbytes), 16), rest
|
|
||||||
|
|
||||||
def read_number(string):
|
|
||||||
number = 0
|
|
||||||
llen = 0
|
|
||||||
# base-128 big endian, with b7 set in all but the last byte
|
|
||||||
while True:
|
|
||||||
if llen > len(string):
|
|
||||||
raise UnexpectedDER("ran out of length bytes")
|
|
||||||
number = number << 7
|
|
||||||
d = string[llen] if isinstance(string[llen], integer_types) else ord(string[llen])
|
|
||||||
number += (d & 0x7f)
|
|
||||||
llen += 1
|
|
||||||
if not d & 0x80:
|
|
||||||
break
|
|
||||||
return number, llen
|
|
||||||
|
|
||||||
def encode_length(l):
|
|
||||||
assert l >= 0
|
|
||||||
if l < 0x80:
|
|
||||||
return int2byte(l)
|
|
||||||
s = ("%x" % l).encode()
|
|
||||||
if len(s)%2:
|
|
||||||
s = b("0")+s
|
|
||||||
s = binascii.unhexlify(s)
|
|
||||||
llen = len(s)
|
|
||||||
return int2byte(0x80|llen) + s
|
|
||||||
|
|
||||||
def read_length(string):
|
|
||||||
num = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
|
||||||
if not (num & 0x80):
|
|
||||||
# short form
|
|
||||||
return (num & 0x7f), 1
|
|
||||||
# else long-form: b0&0x7f is number of additional base256 length bytes,
|
|
||||||
# big-endian
|
|
||||||
llen = num & 0x7f
|
|
||||||
if llen > len(string)-1:
|
|
||||||
raise UnexpectedDER("ran out of length bytes")
|
|
||||||
return int(binascii.hexlify(string[1:1+llen]), 16), 1+llen
|
|
||||||
|
|
||||||
def remove_bitstring(string):
|
|
||||||
num = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
|
||||||
if not string.startswith(b("\x03")):
|
|
||||||
raise UnexpectedDER("wanted bitstring (0x03), got 0x%02x" % num)
|
|
||||||
length, llen = read_length(string[1:])
|
|
||||||
body = string[1+llen:1+llen+length]
|
|
||||||
rest = string[1+llen+length:]
|
|
||||||
return body, rest
|
|
||||||
|
|
||||||
# SEQUENCE([1, STRING(secexp), cont[0], OBJECT(curvename), cont[1], BINTSTRING)
|
|
||||||
|
|
||||||
|
|
||||||
# signatures: (from RFC3279)
|
|
||||||
# ansi-X9-62 OBJECT IDENTIFIER ::= {
|
|
||||||
# iso(1) member-body(2) us(840) 10045 }
|
|
||||||
#
|
|
||||||
# id-ecSigType OBJECT IDENTIFIER ::= {
|
|
||||||
# ansi-X9-62 signatures(4) }
|
|
||||||
# ecdsa-with-SHA1 OBJECT IDENTIFIER ::= {
|
|
||||||
# id-ecSigType 1 }
|
|
||||||
## so 1,2,840,10045,4,1
|
|
||||||
## so 0x42, .. ..
|
|
||||||
|
|
||||||
# Ecdsa-Sig-Value ::= SEQUENCE {
|
|
||||||
# r INTEGER,
|
|
||||||
# s INTEGER }
|
|
||||||
|
|
||||||
# id-public-key-type OBJECT IDENTIFIER ::= { ansi-X9.62 2 }
|
|
||||||
#
|
|
||||||
# id-ecPublicKey OBJECT IDENTIFIER ::= { id-publicKeyType 1 }
|
|
||||||
|
|
||||||
# I think the secp224r1 identifier is (t=06,l=05,v=2b81040021)
|
|
||||||
# secp224r1 OBJECT IDENTIFIER ::= {
|
|
||||||
# iso(1) identified-organization(3) certicom(132) curve(0) 33 }
|
|
||||||
# and the secp384r1 is (t=06,l=05,v=2b81040022)
|
|
||||||
# secp384r1 OBJECT IDENTIFIER ::= {
|
|
||||||
# iso(1) identified-organization(3) certicom(132) curve(0) 34 }
|
|
||||||
|
|
||||||
def unpem(pem):
|
|
||||||
if isinstance(pem, text_type):
|
|
||||||
pem = pem.encode()
|
|
||||||
|
|
||||||
d = b("").join([l.strip() for l in pem.split(b("\n"))
|
|
||||||
if l and not l.startswith(b("-----"))])
|
|
||||||
return base64.b64decode(d)
|
|
||||||
def topem(der, name):
|
|
||||||
b64 = base64.b64encode(der)
|
|
||||||
lines = [("-----BEGIN %s-----\n" % name).encode()]
|
|
||||||
lines.extend([b64[start:start+64]+b("\n")
|
|
||||||
for start in range(0, len(b64), 64)])
|
|
||||||
lines.append(("-----END %s-----\n" % name).encode())
|
|
||||||
return b("").join(lines)
|
|
||||||
|
|
||||||
@@ -1,576 +0,0 @@
|
|||||||
#! /usr/bin/env python
|
|
||||||
|
|
||||||
"""
|
|
||||||
Implementation of Elliptic-Curve Digital Signatures.
|
|
||||||
|
|
||||||
Classes and methods for elliptic-curve signatures:
|
|
||||||
private keys, public keys, signatures,
|
|
||||||
NIST prime-modulus curves with modulus lengths of
|
|
||||||
192, 224, 256, 384, and 521 bits.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
# (In real-life applications, you would probably want to
|
|
||||||
# protect against defects in SystemRandom.)
|
|
||||||
from random import SystemRandom
|
|
||||||
randrange = SystemRandom().randrange
|
|
||||||
|
|
||||||
# Generate a public/private key pair using the NIST Curve P-192:
|
|
||||||
|
|
||||||
g = generator_192
|
|
||||||
n = g.order()
|
|
||||||
secret = randrange( 1, n )
|
|
||||||
pubkey = Public_key( g, g * secret )
|
|
||||||
privkey = Private_key( pubkey, secret )
|
|
||||||
|
|
||||||
# Signing a hash value:
|
|
||||||
|
|
||||||
hash = randrange( 1, n )
|
|
||||||
signature = privkey.sign( hash, randrange( 1, n ) )
|
|
||||||
|
|
||||||
# Verifying a signature for a hash value:
|
|
||||||
|
|
||||||
if pubkey.verifies( hash, signature ):
|
|
||||||
print_("Demo verification succeeded.")
|
|
||||||
else:
|
|
||||||
print_("*** Demo verification failed.")
|
|
||||||
|
|
||||||
# Verification fails if the hash value is modified:
|
|
||||||
|
|
||||||
if pubkey.verifies( hash-1, signature ):
|
|
||||||
print_("**** Demo verification failed to reject tampered hash.")
|
|
||||||
else:
|
|
||||||
print_("Demo verification correctly rejected tampered hash.")
|
|
||||||
|
|
||||||
Version of 2009.05.16.
|
|
||||||
|
|
||||||
Revision history:
|
|
||||||
2005.12.31 - Initial version.
|
|
||||||
2008.11.25 - Substantial revisions introducing new classes.
|
|
||||||
2009.05.16 - Warn against using random.randrange in real applications.
|
|
||||||
2009.05.17 - Use random.SystemRandom by default.
|
|
||||||
|
|
||||||
Written in 2005 by Peter Pearson and placed in the public domain.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .six import int2byte, b, print_
|
|
||||||
from . import ellipticcurve
|
|
||||||
from . import numbertheory
|
|
||||||
import random
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Signature( object ):
|
|
||||||
"""ECDSA signature.
|
|
||||||
"""
|
|
||||||
def __init__( self, r, s ):
|
|
||||||
self.r = r
|
|
||||||
self.s = s
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Public_key( object ):
|
|
||||||
"""Public key for ECDSA.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__( self, generator, point ):
|
|
||||||
"""generator is the Point that generates the group,
|
|
||||||
point is the Point that defines the public key.
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.curve = generator.curve()
|
|
||||||
self.generator = generator
|
|
||||||
self.point = point
|
|
||||||
n = generator.order()
|
|
||||||
if not n:
|
|
||||||
raise RuntimeError("Generator point must have order.")
|
|
||||||
if not n * point == ellipticcurve.INFINITY:
|
|
||||||
raise RuntimeError("Generator point order is bad.")
|
|
||||||
if point.x() < 0 or n <= point.x() or point.y() < 0 or n <= point.y():
|
|
||||||
raise RuntimeError("Generator point has x or y out of range.")
|
|
||||||
|
|
||||||
|
|
||||||
def verifies( self, hash, signature ):
|
|
||||||
"""Verify that signature is a valid signature of hash.
|
|
||||||
Return True if the signature is valid.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# From X9.62 J.3.1.
|
|
||||||
|
|
||||||
G = self.generator
|
|
||||||
n = G.order()
|
|
||||||
r = signature.r
|
|
||||||
s = signature.s
|
|
||||||
if r < 1 or r > n-1: return False
|
|
||||||
if s < 1 or s > n-1: return False
|
|
||||||
c = numbertheory.inverse_mod( s, n )
|
|
||||||
u1 = ( hash * c ) % n
|
|
||||||
u2 = ( r * c ) % n
|
|
||||||
xy = u1 * G + u2 * self.point
|
|
||||||
v = xy.x() % n
|
|
||||||
return v == r
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Private_key( object ):
|
|
||||||
"""Private key for ECDSA.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__( self, public_key, secret_multiplier ):
|
|
||||||
"""public_key is of class Public_key;
|
|
||||||
secret_multiplier is a large integer.
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.public_key = public_key
|
|
||||||
self.secret_multiplier = secret_multiplier
|
|
||||||
|
|
||||||
def sign( self, hash, random_k ):
|
|
||||||
"""Return a signature for the provided hash, using the provided
|
|
||||||
random nonce. It is absolutely vital that random_k be an unpredictable
|
|
||||||
number in the range [1, self.public_key.point.order()-1]. If
|
|
||||||
an attacker can guess random_k, he can compute our private key from a
|
|
||||||
single signature. Also, if an attacker knows a few high-order
|
|
||||||
bits (or a few low-order bits) of random_k, he can compute our private
|
|
||||||
key from many signatures. The generation of nonces with adequate
|
|
||||||
cryptographic strength is very difficult and far beyond the scope
|
|
||||||
of this comment.
|
|
||||||
|
|
||||||
May raise RuntimeError, in which case retrying with a new
|
|
||||||
random value k is in order.
|
|
||||||
"""
|
|
||||||
|
|
||||||
G = self.public_key.generator
|
|
||||||
n = G.order()
|
|
||||||
k = random_k % n
|
|
||||||
p1 = k * G
|
|
||||||
r = p1.x()
|
|
||||||
if r == 0: raise RuntimeError("amazingly unlucky random number r")
|
|
||||||
s = ( numbertheory.inverse_mod( k, n ) * \
|
|
||||||
( hash + ( self.secret_multiplier * r ) % n ) ) % n
|
|
||||||
if s == 0: raise RuntimeError("amazingly unlucky random number s")
|
|
||||||
return Signature( r, s )
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def int_to_string( x ):
|
|
||||||
"""Convert integer x into a string of bytes, as per X9.62."""
|
|
||||||
assert x >= 0
|
|
||||||
if x == 0: return b('\0')
|
|
||||||
result = []
|
|
||||||
while x:
|
|
||||||
ordinal = x & 0xFF
|
|
||||||
result.append(int2byte(ordinal))
|
|
||||||
x >>= 8
|
|
||||||
|
|
||||||
result.reverse()
|
|
||||||
return b('').join(result)
|
|
||||||
|
|
||||||
|
|
||||||
def string_to_int( s ):
|
|
||||||
"""Convert a string of bytes into an integer, as per X9.62."""
|
|
||||||
result = 0
|
|
||||||
for c in s:
|
|
||||||
if not isinstance(c, int): c = ord( c )
|
|
||||||
result = 256 * result + c
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def digest_integer( m ):
|
|
||||||
"""Convert an integer into a string of bytes, compute
|
|
||||||
its SHA-1 hash, and convert the result to an integer."""
|
|
||||||
#
|
|
||||||
# I don't expect this function to be used much. I wrote
|
|
||||||
# it in order to be able to duplicate the examples
|
|
||||||
# in ECDSAVS.
|
|
||||||
#
|
|
||||||
from hashlib import sha1
|
|
||||||
return string_to_int( sha1( int_to_string( m ) ).digest() )
|
|
||||||
|
|
||||||
|
|
||||||
def point_is_valid( generator, x, y ):
|
|
||||||
"""Is (x,y) a valid public key based on the specified generator?"""
|
|
||||||
|
|
||||||
# These are the tests specified in X9.62.
|
|
||||||
|
|
||||||
n = generator.order()
|
|
||||||
curve = generator.curve()
|
|
||||||
if x < 0 or n <= x or y < 0 or n <= y:
|
|
||||||
return False
|
|
||||||
if not curve.contains_point( x, y ):
|
|
||||||
return False
|
|
||||||
if not n*ellipticcurve.Point( curve, x, y ) == \
|
|
||||||
ellipticcurve.INFINITY:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# NIST Curve P-192:
|
|
||||||
_p = 6277101735386680763835789423207666416083908700390324961279
|
|
||||||
_r = 6277101735386680763835789423176059013767194773182842284081
|
|
||||||
# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L
|
|
||||||
# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65L
|
|
||||||
_b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
|
|
||||||
_Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
|
|
||||||
_Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
|
|
||||||
|
|
||||||
curve_192 = ellipticcurve.CurveFp( _p, -3, _b )
|
|
||||||
generator_192 = ellipticcurve.Point( curve_192, _Gx, _Gy, _r )
|
|
||||||
|
|
||||||
|
|
||||||
# NIST Curve P-224:
|
|
||||||
_p = 26959946667150639794667015087019630673557916260026308143510066298881
|
|
||||||
_r = 26959946667150639794667015087019625940457807714424391721682722368061
|
|
||||||
# s = 0xbd71344799d5c7fcdc45b59fa3b9ab8f6a948bc5L
|
|
||||||
# c = 0x5b056c7e11dd68f40469ee7f3c7a7d74f7d121116506d031218291fbL
|
|
||||||
_b = 0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4
|
|
||||||
_Gx =0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21
|
|
||||||
_Gy = 0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34
|
|
||||||
|
|
||||||
curve_224 = ellipticcurve.CurveFp( _p, -3, _b )
|
|
||||||
generator_224 = ellipticcurve.Point( curve_224, _Gx, _Gy, _r )
|
|
||||||
|
|
||||||
# NIST Curve P-256:
|
|
||||||
_p = 115792089210356248762697446949407573530086143415290314195533631308867097853951
|
|
||||||
_r = 115792089210356248762697446949407573529996955224135760342422259061068512044369
|
|
||||||
# s = 0xc49d360886e704936a6678e1139d26b7819f7e90L
|
|
||||||
# c = 0x7efba1662985be9403cb055c75d4f7e0ce8d84a9c5114abcaf3177680104fa0dL
|
|
||||||
_b = 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b
|
|
||||||
_Gx = 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296
|
|
||||||
_Gy = 0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5
|
|
||||||
|
|
||||||
curve_256 = ellipticcurve.CurveFp( _p, -3, _b )
|
|
||||||
generator_256 = ellipticcurve.Point( curve_256, _Gx, _Gy, _r )
|
|
||||||
|
|
||||||
# NIST Curve P-384:
|
|
||||||
_p = 39402006196394479212279040100143613805079739270465446667948293404245721771496870329047266088258938001861606973112319
|
|
||||||
_r = 39402006196394479212279040100143613805079739270465446667946905279627659399113263569398956308152294913554433653942643
|
|
||||||
# s = 0xa335926aa319a27a1d00896a6773a4827acdac73L
|
|
||||||
# c = 0x79d1e655f868f02fff48dcdee14151ddb80643c1406d0ca10dfe6fc52009540a495e8042ea5f744f6e184667cc722483L
|
|
||||||
_b = 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aef
|
|
||||||
_Gx = 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760ab7
|
|
||||||
_Gy = 0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5f
|
|
||||||
|
|
||||||
curve_384 = ellipticcurve.CurveFp( _p, -3, _b )
|
|
||||||
generator_384 = ellipticcurve.Point( curve_384, _Gx, _Gy, _r )
|
|
||||||
|
|
||||||
# NIST Curve P-521:
|
|
||||||
_p = 6864797660130609714981900799081393217269435300143305409394463459185543183397656052122559640661454554977296311391480858037121987999716643812574028291115057151
|
|
||||||
_r = 6864797660130609714981900799081393217269435300143305409394463459185543183397655394245057746333217197532963996371363321113864768612440380340372808892707005449
|
|
||||||
# s = 0xd09e8800291cb85396cc6717393284aaa0da64baL
|
|
||||||
# c = 0x0b48bfa5f420a34949539d2bdfc264eeeeb077688e44fbf0ad8f6d0edb37bd6b533281000518e19f1b9ffbe0fe9ed8a3c2200b8f875e523868c70c1e5bf55bad637L
|
|
||||||
_b = 0x051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00
|
|
||||||
_Gx = 0xc6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66
|
|
||||||
_Gy = 0x11839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650
|
|
||||||
|
|
||||||
curve_521 = ellipticcurve.CurveFp( _p, -3, _b )
|
|
||||||
generator_521 = ellipticcurve.Point( curve_521, _Gx, _Gy, _r )
|
|
||||||
|
|
||||||
# Certicom secp256-k1
|
|
||||||
_a = 0x0000000000000000000000000000000000000000000000000000000000000000
|
|
||||||
_b = 0x0000000000000000000000000000000000000000000000000000000000000007
|
|
||||||
_p = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f
|
|
||||||
_Gx = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
|
|
||||||
_Gy = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
|
|
||||||
_r = 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
|
|
||||||
|
|
||||||
curve_secp256k1 = ellipticcurve.CurveFp( _p, _a, _b)
|
|
||||||
generator_secp256k1 = ellipticcurve.Point( curve_secp256k1, _Gx, _Gy, _r)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def __main__():
|
|
||||||
class TestFailure(Exception): pass
|
|
||||||
|
|
||||||
def test_point_validity( generator, x, y, expected ):
|
|
||||||
"""generator defines the curve; is (x,y) a point on
|
|
||||||
this curve? "expected" is True if the right answer is Yes."""
|
|
||||||
if point_is_valid( generator, x, y ) == expected:
|
|
||||||
print_("Point validity tested as expected.")
|
|
||||||
else:
|
|
||||||
raise TestFailure("*** Point validity test gave wrong result.")
|
|
||||||
|
|
||||||
def test_signature_validity( Msg, Qx, Qy, R, S, expected ):
|
|
||||||
"""Msg = message, Qx and Qy represent the base point on
|
|
||||||
elliptic curve c192, R and S are the signature, and
|
|
||||||
"expected" is True iff the signature is expected to be valid."""
|
|
||||||
pubk = Public_key( generator_192,
|
|
||||||
ellipticcurve.Point( curve_192, Qx, Qy ) )
|
|
||||||
got = pubk.verifies( digest_integer( Msg ), Signature( R, S ) )
|
|
||||||
if got == expected:
|
|
||||||
print_("Signature tested as expected: got %s, expected %s." % \
|
|
||||||
( got, expected ))
|
|
||||||
else:
|
|
||||||
raise TestFailure("*** Signature test failed: got %s, expected %s." % \
|
|
||||||
( got, expected ))
|
|
||||||
|
|
||||||
print_("NIST Curve P-192:")
|
|
||||||
|
|
||||||
p192 = generator_192
|
|
||||||
|
|
||||||
# From X9.62:
|
|
||||||
|
|
||||||
d = 651056770906015076056810763456358567190100156695615665659
|
|
||||||
Q = d * p192
|
|
||||||
if Q.x() != 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5:
|
|
||||||
raise TestFailure("*** p192 * d came out wrong.")
|
|
||||||
else:
|
|
||||||
print_("p192 * d came out right.")
|
|
||||||
|
|
||||||
k = 6140507067065001063065065565667405560006161556565665656654
|
|
||||||
R = k * p192
|
|
||||||
if R.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
|
||||||
or R.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
|
||||||
raise TestFailure("*** k * p192 came out wrong.")
|
|
||||||
else:
|
|
||||||
print_("k * p192 came out right.")
|
|
||||||
|
|
||||||
u1 = 2563697409189434185194736134579731015366492496392189760599
|
|
||||||
u2 = 6266643813348617967186477710235785849136406323338782220568
|
|
||||||
temp = u1 * p192 + u2 * Q
|
|
||||||
if temp.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
|
||||||
or temp.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
|
||||||
raise TestFailure("*** u1 * p192 + u2 * Q came out wrong.")
|
|
||||||
else:
|
|
||||||
print_("u1 * p192 + u2 * Q came out right.")
|
|
||||||
|
|
||||||
e = 968236873715988614170569073515315707566766479517
|
|
||||||
pubk = Public_key( generator_192, generator_192 * d )
|
|
||||||
privk = Private_key( pubk, d )
|
|
||||||
sig = privk.sign( e, k )
|
|
||||||
r, s = sig.r, sig.s
|
|
||||||
if r != 3342403536405981729393488334694600415596881826869351677613 \
|
|
||||||
or s != 5735822328888155254683894997897571951568553642892029982342:
|
|
||||||
raise TestFailure("*** r or s came out wrong.")
|
|
||||||
else:
|
|
||||||
print_("r and s came out right.")
|
|
||||||
|
|
||||||
valid = pubk.verifies( e, sig )
|
|
||||||
if valid: print_("Signature verified OK.")
|
|
||||||
else: raise TestFailure("*** Signature failed verification.")
|
|
||||||
|
|
||||||
valid = pubk.verifies( e-1, sig )
|
|
||||||
if not valid: print_("Forgery was correctly rejected.")
|
|
||||||
else: raise TestFailure("*** Forgery was erroneously accepted.")
|
|
||||||
|
|
||||||
print_("Testing point validity, as per ECDSAVS.pdf B.2.2:")
|
|
||||||
|
|
||||||
test_point_validity( \
|
|
||||||
p192, \
|
|
||||||
0xcd6d0f029a023e9aaca429615b8f577abee685d8257cc83a, \
|
|
||||||
0x00019c410987680e9fb6c0b6ecc01d9a2647c8bae27721bacdfc, \
|
|
||||||
False )
|
|
||||||
|
|
||||||
test_point_validity(
|
|
||||||
p192, \
|
|
||||||
0x00017f2fce203639e9eaf9fb50b81fc32776b30e3b02af16c73b, \
|
|
||||||
0x95da95c5e72dd48e229d4748d4eee658a9a54111b23b2adb, \
|
|
||||||
False )
|
|
||||||
|
|
||||||
test_point_validity(
|
|
||||||
p192, \
|
|
||||||
0x4f77f8bc7fccbadd5760f4938746d5f253ee2168c1cf2792, \
|
|
||||||
0x000147156ff824d131629739817edb197717c41aab5c2a70f0f6, \
|
|
||||||
False )
|
|
||||||
|
|
||||||
test_point_validity(
|
|
||||||
p192, \
|
|
||||||
0xc58d61f88d905293bcd4cd0080bcb1b7f811f2ffa41979f6, \
|
|
||||||
0x8804dc7a7c4c7f8b5d437f5156f3312ca7d6de8a0e11867f, \
|
|
||||||
True )
|
|
||||||
|
|
||||||
test_point_validity(
|
|
||||||
p192, \
|
|
||||||
0xcdf56c1aa3d8afc53c521adf3ffb96734a6a630a4a5b5a70, \
|
|
||||||
0x97c1c44a5fb229007b5ec5d25f7413d170068ffd023caa4e, \
|
|
||||||
True )
|
|
||||||
|
|
||||||
test_point_validity(
|
|
||||||
p192, \
|
|
||||||
0x89009c0dc361c81e99280c8e91df578df88cdf4b0cdedced, \
|
|
||||||
0x27be44a529b7513e727251f128b34262a0fd4d8ec82377b9, \
|
|
||||||
True )
|
|
||||||
|
|
||||||
test_point_validity(
|
|
||||||
p192, \
|
|
||||||
0x6a223d00bd22c52833409a163e057e5b5da1def2a197dd15, \
|
|
||||||
0x7b482604199367f1f303f9ef627f922f97023e90eae08abf, \
|
|
||||||
True )
|
|
||||||
|
|
||||||
test_point_validity(
|
|
||||||
p192, \
|
|
||||||
0x6dccbde75c0948c98dab32ea0bc59fe125cf0fb1a3798eda, \
|
|
||||||
0x0001171a3e0fa60cf3096f4e116b556198de430e1fbd330c8835, \
|
|
||||||
False )
|
|
||||||
|
|
||||||
test_point_validity(
|
|
||||||
p192, \
|
|
||||||
0xd266b39e1f491fc4acbbbc7d098430931cfa66d55015af12, \
|
|
||||||
0x193782eb909e391a3148b7764e6b234aa94e48d30a16dbb2, \
|
|
||||||
False )
|
|
||||||
|
|
||||||
test_point_validity(
|
|
||||||
p192, \
|
|
||||||
0x9d6ddbcd439baa0c6b80a654091680e462a7d1d3f1ffeb43, \
|
|
||||||
0x6ad8efc4d133ccf167c44eb4691c80abffb9f82b932b8caa, \
|
|
||||||
False )
|
|
||||||
|
|
||||||
test_point_validity(
|
|
||||||
p192, \
|
|
||||||
0x146479d944e6bda87e5b35818aa666a4c998a71f4e95edbc, \
|
|
||||||
0xa86d6fe62bc8fbd88139693f842635f687f132255858e7f6, \
|
|
||||||
False )
|
|
||||||
|
|
||||||
test_point_validity(
|
|
||||||
p192, \
|
|
||||||
0xe594d4a598046f3598243f50fd2c7bd7d380edb055802253, \
|
|
||||||
0x509014c0c4d6b536e3ca750ec09066af39b4c8616a53a923, \
|
|
||||||
False )
|
|
||||||
|
|
||||||
print_("Trying signature-verification tests from ECDSAVS.pdf B.2.4:")
|
|
||||||
print_("P-192:")
|
|
||||||
Msg = 0x84ce72aa8699df436059f052ac51b6398d2511e49631bcb7e71f89c499b9ee425dfbc13a5f6d408471b054f2655617cbbaf7937b7c80cd8865cf02c8487d30d2b0fbd8b2c4e102e16d828374bbc47b93852f212d5043c3ea720f086178ff798cc4f63f787b9c2e419efa033e7644ea7936f54462dc21a6c4580725f7f0e7d158
|
|
||||||
Qx = 0xd9dbfb332aa8e5ff091e8ce535857c37c73f6250ffb2e7ac
|
|
||||||
Qy = 0x282102e364feded3ad15ddf968f88d8321aa268dd483ebc4
|
|
||||||
R = 0x64dca58a20787c488d11d6dd96313f1b766f2d8efe122916
|
|
||||||
S = 0x1ecba28141e84ab4ecad92f56720e2cc83eb3d22dec72479
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
|
||||||
|
|
||||||
Msg = 0x94bb5bacd5f8ea765810024db87f4224ad71362a3c28284b2b9f39fab86db12e8beb94aae899768229be8fdb6c4f12f28912bb604703a79ccff769c1607f5a91450f30ba0460d359d9126cbd6296be6d9c4bb96c0ee74cbb44197c207f6db326ab6f5a659113a9034e54be7b041ced9dcf6458d7fb9cbfb2744d999f7dfd63f4
|
|
||||||
Qx = 0x3e53ef8d3112af3285c0e74842090712cd324832d4277ae7
|
|
||||||
Qy = 0xcc75f8952d30aec2cbb719fc6aa9934590b5d0ff5a83adb7
|
|
||||||
R = 0x8285261607283ba18f335026130bab31840dcfd9c3e555af
|
|
||||||
S = 0x356d89e1b04541afc9704a45e9c535ce4a50929e33d7e06c
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
|
||||||
|
|
||||||
Msg = 0xf6227a8eeb34afed1621dcc89a91d72ea212cb2f476839d9b4243c66877911b37b4ad6f4448792a7bbba76c63bdd63414b6facab7dc71c3396a73bd7ee14cdd41a659c61c99b779cecf07bc51ab391aa3252386242b9853ea7da67fd768d303f1b9b513d401565b6f1eb722dfdb96b519fe4f9bd5de67ae131e64b40e78c42dd
|
|
||||||
Qx = 0x16335dbe95f8e8254a4e04575d736befb258b8657f773cb7
|
|
||||||
Qy = 0x421b13379c59bc9dce38a1099ca79bbd06d647c7f6242336
|
|
||||||
R = 0x4141bd5d64ea36c5b0bd21ef28c02da216ed9d04522b1e91
|
|
||||||
S = 0x159a6aa852bcc579e821b7bb0994c0861fb08280c38daa09
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
|
||||||
|
|
||||||
Msg = 0x16b5f93afd0d02246f662761ed8e0dd9504681ed02a253006eb36736b563097ba39f81c8e1bce7a16c1339e345efabbc6baa3efb0612948ae51103382a8ee8bc448e3ef71e9f6f7a9676694831d7f5dd0db5446f179bcb737d4a526367a447bfe2c857521c7f40b6d7d7e01a180d92431fb0bbd29c04a0c420a57b3ed26ccd8a
|
|
||||||
Qx = 0xfd14cdf1607f5efb7b1793037b15bdf4baa6f7c16341ab0b
|
|
||||||
Qy = 0x83fa0795cc6c4795b9016dac928fd6bac32f3229a96312c4
|
|
||||||
R = 0x8dfdb832951e0167c5d762a473c0416c5c15bc1195667dc1
|
|
||||||
S = 0x1720288a2dc13fa1ec78f763f8fe2ff7354a7e6fdde44520
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
|
||||||
|
|
||||||
Msg = 0x08a2024b61b79d260e3bb43ef15659aec89e5b560199bc82cf7c65c77d39192e03b9a895d766655105edd9188242b91fbde4167f7862d4ddd61e5d4ab55196683d4f13ceb90d87aea6e07eb50a874e33086c4a7cb0273a8e1c4408f4b846bceae1ebaac1b2b2ea851a9b09de322efe34cebe601653efd6ddc876ce8c2f2072fb
|
|
||||||
Qx = 0x674f941dc1a1f8b763c9334d726172d527b90ca324db8828
|
|
||||||
Qy = 0x65adfa32e8b236cb33a3e84cf59bfb9417ae7e8ede57a7ff
|
|
||||||
R = 0x9508b9fdd7daf0d8126f9e2bc5a35e4c6d800b5b804d7796
|
|
||||||
S = 0x36f2bf6b21b987c77b53bb801b3435a577e3d493744bfab0
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
|
||||||
|
|
||||||
Msg = 0x1843aba74b0789d4ac6b0b8923848023a644a7b70afa23b1191829bbe4397ce15b629bf21a8838298653ed0c19222b95fa4f7390d1b4c844d96e645537e0aae98afb5c0ac3bd0e4c37f8daaff25556c64e98c319c52687c904c4de7240a1cc55cd9756b7edaef184e6e23b385726e9ffcba8001b8f574987c1a3fedaaa83ca6d
|
|
||||||
Qx = 0x10ecca1aad7220b56a62008b35170bfd5e35885c4014a19f
|
|
||||||
Qy = 0x04eb61984c6c12ade3bc47f3c629ece7aa0a033b9948d686
|
|
||||||
R = 0x82bfa4e82c0dfe9274169b86694e76ce993fd83b5c60f325
|
|
||||||
S = 0xa97685676c59a65dbde002fe9d613431fb183e8006d05633
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
|
||||||
|
|
||||||
Msg = 0x5a478f4084ddd1a7fea038aa9732a822106385797d02311aeef4d0264f824f698df7a48cfb6b578cf3da416bc0799425bb491be5b5ecc37995b85b03420a98f2c4dc5c31a69a379e9e322fbe706bbcaf0f77175e05cbb4fa162e0da82010a278461e3e974d137bc746d1880d6eb02aa95216014b37480d84b87f717bb13f76e1
|
|
||||||
Qx = 0x6636653cb5b894ca65c448277b29da3ad101c4c2300f7c04
|
|
||||||
Qy = 0xfdf1cbb3fc3fd6a4f890b59e554544175fa77dbdbeb656c1
|
|
||||||
R = 0xeac2ddecddfb79931a9c3d49c08de0645c783a24cb365e1c
|
|
||||||
S = 0x3549fee3cfa7e5f93bc47d92d8ba100e881a2a93c22f8d50
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
|
||||||
|
|
||||||
Msg = 0xc598774259a058fa65212ac57eaa4f52240e629ef4c310722088292d1d4af6c39b49ce06ba77e4247b20637174d0bd67c9723feb57b5ead232b47ea452d5d7a089f17c00b8b6767e434a5e16c231ba0efa718a340bf41d67ea2d295812ff1b9277daacb8bc27b50ea5e6443bcf95ef4e9f5468fe78485236313d53d1c68f6ba2
|
|
||||||
Qx = 0xa82bd718d01d354001148cd5f69b9ebf38ff6f21898f8aaa
|
|
||||||
Qy = 0xe67ceede07fc2ebfafd62462a51e4b6c6b3d5b537b7caf3e
|
|
||||||
R = 0x4d292486c620c3de20856e57d3bb72fcde4a73ad26376955
|
|
||||||
S = 0xa85289591a6081d5728825520e62ff1c64f94235c04c7f95
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
|
||||||
|
|
||||||
Msg = 0xca98ed9db081a07b7557f24ced6c7b9891269a95d2026747add9e9eb80638a961cf9c71a1b9f2c29744180bd4c3d3db60f2243c5c0b7cc8a8d40a3f9a7fc910250f2187136ee6413ffc67f1a25e1c4c204fa9635312252ac0e0481d89b6d53808f0c496ba87631803f6c572c1f61fa049737fdacce4adff757afed4f05beb658
|
|
||||||
Qx = 0x7d3b016b57758b160c4fca73d48df07ae3b6b30225126c2f
|
|
||||||
Qy = 0x4af3790d9775742bde46f8da876711be1b65244b2b39e7ec
|
|
||||||
R = 0x95f778f5f656511a5ab49a5d69ddd0929563c29cbc3a9e62
|
|
||||||
S = 0x75c87fc358c251b4c83d2dd979faad496b539f9f2ee7a289
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
|
||||||
|
|
||||||
Msg = 0x31dd9a54c8338bea06b87eca813d555ad1850fac9742ef0bbe40dad400e10288acc9c11ea7dac79eb16378ebea9490e09536099f1b993e2653cd50240014c90a9c987f64545abc6a536b9bd2435eb5e911fdfde2f13be96ea36ad38df4ae9ea387b29cced599af777338af2794820c9cce43b51d2112380a35802ab7e396c97a
|
|
||||||
Qx = 0x9362f28c4ef96453d8a2f849f21e881cd7566887da8beb4a
|
|
||||||
Qy = 0xe64d26d8d74c48a024ae85d982ee74cd16046f4ee5333905
|
|
||||||
R = 0xf3923476a296c88287e8de914b0b324ad5a963319a4fe73b
|
|
||||||
S = 0xf0baeed7624ed00d15244d8ba2aede085517dbdec8ac65f5
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
|
||||||
|
|
||||||
Msg = 0xb2b94e4432267c92f9fdb9dc6040c95ffa477652761290d3c7de312283f6450d89cc4aabe748554dfb6056b2d8e99c7aeaad9cdddebdee9dbc099839562d9064e68e7bb5f3a6bba0749ca9a538181fc785553a4000785d73cc207922f63e8ce1112768cb1de7b673aed83a1e4a74592f1268d8e2a4e9e63d414b5d442bd0456d
|
|
||||||
Qx = 0xcc6fc032a846aaac25533eb033522824f94e670fa997ecef
|
|
||||||
Qy = 0xe25463ef77a029eccda8b294fd63dd694e38d223d30862f1
|
|
||||||
R = 0x066b1d07f3a40e679b620eda7f550842a35c18b80c5ebe06
|
|
||||||
S = 0xa0b0fb201e8f2df65e2c4508ef303bdc90d934016f16b2dc
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
|
||||||
|
|
||||||
Msg = 0x4366fcadf10d30d086911de30143da6f579527036937007b337f7282460eae5678b15cccda853193ea5fc4bc0a6b9d7a31128f27e1214988592827520b214eed5052f7775b750b0c6b15f145453ba3fee24a085d65287e10509eb5d5f602c440341376b95c24e5c4727d4b859bfe1483d20538acdd92c7997fa9c614f0f839d7
|
|
||||||
Qx = 0x955c908fe900a996f7e2089bee2f6376830f76a19135e753
|
|
||||||
Qy = 0xba0c42a91d3847de4a592a46dc3fdaf45a7cc709b90de520
|
|
||||||
R = 0x1f58ad77fc04c782815a1405b0925e72095d906cbf52a668
|
|
||||||
S = 0xf2e93758b3af75edf784f05a6761c9b9a6043c66b845b599
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
|
||||||
|
|
||||||
Msg = 0x543f8af57d750e33aa8565e0cae92bfa7a1ff78833093421c2942cadf9986670a5ff3244c02a8225e790fbf30ea84c74720abf99cfd10d02d34377c3d3b41269bea763384f372bb786b5846f58932defa68023136cd571863b304886e95e52e7877f445b9364b3f06f3c28da12707673fecb4b8071de06b6e0a3c87da160cef3
|
|
||||||
Qx = 0x31f7fa05576d78a949b24812d4383107a9a45bb5fccdd835
|
|
||||||
Qy = 0x8dc0eb65994a90f02b5e19bd18b32d61150746c09107e76b
|
|
||||||
R = 0xbe26d59e4e883dde7c286614a767b31e49ad88789d3a78ff
|
|
||||||
S = 0x8762ca831c1ce42df77893c9b03119428e7a9b819b619068
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
|
||||||
|
|
||||||
Msg = 0xd2e8454143ce281e609a9d748014dcebb9d0bc53adb02443a6aac2ffe6cb009f387c346ecb051791404f79e902ee333ad65e5c8cb38dc0d1d39a8dc90add5023572720e5b94b190d43dd0d7873397504c0c7aef2727e628eb6a74411f2e400c65670716cb4a815dc91cbbfeb7cfe8c929e93184c938af2c078584da045e8f8d1
|
|
||||||
Qx = 0x66aa8edbbdb5cf8e28ceb51b5bda891cae2df84819fe25c0
|
|
||||||
Qy = 0x0c6bc2f69030a7ce58d4a00e3b3349844784a13b8936f8da
|
|
||||||
R = 0xa4661e69b1734f4a71b788410a464b71e7ffe42334484f23
|
|
||||||
S = 0x738421cf5e049159d69c57a915143e226cac8355e149afe9
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
|
||||||
|
|
||||||
Msg = 0x6660717144040f3e2f95a4e25b08a7079c702a8b29babad5a19a87654bc5c5afa261512a11b998a4fb36b5d8fe8bd942792ff0324b108120de86d63f65855e5461184fc96a0a8ffd2ce6d5dfb0230cbbdd98f8543e361b3205f5da3d500fdc8bac6db377d75ebef3cb8f4d1ff738071ad0938917889250b41dd1d98896ca06fb
|
|
||||||
Qx = 0xbcfacf45139b6f5f690a4c35a5fffa498794136a2353fc77
|
|
||||||
Qy = 0x6f4a6c906316a6afc6d98fe1f0399d056f128fe0270b0f22
|
|
||||||
R = 0x9db679a3dafe48f7ccad122933acfe9da0970b71c94c21c1
|
|
||||||
S = 0x984c2db99827576c0a41a5da41e07d8cc768bc82f18c9da9
|
|
||||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
print_("Testing the example code:")
|
|
||||||
|
|
||||||
# Building a public/private key pair from the NIST Curve P-192:
|
|
||||||
|
|
||||||
g = generator_192
|
|
||||||
n = g.order()
|
|
||||||
|
|
||||||
# (random.SystemRandom is supposed to provide
|
|
||||||
# crypto-quality random numbers, but as Debian recently
|
|
||||||
# illustrated, a systems programmer can accidentally
|
|
||||||
# demolish this security, so in serious applications
|
|
||||||
# further precautions are appropriate.)
|
|
||||||
|
|
||||||
randrange = random.SystemRandom().randrange
|
|
||||||
|
|
||||||
secret = randrange( 1, n )
|
|
||||||
pubkey = Public_key( g, g * secret )
|
|
||||||
privkey = Private_key( pubkey, secret )
|
|
||||||
|
|
||||||
# Signing a hash value:
|
|
||||||
|
|
||||||
hash = randrange( 1, n )
|
|
||||||
signature = privkey.sign( hash, randrange( 1, n ) )
|
|
||||||
|
|
||||||
# Verifying a signature for a hash value:
|
|
||||||
|
|
||||||
if pubkey.verifies( hash, signature ):
|
|
||||||
print_("Demo verification succeeded.")
|
|
||||||
else:
|
|
||||||
raise TestFailure("*** Demo verification failed.")
|
|
||||||
|
|
||||||
if pubkey.verifies( hash-1, signature ):
|
|
||||||
raise TestFailure( "**** Demo verification failed to reject tampered hash.")
|
|
||||||
else:
|
|
||||||
print_("Demo verification correctly rejected tampered hash.")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
__main__()
|
|
||||||
@@ -1,293 +0,0 @@
|
|||||||
#! /usr/bin/env python
|
|
||||||
#
|
|
||||||
# Implementation of elliptic curves, for cryptographic applications.
|
|
||||||
#
|
|
||||||
# This module doesn't provide any way to choose a random elliptic
|
|
||||||
# curve, nor to verify that an elliptic curve was chosen randomly,
|
|
||||||
# because one can simply use NIST's standard curves.
|
|
||||||
#
|
|
||||||
# Notes from X9.62-1998 (draft):
|
|
||||||
# Nomenclature:
|
|
||||||
# - Q is a public key.
|
|
||||||
# The "Elliptic Curve Domain Parameters" include:
|
|
||||||
# - q is the "field size", which in our case equals p.
|
|
||||||
# - p is a big prime.
|
|
||||||
# - G is a point of prime order (5.1.1.1).
|
|
||||||
# - n is the order of G (5.1.1.1).
|
|
||||||
# Public-key validation (5.2.2):
|
|
||||||
# - Verify that Q is not the point at infinity.
|
|
||||||
# - Verify that X_Q and Y_Q are in [0,p-1].
|
|
||||||
# - Verify that Q is on the curve.
|
|
||||||
# - Verify that nQ is the point at infinity.
|
|
||||||
# Signature generation (5.3):
|
|
||||||
# - Pick random k from [1,n-1].
|
|
||||||
# Signature checking (5.4.2):
|
|
||||||
# - Verify that r and s are in [1,n-1].
|
|
||||||
#
|
|
||||||
# Version of 2008.11.25.
|
|
||||||
#
|
|
||||||
# Revision history:
|
|
||||||
# 2005.12.31 - Initial version.
|
|
||||||
# 2008.11.25 - Change CurveFp.is_on to contains_point.
|
|
||||||
#
|
|
||||||
# Written in 2005 by Peter Pearson and placed in the public domain.
|
|
||||||
|
|
||||||
from __future__ import division
|
|
||||||
|
|
||||||
from .six import print_
|
|
||||||
from . import numbertheory
|
|
||||||
|
|
||||||
class CurveFp( object ):
|
|
||||||
"""Elliptic Curve over the field of integers modulo a prime."""
|
|
||||||
def __init__( self, p, a, b ):
|
|
||||||
"""The curve of points satisfying y^2 = x^3 + a*x + b (mod p)."""
|
|
||||||
self.__p = p
|
|
||||||
self.__a = a
|
|
||||||
self.__b = b
|
|
||||||
|
|
||||||
def p( self ):
|
|
||||||
return self.__p
|
|
||||||
|
|
||||||
def a( self ):
|
|
||||||
return self.__a
|
|
||||||
|
|
||||||
def b( self ):
|
|
||||||
return self.__b
|
|
||||||
|
|
||||||
def contains_point( self, x, y ):
|
|
||||||
"""Is the point (x,y) on this curve?"""
|
|
||||||
return ( y * y - ( x * x * x + self.__a * x + self.__b ) ) % self.__p == 0
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Point( object ):
|
|
||||||
"""A point on an elliptic curve. Altering x and y is forbidding,
|
|
||||||
but they can be read by the x() and y() methods."""
|
|
||||||
def __init__( self, curve, x, y, order = None ):
|
|
||||||
"""curve, x, y, order; order (optional) is the order of this point."""
|
|
||||||
self.__curve = curve
|
|
||||||
self.__x = x
|
|
||||||
self.__y = y
|
|
||||||
self.__order = order
|
|
||||||
# self.curve is allowed to be None only for INFINITY:
|
|
||||||
if self.__curve: assert self.__curve.contains_point( x, y )
|
|
||||||
if order: assert self * order == INFINITY
|
|
||||||
|
|
||||||
def __eq__( self, other ):
|
|
||||||
"""Return True if the points are identical, False otherwise."""
|
|
||||||
if self.__curve == other.__curve \
|
|
||||||
and self.__x == other.__x \
|
|
||||||
and self.__y == other.__y:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def __add__( self, other ):
|
|
||||||
"""Add one point to another point."""
|
|
||||||
|
|
||||||
# X9.62 B.3:
|
|
||||||
|
|
||||||
if other == INFINITY: return self
|
|
||||||
if self == INFINITY: return other
|
|
||||||
assert self.__curve == other.__curve
|
|
||||||
if self.__x == other.__x:
|
|
||||||
if ( self.__y + other.__y ) % self.__curve.p() == 0:
|
|
||||||
return INFINITY
|
|
||||||
else:
|
|
||||||
return self.double()
|
|
||||||
|
|
||||||
p = self.__curve.p()
|
|
||||||
|
|
||||||
l = ( ( other.__y - self.__y ) * \
|
|
||||||
numbertheory.inverse_mod( other.__x - self.__x, p ) ) % p
|
|
||||||
|
|
||||||
x3 = ( l * l - self.__x - other.__x ) % p
|
|
||||||
y3 = ( l * ( self.__x - x3 ) - self.__y ) % p
|
|
||||||
|
|
||||||
return Point( self.__curve, x3, y3 )
|
|
||||||
|
|
||||||
def __mul__( self, other ):
|
|
||||||
"""Multiply a point by an integer."""
|
|
||||||
|
|
||||||
def leftmost_bit( x ):
|
|
||||||
assert x > 0
|
|
||||||
result = 1
|
|
||||||
while result <= x: result = 2 * result
|
|
||||||
return result // 2
|
|
||||||
|
|
||||||
e = other
|
|
||||||
if self.__order: e = e % self.__order
|
|
||||||
if e == 0: return INFINITY
|
|
||||||
if self == INFINITY: return INFINITY
|
|
||||||
assert e > 0
|
|
||||||
|
|
||||||
# From X9.62 D.3.2:
|
|
||||||
|
|
||||||
e3 = 3 * e
|
|
||||||
negative_self = Point( self.__curve, self.__x, -self.__y, self.__order )
|
|
||||||
i = leftmost_bit( e3 ) // 2
|
|
||||||
result = self
|
|
||||||
# print_("Multiplying %s by %d (e3 = %d):" % ( self, other, e3 ))
|
|
||||||
while i > 1:
|
|
||||||
result = result.double()
|
|
||||||
if ( e3 & i ) != 0 and ( e & i ) == 0: result = result + self
|
|
||||||
if ( e3 & i ) == 0 and ( e & i ) != 0: result = result + negative_self
|
|
||||||
# print_(". . . i = %d, result = %s" % ( i, result ))
|
|
||||||
i = i // 2
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def __rmul__( self, other ):
|
|
||||||
"""Multiply a point by an integer."""
|
|
||||||
|
|
||||||
return self * other
|
|
||||||
|
|
||||||
def __str__( self ):
|
|
||||||
if self == INFINITY: return "infinity"
|
|
||||||
return "(%d,%d)" % ( self.__x, self.__y )
|
|
||||||
|
|
||||||
def double( self ):
|
|
||||||
"""Return a new point that is twice the old."""
|
|
||||||
|
|
||||||
if self == INFINITY:
|
|
||||||
return INFINITY
|
|
||||||
|
|
||||||
# X9.62 B.3:
|
|
||||||
|
|
||||||
p = self.__curve.p()
|
|
||||||
a = self.__curve.a()
|
|
||||||
|
|
||||||
l = ( ( 3 * self.__x * self.__x + a ) * \
|
|
||||||
numbertheory.inverse_mod( 2 * self.__y, p ) ) % p
|
|
||||||
|
|
||||||
x3 = ( l * l - 2 * self.__x ) % p
|
|
||||||
y3 = ( l * ( self.__x - x3 ) - self.__y ) % p
|
|
||||||
|
|
||||||
return Point( self.__curve, x3, y3 )
|
|
||||||
|
|
||||||
def x( self ):
|
|
||||||
return self.__x
|
|
||||||
|
|
||||||
def y( self ):
|
|
||||||
return self.__y
|
|
||||||
|
|
||||||
def curve( self ):
|
|
||||||
return self.__curve
|
|
||||||
|
|
||||||
def order( self ):
|
|
||||||
return self.__order
|
|
||||||
|
|
||||||
|
|
||||||
# This one point is the Point At Infinity for all purposes:
|
|
||||||
INFINITY = Point( None, None, None )
|
|
||||||
|
|
||||||
def __main__():
|
|
||||||
|
|
||||||
class FailedTest(Exception): pass
|
|
||||||
def test_add( c, x1, y1, x2, y2, x3, y3 ):
|
|
||||||
"""We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3)."""
|
|
||||||
p1 = Point( c, x1, y1 )
|
|
||||||
p2 = Point( c, x2, y2 )
|
|
||||||
p3 = p1 + p2
|
|
||||||
print_("%s + %s = %s" % ( p1, p2, p3 ), end=' ')
|
|
||||||
if p3.x() != x3 or p3.y() != y3:
|
|
||||||
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
|
||||||
else:
|
|
||||||
print_(" Good.")
|
|
||||||
|
|
||||||
def test_double( c, x1, y1, x3, y3 ):
|
|
||||||
"""We expect that on curve c, 2*(x1,y1) = (x3, y3)."""
|
|
||||||
p1 = Point( c, x1, y1 )
|
|
||||||
p3 = p1.double()
|
|
||||||
print_("%s doubled = %s" % ( p1, p3 ), end=' ')
|
|
||||||
if p3.x() != x3 or p3.y() != y3:
|
|
||||||
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
|
||||||
else:
|
|
||||||
print_(" Good.")
|
|
||||||
|
|
||||||
def test_double_infinity( c ):
|
|
||||||
"""We expect that on curve c, 2*INFINITY = INFINITY."""
|
|
||||||
p1 = INFINITY
|
|
||||||
p3 = p1.double()
|
|
||||||
print_("%s doubled = %s" % ( p1, p3 ), end=' ')
|
|
||||||
if p3.x() != INFINITY.x() or p3.y() != INFINITY.y():
|
|
||||||
raise FailedTest("Failure: should give (%d,%d)." % ( INFINITY.x(), INFINITY.y() ))
|
|
||||||
else:
|
|
||||||
print_(" Good.")
|
|
||||||
|
|
||||||
def test_multiply( c, x1, y1, m, x3, y3 ):
|
|
||||||
"""We expect that on curve c, m*(x1,y1) = (x3,y3)."""
|
|
||||||
p1 = Point( c, x1, y1 )
|
|
||||||
p3 = p1 * m
|
|
||||||
print_("%s * %d = %s" % ( p1, m, p3 ), end=' ')
|
|
||||||
if p3.x() != x3 or p3.y() != y3:
|
|
||||||
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
|
||||||
else:
|
|
||||||
print_(" Good.")
|
|
||||||
|
|
||||||
|
|
||||||
# A few tests from X9.62 B.3:
|
|
||||||
|
|
||||||
c = CurveFp( 23, 1, 1 )
|
|
||||||
test_add( c, 3, 10, 9, 7, 17, 20 )
|
|
||||||
test_double( c, 3, 10, 7, 12 )
|
|
||||||
test_add( c, 3, 10, 3, 10, 7, 12 ) # (Should just invoke double.)
|
|
||||||
test_multiply( c, 3, 10, 2, 7, 12 )
|
|
||||||
|
|
||||||
test_double_infinity(c)
|
|
||||||
|
|
||||||
# From X9.62 I.1 (p. 96):
|
|
||||||
|
|
||||||
g = Point( c, 13, 7, 7 )
|
|
||||||
|
|
||||||
check = INFINITY
|
|
||||||
for i in range( 7 + 1 ):
|
|
||||||
p = ( i % 7 ) * g
|
|
||||||
print_("%s * %d = %s, expected %s . . ." % ( g, i, p, check ), end=' ')
|
|
||||||
if p == check:
|
|
||||||
print_(" Good.")
|
|
||||||
else:
|
|
||||||
raise FailedTest("Bad.")
|
|
||||||
check = check + g
|
|
||||||
|
|
||||||
# NIST Curve P-192:
|
|
||||||
p = 6277101735386680763835789423207666416083908700390324961279
|
|
||||||
r = 6277101735386680763835789423176059013767194773182842284081
|
|
||||||
#s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L
|
|
||||||
c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65
|
|
||||||
b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
|
|
||||||
Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
|
|
||||||
Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
|
|
||||||
|
|
||||||
c192 = CurveFp( p, -3, b )
|
|
||||||
p192 = Point( c192, Gx, Gy, r )
|
|
||||||
|
|
||||||
# Checking against some sample computations presented
|
|
||||||
# in X9.62:
|
|
||||||
|
|
||||||
d = 651056770906015076056810763456358567190100156695615665659
|
|
||||||
Q = d * p192
|
|
||||||
if Q.x() != 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5:
|
|
||||||
raise FailedTest("p192 * d came out wrong.")
|
|
||||||
else:
|
|
||||||
print_("p192 * d came out right.")
|
|
||||||
|
|
||||||
k = 6140507067065001063065065565667405560006161556565665656654
|
|
||||||
R = k * p192
|
|
||||||
if R.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
|
||||||
or R.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
|
||||||
raise FailedTest("k * p192 came out wrong.")
|
|
||||||
else:
|
|
||||||
print_("k * p192 came out right.")
|
|
||||||
|
|
||||||
u1 = 2563697409189434185194736134579731015366492496392189760599
|
|
||||||
u2 = 6266643813348617967186477710235785849136406323338782220568
|
|
||||||
temp = u1 * p192 + u2 * Q
|
|
||||||
if temp.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
|
||||||
or temp.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
|
||||||
raise FailedTest("u1 * p192 + u2 * Q came out wrong.")
|
|
||||||
else:
|
|
||||||
print_("u1 * p192 + u2 * Q came out right.")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
__main__()
|
|
||||||
@@ -1,283 +0,0 @@
|
|||||||
import binascii
|
|
||||||
|
|
||||||
from . import ecdsa
|
|
||||||
from . import der
|
|
||||||
from . import rfc6979
|
|
||||||
from .curves import NIST192p, find_curve
|
|
||||||
from .util import string_to_number, number_to_string, randrange
|
|
||||||
from .util import sigencode_string, sigdecode_string
|
|
||||||
from .util import oid_ecPublicKey, encoded_oid_ecPublicKey
|
|
||||||
from .six import PY3, b
|
|
||||||
from hashlib import sha1
|
|
||||||
|
|
||||||
class BadSignatureError(Exception):
|
|
||||||
pass
|
|
||||||
class BadDigestError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class VerifyingKey:
|
|
||||||
def __init__(self, _error__please_use_generate=None):
|
|
||||||
if not _error__please_use_generate:
|
|
||||||
raise TypeError("Please use SigningKey.generate() to construct me")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_public_point(klass, point, curve=NIST192p, hashfunc=sha1):
|
|
||||||
self = klass(_error__please_use_generate=True)
|
|
||||||
self.curve = curve
|
|
||||||
self.default_hashfunc = hashfunc
|
|
||||||
self.pubkey = ecdsa.Public_key(curve.generator, point)
|
|
||||||
self.pubkey.order = curve.order
|
|
||||||
return self
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_string(klass, string, curve=NIST192p, hashfunc=sha1,
|
|
||||||
validate_point=True):
|
|
||||||
order = curve.order
|
|
||||||
assert len(string) == curve.verifying_key_length, \
|
|
||||||
(len(string), curve.verifying_key_length)
|
|
||||||
xs = string[:curve.baselen]
|
|
||||||
ys = string[curve.baselen:]
|
|
||||||
assert len(xs) == curve.baselen, (len(xs), curve.baselen)
|
|
||||||
assert len(ys) == curve.baselen, (len(ys), curve.baselen)
|
|
||||||
x = string_to_number(xs)
|
|
||||||
y = string_to_number(ys)
|
|
||||||
if validate_point:
|
|
||||||
assert ecdsa.point_is_valid(curve.generator, x, y)
|
|
||||||
from . import ellipticcurve
|
|
||||||
point = ellipticcurve.Point(curve.curve, x, y, order)
|
|
||||||
return klass.from_public_point(point, curve, hashfunc)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_pem(klass, string):
|
|
||||||
return klass.from_der(der.unpem(string))
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_der(klass, string):
|
|
||||||
# [[oid_ecPublicKey,oid_curve], point_str_bitstring]
|
|
||||||
s1,empty = der.remove_sequence(string)
|
|
||||||
if empty != b(""):
|
|
||||||
raise der.UnexpectedDER("trailing junk after DER pubkey: %s" %
|
|
||||||
binascii.hexlify(empty))
|
|
||||||
s2,point_str_bitstring = der.remove_sequence(s1)
|
|
||||||
# s2 = oid_ecPublicKey,oid_curve
|
|
||||||
oid_pk, rest = der.remove_object(s2)
|
|
||||||
oid_curve, empty = der.remove_object(rest)
|
|
||||||
if empty != b(""):
|
|
||||||
raise der.UnexpectedDER("trailing junk after DER pubkey objects: %s" %
|
|
||||||
binascii.hexlify(empty))
|
|
||||||
assert oid_pk == oid_ecPublicKey, (oid_pk, oid_ecPublicKey)
|
|
||||||
curve = find_curve(oid_curve)
|
|
||||||
point_str, empty = der.remove_bitstring(point_str_bitstring)
|
|
||||||
if empty != b(""):
|
|
||||||
raise der.UnexpectedDER("trailing junk after pubkey pointstring: %s" %
|
|
||||||
binascii.hexlify(empty))
|
|
||||||
assert point_str.startswith(b("\x00\x04"))
|
|
||||||
return klass.from_string(point_str[2:], curve)
|
|
||||||
|
|
||||||
def to_string(self):
|
|
||||||
# VerifyingKey.from_string(vk.to_string()) == vk as long as the
|
|
||||||
# curves are the same: the curve itself is not included in the
|
|
||||||
# serialized form
|
|
||||||
order = self.pubkey.order
|
|
||||||
x_str = number_to_string(self.pubkey.point.x(), order)
|
|
||||||
y_str = number_to_string(self.pubkey.point.y(), order)
|
|
||||||
return x_str + y_str
|
|
||||||
|
|
||||||
def to_pem(self):
|
|
||||||
return der.topem(self.to_der(), "PUBLIC KEY")
|
|
||||||
|
|
||||||
def to_der(self):
|
|
||||||
order = self.pubkey.order
|
|
||||||
x_str = number_to_string(self.pubkey.point.x(), order)
|
|
||||||
y_str = number_to_string(self.pubkey.point.y(), order)
|
|
||||||
point_str = b("\x00\x04") + x_str + y_str
|
|
||||||
return der.encode_sequence(der.encode_sequence(encoded_oid_ecPublicKey,
|
|
||||||
self.curve.encoded_oid),
|
|
||||||
der.encode_bitstring(point_str))
|
|
||||||
|
|
||||||
def verify(self, signature, data, hashfunc=None, sigdecode=sigdecode_string):
|
|
||||||
hashfunc = hashfunc or self.default_hashfunc
|
|
||||||
digest = hashfunc(data).digest()
|
|
||||||
return self.verify_digest(signature, digest, sigdecode)
|
|
||||||
|
|
||||||
def verify_digest(self, signature, digest, sigdecode=sigdecode_string):
|
|
||||||
if len(digest) > self.curve.baselen:
|
|
||||||
raise BadDigestError("this curve (%s) is too short "
|
|
||||||
"for your digest (%d)" % (self.curve.name,
|
|
||||||
8*len(digest)))
|
|
||||||
number = string_to_number(digest)
|
|
||||||
r, s = sigdecode(signature, self.pubkey.order)
|
|
||||||
sig = ecdsa.Signature(r, s)
|
|
||||||
if self.pubkey.verifies(number, sig):
|
|
||||||
return True
|
|
||||||
raise BadSignatureError
|
|
||||||
|
|
||||||
class SigningKey:
|
|
||||||
def __init__(self, _error__please_use_generate=None):
|
|
||||||
if not _error__please_use_generate:
|
|
||||||
raise TypeError("Please use SigningKey.generate() to construct me")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def generate(klass, curve=NIST192p, entropy=None, hashfunc=sha1):
|
|
||||||
secexp = randrange(curve.order, entropy)
|
|
||||||
return klass.from_secret_exponent(secexp, curve, hashfunc)
|
|
||||||
|
|
||||||
# to create a signing key from a short (arbitrary-length) seed, convert
|
|
||||||
# that seed into an integer with something like
|
|
||||||
# secexp=util.randrange_from_seed__X(seed, curve.order), and then pass
|
|
||||||
# that integer into SigningKey.from_secret_exponent(secexp, curve)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_secret_exponent(klass, secexp, curve=NIST192p, hashfunc=sha1):
|
|
||||||
self = klass(_error__please_use_generate=True)
|
|
||||||
self.curve = curve
|
|
||||||
self.default_hashfunc = hashfunc
|
|
||||||
self.baselen = curve.baselen
|
|
||||||
n = curve.order
|
|
||||||
assert 1 <= secexp < n
|
|
||||||
pubkey_point = curve.generator*secexp
|
|
||||||
pubkey = ecdsa.Public_key(curve.generator, pubkey_point)
|
|
||||||
pubkey.order = n
|
|
||||||
self.verifying_key = VerifyingKey.from_public_point(pubkey_point, curve,
|
|
||||||
hashfunc)
|
|
||||||
self.privkey = ecdsa.Private_key(pubkey, secexp)
|
|
||||||
self.privkey.order = n
|
|
||||||
return self
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_string(klass, string, curve=NIST192p, hashfunc=sha1):
|
|
||||||
assert len(string) == curve.baselen, (len(string), curve.baselen)
|
|
||||||
secexp = string_to_number(string)
|
|
||||||
return klass.from_secret_exponent(secexp, curve, hashfunc)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_pem(klass, string, hashfunc=sha1):
|
|
||||||
# the privkey pem file has two sections: "EC PARAMETERS" and "EC
|
|
||||||
# PRIVATE KEY". The first is redundant.
|
|
||||||
if PY3 and isinstance(string, str):
|
|
||||||
string = string.encode()
|
|
||||||
privkey_pem = string[string.index(b("-----BEGIN EC PRIVATE KEY-----")):]
|
|
||||||
return klass.from_der(der.unpem(privkey_pem), hashfunc)
|
|
||||||
@classmethod
|
|
||||||
def from_der(klass, string, hashfunc=sha1):
|
|
||||||
# SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1),
|
|
||||||
# cont[1],bitstring])
|
|
||||||
s, empty = der.remove_sequence(string)
|
|
||||||
if empty != b(""):
|
|
||||||
raise der.UnexpectedDER("trailing junk after DER privkey: %s" %
|
|
||||||
binascii.hexlify(empty))
|
|
||||||
one, s = der.remove_integer(s)
|
|
||||||
if one != 1:
|
|
||||||
raise der.UnexpectedDER("expected '1' at start of DER privkey,"
|
|
||||||
" got %d" % one)
|
|
||||||
privkey_str, s = der.remove_octet_string(s)
|
|
||||||
tag, curve_oid_str, s = der.remove_constructed(s)
|
|
||||||
if tag != 0:
|
|
||||||
raise der.UnexpectedDER("expected tag 0 in DER privkey,"
|
|
||||||
" got %d" % tag)
|
|
||||||
curve_oid, empty = der.remove_object(curve_oid_str)
|
|
||||||
if empty != b(""):
|
|
||||||
raise der.UnexpectedDER("trailing junk after DER privkey "
|
|
||||||
"curve_oid: %s" % binascii.hexlify(empty))
|
|
||||||
curve = find_curve(curve_oid)
|
|
||||||
|
|
||||||
# we don't actually care about the following fields
|
|
||||||
#
|
|
||||||
#tag, pubkey_bitstring, s = der.remove_constructed(s)
|
|
||||||
#if tag != 1:
|
|
||||||
# raise der.UnexpectedDER("expected tag 1 in DER privkey, got %d"
|
|
||||||
# % tag)
|
|
||||||
#pubkey_str = der.remove_bitstring(pubkey_bitstring)
|
|
||||||
#if empty != "":
|
|
||||||
# raise der.UnexpectedDER("trailing junk after DER privkey "
|
|
||||||
# "pubkeystr: %s" % binascii.hexlify(empty))
|
|
||||||
|
|
||||||
# our from_string method likes fixed-length privkey strings
|
|
||||||
if len(privkey_str) < curve.baselen:
|
|
||||||
privkey_str = b("\x00")*(curve.baselen-len(privkey_str)) + privkey_str
|
|
||||||
return klass.from_string(privkey_str, curve, hashfunc)
|
|
||||||
|
|
||||||
def to_string(self):
|
|
||||||
secexp = self.privkey.secret_multiplier
|
|
||||||
s = number_to_string(secexp, self.privkey.order)
|
|
||||||
return s
|
|
||||||
|
|
||||||
def to_pem(self):
|
|
||||||
# TODO: "BEGIN ECPARAMETERS"
|
|
||||||
return der.topem(self.to_der(), "EC PRIVATE KEY")
|
|
||||||
|
|
||||||
def to_der(self):
|
|
||||||
# SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1),
|
|
||||||
# cont[1],bitstring])
|
|
||||||
encoded_vk = b("\x00\x04") + self.get_verifying_key().to_string()
|
|
||||||
return der.encode_sequence(der.encode_integer(1),
|
|
||||||
der.encode_octet_string(self.to_string()),
|
|
||||||
der.encode_constructed(0, self.curve.encoded_oid),
|
|
||||||
der.encode_constructed(1, der.encode_bitstring(encoded_vk)),
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_verifying_key(self):
|
|
||||||
return self.verifying_key
|
|
||||||
|
|
||||||
def sign_deterministic(self, data, hashfunc=None, sigencode=sigencode_string):
|
|
||||||
hashfunc = hashfunc or self.default_hashfunc
|
|
||||||
digest = hashfunc(data).digest()
|
|
||||||
|
|
||||||
return self.sign_digest_deterministic(digest, hashfunc=hashfunc, sigencode=sigencode)
|
|
||||||
|
|
||||||
def sign_digest_deterministic(self, digest, hashfunc=None, sigencode=sigencode_string):
|
|
||||||
"""
|
|
||||||
Calculates 'k' from data itself, removing the need for strong
|
|
||||||
random generator and producing deterministic (reproducible) signatures.
|
|
||||||
See RFC 6979 for more details.
|
|
||||||
"""
|
|
||||||
secexp = self.privkey.secret_multiplier
|
|
||||||
k = rfc6979.generate_k(
|
|
||||||
self.curve.generator.order(), secexp, hashfunc, digest)
|
|
||||||
|
|
||||||
return self.sign_digest(digest, sigencode=sigencode, k=k)
|
|
||||||
|
|
||||||
def sign(self, data, entropy=None, hashfunc=None, sigencode=sigencode_string, k=None):
|
|
||||||
"""
|
|
||||||
hashfunc= should behave like hashlib.sha1 . The output length of the
|
|
||||||
hash (in bytes) must not be longer than the length of the curve order
|
|
||||||
(rounded up to the nearest byte), so using SHA256 with nist256p is
|
|
||||||
ok, but SHA256 with nist192p is not. (In the 2**-96ish unlikely event
|
|
||||||
of a hash output larger than the curve order, the hash will
|
|
||||||
effectively be wrapped mod n).
|
|
||||||
|
|
||||||
Use hashfunc=hashlib.sha1 to match openssl's -ecdsa-with-SHA1 mode,
|
|
||||||
or hashfunc=hashlib.sha256 for openssl-1.0.0's -ecdsa-with-SHA256.
|
|
||||||
"""
|
|
||||||
|
|
||||||
hashfunc = hashfunc or self.default_hashfunc
|
|
||||||
h = hashfunc(data).digest()
|
|
||||||
return self.sign_digest(h, entropy, sigencode, k)
|
|
||||||
|
|
||||||
def sign_digest(self, digest, entropy=None, sigencode=sigencode_string, k=None):
|
|
||||||
if len(digest) > self.curve.baselen:
|
|
||||||
raise BadDigestError("this curve (%s) is too short "
|
|
||||||
"for your digest (%d)" % (self.curve.name,
|
|
||||||
8*len(digest)))
|
|
||||||
number = string_to_number(digest)
|
|
||||||
r, s = self.sign_number(number, entropy, k)
|
|
||||||
return sigencode(r, s, self.privkey.order)
|
|
||||||
|
|
||||||
def sign_number(self, number, entropy=None, k=None):
|
|
||||||
# returns a pair of numbers
|
|
||||||
order = self.privkey.order
|
|
||||||
# privkey.sign() may raise RuntimeError in the amazingly unlikely
|
|
||||||
# (2**-192) event that r=0 or s=0, because that would leak the key.
|
|
||||||
# We could re-try with a different 'k', but we couldn't test that
|
|
||||||
# code, so I choose to allow the signature to fail instead.
|
|
||||||
|
|
||||||
# If k is set, it is used directly. In other cases
|
|
||||||
# it is generated using entropy function
|
|
||||||
if k is not None:
|
|
||||||
_k = k
|
|
||||||
else:
|
|
||||||
_k = randrange(order, entropy)
|
|
||||||
|
|
||||||
assert 1 <= _k < order
|
|
||||||
sig = self.privkey.sign(number, _k)
|
|
||||||
return sig.r, sig.s
|
|
||||||
@@ -1,613 +0,0 @@
|
|||||||
#! /usr/bin/env python
|
|
||||||
#
|
|
||||||
# Provide some simple capabilities from number theory.
|
|
||||||
#
|
|
||||||
# Version of 2008.11.14.
|
|
||||||
#
|
|
||||||
# Written in 2005 and 2006 by Peter Pearson and placed in the public domain.
|
|
||||||
# Revision history:
|
|
||||||
# 2008.11.14: Use pow( base, exponent, modulus ) for modular_exp.
|
|
||||||
# Make gcd and lcm accept arbitrarly many arguments.
|
|
||||||
|
|
||||||
from __future__ import division
|
|
||||||
|
|
||||||
from .six import print_, integer_types
|
|
||||||
from .six.moves import reduce
|
|
||||||
|
|
||||||
import math
|
|
||||||
|
|
||||||
|
|
||||||
class Error( Exception ):
|
|
||||||
"""Base class for exceptions in this module."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
class SquareRootError( Error ):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class NegativeExponentError( Error ):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def modular_exp( base, exponent, modulus ):
|
|
||||||
"Raise base to exponent, reducing by modulus"
|
|
||||||
if exponent < 0:
|
|
||||||
raise NegativeExponentError( "Negative exponents (%d) not allowed" \
|
|
||||||
% exponent )
|
|
||||||
return pow( base, exponent, modulus )
|
|
||||||
# result = 1L
|
|
||||||
# x = exponent
|
|
||||||
# b = base + 0L
|
|
||||||
# while x > 0:
|
|
||||||
# if x % 2 > 0: result = (result * b) % modulus
|
|
||||||
# x = x // 2
|
|
||||||
# b = ( b * b ) % modulus
|
|
||||||
# return result
|
|
||||||
|
|
||||||
|
|
||||||
def polynomial_reduce_mod( poly, polymod, p ):
|
|
||||||
"""Reduce poly by polymod, integer arithmetic modulo p.
|
|
||||||
|
|
||||||
Polynomials are represented as lists of coefficients
|
|
||||||
of increasing powers of x."""
|
|
||||||
|
|
||||||
# This module has been tested only by extensive use
|
|
||||||
# in calculating modular square roots.
|
|
||||||
|
|
||||||
# Just to make this easy, require a monic polynomial:
|
|
||||||
assert polymod[-1] == 1
|
|
||||||
|
|
||||||
assert len( polymod ) > 1
|
|
||||||
|
|
||||||
while len( poly ) >= len( polymod ):
|
|
||||||
if poly[-1] != 0:
|
|
||||||
for i in range( 2, len( polymod ) + 1 ):
|
|
||||||
poly[-i] = ( poly[-i] - poly[-1] * polymod[-i] ) % p
|
|
||||||
poly = poly[0:-1]
|
|
||||||
|
|
||||||
return poly
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def polynomial_multiply_mod( m1, m2, polymod, p ):
|
|
||||||
"""Polynomial multiplication modulo a polynomial over ints mod p.
|
|
||||||
|
|
||||||
Polynomials are represented as lists of coefficients
|
|
||||||
of increasing powers of x."""
|
|
||||||
|
|
||||||
# This is just a seat-of-the-pants implementation.
|
|
||||||
|
|
||||||
# This module has been tested only by extensive use
|
|
||||||
# in calculating modular square roots.
|
|
||||||
|
|
||||||
# Initialize the product to zero:
|
|
||||||
|
|
||||||
prod = ( len( m1 ) + len( m2 ) - 1 ) * [0]
|
|
||||||
|
|
||||||
# Add together all the cross-terms:
|
|
||||||
|
|
||||||
for i in range( len( m1 ) ):
|
|
||||||
for j in range( len( m2 ) ):
|
|
||||||
prod[i+j] = ( prod[i+j] + m1[i] * m2[j] ) % p
|
|
||||||
|
|
||||||
return polynomial_reduce_mod( prod, polymod, p )
|
|
||||||
|
|
||||||
|
|
||||||
def polynomial_exp_mod( base, exponent, polymod, p ):
|
|
||||||
"""Polynomial exponentiation modulo a polynomial over ints mod p.
|
|
||||||
|
|
||||||
Polynomials are represented as lists of coefficients
|
|
||||||
of increasing powers of x."""
|
|
||||||
|
|
||||||
# Based on the Handbook of Applied Cryptography, algorithm 2.227.
|
|
||||||
|
|
||||||
# This module has been tested only by extensive use
|
|
||||||
# in calculating modular square roots.
|
|
||||||
|
|
||||||
assert exponent < p
|
|
||||||
|
|
||||||
if exponent == 0: return [ 1 ]
|
|
||||||
|
|
||||||
G = base
|
|
||||||
k = exponent
|
|
||||||
if k%2 == 1: s = G
|
|
||||||
else: s = [ 1 ]
|
|
||||||
|
|
||||||
while k > 1:
|
|
||||||
k = k // 2
|
|
||||||
G = polynomial_multiply_mod( G, G, polymod, p )
|
|
||||||
if k%2 == 1: s = polynomial_multiply_mod( G, s, polymod, p )
|
|
||||||
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def jacobi( a, n ):
|
|
||||||
"""Jacobi symbol"""
|
|
||||||
|
|
||||||
# Based on the Handbook of Applied Cryptography (HAC), algorithm 2.149.
|
|
||||||
|
|
||||||
# This function has been tested by comparison with a small
|
|
||||||
# table printed in HAC, and by extensive use in calculating
|
|
||||||
# modular square roots.
|
|
||||||
|
|
||||||
assert n >= 3
|
|
||||||
assert n%2 == 1
|
|
||||||
a = a % n
|
|
||||||
if a == 0: return 0
|
|
||||||
if a == 1: return 1
|
|
||||||
a1, e = a, 0
|
|
||||||
while a1%2 == 0:
|
|
||||||
a1, e = a1//2, e+1
|
|
||||||
if e%2 == 0 or n%8 == 1 or n%8 == 7: s = 1
|
|
||||||
else: s = -1
|
|
||||||
if a1 == 1: return s
|
|
||||||
if n%4 == 3 and a1%4 == 3: s = -s
|
|
||||||
return s * jacobi( n % a1, a1 )
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def square_root_mod_prime( a, p ):
|
|
||||||
"""Modular square root of a, mod p, p prime."""
|
|
||||||
|
|
||||||
# Based on the Handbook of Applied Cryptography, algorithms 3.34 to 3.39.
|
|
||||||
|
|
||||||
# This module has been tested for all values in [0,p-1] for
|
|
||||||
# every prime p from 3 to 1229.
|
|
||||||
|
|
||||||
assert 0 <= a < p
|
|
||||||
assert 1 < p
|
|
||||||
|
|
||||||
if a == 0: return 0
|
|
||||||
if p == 2: return a
|
|
||||||
|
|
||||||
jac = jacobi( a, p )
|
|
||||||
if jac == -1: raise SquareRootError( "%d has no square root modulo %d" \
|
|
||||||
% ( a, p ) )
|
|
||||||
|
|
||||||
if p % 4 == 3: return modular_exp( a, (p+1)//4, p )
|
|
||||||
|
|
||||||
if p % 8 == 5:
|
|
||||||
d = modular_exp( a, (p-1)//4, p )
|
|
||||||
if d == 1: return modular_exp( a, (p+3)//8, p )
|
|
||||||
if d == p-1: return ( 2 * a * modular_exp( 4*a, (p-5)//8, p ) ) % p
|
|
||||||
raise RuntimeError("Shouldn't get here.")
|
|
||||||
|
|
||||||
for b in range( 2, p ):
|
|
||||||
if jacobi( b*b-4*a, p ) == -1:
|
|
||||||
f = ( a, -b, 1 )
|
|
||||||
ff = polynomial_exp_mod( ( 0, 1 ), (p+1)//2, f, p )
|
|
||||||
assert ff[1] == 0
|
|
||||||
return ff[0]
|
|
||||||
raise RuntimeError("No b found.")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def inverse_mod( a, m ):
|
|
||||||
"""Inverse of a mod m."""
|
|
||||||
|
|
||||||
if a < 0 or m <= a: a = a % m
|
|
||||||
|
|
||||||
# From Ferguson and Schneier, roughly:
|
|
||||||
|
|
||||||
c, d = a, m
|
|
||||||
uc, vc, ud, vd = 1, 0, 0, 1
|
|
||||||
while c != 0:
|
|
||||||
q, c, d = divmod( d, c ) + ( c, )
|
|
||||||
uc, vc, ud, vd = ud - q*uc, vd - q*vc, uc, vc
|
|
||||||
|
|
||||||
# At this point, d is the GCD, and ud*a+vd*m = d.
|
|
||||||
# If d == 1, this means that ud is a inverse.
|
|
||||||
|
|
||||||
assert d == 1
|
|
||||||
if ud > 0: return ud
|
|
||||||
else: return ud + m
|
|
||||||
|
|
||||||
|
|
||||||
def gcd2(a, b):
|
|
||||||
"""Greatest common divisor using Euclid's algorithm."""
|
|
||||||
while a:
|
|
||||||
a, b = b%a, a
|
|
||||||
return b
|
|
||||||
|
|
||||||
|
|
||||||
def gcd( *a ):
|
|
||||||
"""Greatest common divisor.
|
|
||||||
|
|
||||||
Usage: gcd( [ 2, 4, 6 ] )
|
|
||||||
or: gcd( 2, 4, 6 )
|
|
||||||
"""
|
|
||||||
|
|
||||||
if len( a ) > 1: return reduce( gcd2, a )
|
|
||||||
if hasattr( a[0], "__iter__" ): return reduce( gcd2, a[0] )
|
|
||||||
return a[0]
|
|
||||||
|
|
||||||
|
|
||||||
def lcm2(a,b):
|
|
||||||
"""Least common multiple of two integers."""
|
|
||||||
|
|
||||||
return (a*b)//gcd(a,b)
|
|
||||||
|
|
||||||
|
|
||||||
def lcm( *a ):
|
|
||||||
"""Least common multiple.
|
|
||||||
|
|
||||||
Usage: lcm( [ 3, 4, 5 ] )
|
|
||||||
or: lcm( 3, 4, 5 )
|
|
||||||
"""
|
|
||||||
|
|
||||||
if len( a ) > 1: return reduce( lcm2, a )
|
|
||||||
if hasattr( a[0], "__iter__" ): return reduce( lcm2, a[0] )
|
|
||||||
return a[0]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def factorization( n ):
|
|
||||||
"""Decompose n into a list of (prime,exponent) pairs."""
|
|
||||||
|
|
||||||
assert isinstance( n, integer_types )
|
|
||||||
|
|
||||||
if n < 2: return []
|
|
||||||
|
|
||||||
result = []
|
|
||||||
d = 2
|
|
||||||
|
|
||||||
# Test the small primes:
|
|
||||||
|
|
||||||
for d in smallprimes:
|
|
||||||
if d > n: break
|
|
||||||
q, r = divmod( n, d )
|
|
||||||
if r == 0:
|
|
||||||
count = 1
|
|
||||||
while d <= n:
|
|
||||||
n = q
|
|
||||||
q, r = divmod( n, d )
|
|
||||||
if r != 0: break
|
|
||||||
count = count + 1
|
|
||||||
result.append( ( d, count ) )
|
|
||||||
|
|
||||||
# If n is still greater than the last of our small primes,
|
|
||||||
# it may require further work:
|
|
||||||
|
|
||||||
if n > smallprimes[-1]:
|
|
||||||
if is_prime( n ): # If what's left is prime, it's easy:
|
|
||||||
result.append( ( n, 1 ) )
|
|
||||||
else: # Ugh. Search stupidly for a divisor:
|
|
||||||
d = smallprimes[-1]
|
|
||||||
while 1:
|
|
||||||
d = d + 2 # Try the next divisor.
|
|
||||||
q, r = divmod( n, d )
|
|
||||||
if q < d: break # n < d*d means we're done, n = 1 or prime.
|
|
||||||
if r == 0: # d divides n. How many times?
|
|
||||||
count = 1
|
|
||||||
n = q
|
|
||||||
while d <= n: # As long as d might still divide n,
|
|
||||||
q, r = divmod( n, d ) # see if it does.
|
|
||||||
if r != 0: break
|
|
||||||
n = q # It does. Reduce n, increase count.
|
|
||||||
count = count + 1
|
|
||||||
result.append( ( d, count ) )
|
|
||||||
if n > 1: result.append( ( n, 1 ) )
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def phi( n ):
|
|
||||||
"""Return the Euler totient function of n."""
|
|
||||||
|
|
||||||
assert isinstance( n, integer_types )
|
|
||||||
|
|
||||||
if n < 3: return 1
|
|
||||||
|
|
||||||
result = 1
|
|
||||||
ff = factorization( n )
|
|
||||||
for f in ff:
|
|
||||||
e = f[1]
|
|
||||||
if e > 1:
|
|
||||||
result = result * f[0] ** (e-1) * ( f[0] - 1 )
|
|
||||||
else:
|
|
||||||
result = result * ( f[0] - 1 )
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def carmichael( n ):
|
|
||||||
"""Return Carmichael function of n.
|
|
||||||
|
|
||||||
Carmichael(n) is the smallest integer x such that
|
|
||||||
m**x = 1 mod n for all m relatively prime to n.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return carmichael_of_factorized( factorization( n ) )
|
|
||||||
|
|
||||||
|
|
||||||
def carmichael_of_factorized( f_list ):
|
|
||||||
"""Return the Carmichael function of a number that is
|
|
||||||
represented as a list of (prime,exponent) pairs.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if len( f_list ) < 1: return 1
|
|
||||||
|
|
||||||
result = carmichael_of_ppower( f_list[0] )
|
|
||||||
for i in range( 1, len( f_list ) ):
|
|
||||||
result = lcm( result, carmichael_of_ppower( f_list[i] ) )
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def carmichael_of_ppower( pp ):
|
|
||||||
"""Carmichael function of the given power of the given prime.
|
|
||||||
"""
|
|
||||||
|
|
||||||
p, a = pp
|
|
||||||
if p == 2 and a > 2: return 2**(a-2)
|
|
||||||
else: return (p-1) * p**(a-1)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def order_mod( x, m ):
|
|
||||||
"""Return the order of x in the multiplicative group mod m.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Warning: this implementation is not very clever, and will
|
|
||||||
# take a long time if m is very large.
|
|
||||||
|
|
||||||
if m <= 1: return 0
|
|
||||||
|
|
||||||
assert gcd( x, m ) == 1
|
|
||||||
|
|
||||||
z = x
|
|
||||||
result = 1
|
|
||||||
while z != 1:
|
|
||||||
z = ( z * x ) % m
|
|
||||||
result = result + 1
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def largest_factor_relatively_prime( a, b ):
|
|
||||||
"""Return the largest factor of a relatively prime to b.
|
|
||||||
"""
|
|
||||||
|
|
||||||
while 1:
|
|
||||||
d = gcd( a, b )
|
|
||||||
if d <= 1: break
|
|
||||||
b = d
|
|
||||||
while 1:
|
|
||||||
q, r = divmod( a, d )
|
|
||||||
if r > 0:
|
|
||||||
break
|
|
||||||
a = q
|
|
||||||
return a
|
|
||||||
|
|
||||||
|
|
||||||
def kinda_order_mod( x, m ):
|
|
||||||
"""Return the order of x in the multiplicative group mod m',
|
|
||||||
where m' is the largest factor of m relatively prime to x.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return order_mod( x, largest_factor_relatively_prime( m, x ) )
|
|
||||||
|
|
||||||
|
|
||||||
def is_prime( n ):
|
|
||||||
"""Return True if x is prime, False otherwise.
|
|
||||||
|
|
||||||
We use the Miller-Rabin test, as given in Menezes et al. p. 138.
|
|
||||||
This test is not exact: there are composite values n for which
|
|
||||||
it returns True.
|
|
||||||
|
|
||||||
In testing the odd numbers from 10000001 to 19999999,
|
|
||||||
about 66 composites got past the first test,
|
|
||||||
5 got past the second test, and none got past the third.
|
|
||||||
Since factors of 2, 3, 5, 7, and 11 were detected during
|
|
||||||
preliminary screening, the number of numbers tested by
|
|
||||||
Miller-Rabin was (19999999 - 10000001)*(2/3)*(4/5)*(6/7)
|
|
||||||
= 4.57 million.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# (This is used to study the risk of false positives:)
|
|
||||||
global miller_rabin_test_count
|
|
||||||
|
|
||||||
miller_rabin_test_count = 0
|
|
||||||
|
|
||||||
if n <= smallprimes[-1]:
|
|
||||||
if n in smallprimes: return True
|
|
||||||
else: return False
|
|
||||||
|
|
||||||
if gcd( n, 2*3*5*7*11 ) != 1: return False
|
|
||||||
|
|
||||||
# Choose a number of iterations sufficient to reduce the
|
|
||||||
# probability of accepting a composite below 2**-80
|
|
||||||
# (from Menezes et al. Table 4.4):
|
|
||||||
|
|
||||||
t = 40
|
|
||||||
n_bits = 1 + int( math.log( n, 2 ) )
|
|
||||||
for k, tt in ( ( 100, 27 ),
|
|
||||||
( 150, 18 ),
|
|
||||||
( 200, 15 ),
|
|
||||||
( 250, 12 ),
|
|
||||||
( 300, 9 ),
|
|
||||||
( 350, 8 ),
|
|
||||||
( 400, 7 ),
|
|
||||||
( 450, 6 ),
|
|
||||||
( 550, 5 ),
|
|
||||||
( 650, 4 ),
|
|
||||||
( 850, 3 ),
|
|
||||||
( 1300, 2 ),
|
|
||||||
):
|
|
||||||
if n_bits < k: break
|
|
||||||
t = tt
|
|
||||||
|
|
||||||
# Run the test t times:
|
|
||||||
|
|
||||||
s = 0
|
|
||||||
r = n - 1
|
|
||||||
while ( r % 2 ) == 0:
|
|
||||||
s = s + 1
|
|
||||||
r = r // 2
|
|
||||||
for i in range( t ):
|
|
||||||
a = smallprimes[ i ]
|
|
||||||
y = modular_exp( a, r, n )
|
|
||||||
if y != 1 and y != n-1:
|
|
||||||
j = 1
|
|
||||||
while j <= s - 1 and y != n - 1:
|
|
||||||
y = modular_exp( y, 2, n )
|
|
||||||
if y == 1:
|
|
||||||
miller_rabin_test_count = i + 1
|
|
||||||
return False
|
|
||||||
j = j + 1
|
|
||||||
if y != n-1:
|
|
||||||
miller_rabin_test_count = i + 1
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def next_prime( starting_value ):
|
|
||||||
"Return the smallest prime larger than the starting value."
|
|
||||||
|
|
||||||
if starting_value < 2: return 2
|
|
||||||
result = ( starting_value + 1 ) | 1
|
|
||||||
while not is_prime( result ): result = result + 2
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
smallprimes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41,
|
|
||||||
43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97,
|
|
||||||
101, 103, 107, 109, 113, 127, 131, 137, 139, 149,
|
|
||||||
151, 157, 163, 167, 173, 179, 181, 191, 193, 197,
|
|
||||||
199, 211, 223, 227, 229, 233, 239, 241, 251, 257,
|
|
||||||
263, 269, 271, 277, 281, 283, 293, 307, 311, 313,
|
|
||||||
317, 331, 337, 347, 349, 353, 359, 367, 373, 379,
|
|
||||||
383, 389, 397, 401, 409, 419, 421, 431, 433, 439,
|
|
||||||
443, 449, 457, 461, 463, 467, 479, 487, 491, 499,
|
|
||||||
503, 509, 521, 523, 541, 547, 557, 563, 569, 571,
|
|
||||||
577, 587, 593, 599, 601, 607, 613, 617, 619, 631,
|
|
||||||
641, 643, 647, 653, 659, 661, 673, 677, 683, 691,
|
|
||||||
701, 709, 719, 727, 733, 739, 743, 751, 757, 761,
|
|
||||||
769, 773, 787, 797, 809, 811, 821, 823, 827, 829,
|
|
||||||
839, 853, 857, 859, 863, 877, 881, 883, 887, 907,
|
|
||||||
911, 919, 929, 937, 941, 947, 953, 967, 971, 977,
|
|
||||||
983, 991, 997, 1009, 1013, 1019, 1021, 1031, 1033,
|
|
||||||
1039, 1049, 1051, 1061, 1063, 1069, 1087, 1091, 1093,
|
|
||||||
1097, 1103, 1109, 1117, 1123, 1129, 1151, 1153, 1163,
|
|
||||||
1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, 1229]
|
|
||||||
|
|
||||||
miller_rabin_test_count = 0
|
|
||||||
|
|
||||||
def __main__():
|
|
||||||
|
|
||||||
# Making sure locally defined exceptions work:
|
|
||||||
# p = modular_exp( 2, -2, 3 )
|
|
||||||
# p = square_root_mod_prime( 2, 3 )
|
|
||||||
|
|
||||||
|
|
||||||
print_("Testing gcd...")
|
|
||||||
assert gcd( 3*5*7, 3*5*11, 3*5*13 ) == 3*5
|
|
||||||
assert gcd( [ 3*5*7, 3*5*11, 3*5*13 ] ) == 3*5
|
|
||||||
assert gcd( 3 ) == 3
|
|
||||||
|
|
||||||
print_("Testing lcm...")
|
|
||||||
assert lcm( 3, 5*3, 7*3 ) == 3*5*7
|
|
||||||
assert lcm( [ 3, 5*3, 7*3 ] ) == 3*5*7
|
|
||||||
assert lcm( 3 ) == 3
|
|
||||||
|
|
||||||
print_("Testing next_prime...")
|
|
||||||
bigprimes = ( 999671,
|
|
||||||
999683,
|
|
||||||
999721,
|
|
||||||
999727,
|
|
||||||
999749,
|
|
||||||
999763,
|
|
||||||
999769,
|
|
||||||
999773,
|
|
||||||
999809,
|
|
||||||
999853,
|
|
||||||
999863,
|
|
||||||
999883,
|
|
||||||
999907,
|
|
||||||
999917,
|
|
||||||
999931,
|
|
||||||
999953,
|
|
||||||
999959,
|
|
||||||
999961,
|
|
||||||
999979,
|
|
||||||
999983 )
|
|
||||||
|
|
||||||
for i in range( len( bigprimes ) - 1 ):
|
|
||||||
assert next_prime( bigprimes[i] ) == bigprimes[ i+1 ]
|
|
||||||
|
|
||||||
error_tally = 0
|
|
||||||
|
|
||||||
# Test the square_root_mod_prime function:
|
|
||||||
|
|
||||||
for p in smallprimes:
|
|
||||||
print_("Testing square_root_mod_prime for modulus p = %d." % p)
|
|
||||||
squares = []
|
|
||||||
|
|
||||||
for root in range( 0, 1+p//2 ):
|
|
||||||
sq = ( root * root ) % p
|
|
||||||
squares.append( sq )
|
|
||||||
calculated = square_root_mod_prime( sq, p )
|
|
||||||
if ( calculated * calculated ) % p != sq:
|
|
||||||
error_tally = error_tally + 1
|
|
||||||
print_("Failed to find %d as sqrt( %d ) mod %d. Said %d." % \
|
|
||||||
( root, sq, p, calculated ))
|
|
||||||
|
|
||||||
for nonsquare in range( 0, p ):
|
|
||||||
if nonsquare not in squares:
|
|
||||||
try:
|
|
||||||
calculated = square_root_mod_prime( nonsquare, p )
|
|
||||||
except SquareRootError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
error_tally = error_tally + 1
|
|
||||||
print_("Failed to report no root for sqrt( %d ) mod %d." % \
|
|
||||||
( nonsquare, p ))
|
|
||||||
|
|
||||||
# Test the jacobi function:
|
|
||||||
for m in range( 3, 400, 2 ):
|
|
||||||
print_("Testing jacobi for modulus m = %d." % m)
|
|
||||||
if is_prime( m ):
|
|
||||||
squares = []
|
|
||||||
for root in range( 1, m ):
|
|
||||||
if jacobi( root * root, m ) != 1:
|
|
||||||
error_tally = error_tally + 1
|
|
||||||
print_("jacobi( %d * %d, %d ) != 1" % ( root, root, m ))
|
|
||||||
squares.append( root * root % m )
|
|
||||||
for i in range( 1, m ):
|
|
||||||
if not i in squares:
|
|
||||||
if jacobi( i, m ) != -1:
|
|
||||||
error_tally = error_tally + 1
|
|
||||||
print_("jacobi( %d, %d ) != -1" % ( i, m ))
|
|
||||||
else: # m is not prime.
|
|
||||||
f = factorization( m )
|
|
||||||
for a in range( 1, m ):
|
|
||||||
c = 1
|
|
||||||
for i in f:
|
|
||||||
c = c * jacobi( a, i[0] ) ** i[1]
|
|
||||||
if c != jacobi( a, m ):
|
|
||||||
error_tally = error_tally + 1
|
|
||||||
print_("%d != jacobi( %d, %d )" % ( c, a, m ))
|
|
||||||
|
|
||||||
|
|
||||||
# Test the inverse_mod function:
|
|
||||||
print_("Testing inverse_mod . . .")
|
|
||||||
import random
|
|
||||||
n_tests = 0
|
|
||||||
for i in range( 100 ):
|
|
||||||
m = random.randint( 20, 10000 )
|
|
||||||
for j in range( 100 ):
|
|
||||||
a = random.randint( 1, m-1 )
|
|
||||||
if gcd( a, m ) == 1:
|
|
||||||
n_tests = n_tests + 1
|
|
||||||
inv = inverse_mod( a, m )
|
|
||||||
if inv <= 0 or inv >= m or ( a * inv ) % m != 1:
|
|
||||||
error_tally = error_tally + 1
|
|
||||||
print_("%d = inverse_mod( %d, %d ) is wrong." % ( inv, a, m ))
|
|
||||||
assert n_tests > 1000
|
|
||||||
print_(n_tests, " tests of inverse_mod completed.")
|
|
||||||
|
|
||||||
class FailedTest(Exception): pass
|
|
||||||
print_(error_tally, "errors detected.")
|
|
||||||
if error_tally != 0:
|
|
||||||
raise FailedTest("%d errors detected" % error_tally)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
__main__()
|
|
||||||
@@ -1,103 +0,0 @@
|
|||||||
'''
|
|
||||||
RFC 6979:
|
|
||||||
Deterministic Usage of the Digital Signature Algorithm (DSA) and
|
|
||||||
Elliptic Curve Digital Signature Algorithm (ECDSA)
|
|
||||||
|
|
||||||
http://tools.ietf.org/html/rfc6979
|
|
||||||
|
|
||||||
Many thanks to Coda Hale for his implementation in Go language:
|
|
||||||
https://github.com/codahale/rfc6979
|
|
||||||
'''
|
|
||||||
|
|
||||||
import hmac
|
|
||||||
from binascii import hexlify
|
|
||||||
from .util import number_to_string, number_to_string_crop
|
|
||||||
from .six import b
|
|
||||||
|
|
||||||
try:
|
|
||||||
bin(0)
|
|
||||||
except NameError:
|
|
||||||
binmap = {"0": "0000", "1": "0001", "2": "0010", "3": "0011",
|
|
||||||
"4": "0100", "5": "0101", "6": "0110", "7": "0111",
|
|
||||||
"8": "1000", "9": "1001", "a": "1010", "b": "1011",
|
|
||||||
"c": "1100", "d": "1101", "e": "1110", "f": "1111"}
|
|
||||||
def bin(value): # for python2.5
|
|
||||||
v = "".join(binmap[x] for x in "%x"%abs(value)).lstrip("0")
|
|
||||||
if value < 0:
|
|
||||||
return "-0b" + v
|
|
||||||
return "0b" + v
|
|
||||||
|
|
||||||
def bit_length(num):
|
|
||||||
# http://docs.python.org/dev/library/stdtypes.html#int.bit_length
|
|
||||||
s = bin(num) # binary representation: bin(-37) --> '-0b100101'
|
|
||||||
s = s.lstrip('-0b') # remove leading zeros and minus sign
|
|
||||||
return len(s) # len('100101') --> 6
|
|
||||||
|
|
||||||
def bits2int(data, qlen):
|
|
||||||
x = int(hexlify(data), 16)
|
|
||||||
l = len(data) * 8
|
|
||||||
|
|
||||||
if l > qlen:
|
|
||||||
return x >> (l-qlen)
|
|
||||||
return x
|
|
||||||
|
|
||||||
def bits2octets(data, order):
|
|
||||||
z1 = bits2int(data, bit_length(order))
|
|
||||||
z2 = z1 - order
|
|
||||||
|
|
||||||
if z2 < 0:
|
|
||||||
z2 = z1
|
|
||||||
|
|
||||||
return number_to_string_crop(z2, order)
|
|
||||||
|
|
||||||
# https://tools.ietf.org/html/rfc6979#section-3.2
|
|
||||||
def generate_k(order, secexp, hash_func, data):
|
|
||||||
'''
|
|
||||||
generator - order of the DSA generator used in the signature
|
|
||||||
secexp - secure exponent (private key) in numeric form
|
|
||||||
hash_func - reference to the same hash function used for generating hash
|
|
||||||
data - hash in binary form of the signing data
|
|
||||||
'''
|
|
||||||
|
|
||||||
qlen = bit_length(order)
|
|
||||||
holen = hash_func().digest_size
|
|
||||||
rolen = (qlen + 7) / 8
|
|
||||||
bx = number_to_string(secexp, order) + bits2octets(data, order)
|
|
||||||
|
|
||||||
# Step B
|
|
||||||
v = b('\x01') * holen
|
|
||||||
|
|
||||||
# Step C
|
|
||||||
k = b('\x00') * holen
|
|
||||||
|
|
||||||
# Step D
|
|
||||||
|
|
||||||
k = hmac.new(k, v+b('\x00')+bx, hash_func).digest()
|
|
||||||
|
|
||||||
# Step E
|
|
||||||
v = hmac.new(k, v, hash_func).digest()
|
|
||||||
|
|
||||||
# Step F
|
|
||||||
k = hmac.new(k, v+b('\x01')+bx, hash_func).digest()
|
|
||||||
|
|
||||||
# Step G
|
|
||||||
v = hmac.new(k, v, hash_func).digest()
|
|
||||||
|
|
||||||
# Step H
|
|
||||||
while True:
|
|
||||||
# Step H1
|
|
||||||
t = b('')
|
|
||||||
|
|
||||||
# Step H2
|
|
||||||
while len(t) < rolen:
|
|
||||||
v = hmac.new(k, v, hash_func).digest()
|
|
||||||
t += v
|
|
||||||
|
|
||||||
# Step H3
|
|
||||||
secret = bits2int(t, qlen)
|
|
||||||
|
|
||||||
if secret >= 1 and secret < order:
|
|
||||||
return secret
|
|
||||||
|
|
||||||
k = hmac.new(k, v+b('\x00'), hash_func).digest()
|
|
||||||
v = hmac.new(k, v, hash_func).digest()
|
|
||||||
@@ -1,394 +0,0 @@
|
|||||||
"""Utilities for writing code that runs on Python 2 and 3"""
|
|
||||||
|
|
||||||
# Copyright (c) 2010-2012 Benjamin Peterson
|
|
||||||
#
|
|
||||||
# Permission is hereby granted, free of charge, to any person obtaining a copy of
|
|
||||||
# this software and associated documentation files (the "Software"), to deal in
|
|
||||||
# the Software without restriction, including without limitation the rights to
|
|
||||||
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
|
||||||
# the Software, and to permit persons to whom the Software is furnished to do so,
|
|
||||||
# subject to the following conditions:
|
|
||||||
#
|
|
||||||
# The above copyright notice and this permission notice shall be included in all
|
|
||||||
# copies or substantial portions of the Software.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
|
||||||
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
|
||||||
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
|
||||||
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
|
||||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
|
|
||||||
import operator
|
|
||||||
import sys
|
|
||||||
import types
|
|
||||||
|
|
||||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
|
||||||
__version__ = "1.2.0"
|
|
||||||
|
|
||||||
|
|
||||||
# True if we are running on Python 3.
|
|
||||||
PY3 = sys.version_info[0] == 3
|
|
||||||
|
|
||||||
if PY3:
|
|
||||||
string_types = str,
|
|
||||||
integer_types = int,
|
|
||||||
class_types = type,
|
|
||||||
text_type = str
|
|
||||||
binary_type = bytes
|
|
||||||
|
|
||||||
MAXSIZE = sys.maxsize
|
|
||||||
else:
|
|
||||||
string_types = basestring,
|
|
||||||
integer_types = (int, long)
|
|
||||||
class_types = (type, types.ClassType)
|
|
||||||
text_type = unicode
|
|
||||||
binary_type = str
|
|
||||||
|
|
||||||
if sys.platform.startswith("java"):
|
|
||||||
# Jython always uses 32 bits.
|
|
||||||
MAXSIZE = int((1 << 31) - 1)
|
|
||||||
else:
|
|
||||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
|
||||||
class X(object):
|
|
||||||
def __len__(self):
|
|
||||||
return 1 << 31
|
|
||||||
try:
|
|
||||||
len(X())
|
|
||||||
except OverflowError:
|
|
||||||
# 32-bit
|
|
||||||
MAXSIZE = int((1 << 31) - 1)
|
|
||||||
else:
|
|
||||||
# 64-bit
|
|
||||||
MAXSIZE = int((1 << 63) - 1)
|
|
||||||
del X
|
|
||||||
|
|
||||||
|
|
||||||
def _add_doc(func, doc):
|
|
||||||
"""Add documentation to a function."""
|
|
||||||
func.__doc__ = doc
|
|
||||||
|
|
||||||
|
|
||||||
def _import_module(name):
|
|
||||||
"""Import module, returning the module after the last dot."""
|
|
||||||
__import__(name)
|
|
||||||
return sys.modules[name]
|
|
||||||
|
|
||||||
|
|
||||||
class _LazyDescr(object):
|
|
||||||
|
|
||||||
def __init__(self, name):
|
|
||||||
self.name = name
|
|
||||||
|
|
||||||
def __get__(self, obj, tp):
|
|
||||||
result = self._resolve()
|
|
||||||
setattr(obj, self.name, result)
|
|
||||||
# This is a bit ugly, but it avoids running this again.
|
|
||||||
delattr(tp, self.name)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class MovedModule(_LazyDescr):
|
|
||||||
|
|
||||||
def __init__(self, name, old, new=None):
|
|
||||||
super(MovedModule, self).__init__(name)
|
|
||||||
if PY3:
|
|
||||||
if new is None:
|
|
||||||
new = name
|
|
||||||
self.mod = new
|
|
||||||
else:
|
|
||||||
self.mod = old
|
|
||||||
|
|
||||||
def _resolve(self):
|
|
||||||
return _import_module(self.mod)
|
|
||||||
|
|
||||||
|
|
||||||
class MovedAttribute(_LazyDescr):
|
|
||||||
|
|
||||||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
|
||||||
super(MovedAttribute, self).__init__(name)
|
|
||||||
if PY3:
|
|
||||||
if new_mod is None:
|
|
||||||
new_mod = name
|
|
||||||
self.mod = new_mod
|
|
||||||
if new_attr is None:
|
|
||||||
if old_attr is None:
|
|
||||||
new_attr = name
|
|
||||||
else:
|
|
||||||
new_attr = old_attr
|
|
||||||
self.attr = new_attr
|
|
||||||
else:
|
|
||||||
self.mod = old_mod
|
|
||||||
if old_attr is None:
|
|
||||||
old_attr = name
|
|
||||||
self.attr = old_attr
|
|
||||||
|
|
||||||
def _resolve(self):
|
|
||||||
module = _import_module(self.mod)
|
|
||||||
return getattr(module, self.attr)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class _MovedItems(types.ModuleType):
|
|
||||||
"""Lazy loading of moved objects"""
|
|
||||||
|
|
||||||
|
|
||||||
_moved_attributes = [
|
|
||||||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
|
||||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
|
||||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
|
||||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
|
||||||
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
|
|
||||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
|
||||||
MovedAttribute("StringIO", "StringIO", "io"),
|
|
||||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
|
||||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
|
||||||
|
|
||||||
MovedModule("builtins", "__builtin__"),
|
|
||||||
MovedModule("configparser", "ConfigParser"),
|
|
||||||
MovedModule("copyreg", "copy_reg"),
|
|
||||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
|
||||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
|
||||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
|
||||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
|
||||||
MovedModule("http_client", "httplib", "http.client"),
|
|
||||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
|
||||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
|
||||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
|
||||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
|
||||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
|
||||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
|
||||||
MovedModule("cPickle", "cPickle", "pickle"),
|
|
||||||
MovedModule("queue", "Queue"),
|
|
||||||
MovedModule("reprlib", "repr"),
|
|
||||||
MovedModule("socketserver", "SocketServer"),
|
|
||||||
MovedModule("tkinter", "Tkinter"),
|
|
||||||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
|
||||||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
|
||||||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
|
||||||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
|
||||||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
|
||||||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
|
||||||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
|
||||||
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
|
||||||
"tkinter.colorchooser"),
|
|
||||||
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
|
||||||
"tkinter.commondialog"),
|
|
||||||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
|
||||||
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
|
||||||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
|
||||||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
|
||||||
"tkinter.simpledialog"),
|
|
||||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
|
||||||
MovedModule("winreg", "_winreg"),
|
|
||||||
]
|
|
||||||
for attr in _moved_attributes:
|
|
||||||
setattr(_MovedItems, attr.name, attr)
|
|
||||||
del attr
|
|
||||||
|
|
||||||
moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
|
|
||||||
|
|
||||||
|
|
||||||
def add_move(move):
|
|
||||||
"""Add an item to six.moves."""
|
|
||||||
setattr(_MovedItems, move.name, move)
|
|
||||||
|
|
||||||
|
|
||||||
def remove_move(name):
|
|
||||||
"""Remove item from six.moves."""
|
|
||||||
try:
|
|
||||||
delattr(_MovedItems, name)
|
|
||||||
except AttributeError:
|
|
||||||
try:
|
|
||||||
del moves.__dict__[name]
|
|
||||||
except KeyError:
|
|
||||||
raise AttributeError("no such move, %r" % (name,))
|
|
||||||
|
|
||||||
|
|
||||||
if PY3:
|
|
||||||
_meth_func = "__func__"
|
|
||||||
_meth_self = "__self__"
|
|
||||||
|
|
||||||
_func_code = "__code__"
|
|
||||||
_func_defaults = "__defaults__"
|
|
||||||
|
|
||||||
_iterkeys = "keys"
|
|
||||||
_itervalues = "values"
|
|
||||||
_iteritems = "items"
|
|
||||||
else:
|
|
||||||
_meth_func = "im_func"
|
|
||||||
_meth_self = "im_self"
|
|
||||||
|
|
||||||
_func_code = "func_code"
|
|
||||||
_func_defaults = "func_defaults"
|
|
||||||
|
|
||||||
_iterkeys = "iterkeys"
|
|
||||||
_itervalues = "itervalues"
|
|
||||||
_iteritems = "iteritems"
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
advance_iterator = next
|
|
||||||
except NameError:
|
|
||||||
def advance_iterator(it):
|
|
||||||
return it.next()
|
|
||||||
next = advance_iterator
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
callable = callable
|
|
||||||
except NameError:
|
|
||||||
def callable(obj):
|
|
||||||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
|
||||||
|
|
||||||
|
|
||||||
if PY3:
|
|
||||||
def get_unbound_function(unbound):
|
|
||||||
return unbound
|
|
||||||
|
|
||||||
Iterator = object
|
|
||||||
else:
|
|
||||||
def get_unbound_function(unbound):
|
|
||||||
return unbound.im_func
|
|
||||||
|
|
||||||
class Iterator(object):
|
|
||||||
|
|
||||||
def next(self):
|
|
||||||
return type(self).__next__(self)
|
|
||||||
|
|
||||||
callable = callable
|
|
||||||
_add_doc(get_unbound_function,
|
|
||||||
"""Get the function out of a possibly unbound function""")
|
|
||||||
|
|
||||||
|
|
||||||
get_method_function = operator.attrgetter(_meth_func)
|
|
||||||
get_method_self = operator.attrgetter(_meth_self)
|
|
||||||
get_function_code = operator.attrgetter(_func_code)
|
|
||||||
get_function_defaults = operator.attrgetter(_func_defaults)
|
|
||||||
|
|
||||||
|
|
||||||
def iterkeys(d):
|
|
||||||
"""Return an iterator over the keys of a dictionary."""
|
|
||||||
return iter(getattr(d, _iterkeys)())
|
|
||||||
|
|
||||||
def itervalues(d):
|
|
||||||
"""Return an iterator over the values of a dictionary."""
|
|
||||||
return iter(getattr(d, _itervalues)())
|
|
||||||
|
|
||||||
def iteritems(d):
|
|
||||||
"""Return an iterator over the (key, value) pairs of a dictionary."""
|
|
||||||
return iter(getattr(d, _iteritems)())
|
|
||||||
|
|
||||||
|
|
||||||
if PY3:
|
|
||||||
def b(s):
|
|
||||||
return s.encode("latin-1")
|
|
||||||
def u(s):
|
|
||||||
return s
|
|
||||||
if sys.version_info[1] <= 1:
|
|
||||||
def int2byte(i):
|
|
||||||
return bytes((i,))
|
|
||||||
else:
|
|
||||||
# This is about 2x faster than the implementation above on 3.2+
|
|
||||||
int2byte = operator.methodcaller("to_bytes", 1, "big")
|
|
||||||
import io
|
|
||||||
StringIO = io.StringIO
|
|
||||||
BytesIO = io.BytesIO
|
|
||||||
else:
|
|
||||||
def b(s):
|
|
||||||
return s
|
|
||||||
def u(s):
|
|
||||||
if isinstance(s, unicode):
|
|
||||||
return s
|
|
||||||
return unicode(s, "unicode_escape")
|
|
||||||
int2byte = chr
|
|
||||||
import StringIO
|
|
||||||
StringIO = BytesIO = StringIO.StringIO
|
|
||||||
_add_doc(b, """Byte literal""")
|
|
||||||
_add_doc(u, """Text literal""")
|
|
||||||
|
|
||||||
|
|
||||||
if PY3:
|
|
||||||
import builtins
|
|
||||||
exec_ = getattr(builtins, "exec")
|
|
||||||
|
|
||||||
|
|
||||||
def reraise(tp, value, tb=None):
|
|
||||||
if value.__traceback__ is not tb:
|
|
||||||
raise value.with_traceback(tb)
|
|
||||||
raise value
|
|
||||||
|
|
||||||
|
|
||||||
print_ = getattr(builtins, "print")
|
|
||||||
del builtins
|
|
||||||
|
|
||||||
else:
|
|
||||||
def exec_(_code_, _globs_=None, _locs_=None):
|
|
||||||
"""Execute code in a namespace."""
|
|
||||||
if _globs_ is None:
|
|
||||||
frame = sys._getframe(1)
|
|
||||||
_globs_ = frame.f_globals
|
|
||||||
if _locs_ is None:
|
|
||||||
_locs_ = frame.f_locals
|
|
||||||
del frame
|
|
||||||
elif _locs_ is None:
|
|
||||||
_locs_ = _globs_
|
|
||||||
exec("""exec _code_ in _globs_, _locs_""")
|
|
||||||
|
|
||||||
|
|
||||||
exec_("""def reraise(tp, value, tb=None):
|
|
||||||
raise tp, value, tb
|
|
||||||
""")
|
|
||||||
|
|
||||||
|
|
||||||
def print_(*args, **kwargs):
|
|
||||||
"""The new-style print function."""
|
|
||||||
fp = kwargs.pop("file", sys.stdout)
|
|
||||||
if fp is None:
|
|
||||||
return
|
|
||||||
def write(data):
|
|
||||||
if not isinstance(data, basestring):
|
|
||||||
data = str(data)
|
|
||||||
fp.write(data)
|
|
||||||
want_unicode = False
|
|
||||||
sep = kwargs.pop("sep", None)
|
|
||||||
if sep is not None:
|
|
||||||
if isinstance(sep, unicode):
|
|
||||||
want_unicode = True
|
|
||||||
elif not isinstance(sep, str):
|
|
||||||
raise TypeError("sep must be None or a string")
|
|
||||||
end = kwargs.pop("end", None)
|
|
||||||
if end is not None:
|
|
||||||
if isinstance(end, unicode):
|
|
||||||
want_unicode = True
|
|
||||||
elif not isinstance(end, str):
|
|
||||||
raise TypeError("end must be None or a string")
|
|
||||||
if kwargs:
|
|
||||||
raise TypeError("invalid keyword arguments to print()")
|
|
||||||
if not want_unicode:
|
|
||||||
for arg in args:
|
|
||||||
if isinstance(arg, unicode):
|
|
||||||
want_unicode = True
|
|
||||||
break
|
|
||||||
if want_unicode:
|
|
||||||
newline = unicode("\n")
|
|
||||||
space = unicode(" ")
|
|
||||||
else:
|
|
||||||
newline = "\n"
|
|
||||||
space = " "
|
|
||||||
if sep is None:
|
|
||||||
sep = space
|
|
||||||
if end is None:
|
|
||||||
end = newline
|
|
||||||
for i, arg in enumerate(args):
|
|
||||||
if i:
|
|
||||||
write(sep)
|
|
||||||
write(arg)
|
|
||||||
write(end)
|
|
||||||
|
|
||||||
_add_doc(reraise, """Reraise an exception.""")
|
|
||||||
|
|
||||||
|
|
||||||
def with_metaclass(meta, base=object):
|
|
||||||
"""Create a base class with a metaclass."""
|
|
||||||
return meta("NewBase", (base,), {})
|
|
||||||
@@ -1,663 +0,0 @@
|
|||||||
from __future__ import with_statement, division
|
|
||||||
|
|
||||||
import unittest
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
from binascii import hexlify, unhexlify
|
|
||||||
from hashlib import sha1, sha256, sha512
|
|
||||||
|
|
||||||
from .six import b, print_, binary_type
|
|
||||||
from .keys import SigningKey, VerifyingKey
|
|
||||||
from .keys import BadSignatureError
|
|
||||||
from . import util
|
|
||||||
from .util import sigencode_der, sigencode_strings
|
|
||||||
from .util import sigdecode_der, sigdecode_strings
|
|
||||||
from .curves import Curve, UnknownCurveError
|
|
||||||
from .curves import NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1
|
|
||||||
from .ellipticcurve import Point
|
|
||||||
from . import der
|
|
||||||
from . import rfc6979
|
|
||||||
|
|
||||||
class SubprocessError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def run_openssl(cmd):
|
|
||||||
OPENSSL = "openssl"
|
|
||||||
p = subprocess.Popen([OPENSSL] + cmd.split(),
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.STDOUT)
|
|
||||||
stdout, ignored = p.communicate()
|
|
||||||
if p.returncode != 0:
|
|
||||||
raise SubprocessError("cmd '%s %s' failed: rc=%s, stdout/err was %s" %
|
|
||||||
(OPENSSL, cmd, p.returncode, stdout))
|
|
||||||
return stdout.decode()
|
|
||||||
|
|
||||||
BENCH = False
|
|
||||||
|
|
||||||
class ECDSA(unittest.TestCase):
|
|
||||||
def test_basic(self):
|
|
||||||
priv = SigningKey.generate()
|
|
||||||
pub = priv.get_verifying_key()
|
|
||||||
|
|
||||||
data = b("blahblah")
|
|
||||||
sig = priv.sign(data)
|
|
||||||
|
|
||||||
self.assertTrue(pub.verify(sig, data))
|
|
||||||
self.assertRaises(BadSignatureError, pub.verify, sig, data+b("bad"))
|
|
||||||
|
|
||||||
pub2 = VerifyingKey.from_string(pub.to_string())
|
|
||||||
self.assertTrue(pub2.verify(sig, data))
|
|
||||||
|
|
||||||
def test_deterministic(self):
|
|
||||||
data = b("blahblah")
|
|
||||||
secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16)
|
|
||||||
|
|
||||||
priv = SigningKey.from_secret_exponent(secexp, SECP256k1, sha256)
|
|
||||||
pub = priv.get_verifying_key()
|
|
||||||
|
|
||||||
k = rfc6979.generate_k(
|
|
||||||
SECP256k1.generator.order(), secexp, sha256, sha256(data).digest())
|
|
||||||
|
|
||||||
sig1 = priv.sign(data, k=k)
|
|
||||||
self.assertTrue(pub.verify(sig1, data))
|
|
||||||
|
|
||||||
sig2 = priv.sign(data, k=k)
|
|
||||||
self.assertTrue(pub.verify(sig2, data))
|
|
||||||
|
|
||||||
sig3 = priv.sign_deterministic(data, sha256)
|
|
||||||
self.assertTrue(pub.verify(sig3, data))
|
|
||||||
|
|
||||||
self.assertEqual(sig1, sig2)
|
|
||||||
self.assertEqual(sig1, sig3)
|
|
||||||
|
|
||||||
def test_bad_usage(self):
|
|
||||||
# sk=SigningKey() is wrong
|
|
||||||
self.assertRaises(TypeError, SigningKey)
|
|
||||||
self.assertRaises(TypeError, VerifyingKey)
|
|
||||||
|
|
||||||
def test_lengths(self):
|
|
||||||
default = NIST192p
|
|
||||||
priv = SigningKey.generate()
|
|
||||||
pub = priv.get_verifying_key()
|
|
||||||
self.assertEqual(len(pub.to_string()), default.verifying_key_length)
|
|
||||||
sig = priv.sign(b("data"))
|
|
||||||
self.assertEqual(len(sig), default.signature_length)
|
|
||||||
if BENCH:
|
|
||||||
print_()
|
|
||||||
for curve in (NIST192p, NIST224p, NIST256p, NIST384p, NIST521p):
|
|
||||||
start = time.time()
|
|
||||||
priv = SigningKey.generate(curve=curve)
|
|
||||||
pub1 = priv.get_verifying_key()
|
|
||||||
keygen_time = time.time() - start
|
|
||||||
pub2 = VerifyingKey.from_string(pub1.to_string(), curve)
|
|
||||||
self.assertEqual(pub1.to_string(), pub2.to_string())
|
|
||||||
self.assertEqual(len(pub1.to_string()),
|
|
||||||
curve.verifying_key_length)
|
|
||||||
start = time.time()
|
|
||||||
sig = priv.sign(b("data"))
|
|
||||||
sign_time = time.time() - start
|
|
||||||
self.assertEqual(len(sig), curve.signature_length)
|
|
||||||
if BENCH:
|
|
||||||
start = time.time()
|
|
||||||
pub1.verify(sig, b("data"))
|
|
||||||
verify_time = time.time() - start
|
|
||||||
print_("%s: siglen=%d, keygen=%0.3fs, sign=%0.3f, verify=%0.3f" \
|
|
||||||
% (curve.name, curve.signature_length,
|
|
||||||
keygen_time, sign_time, verify_time))
|
|
||||||
|
|
||||||
def test_serialize(self):
|
|
||||||
seed = b("secret")
|
|
||||||
curve = NIST192p
|
|
||||||
secexp1 = util.randrange_from_seed__trytryagain(seed, curve.order)
|
|
||||||
secexp2 = util.randrange_from_seed__trytryagain(seed, curve.order)
|
|
||||||
self.assertEqual(secexp1, secexp2)
|
|
||||||
priv1 = SigningKey.from_secret_exponent(secexp1, curve)
|
|
||||||
priv2 = SigningKey.from_secret_exponent(secexp2, curve)
|
|
||||||
self.assertEqual(hexlify(priv1.to_string()),
|
|
||||||
hexlify(priv2.to_string()))
|
|
||||||
self.assertEqual(priv1.to_pem(), priv2.to_pem())
|
|
||||||
pub1 = priv1.get_verifying_key()
|
|
||||||
pub2 = priv2.get_verifying_key()
|
|
||||||
data = b("data")
|
|
||||||
sig1 = priv1.sign(data)
|
|
||||||
sig2 = priv2.sign(data)
|
|
||||||
self.assertTrue(pub1.verify(sig1, data))
|
|
||||||
self.assertTrue(pub2.verify(sig1, data))
|
|
||||||
self.assertTrue(pub1.verify(sig2, data))
|
|
||||||
self.assertTrue(pub2.verify(sig2, data))
|
|
||||||
self.assertEqual(hexlify(pub1.to_string()),
|
|
||||||
hexlify(pub2.to_string()))
|
|
||||||
|
|
||||||
def test_nonrandom(self):
|
|
||||||
s = b("all the entropy in the entire world, compressed into one line")
|
|
||||||
def not_much_entropy(numbytes):
|
|
||||||
return s[:numbytes]
|
|
||||||
# we control the entropy source, these two keys should be identical:
|
|
||||||
priv1 = SigningKey.generate(entropy=not_much_entropy)
|
|
||||||
priv2 = SigningKey.generate(entropy=not_much_entropy)
|
|
||||||
self.assertEqual(hexlify(priv1.get_verifying_key().to_string()),
|
|
||||||
hexlify(priv2.get_verifying_key().to_string()))
|
|
||||||
# likewise, signatures should be identical. Obviously you'd never
|
|
||||||
# want to do this with keys you care about, because the secrecy of
|
|
||||||
# the private key depends upon using different random numbers for
|
|
||||||
# each signature
|
|
||||||
sig1 = priv1.sign(b("data"), entropy=not_much_entropy)
|
|
||||||
sig2 = priv2.sign(b("data"), entropy=not_much_entropy)
|
|
||||||
self.assertEqual(hexlify(sig1), hexlify(sig2))
|
|
||||||
|
|
||||||
def assertTruePrivkeysEqual(self, priv1, priv2):
|
|
||||||
self.assertEqual(priv1.privkey.secret_multiplier,
|
|
||||||
priv2.privkey.secret_multiplier)
|
|
||||||
self.assertEqual(priv1.privkey.public_key.generator,
|
|
||||||
priv2.privkey.public_key.generator)
|
|
||||||
|
|
||||||
def failIfPrivkeysEqual(self, priv1, priv2):
|
|
||||||
self.failIfEqual(priv1.privkey.secret_multiplier,
|
|
||||||
priv2.privkey.secret_multiplier)
|
|
||||||
|
|
||||||
def test_privkey_creation(self):
|
|
||||||
s = b("all the entropy in the entire world, compressed into one line")
|
|
||||||
def not_much_entropy(numbytes):
|
|
||||||
return s[:numbytes]
|
|
||||||
priv1 = SigningKey.generate()
|
|
||||||
self.assertEqual(priv1.baselen, NIST192p.baselen)
|
|
||||||
|
|
||||||
priv1 = SigningKey.generate(curve=NIST224p)
|
|
||||||
self.assertEqual(priv1.baselen, NIST224p.baselen)
|
|
||||||
|
|
||||||
priv1 = SigningKey.generate(entropy=not_much_entropy)
|
|
||||||
self.assertEqual(priv1.baselen, NIST192p.baselen)
|
|
||||||
priv2 = SigningKey.generate(entropy=not_much_entropy)
|
|
||||||
self.assertEqual(priv2.baselen, NIST192p.baselen)
|
|
||||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
|
||||||
|
|
||||||
priv1 = SigningKey.from_secret_exponent(secexp=3)
|
|
||||||
self.assertEqual(priv1.baselen, NIST192p.baselen)
|
|
||||||
priv2 = SigningKey.from_secret_exponent(secexp=3)
|
|
||||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
|
||||||
|
|
||||||
priv1 = SigningKey.from_secret_exponent(secexp=4, curve=NIST224p)
|
|
||||||
self.assertEqual(priv1.baselen, NIST224p.baselen)
|
|
||||||
|
|
||||||
def test_privkey_strings(self):
|
|
||||||
priv1 = SigningKey.generate()
|
|
||||||
s1 = priv1.to_string()
|
|
||||||
self.assertEqual(type(s1), binary_type)
|
|
||||||
self.assertEqual(len(s1), NIST192p.baselen)
|
|
||||||
priv2 = SigningKey.from_string(s1)
|
|
||||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
|
||||||
|
|
||||||
s1 = priv1.to_pem()
|
|
||||||
self.assertEqual(type(s1), binary_type)
|
|
||||||
self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----")))
|
|
||||||
self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----")))
|
|
||||||
priv2 = SigningKey.from_pem(s1)
|
|
||||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
|
||||||
|
|
||||||
s1 = priv1.to_der()
|
|
||||||
self.assertEqual(type(s1), binary_type)
|
|
||||||
priv2 = SigningKey.from_der(s1)
|
|
||||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
|
||||||
|
|
||||||
priv1 = SigningKey.generate(curve=NIST256p)
|
|
||||||
s1 = priv1.to_pem()
|
|
||||||
self.assertEqual(type(s1), binary_type)
|
|
||||||
self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----")))
|
|
||||||
self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----")))
|
|
||||||
priv2 = SigningKey.from_pem(s1)
|
|
||||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
|
||||||
|
|
||||||
s1 = priv1.to_der()
|
|
||||||
self.assertEqual(type(s1), binary_type)
|
|
||||||
priv2 = SigningKey.from_der(s1)
|
|
||||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
|
||||||
|
|
||||||
def assertTruePubkeysEqual(self, pub1, pub2):
|
|
||||||
self.assertEqual(pub1.pubkey.point, pub2.pubkey.point)
|
|
||||||
self.assertEqual(pub1.pubkey.generator, pub2.pubkey.generator)
|
|
||||||
self.assertEqual(pub1.curve, pub2.curve)
|
|
||||||
|
|
||||||
def test_pubkey_strings(self):
|
|
||||||
priv1 = SigningKey.generate()
|
|
||||||
pub1 = priv1.get_verifying_key()
|
|
||||||
s1 = pub1.to_string()
|
|
||||||
self.assertEqual(type(s1), binary_type)
|
|
||||||
self.assertEqual(len(s1), NIST192p.verifying_key_length)
|
|
||||||
pub2 = VerifyingKey.from_string(s1)
|
|
||||||
self.assertTruePubkeysEqual(pub1, pub2)
|
|
||||||
|
|
||||||
priv1 = SigningKey.generate(curve=NIST256p)
|
|
||||||
pub1 = priv1.get_verifying_key()
|
|
||||||
s1 = pub1.to_string()
|
|
||||||
self.assertEqual(type(s1), binary_type)
|
|
||||||
self.assertEqual(len(s1), NIST256p.verifying_key_length)
|
|
||||||
pub2 = VerifyingKey.from_string(s1, curve=NIST256p)
|
|
||||||
self.assertTruePubkeysEqual(pub1, pub2)
|
|
||||||
|
|
||||||
pub1_der = pub1.to_der()
|
|
||||||
self.assertEqual(type(pub1_der), binary_type)
|
|
||||||
pub2 = VerifyingKey.from_der(pub1_der)
|
|
||||||
self.assertTruePubkeysEqual(pub1, pub2)
|
|
||||||
|
|
||||||
self.assertRaises(der.UnexpectedDER,
|
|
||||||
VerifyingKey.from_der, pub1_der+b("junk"))
|
|
||||||
badpub = VerifyingKey.from_der(pub1_der)
|
|
||||||
class FakeGenerator:
|
|
||||||
def order(self): return 123456789
|
|
||||||
badcurve = Curve("unknown", None, None, FakeGenerator(), (1,2,3,4,5,6))
|
|
||||||
badpub.curve = badcurve
|
|
||||||
badder = badpub.to_der()
|
|
||||||
self.assertRaises(UnknownCurveError, VerifyingKey.from_der, badder)
|
|
||||||
|
|
||||||
pem = pub1.to_pem()
|
|
||||||
self.assertEqual(type(pem), binary_type)
|
|
||||||
self.assertTrue(pem.startswith(b("-----BEGIN PUBLIC KEY-----")), pem)
|
|
||||||
self.assertTrue(pem.strip().endswith(b("-----END PUBLIC KEY-----")), pem)
|
|
||||||
pub2 = VerifyingKey.from_pem(pem)
|
|
||||||
self.assertTruePubkeysEqual(pub1, pub2)
|
|
||||||
|
|
||||||
def test_signature_strings(self):
|
|
||||||
priv1 = SigningKey.generate()
|
|
||||||
pub1 = priv1.get_verifying_key()
|
|
||||||
data = b("data")
|
|
||||||
|
|
||||||
sig = priv1.sign(data)
|
|
||||||
self.assertEqual(type(sig), binary_type)
|
|
||||||
self.assertEqual(len(sig), NIST192p.signature_length)
|
|
||||||
self.assertTrue(pub1.verify(sig, data))
|
|
||||||
|
|
||||||
sig = priv1.sign(data, sigencode=sigencode_strings)
|
|
||||||
self.assertEqual(type(sig), tuple)
|
|
||||||
self.assertEqual(len(sig), 2)
|
|
||||||
self.assertEqual(type(sig[0]), binary_type)
|
|
||||||
self.assertEqual(type(sig[1]), binary_type)
|
|
||||||
self.assertEqual(len(sig[0]), NIST192p.baselen)
|
|
||||||
self.assertEqual(len(sig[1]), NIST192p.baselen)
|
|
||||||
self.assertTrue(pub1.verify(sig, data, sigdecode=sigdecode_strings))
|
|
||||||
|
|
||||||
sig_der = priv1.sign(data, sigencode=sigencode_der)
|
|
||||||
self.assertEqual(type(sig_der), binary_type)
|
|
||||||
self.assertTrue(pub1.verify(sig_der, data, sigdecode=sigdecode_der))
|
|
||||||
|
|
||||||
def test_hashfunc(self):
|
|
||||||
sk = SigningKey.generate(curve=NIST256p, hashfunc=sha256)
|
|
||||||
data = b("security level is 128 bits")
|
|
||||||
sig = sk.sign(data)
|
|
||||||
vk = VerifyingKey.from_string(sk.get_verifying_key().to_string(),
|
|
||||||
curve=NIST256p, hashfunc=sha256)
|
|
||||||
self.assertTrue(vk.verify(sig, data))
|
|
||||||
|
|
||||||
sk2 = SigningKey.generate(curve=NIST256p)
|
|
||||||
sig2 = sk2.sign(data, hashfunc=sha256)
|
|
||||||
vk2 = VerifyingKey.from_string(sk2.get_verifying_key().to_string(),
|
|
||||||
curve=NIST256p, hashfunc=sha256)
|
|
||||||
self.assertTrue(vk2.verify(sig2, data))
|
|
||||||
|
|
||||||
vk3 = VerifyingKey.from_string(sk.get_verifying_key().to_string(),
|
|
||||||
curve=NIST256p)
|
|
||||||
self.assertTrue(vk3.verify(sig, data, hashfunc=sha256))
|
|
||||||
|
|
||||||
|
|
||||||
class OpenSSL(unittest.TestCase):
|
|
||||||
# test interoperability with OpenSSL tools. Note that openssl's ECDSA
|
|
||||||
# sign/verify arguments changed between 0.9.8 and 1.0.0: the early
|
|
||||||
# versions require "-ecdsa-with-SHA1", the later versions want just
|
|
||||||
# "-SHA1" (or to leave out that argument entirely, which means the
|
|
||||||
# signature will use some default digest algorithm, probably determined
|
|
||||||
# by the key, probably always SHA1).
|
|
||||||
#
|
|
||||||
# openssl ecparam -name secp224r1 -genkey -out privkey.pem
|
|
||||||
# openssl ec -in privkey.pem -text -noout # get the priv/pub keys
|
|
||||||
# openssl dgst -ecdsa-with-SHA1 -sign privkey.pem -out data.sig data.txt
|
|
||||||
# openssl asn1parse -in data.sig -inform DER
|
|
||||||
# data.sig is 64 bytes, probably 56b plus ASN1 overhead
|
|
||||||
# openssl dgst -ecdsa-with-SHA1 -prverify privkey.pem -signature data.sig data.txt ; echo $?
|
|
||||||
# openssl ec -in privkey.pem -pubout -out pubkey.pem
|
|
||||||
# openssl ec -in privkey.pem -pubout -outform DER -out pubkey.der
|
|
||||||
|
|
||||||
def get_openssl_messagedigest_arg(self):
|
|
||||||
v = run_openssl("version")
|
|
||||||
# e.g. "OpenSSL 1.0.0 29 Mar 2010", or "OpenSSL 1.0.0a 1 Jun 2010",
|
|
||||||
# or "OpenSSL 0.9.8o 01 Jun 2010"
|
|
||||||
vs = v.split()[1].split(".")
|
|
||||||
if vs >= ["1","0","0"]:
|
|
||||||
return "-SHA1"
|
|
||||||
else:
|
|
||||||
return "-ecdsa-with-SHA1"
|
|
||||||
|
|
||||||
# sk: 1:OpenSSL->python 2:python->OpenSSL
|
|
||||||
# vk: 3:OpenSSL->python 4:python->OpenSSL
|
|
||||||
# sig: 5:OpenSSL->python 6:python->OpenSSL
|
|
||||||
|
|
||||||
def test_from_openssl_nist192p(self):
|
|
||||||
return self.do_test_from_openssl(NIST192p)
|
|
||||||
def test_from_openssl_nist224p(self):
|
|
||||||
return self.do_test_from_openssl(NIST224p)
|
|
||||||
def test_from_openssl_nist256p(self):
|
|
||||||
return self.do_test_from_openssl(NIST256p)
|
|
||||||
def test_from_openssl_nist384p(self):
|
|
||||||
return self.do_test_from_openssl(NIST384p)
|
|
||||||
def test_from_openssl_nist521p(self):
|
|
||||||
return self.do_test_from_openssl(NIST521p)
|
|
||||||
def test_from_openssl_secp256k1(self):
|
|
||||||
return self.do_test_from_openssl(SECP256k1)
|
|
||||||
|
|
||||||
def do_test_from_openssl(self, curve):
|
|
||||||
curvename = curve.openssl_name
|
|
||||||
assert curvename
|
|
||||||
# OpenSSL: create sk, vk, sign.
|
|
||||||
# Python: read vk(3), checksig(5), read sk(1), sign, check
|
|
||||||
mdarg = self.get_openssl_messagedigest_arg()
|
|
||||||
if os.path.isdir("t"):
|
|
||||||
shutil.rmtree("t")
|
|
||||||
os.mkdir("t")
|
|
||||||
run_openssl("ecparam -name %s -genkey -out t/privkey.pem" % curvename)
|
|
||||||
run_openssl("ec -in t/privkey.pem -pubout -out t/pubkey.pem")
|
|
||||||
data = b("data")
|
|
||||||
with open("t/data.txt","wb") as e: e.write(data)
|
|
||||||
run_openssl("dgst %s -sign t/privkey.pem -out t/data.sig t/data.txt" % mdarg)
|
|
||||||
run_openssl("dgst %s -verify t/pubkey.pem -signature t/data.sig t/data.txt" % mdarg)
|
|
||||||
with open("t/pubkey.pem","rb") as e: pubkey_pem = e.read()
|
|
||||||
vk = VerifyingKey.from_pem(pubkey_pem) # 3
|
|
||||||
with open("t/data.sig","rb") as e: sig_der = e.read()
|
|
||||||
self.assertTrue(vk.verify(sig_der, data, # 5
|
|
||||||
hashfunc=sha1, sigdecode=sigdecode_der))
|
|
||||||
|
|
||||||
with open("t/privkey.pem") as e: fp = e.read()
|
|
||||||
sk = SigningKey.from_pem(fp) # 1
|
|
||||||
sig = sk.sign(data)
|
|
||||||
self.assertTrue(vk.verify(sig, data))
|
|
||||||
|
|
||||||
def test_to_openssl_nist192p(self):
|
|
||||||
self.do_test_to_openssl(NIST192p)
|
|
||||||
def test_to_openssl_nist224p(self):
|
|
||||||
self.do_test_to_openssl(NIST224p)
|
|
||||||
def test_to_openssl_nist256p(self):
|
|
||||||
self.do_test_to_openssl(NIST256p)
|
|
||||||
def test_to_openssl_nist384p(self):
|
|
||||||
self.do_test_to_openssl(NIST384p)
|
|
||||||
def test_to_openssl_nist521p(self):
|
|
||||||
self.do_test_to_openssl(NIST521p)
|
|
||||||
def test_to_openssl_secp256k1(self):
|
|
||||||
self.do_test_to_openssl(SECP256k1)
|
|
||||||
|
|
||||||
def do_test_to_openssl(self, curve):
|
|
||||||
curvename = curve.openssl_name
|
|
||||||
assert curvename
|
|
||||||
# Python: create sk, vk, sign.
|
|
||||||
# OpenSSL: read vk(4), checksig(6), read sk(2), sign, check
|
|
||||||
mdarg = self.get_openssl_messagedigest_arg()
|
|
||||||
if os.path.isdir("t"):
|
|
||||||
shutil.rmtree("t")
|
|
||||||
os.mkdir("t")
|
|
||||||
sk = SigningKey.generate(curve=curve)
|
|
||||||
vk = sk.get_verifying_key()
|
|
||||||
data = b("data")
|
|
||||||
with open("t/pubkey.der","wb") as e: e.write(vk.to_der()) # 4
|
|
||||||
with open("t/pubkey.pem","wb") as e: e.write(vk.to_pem()) # 4
|
|
||||||
sig_der = sk.sign(data, hashfunc=sha1, sigencode=sigencode_der)
|
|
||||||
|
|
||||||
with open("t/data.sig","wb") as e: e.write(sig_der) # 6
|
|
||||||
with open("t/data.txt","wb") as e: e.write(data)
|
|
||||||
with open("t/baddata.txt","wb") as e: e.write(data+b("corrupt"))
|
|
||||||
|
|
||||||
self.assertRaises(SubprocessError, run_openssl,
|
|
||||||
"dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/baddata.txt" % mdarg)
|
|
||||||
run_openssl("dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/data.txt" % mdarg)
|
|
||||||
|
|
||||||
with open("t/privkey.pem","wb") as e: e.write(sk.to_pem()) # 2
|
|
||||||
run_openssl("dgst %s -sign t/privkey.pem -out t/data.sig2 t/data.txt" % mdarg)
|
|
||||||
run_openssl("dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt" % mdarg)
|
|
||||||
|
|
||||||
class DER(unittest.TestCase):
|
|
||||||
def test_oids(self):
|
|
||||||
oid_ecPublicKey = der.encode_oid(1, 2, 840, 10045, 2, 1)
|
|
||||||
self.assertEqual(hexlify(oid_ecPublicKey), b("06072a8648ce3d0201"))
|
|
||||||
self.assertEqual(hexlify(NIST224p.encoded_oid), b("06052b81040021"))
|
|
||||||
self.assertEqual(hexlify(NIST256p.encoded_oid),
|
|
||||||
b("06082a8648ce3d030107"))
|
|
||||||
x = oid_ecPublicKey + b("more")
|
|
||||||
x1, rest = der.remove_object(x)
|
|
||||||
self.assertEqual(x1, (1, 2, 840, 10045, 2, 1))
|
|
||||||
self.assertEqual(rest, b("more"))
|
|
||||||
|
|
||||||
def test_integer(self):
|
|
||||||
self.assertEqual(der.encode_integer(0), b("\x02\x01\x00"))
|
|
||||||
self.assertEqual(der.encode_integer(1), b("\x02\x01\x01"))
|
|
||||||
self.assertEqual(der.encode_integer(127), b("\x02\x01\x7f"))
|
|
||||||
self.assertEqual(der.encode_integer(128), b("\x02\x02\x00\x80"))
|
|
||||||
self.assertEqual(der.encode_integer(256), b("\x02\x02\x01\x00"))
|
|
||||||
#self.assertEqual(der.encode_integer(-1), b("\x02\x01\xff"))
|
|
||||||
|
|
||||||
def s(n): return der.remove_integer(der.encode_integer(n) + b("junk"))
|
|
||||||
self.assertEqual(s(0), (0, b("junk")))
|
|
||||||
self.assertEqual(s(1), (1, b("junk")))
|
|
||||||
self.assertEqual(s(127), (127, b("junk")))
|
|
||||||
self.assertEqual(s(128), (128, b("junk")))
|
|
||||||
self.assertEqual(s(256), (256, b("junk")))
|
|
||||||
self.assertEqual(s(1234567890123456789012345678901234567890),
|
|
||||||
(1234567890123456789012345678901234567890,b("junk")))
|
|
||||||
|
|
||||||
def test_number(self):
|
|
||||||
self.assertEqual(der.encode_number(0), b("\x00"))
|
|
||||||
self.assertEqual(der.encode_number(127), b("\x7f"))
|
|
||||||
self.assertEqual(der.encode_number(128), b("\x81\x00"))
|
|
||||||
self.assertEqual(der.encode_number(3*128+7), b("\x83\x07"))
|
|
||||||
#self.assertEqual(der.read_number("\x81\x9b"+"more"), (155, 2))
|
|
||||||
#self.assertEqual(der.encode_number(155), b("\x81\x9b"))
|
|
||||||
for n in (0, 1, 2, 127, 128, 3*128+7, 840, 10045): #, 155):
|
|
||||||
x = der.encode_number(n) + b("more")
|
|
||||||
n1, llen = der.read_number(x)
|
|
||||||
self.assertEqual(n1, n)
|
|
||||||
self.assertEqual(x[llen:], b("more"))
|
|
||||||
|
|
||||||
def test_length(self):
|
|
||||||
self.assertEqual(der.encode_length(0), b("\x00"))
|
|
||||||
self.assertEqual(der.encode_length(127), b("\x7f"))
|
|
||||||
self.assertEqual(der.encode_length(128), b("\x81\x80"))
|
|
||||||
self.assertEqual(der.encode_length(255), b("\x81\xff"))
|
|
||||||
self.assertEqual(der.encode_length(256), b("\x82\x01\x00"))
|
|
||||||
self.assertEqual(der.encode_length(3*256+7), b("\x82\x03\x07"))
|
|
||||||
self.assertEqual(der.read_length(b("\x81\x9b")+b("more")), (155, 2))
|
|
||||||
self.assertEqual(der.encode_length(155), b("\x81\x9b"))
|
|
||||||
for n in (0, 1, 2, 127, 128, 255, 256, 3*256+7, 155):
|
|
||||||
x = der.encode_length(n) + b("more")
|
|
||||||
n1, llen = der.read_length(x)
|
|
||||||
self.assertEqual(n1, n)
|
|
||||||
self.assertEqual(x[llen:], b("more"))
|
|
||||||
|
|
||||||
def test_sequence(self):
|
|
||||||
x = der.encode_sequence(b("ABC"), b("DEF")) + b("GHI")
|
|
||||||
self.assertEqual(x, b("\x30\x06ABCDEFGHI"))
|
|
||||||
x1, rest = der.remove_sequence(x)
|
|
||||||
self.assertEqual(x1, b("ABCDEF"))
|
|
||||||
self.assertEqual(rest, b("GHI"))
|
|
||||||
|
|
||||||
def test_constructed(self):
|
|
||||||
x = der.encode_constructed(0, NIST224p.encoded_oid)
|
|
||||||
self.assertEqual(hexlify(x), b("a007") + b("06052b81040021"))
|
|
||||||
x = der.encode_constructed(1, unhexlify(b("0102030a0b0c")))
|
|
||||||
self.assertEqual(hexlify(x), b("a106") + b("0102030a0b0c"))
|
|
||||||
|
|
||||||
class Util(unittest.TestCase):
|
|
||||||
def test_trytryagain(self):
|
|
||||||
tta = util.randrange_from_seed__trytryagain
|
|
||||||
for i in range(1000):
|
|
||||||
seed = "seed-%d" % i
|
|
||||||
for order in (2**8-2, 2**8-1, 2**8, 2**8+1, 2**8+2,
|
|
||||||
2**16-1, 2**16+1):
|
|
||||||
n = tta(seed, order)
|
|
||||||
self.assertTrue(1 <= n < order, (1, n, order))
|
|
||||||
# this trytryagain *does* provide long-term stability
|
|
||||||
self.assertEqual(("%x"%(tta("seed", NIST224p.order))).encode(),
|
|
||||||
b("6fa59d73bf0446ae8743cf748fc5ac11d5585a90356417e97155c3bc"))
|
|
||||||
|
|
||||||
def test_randrange(self):
|
|
||||||
# util.randrange does not provide long-term stability: we might
|
|
||||||
# change the algorithm in the future.
|
|
||||||
for i in range(1000):
|
|
||||||
entropy = util.PRNG("seed-%d" % i)
|
|
||||||
for order in (2**8-2, 2**8-1, 2**8,
|
|
||||||
2**16-1, 2**16+1,
|
|
||||||
):
|
|
||||||
# that oddball 2**16+1 takes half our runtime
|
|
||||||
n = util.randrange(order, entropy=entropy)
|
|
||||||
self.assertTrue(1 <= n < order, (1, n, order))
|
|
||||||
|
|
||||||
def OFF_test_prove_uniformity(self):
|
|
||||||
order = 2**8-2
|
|
||||||
counts = dict([(i, 0) for i in range(1, order)])
|
|
||||||
assert 0 not in counts
|
|
||||||
assert order not in counts
|
|
||||||
for i in range(1000000):
|
|
||||||
seed = "seed-%d" % i
|
|
||||||
n = util.randrange_from_seed__trytryagain(seed, order)
|
|
||||||
counts[n] += 1
|
|
||||||
# this technique should use the full range
|
|
||||||
self.assertTrue(counts[order-1])
|
|
||||||
for i in range(1, order):
|
|
||||||
print_("%3d: %s" % (i, "*"*(counts[i]//100)))
|
|
||||||
|
|
||||||
class RFC6979(unittest.TestCase):
|
|
||||||
# https://tools.ietf.org/html/rfc6979#appendix-A.1
|
|
||||||
def _do(self, generator, secexp, hsh, hash_func, expected):
|
|
||||||
actual = rfc6979.generate_k(generator.order(), secexp, hash_func, hsh)
|
|
||||||
self.assertEqual(expected, actual)
|
|
||||||
|
|
||||||
def test_SECP256k1(self):
|
|
||||||
'''RFC doesn't contain test vectors for SECP256k1 used in bitcoin.
|
|
||||||
This vector has been computed by Golang reference implementation instead.'''
|
|
||||||
self._do(
|
|
||||||
generator = SECP256k1.generator,
|
|
||||||
secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16),
|
|
||||||
hsh = sha256(b("sample")).digest(),
|
|
||||||
hash_func = sha256,
|
|
||||||
expected = int("8fa1f95d514760e498f28957b824ee6ec39ed64826ff4fecc2b5739ec45b91cd", 16))
|
|
||||||
|
|
||||||
def test_SECP256k1_2(self):
|
|
||||||
self._do(
|
|
||||||
generator=SECP256k1.generator,
|
|
||||||
secexp=int("cca9fbcc1b41e5a95d369eaa6ddcff73b61a4efaa279cfc6567e8daa39cbaf50", 16),
|
|
||||||
hsh=sha256(b("sample")).digest(),
|
|
||||||
hash_func=sha256,
|
|
||||||
expected=int("2df40ca70e639d89528a6b670d9d48d9165fdc0febc0974056bdce192b8e16a3", 16))
|
|
||||||
|
|
||||||
def test_SECP256k1_3(self):
|
|
||||||
self._do(
|
|
||||||
generator=SECP256k1.generator,
|
|
||||||
secexp=0x1,
|
|
||||||
hsh=sha256(b("Satoshi Nakamoto")).digest(),
|
|
||||||
hash_func=sha256,
|
|
||||||
expected=0x8F8A276C19F4149656B280621E358CCE24F5F52542772691EE69063B74F15D15)
|
|
||||||
|
|
||||||
def test_SECP256k1_4(self):
|
|
||||||
self._do(
|
|
||||||
generator=SECP256k1.generator,
|
|
||||||
secexp=0x1,
|
|
||||||
hsh=sha256(b("All those moments will be lost in time, like tears in rain. Time to die...")).digest(),
|
|
||||||
hash_func=sha256,
|
|
||||||
expected=0x38AA22D72376B4DBC472E06C3BA403EE0A394DA63FC58D88686C611ABA98D6B3)
|
|
||||||
|
|
||||||
def test_SECP256k1_5(self):
|
|
||||||
self._do(
|
|
||||||
generator=SECP256k1.generator,
|
|
||||||
secexp=0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364140,
|
|
||||||
hsh=sha256(b("Satoshi Nakamoto")).digest(),
|
|
||||||
hash_func=sha256,
|
|
||||||
expected=0x33A19B60E25FB6F4435AF53A3D42D493644827367E6453928554F43E49AA6F90)
|
|
||||||
|
|
||||||
def test_SECP256k1_6(self):
|
|
||||||
self._do(
|
|
||||||
generator=SECP256k1.generator,
|
|
||||||
secexp=0xf8b8af8ce3c7cca5e300d33939540c10d45ce001b8f252bfbc57ba0342904181,
|
|
||||||
hsh=sha256(b("Alan Turing")).digest(),
|
|
||||||
hash_func=sha256,
|
|
||||||
expected=0x525A82B70E67874398067543FD84C83D30C175FDC45FDEEE082FE13B1D7CFDF1)
|
|
||||||
|
|
||||||
def test_1(self):
|
|
||||||
# Basic example of the RFC, it also tests 'try-try-again' from Step H of rfc6979
|
|
||||||
self._do(
|
|
||||||
generator = Point(None, 0, 0, int("4000000000000000000020108A2E0CC0D99F8A5EF", 16)),
|
|
||||||
secexp = int("09A4D6792295A7F730FC3F2B49CBC0F62E862272F", 16),
|
|
||||||
hsh = unhexlify(b("AF2BDBE1AA9B6EC1E2ADE1D694F41FC71A831D0268E9891562113D8A62ADD1BF")),
|
|
||||||
hash_func = sha256,
|
|
||||||
expected = int("23AF4074C90A02B3FE61D286D5C87F425E6BDD81B", 16))
|
|
||||||
|
|
||||||
def test_2(self):
|
|
||||||
self._do(
|
|
||||||
generator=NIST192p.generator,
|
|
||||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
|
||||||
hsh = sha1(b("sample")).digest(),
|
|
||||||
hash_func = sha1,
|
|
||||||
expected = int("37D7CA00D2C7B0E5E412AC03BD44BA837FDD5B28CD3B0021", 16))
|
|
||||||
|
|
||||||
def test_3(self):
|
|
||||||
self._do(
|
|
||||||
generator=NIST192p.generator,
|
|
||||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
|
||||||
hsh = sha256(b("sample")).digest(),
|
|
||||||
hash_func = sha256,
|
|
||||||
expected = int("32B1B6D7D42A05CB449065727A84804FB1A3E34D8F261496", 16))
|
|
||||||
|
|
||||||
def test_4(self):
|
|
||||||
self._do(
|
|
||||||
generator=NIST192p.generator,
|
|
||||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
|
||||||
hsh = sha512(b("sample")).digest(),
|
|
||||||
hash_func = sha512,
|
|
||||||
expected = int("A2AC7AB055E4F20692D49209544C203A7D1F2C0BFBC75DB1", 16))
|
|
||||||
|
|
||||||
def test_5(self):
|
|
||||||
self._do(
|
|
||||||
generator=NIST192p.generator,
|
|
||||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
|
||||||
hsh = sha1(b("test")).digest(),
|
|
||||||
hash_func = sha1,
|
|
||||||
expected = int("D9CF9C3D3297D3260773A1DA7418DB5537AB8DD93DE7FA25", 16))
|
|
||||||
|
|
||||||
def test_6(self):
|
|
||||||
self._do(
|
|
||||||
generator=NIST192p.generator,
|
|
||||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
|
||||||
hsh = sha256(b("test")).digest(),
|
|
||||||
hash_func = sha256,
|
|
||||||
expected = int("5C4CE89CF56D9E7C77C8585339B006B97B5F0680B4306C6C", 16))
|
|
||||||
|
|
||||||
def test_7(self):
|
|
||||||
self._do(
|
|
||||||
generator=NIST192p.generator,
|
|
||||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
|
||||||
hsh = sha512(b("test")).digest(),
|
|
||||||
hash_func = sha512,
|
|
||||||
expected = int("0758753A5254759C7CFBAD2E2D9B0792EEE44136C9480527", 16))
|
|
||||||
|
|
||||||
def test_8(self):
|
|
||||||
self._do(
|
|
||||||
generator=NIST521p.generator,
|
|
||||||
secexp = int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
|
|
||||||
hsh = sha1(b("sample")).digest(),
|
|
||||||
hash_func = sha1,
|
|
||||||
expected = int("089C071B419E1C2820962321787258469511958E80582E95D8378E0C2CCDB3CB42BEDE42F50E3FA3C71F5A76724281D31D9C89F0F91FC1BE4918DB1C03A5838D0F9", 16))
|
|
||||||
|
|
||||||
def test_9(self):
|
|
||||||
self._do(
|
|
||||||
generator=NIST521p.generator,
|
|
||||||
secexp = int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
|
|
||||||
hsh = sha256(b("sample")).digest(),
|
|
||||||
hash_func = sha256,
|
|
||||||
expected = int("0EDF38AFCAAECAB4383358B34D67C9F2216C8382AAEA44A3DAD5FDC9C32575761793FEF24EB0FC276DFC4F6E3EC476752F043CF01415387470BCBD8678ED2C7E1A0", 16))
|
|
||||||
|
|
||||||
def test_10(self):
|
|
||||||
self._do(
|
|
||||||
generator=NIST521p.generator,
|
|
||||||
secexp = int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
|
|
||||||
hsh = sha512(b("test")).digest(),
|
|
||||||
hash_func = sha512,
|
|
||||||
expected = int("16200813020EC986863BEDFC1B121F605C1215645018AEA1A7B215A564DE9EB1B38A67AA1128B80CE391C4FB71187654AAA3431027BFC7F395766CA988C964DC56D", 16))
|
|
||||||
|
|
||||||
def __main__():
|
|
||||||
unittest.main()
|
|
||||||
if __name__ == "__main__":
|
|
||||||
__main__()
|
|
||||||
@@ -1,247 +0,0 @@
|
|||||||
from __future__ import division
|
|
||||||
|
|
||||||
import os
|
|
||||||
import math
|
|
||||||
import binascii
|
|
||||||
from hashlib import sha256
|
|
||||||
from . import der
|
|
||||||
from .curves import orderlen
|
|
||||||
from .six import PY3, int2byte, b, next
|
|
||||||
|
|
||||||
# RFC5480:
|
|
||||||
# The "unrestricted" algorithm identifier is:
|
|
||||||
# id-ecPublicKey OBJECT IDENTIFIER ::= {
|
|
||||||
# iso(1) member-body(2) us(840) ansi-X9-62(10045) keyType(2) 1 }
|
|
||||||
|
|
||||||
oid_ecPublicKey = (1, 2, 840, 10045, 2, 1)
|
|
||||||
encoded_oid_ecPublicKey = der.encode_oid(*oid_ecPublicKey)
|
|
||||||
|
|
||||||
def randrange(order, entropy=None):
|
|
||||||
"""Return a random integer k such that 1 <= k < order, uniformly
|
|
||||||
distributed across that range. For simplicity, this only behaves well if
|
|
||||||
'order' is fairly close (but below) a power of 256. The try-try-again
|
|
||||||
algorithm we use takes longer and longer time (on average) to complete as
|
|
||||||
'order' falls, rising to a maximum of avg=512 loops for the worst-case
|
|
||||||
(256**k)+1 . All of the standard curves behave well. There is a cutoff at
|
|
||||||
10k loops (which raises RuntimeError) to prevent an infinite loop when
|
|
||||||
something is really broken like the entropy function not working.
|
|
||||||
|
|
||||||
Note that this function is not declared to be forwards-compatible: we may
|
|
||||||
change the behavior in future releases. The entropy= argument (which
|
|
||||||
should get a callable that behaves like os.urandom) can be used to
|
|
||||||
achieve stability within a given release (for repeatable unit tests), but
|
|
||||||
should not be used as a long-term-compatible key generation algorithm.
|
|
||||||
"""
|
|
||||||
# we could handle arbitrary orders (even 256**k+1) better if we created
|
|
||||||
# candidates bit-wise instead of byte-wise, which would reduce the
|
|
||||||
# worst-case behavior to avg=2 loops, but that would be more complex. The
|
|
||||||
# change would be to round the order up to a power of 256, subtract one
|
|
||||||
# (to get 0xffff..), use that to get a byte-long mask for the top byte,
|
|
||||||
# generate the len-1 entropy bytes, generate one extra byte and mask off
|
|
||||||
# the top bits, then combine it with the rest. Requires jumping back and
|
|
||||||
# forth between strings and integers a lot.
|
|
||||||
|
|
||||||
if entropy is None:
|
|
||||||
entropy = os.urandom
|
|
||||||
assert order > 1
|
|
||||||
bytes = orderlen(order)
|
|
||||||
dont_try_forever = 10000 # gives about 2**-60 failures for worst case
|
|
||||||
while dont_try_forever > 0:
|
|
||||||
dont_try_forever -= 1
|
|
||||||
candidate = string_to_number(entropy(bytes)) + 1
|
|
||||||
if 1 <= candidate < order:
|
|
||||||
return candidate
|
|
||||||
continue
|
|
||||||
raise RuntimeError("randrange() tried hard but gave up, either something"
|
|
||||||
" is very wrong or you got realllly unlucky. Order was"
|
|
||||||
" %x" % order)
|
|
||||||
|
|
||||||
class PRNG:
|
|
||||||
# this returns a callable which, when invoked with an integer N, will
|
|
||||||
# return N pseudorandom bytes. Note: this is a short-term PRNG, meant
|
|
||||||
# primarily for the needs of randrange_from_seed__trytryagain(), which
|
|
||||||
# only needs to run it a few times per seed. It does not provide
|
|
||||||
# protection against state compromise (forward security).
|
|
||||||
def __init__(self, seed):
|
|
||||||
self.generator = self.block_generator(seed)
|
|
||||||
|
|
||||||
def __call__(self, numbytes):
|
|
||||||
a = [next(self.generator) for i in range(numbytes)]
|
|
||||||
|
|
||||||
if PY3:
|
|
||||||
return bytes(a)
|
|
||||||
else:
|
|
||||||
return "".join(a)
|
|
||||||
|
|
||||||
|
|
||||||
def block_generator(self, seed):
|
|
||||||
counter = 0
|
|
||||||
while True:
|
|
||||||
for byte in sha256(("prng-%d-%s" % (counter, seed)).encode()).digest():
|
|
||||||
yield byte
|
|
||||||
counter += 1
|
|
||||||
|
|
||||||
def randrange_from_seed__overshoot_modulo(seed, order):
|
|
||||||
# hash the data, then turn the digest into a number in [1,order).
|
|
||||||
#
|
|
||||||
# We use David-Sarah Hopwood's suggestion: turn it into a number that's
|
|
||||||
# sufficiently larger than the group order, then modulo it down to fit.
|
|
||||||
# This should give adequate (but not perfect) uniformity, and simple
|
|
||||||
# code. There are other choices: try-try-again is the main one.
|
|
||||||
base = PRNG(seed)(2*orderlen(order))
|
|
||||||
number = (int(binascii.hexlify(base), 16) % (order-1)) + 1
|
|
||||||
assert 1 <= number < order, (1, number, order)
|
|
||||||
return number
|
|
||||||
|
|
||||||
def lsb_of_ones(numbits):
|
|
||||||
return (1 << numbits) - 1
|
|
||||||
def bits_and_bytes(order):
|
|
||||||
bits = int(math.log(order-1, 2)+1)
|
|
||||||
bytes = bits // 8
|
|
||||||
extrabits = bits % 8
|
|
||||||
return bits, bytes, extrabits
|
|
||||||
|
|
||||||
# the following randrange_from_seed__METHOD() functions take an
|
|
||||||
# arbitrarily-sized secret seed and turn it into a number that obeys the same
|
|
||||||
# range limits as randrange() above. They are meant for deriving consistent
|
|
||||||
# signing keys from a secret rather than generating them randomly, for
|
|
||||||
# example a protocol in which three signing keys are derived from a master
|
|
||||||
# secret. You should use a uniformly-distributed unguessable seed with about
|
|
||||||
# curve.baselen bytes of entropy. To use one, do this:
|
|
||||||
# seed = os.urandom(curve.baselen) # or other starting point
|
|
||||||
# secexp = ecdsa.util.randrange_from_seed__trytryagain(sed, curve.order)
|
|
||||||
# sk = SigningKey.from_secret_exponent(secexp, curve)
|
|
||||||
|
|
||||||
def randrange_from_seed__truncate_bytes(seed, order, hashmod=sha256):
|
|
||||||
# hash the seed, then turn the digest into a number in [1,order), but
|
|
||||||
# don't worry about trying to uniformly fill the range. This will lose,
|
|
||||||
# on average, four bits of entropy.
|
|
||||||
bits, bytes, extrabits = bits_and_bytes(order)
|
|
||||||
if extrabits:
|
|
||||||
bytes += 1
|
|
||||||
base = hashmod(seed).digest()[:bytes]
|
|
||||||
base = "\x00"*(bytes-len(base)) + base
|
|
||||||
number = 1+int(binascii.hexlify(base), 16)
|
|
||||||
assert 1 <= number < order
|
|
||||||
return number
|
|
||||||
|
|
||||||
def randrange_from_seed__truncate_bits(seed, order, hashmod=sha256):
|
|
||||||
# like string_to_randrange_truncate_bytes, but only lose an average of
|
|
||||||
# half a bit
|
|
||||||
bits = int(math.log(order-1, 2)+1)
|
|
||||||
maxbytes = (bits+7) // 8
|
|
||||||
base = hashmod(seed).digest()[:maxbytes]
|
|
||||||
base = "\x00"*(maxbytes-len(base)) + base
|
|
||||||
topbits = 8*maxbytes - bits
|
|
||||||
if topbits:
|
|
||||||
base = int2byte(ord(base[0]) & lsb_of_ones(topbits)) + base[1:]
|
|
||||||
number = 1+int(binascii.hexlify(base), 16)
|
|
||||||
assert 1 <= number < order
|
|
||||||
return number
|
|
||||||
|
|
||||||
def randrange_from_seed__trytryagain(seed, order):
|
|
||||||
# figure out exactly how many bits we need (rounded up to the nearest
|
|
||||||
# bit), so we can reduce the chance of looping to less than 0.5 . This is
|
|
||||||
# specified to feed from a byte-oriented PRNG, and discards the
|
|
||||||
# high-order bits of the first byte as necessary to get the right number
|
|
||||||
# of bits. The average number of loops will range from 1.0 (when
|
|
||||||
# order=2**k-1) to 2.0 (when order=2**k+1).
|
|
||||||
assert order > 1
|
|
||||||
bits, bytes, extrabits = bits_and_bytes(order)
|
|
||||||
generate = PRNG(seed)
|
|
||||||
while True:
|
|
||||||
extrabyte = b("")
|
|
||||||
if extrabits:
|
|
||||||
extrabyte = int2byte(ord(generate(1)) & lsb_of_ones(extrabits))
|
|
||||||
guess = string_to_number(extrabyte + generate(bytes)) + 1
|
|
||||||
if 1 <= guess < order:
|
|
||||||
return guess
|
|
||||||
|
|
||||||
|
|
||||||
def number_to_string(num, order):
|
|
||||||
l = orderlen(order)
|
|
||||||
fmt_str = "%0" + str(2*l) + "x"
|
|
||||||
string = binascii.unhexlify((fmt_str % num).encode())
|
|
||||||
assert len(string) == l, (len(string), l)
|
|
||||||
return string
|
|
||||||
|
|
||||||
def number_to_string_crop(num, order):
|
|
||||||
l = orderlen(order)
|
|
||||||
fmt_str = "%0" + str(2*l) + "x"
|
|
||||||
string = binascii.unhexlify((fmt_str % num).encode())
|
|
||||||
return string[:l]
|
|
||||||
|
|
||||||
def string_to_number(string):
|
|
||||||
return int(binascii.hexlify(string), 16)
|
|
||||||
|
|
||||||
def string_to_number_fixedlen(string, order):
|
|
||||||
l = orderlen(order)
|
|
||||||
assert len(string) == l, (len(string), l)
|
|
||||||
return int(binascii.hexlify(string), 16)
|
|
||||||
|
|
||||||
# these methods are useful for the sigencode= argument to SK.sign() and the
|
|
||||||
# sigdecode= argument to VK.verify(), and control how the signature is packed
|
|
||||||
# or unpacked.
|
|
||||||
|
|
||||||
def sigencode_strings(r, s, order):
|
|
||||||
r_str = number_to_string(r, order)
|
|
||||||
s_str = number_to_string(s, order)
|
|
||||||
return (r_str, s_str)
|
|
||||||
|
|
||||||
def sigencode_string(r, s, order):
|
|
||||||
# for any given curve, the size of the signature numbers is
|
|
||||||
# fixed, so just use simple concatenation
|
|
||||||
r_str, s_str = sigencode_strings(r, s, order)
|
|
||||||
return r_str + s_str
|
|
||||||
|
|
||||||
def sigencode_der(r, s, order):
|
|
||||||
return der.encode_sequence(der.encode_integer(r), der.encode_integer(s))
|
|
||||||
|
|
||||||
# canonical versions of sigencode methods
|
|
||||||
# these enforce low S values, by negating the value (modulo the order) if above order/2
|
|
||||||
# see CECKey::Sign() https://github.com/bitcoin/bitcoin/blob/master/src/key.cpp#L214
|
|
||||||
def sigencode_strings_canonize(r, s, order):
|
|
||||||
if s > order / 2:
|
|
||||||
s = order - s
|
|
||||||
return sigencode_strings(r, s, order)
|
|
||||||
|
|
||||||
def sigencode_string_canonize(r, s, order):
|
|
||||||
if s > order / 2:
|
|
||||||
s = order - s
|
|
||||||
return sigencode_string(r, s, order)
|
|
||||||
|
|
||||||
def sigencode_der_canonize(r, s, order):
|
|
||||||
if s > order / 2:
|
|
||||||
s = order - s
|
|
||||||
return sigencode_der(r, s, order)
|
|
||||||
|
|
||||||
|
|
||||||
def sigdecode_string(signature, order):
|
|
||||||
l = orderlen(order)
|
|
||||||
assert len(signature) == 2*l, (len(signature), 2*l)
|
|
||||||
r = string_to_number_fixedlen(signature[:l], order)
|
|
||||||
s = string_to_number_fixedlen(signature[l:], order)
|
|
||||||
return r, s
|
|
||||||
|
|
||||||
def sigdecode_strings(rs_strings, order):
|
|
||||||
(r_str, s_str) = rs_strings
|
|
||||||
l = orderlen(order)
|
|
||||||
assert len(r_str) == l, (len(r_str), l)
|
|
||||||
assert len(s_str) == l, (len(s_str), l)
|
|
||||||
r = string_to_number_fixedlen(r_str, order)
|
|
||||||
s = string_to_number_fixedlen(s_str, order)
|
|
||||||
return r, s
|
|
||||||
|
|
||||||
def sigdecode_der(sig_der, order):
|
|
||||||
#return der.encode_sequence(der.encode_integer(r), der.encode_integer(s))
|
|
||||||
rs_strings, empty = der.remove_sequence(sig_der)
|
|
||||||
if empty != b(""):
|
|
||||||
raise der.UnexpectedDER("trailing junk after DER sig: %s" %
|
|
||||||
binascii.hexlify(empty))
|
|
||||||
r, rest = der.remove_integer(rs_strings)
|
|
||||||
s, empty = der.remove_integer(rest)
|
|
||||||
if empty != b(""):
|
|
||||||
raise der.UnexpectedDER("trailing junk after DER numbers: %s" %
|
|
||||||
binascii.hexlify(empty))
|
|
||||||
return r, s
|
|
||||||
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
import hashlib
|
|
||||||
|
|
||||||
b = 256
|
|
||||||
q = 2**255 - 19
|
|
||||||
l = 2**252 + 27742317777372353535851937790883648493
|
|
||||||
|
|
||||||
def H(m):
|
|
||||||
return hashlib.sha512(m).digest()
|
|
||||||
|
|
||||||
def expmod(b,e,m):
|
|
||||||
if e == 0: return 1
|
|
||||||
t = expmod(b,e/2,m)**2 % m
|
|
||||||
if e & 1: t = (t*b) % m
|
|
||||||
return t
|
|
||||||
|
|
||||||
def inv(x):
|
|
||||||
return expmod(x,q-2,q)
|
|
||||||
|
|
||||||
d = -121665 * inv(121666)
|
|
||||||
I = expmod(2,(q-1)/4,q)
|
|
||||||
|
|
||||||
def xrecover(y):
|
|
||||||
xx = (y*y-1) * inv(d*y*y+1)
|
|
||||||
x = expmod(xx,(q+3)/8,q)
|
|
||||||
if (x*x - xx) % q != 0: x = (x*I) % q
|
|
||||||
if x % 2 != 0: x = q-x
|
|
||||||
return x
|
|
||||||
|
|
||||||
By = 4 * inv(5)
|
|
||||||
Bx = xrecover(By)
|
|
||||||
B = [Bx % q,By % q]
|
|
||||||
|
|
||||||
def edwards(P,Q):
|
|
||||||
x1 = P[0]
|
|
||||||
y1 = P[1]
|
|
||||||
x2 = Q[0]
|
|
||||||
y2 = Q[1]
|
|
||||||
x3 = (x1*y2+x2*y1) * inv(1+d*x1*x2*y1*y2)
|
|
||||||
y3 = (y1*y2+x1*x2) * inv(1-d*x1*x2*y1*y2)
|
|
||||||
return [x3 % q,y3 % q]
|
|
||||||
|
|
||||||
def scalarmult(P,e):
|
|
||||||
if e == 0: return [0,1]
|
|
||||||
Q = scalarmult(P,e/2)
|
|
||||||
Q = edwards(Q,Q)
|
|
||||||
if e & 1: Q = edwards(Q,P)
|
|
||||||
return Q
|
|
||||||
|
|
||||||
def encodeint(y):
|
|
||||||
bits = [(y >> i) & 1 for i in range(b)]
|
|
||||||
return ''.join([chr(sum([bits[i * 8 + j] << j for j in range(8)])) for i in range(b/8)])
|
|
||||||
|
|
||||||
def encodepoint(P):
|
|
||||||
x = P[0]
|
|
||||||
y = P[1]
|
|
||||||
bits = [(y >> i) & 1 for i in range(b - 1)] + [x & 1]
|
|
||||||
return ''.join([chr(sum([bits[i * 8 + j] << j for j in range(8)])) for i in range(b/8)])
|
|
||||||
|
|
||||||
def bit(h,i):
|
|
||||||
return (ord(h[i/8]) >> (i%8)) & 1
|
|
||||||
|
|
||||||
def publickey(sk):
|
|
||||||
h = H(sk)
|
|
||||||
a = 2**(b-2) + sum(2**i * bit(h,i) for i in range(3,b-2))
|
|
||||||
A = scalarmult(B,a)
|
|
||||||
return encodepoint(A)
|
|
||||||
|
|
||||||
def Hint(m):
|
|
||||||
h = H(m)
|
|
||||||
return sum(2**i * bit(h,i) for i in range(2*b))
|
|
||||||
|
|
||||||
def signature(m,sk,pk):
|
|
||||||
h = H(sk)
|
|
||||||
a = 2**(b-2) + sum(2**i * bit(h,i) for i in range(3,b-2))
|
|
||||||
r = Hint(''.join([h[i] for i in range(b/8,b/4)]) + m)
|
|
||||||
R = scalarmult(B,r)
|
|
||||||
S = (r + Hint(encodepoint(R) + pk + m) * a) % l
|
|
||||||
return encodepoint(R) + encodeint(S)
|
|
||||||
|
|
||||||
def isoncurve(P):
|
|
||||||
x = P[0]
|
|
||||||
y = P[1]
|
|
||||||
return (-x*x + y*y - 1 - d*x*x*y*y) % q == 0
|
|
||||||
|
|
||||||
def decodeint(s):
|
|
||||||
return sum(2**i * bit(s,i) for i in range(0,b))
|
|
||||||
|
|
||||||
def decodepoint(s):
|
|
||||||
y = sum(2**i * bit(s,i) for i in range(0,b-1))
|
|
||||||
x = xrecover(y)
|
|
||||||
if x & 1 != bit(s,b-1): x = q-x
|
|
||||||
P = [x,y]
|
|
||||||
if not isoncurve(P): raise Exception("decoding point that is not on curve")
|
|
||||||
return P
|
|
||||||
|
|
||||||
def checkvalid(s,m,pk):
|
|
||||||
if len(s) != b/4: raise Exception("signature length is wrong")
|
|
||||||
if len(pk) != b/8: raise Exception("public-key length is wrong")
|
|
||||||
R = decodepoint(s[0:b/8])
|
|
||||||
A = decodepoint(pk)
|
|
||||||
S = decodeint(s[b/8:b/4])
|
|
||||||
h = Hint(encodepoint(R) + pk + m)
|
|
||||||
if scalarmult(B,S) != edwards(R,scalarmult(A,h)):
|
|
||||||
raise Exception("signature does not pass verification")
|
|
||||||
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
from .jsonpath import *
|
|
||||||
from .parser import parse
|
|
||||||
|
|
||||||
__version__ = '1.3.0'
|
|
||||||
@@ -1,510 +0,0 @@
|
|||||||
from __future__ import unicode_literals, print_function, absolute_import, division, generators, nested_scopes
|
|
||||||
import logging
|
|
||||||
import six
|
|
||||||
from six.moves import xrange
|
|
||||||
from itertools import *
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Turn on/off the automatic creation of id attributes
|
|
||||||
# ... could be a kwarg pervasively but uses are rare and simple today
|
|
||||||
auto_id_field = None
|
|
||||||
|
|
||||||
class JSONPath(object):
|
|
||||||
"""
|
|
||||||
The base class for JSONPath abstract syntax; those
|
|
||||||
methods stubbed here are the interface to supported
|
|
||||||
JSONPath semantics.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def find(self, data):
|
|
||||||
"""
|
|
||||||
All `JSONPath` types support `find()`, which returns an iterable of `DatumInContext`s.
|
|
||||||
They keep track of the path followed to the current location, so if the calling code
|
|
||||||
has some opinion about that, it can be passed in here as a starting point.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def update(self, data, val):
|
|
||||||
"Returns `data` with the specified path replaced by `val`"
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def child(self, child):
|
|
||||||
"""
|
|
||||||
Equivalent to Child(self, next) but with some canonicalization
|
|
||||||
"""
|
|
||||||
if isinstance(self, This) or isinstance(self, Root):
|
|
||||||
return child
|
|
||||||
elif isinstance(child, This):
|
|
||||||
return self
|
|
||||||
elif isinstance(child, Root):
|
|
||||||
return child
|
|
||||||
else:
|
|
||||||
return Child(self, child)
|
|
||||||
|
|
||||||
def make_datum(self, value):
|
|
||||||
if isinstance(value, DatumInContext):
|
|
||||||
return value
|
|
||||||
else:
|
|
||||||
return DatumInContext(value, path=Root(), context=None)
|
|
||||||
|
|
||||||
class DatumInContext(object):
|
|
||||||
"""
|
|
||||||
Represents a datum along a path from a context.
|
|
||||||
|
|
||||||
Essentially a zipper but with a structure represented by JsonPath,
|
|
||||||
and where the context is more of a parent pointer than a proper
|
|
||||||
representation of the context.
|
|
||||||
|
|
||||||
For quick-and-dirty work, this proxies any non-special attributes
|
|
||||||
to the underlying datum, but the actual datum can (and usually should)
|
|
||||||
be retrieved via the `value` attribute.
|
|
||||||
|
|
||||||
To place `datum` within another, use `datum.in_context(context=..., path=...)`
|
|
||||||
which extends the path. If the datum already has a context, it places the entire
|
|
||||||
context within that passed in, so an object can be built from the inside
|
|
||||||
out.
|
|
||||||
"""
|
|
||||||
@classmethod
|
|
||||||
def wrap(cls, data):
|
|
||||||
if isinstance(data, cls):
|
|
||||||
return data
|
|
||||||
else:
|
|
||||||
return cls(data)
|
|
||||||
|
|
||||||
def __init__(self, value, path=None, context=None):
|
|
||||||
self.value = value
|
|
||||||
self.path = path or This()
|
|
||||||
self.context = None if context is None else DatumInContext.wrap(context)
|
|
||||||
|
|
||||||
def in_context(self, context, path):
|
|
||||||
context = DatumInContext.wrap(context)
|
|
||||||
|
|
||||||
if self.context:
|
|
||||||
return DatumInContext(value=self.value, path=self.path, context=context.in_context(path=path, context=context))
|
|
||||||
else:
|
|
||||||
return DatumInContext(value=self.value, path=path, context=context)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def full_path(self):
|
|
||||||
return self.path if self.context is None else self.context.full_path.child(self.path)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def id_pseudopath(self):
|
|
||||||
"""
|
|
||||||
Looks like a path, but with ids stuck in when available
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
pseudopath = Fields(str(self.value[auto_id_field]))
|
|
||||||
except (TypeError, AttributeError, KeyError): # This may not be all the interesting exceptions
|
|
||||||
pseudopath = self.path
|
|
||||||
|
|
||||||
if self.context:
|
|
||||||
return self.context.id_pseudopath.child(pseudopath)
|
|
||||||
else:
|
|
||||||
return pseudopath
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '%s(value=%r, path=%r, context=%r)' % (self.__class__.__name__, self.value, self.path, self.context)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return isinstance(other, DatumInContext) and other.value == self.value and other.path == self.path and self.context == other.context
|
|
||||||
|
|
||||||
class AutoIdForDatum(DatumInContext):
|
|
||||||
"""
|
|
||||||
This behaves like a DatumInContext, but the value is
|
|
||||||
always the path leading up to it, not including the "id",
|
|
||||||
and with any "id" fields along the way replacing the prior
|
|
||||||
segment of the path
|
|
||||||
|
|
||||||
For example, it will make "foo.bar.id" return a datum
|
|
||||||
that behaves like DatumInContext(value="foo.bar", path="foo.bar.id").
|
|
||||||
|
|
||||||
This is disabled by default; it can be turned on by
|
|
||||||
settings the `auto_id_field` global to a value other
|
|
||||||
than `None`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, datum, id_field=None):
|
|
||||||
"""
|
|
||||||
Invariant is that datum.path is the path from context to datum. The auto id
|
|
||||||
will either be the id in the datum (if present) or the id of the context
|
|
||||||
followed by the path to the datum.
|
|
||||||
|
|
||||||
The path to this datum is always the path to the context, the path to the
|
|
||||||
datum, and then the auto id field.
|
|
||||||
"""
|
|
||||||
self.datum = datum
|
|
||||||
self.id_field = id_field or auto_id_field
|
|
||||||
|
|
||||||
@property
|
|
||||||
def value(self):
|
|
||||||
return str(self.datum.id_pseudopath)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path(self):
|
|
||||||
return self.id_field
|
|
||||||
|
|
||||||
@property
|
|
||||||
def context(self):
|
|
||||||
return self.datum
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '%s(%r)' % (self.__class__.__name__, self.datum)
|
|
||||||
|
|
||||||
def in_context(self, context, path):
|
|
||||||
return AutoIdForDatum(self.datum.in_context(context=context, path=path))
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return isinstance(other, AutoIdForDatum) and other.datum == self.datum and self.id_field == other.id_field
|
|
||||||
|
|
||||||
|
|
||||||
class Root(JSONPath):
|
|
||||||
"""
|
|
||||||
The JSONPath referring to the "root" object. Concrete syntax is '$'.
|
|
||||||
The root is the topmost datum without any context attached.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def find(self, data):
|
|
||||||
if not isinstance(data, DatumInContext):
|
|
||||||
return [DatumInContext(data, path=Root(), context=None)]
|
|
||||||
else:
|
|
||||||
if data.context is None:
|
|
||||||
return [DatumInContext(data.value, context=None, path=Root())]
|
|
||||||
else:
|
|
||||||
return Root().find(data.context)
|
|
||||||
|
|
||||||
def update(self, data, val):
|
|
||||||
return val
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return '$'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'Root()'
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return isinstance(other, Root)
|
|
||||||
|
|
||||||
class This(JSONPath):
|
|
||||||
"""
|
|
||||||
The JSONPath referring to the current datum. Concrete syntax is '@'.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def find(self, datum):
|
|
||||||
return [DatumInContext.wrap(datum)]
|
|
||||||
|
|
||||||
def update(self, data, val):
|
|
||||||
return val
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return '`this`'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'This()'
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return isinstance(other, This)
|
|
||||||
|
|
||||||
class Child(JSONPath):
|
|
||||||
"""
|
|
||||||
JSONPath that first matches the left, then the right.
|
|
||||||
Concrete syntax is <left> '.' <right>
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, left, right):
|
|
||||||
self.left = left
|
|
||||||
self.right = right
|
|
||||||
|
|
||||||
def find(self, datum):
|
|
||||||
"""
|
|
||||||
Extra special case: auto ids do not have children,
|
|
||||||
so cut it off right now rather than auto id the auto id
|
|
||||||
"""
|
|
||||||
|
|
||||||
return [submatch
|
|
||||||
for subdata in self.left.find(datum)
|
|
||||||
if not isinstance(subdata, AutoIdForDatum)
|
|
||||||
for submatch in self.right.find(subdata)]
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return isinstance(other, Child) and self.left == other.left and self.right == other.right
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return '%s.%s' % (self.left, self.right)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '%s(%r, %r)' % (self.__class__.__name__, self.left, self.right)
|
|
||||||
|
|
||||||
class Parent(JSONPath):
|
|
||||||
"""
|
|
||||||
JSONPath that matches the parent node of the current match.
|
|
||||||
Will crash if no such parent exists.
|
|
||||||
Available via named operator `parent`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def find(self, datum):
|
|
||||||
datum = DatumInContext.wrap(datum)
|
|
||||||
return [datum.context]
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return isinstance(other, Parent)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return '`parent`'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'Parent()'
|
|
||||||
|
|
||||||
|
|
||||||
class Where(JSONPath):
|
|
||||||
"""
|
|
||||||
JSONPath that first matches the left, and then
|
|
||||||
filters for only those nodes that have
|
|
||||||
a match on the right.
|
|
||||||
|
|
||||||
WARNING: Subject to change. May want to have "contains"
|
|
||||||
or some other better word for it.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, left, right):
|
|
||||||
self.left = left
|
|
||||||
self.right = right
|
|
||||||
|
|
||||||
def find(self, data):
|
|
||||||
return [subdata for subdata in self.left.find(data) if self.right.find(data)]
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return '%s where %s' % (self.left, self.right)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return isinstance(other, Where) and other.left == self.left and other.right == self.right
|
|
||||||
|
|
||||||
class Descendants(JSONPath):
|
|
||||||
"""
|
|
||||||
JSONPath that matches first the left expression then any descendant
|
|
||||||
of it which matches the right expression.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, left, right):
|
|
||||||
self.left = left
|
|
||||||
self.right = right
|
|
||||||
|
|
||||||
def find(self, datum):
|
|
||||||
# <left> .. <right> ==> <left> . (<right> | *..<right> | [*]..<right>)
|
|
||||||
#
|
|
||||||
# With with a wonky caveat that since Slice() has funky coercions
|
|
||||||
# we cannot just delegate to that equivalence or we'll hit an
|
|
||||||
# infinite loop. So right here we implement the coercion-free version.
|
|
||||||
|
|
||||||
# Get all left matches into a list
|
|
||||||
left_matches = self.left.find(datum)
|
|
||||||
if not isinstance(left_matches, list):
|
|
||||||
left_matches = [left_matches]
|
|
||||||
|
|
||||||
def match_recursively(datum):
|
|
||||||
right_matches = self.right.find(datum)
|
|
||||||
|
|
||||||
# Manually do the * or [*] to avoid coercion and recurse just the right-hand pattern
|
|
||||||
if isinstance(datum.value, list):
|
|
||||||
recursive_matches = [submatch
|
|
||||||
for i in range(0, len(datum.value))
|
|
||||||
for submatch in match_recursively(DatumInContext(datum.value[i], context=datum, path=Index(i)))]
|
|
||||||
|
|
||||||
elif isinstance(datum.value, dict):
|
|
||||||
recursive_matches = [submatch
|
|
||||||
for field in datum.value.keys()
|
|
||||||
for submatch in match_recursively(DatumInContext(datum.value[field], context=datum, path=Fields(field)))]
|
|
||||||
|
|
||||||
else:
|
|
||||||
recursive_matches = []
|
|
||||||
|
|
||||||
return right_matches + list(recursive_matches)
|
|
||||||
|
|
||||||
# TODO: repeatable iterator instead of list?
|
|
||||||
return [submatch
|
|
||||||
for left_match in left_matches
|
|
||||||
for submatch in match_recursively(left_match)]
|
|
||||||
|
|
||||||
def is_singular():
|
|
||||||
return False
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return '%s..%s' % (self.left, self.right)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return isinstance(other, Descendants) and self.left == other.left and self.right == other.right
|
|
||||||
|
|
||||||
class Union(JSONPath):
|
|
||||||
"""
|
|
||||||
JSONPath that returns the union of the results of each match.
|
|
||||||
This is pretty shoddily implemented for now. The nicest semantics
|
|
||||||
in case of mismatched bits (list vs atomic) is to put
|
|
||||||
them all in a list, but I haven't done that yet.
|
|
||||||
|
|
||||||
WARNING: Any appearance of this being the _concatenation_ is
|
|
||||||
coincidence. It may even be a bug! (or laziness)
|
|
||||||
"""
|
|
||||||
def __init__(self, left, right):
|
|
||||||
self.left = left
|
|
||||||
self.right = right
|
|
||||||
|
|
||||||
def is_singular(self):
|
|
||||||
return False
|
|
||||||
|
|
||||||
def find(self, data):
|
|
||||||
return self.left.find(data) + self.right.find(data)
|
|
||||||
|
|
||||||
class Intersect(JSONPath):
|
|
||||||
"""
|
|
||||||
JSONPath for bits that match *both* patterns.
|
|
||||||
|
|
||||||
This can be accomplished a couple of ways. The most
|
|
||||||
efficient is to actually build the intersected
|
|
||||||
AST as in building a state machine for matching the
|
|
||||||
intersection of regular languages. The next
|
|
||||||
idea is to build a filtered data and match against
|
|
||||||
that.
|
|
||||||
"""
|
|
||||||
def __init__(self, left, right):
|
|
||||||
self.left = left
|
|
||||||
self.right = right
|
|
||||||
|
|
||||||
def is_singular(self):
|
|
||||||
return False
|
|
||||||
|
|
||||||
def find(self, data):
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
class Fields(JSONPath):
|
|
||||||
"""
|
|
||||||
JSONPath referring to some field of the current object.
|
|
||||||
Concrete syntax ix comma-separated field names.
|
|
||||||
|
|
||||||
WARNING: If '*' is any of the field names, then they will
|
|
||||||
all be returned.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *fields):
|
|
||||||
self.fields = fields
|
|
||||||
|
|
||||||
def get_field_datum(self, datum, field):
|
|
||||||
if field == auto_id_field:
|
|
||||||
return AutoIdForDatum(datum)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
field_value = datum.value[field] # Do NOT use `val.get(field)` since that confuses None as a value and None due to `get`
|
|
||||||
return DatumInContext(value=field_value, path=Fields(field), context=datum)
|
|
||||||
except (TypeError, KeyError, AttributeError):
|
|
||||||
return None
|
|
||||||
|
|
||||||
def reified_fields(self, datum):
|
|
||||||
if '*' not in self.fields:
|
|
||||||
return self.fields
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
fields = tuple(datum.value.keys())
|
|
||||||
return fields if auto_id_field is None else fields + (auto_id_field,)
|
|
||||||
except AttributeError:
|
|
||||||
return ()
|
|
||||||
|
|
||||||
def find(self, datum):
|
|
||||||
datum = DatumInContext.wrap(datum)
|
|
||||||
|
|
||||||
return [field_datum
|
|
||||||
for field_datum in [self.get_field_datum(datum, field) for field in self.reified_fields(datum)]
|
|
||||||
if field_datum is not None]
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return ','.join(self.fields)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '%s(%s)' % (self.__class__.__name__, ','.join(map(repr, self.fields)))
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return isinstance(other, Fields) and tuple(self.fields) == tuple(other.fields)
|
|
||||||
|
|
||||||
|
|
||||||
class Index(JSONPath):
|
|
||||||
"""
|
|
||||||
JSONPath that matches indices of the current datum, or none if not large enough.
|
|
||||||
Concrete syntax is brackets.
|
|
||||||
|
|
||||||
WARNING: If the datum is not long enough, it will not crash but will not match anything.
|
|
||||||
NOTE: For the concrete syntax of `[*]`, the abstract syntax is a Slice() with no parameters (equiv to `[:]`
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, index):
|
|
||||||
self.index = index
|
|
||||||
|
|
||||||
def find(self, datum):
|
|
||||||
datum = DatumInContext.wrap(datum)
|
|
||||||
|
|
||||||
if len(datum.value) > self.index:
|
|
||||||
return [DatumInContext(datum.value[self.index], path=self, context=datum)]
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return isinstance(other, Index) and self.index == other.index
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return '[%i]' % self.index
|
|
||||||
|
|
||||||
class Slice(JSONPath):
|
|
||||||
"""
|
|
||||||
JSONPath matching a slice of an array.
|
|
||||||
|
|
||||||
Because of a mismatch between JSON and XML when schema-unaware,
|
|
||||||
this always returns an iterable; if the incoming data
|
|
||||||
was not a list, then it returns a one element list _containing_ that
|
|
||||||
data.
|
|
||||||
|
|
||||||
Consider these two docs, and their schema-unaware translation to JSON:
|
|
||||||
|
|
||||||
<a><b>hello</b></a> ==> {"a": {"b": "hello"}}
|
|
||||||
<a><b>hello</b><b>goodbye</b></a> ==> {"a": {"b": ["hello", "goodbye"]}}
|
|
||||||
|
|
||||||
If there were a schema, it would be known that "b" should always be an
|
|
||||||
array (unless the schema were wonky, but that is too much to fix here)
|
|
||||||
so when querying with JSON if the one writing the JSON knows that it
|
|
||||||
should be an array, they can write a slice operator and it will coerce
|
|
||||||
a non-array value to an array.
|
|
||||||
|
|
||||||
This may be a bit unfortunate because it would be nice to always have
|
|
||||||
an iterator, but dictionaries and other objects may also be iterable,
|
|
||||||
so this is the compromise.
|
|
||||||
"""
|
|
||||||
def __init__(self, start=None, end=None, step=None):
|
|
||||||
self.start = start
|
|
||||||
self.end = end
|
|
||||||
self.step = step
|
|
||||||
|
|
||||||
def find(self, datum):
|
|
||||||
datum = DatumInContext.wrap(datum)
|
|
||||||
|
|
||||||
# Here's the hack. If it is a dictionary or some kind of constant,
|
|
||||||
# put it in a single-element list
|
|
||||||
if (isinstance(datum.value, dict) or isinstance(datum.value, six.integer_types) or isinstance(datum.value, six.string_types)):
|
|
||||||
return self.find(DatumInContext([datum.value], path=datum.path, context=datum.context))
|
|
||||||
|
|
||||||
# Some iterators do not support slicing but we can still
|
|
||||||
# at least work for '*'
|
|
||||||
if self.start == None and self.end == None and self.step == None:
|
|
||||||
return [DatumInContext(datum.value[i], path=Index(i), context=datum) for i in xrange(0, len(datum.value))]
|
|
||||||
else:
|
|
||||||
return [DatumInContext(datum.value[i], path=Index(i), context=datum) for i in range(0, len(datum.value))[self.start:self.end:self.step]]
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
if self.start == None and self.end == None and self.step == None:
|
|
||||||
return '[*]'
|
|
||||||
else:
|
|
||||||
return '[%s%s%s]' % (self.start or '',
|
|
||||||
':%d'%self.end if self.end else '',
|
|
||||||
':%d'%self.step if self.step else '')
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '%s(start=%r,end=%r,step=%r)' % (self.__class__.__name__, self.start, self.end, self.step)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return isinstance(other, Slice) and other.start == self.start and self.end == other.end and other.step == self.step
|
|
||||||
@@ -1,171 +0,0 @@
|
|||||||
from __future__ import unicode_literals, print_function, absolute_import, division, generators, nested_scopes
|
|
||||||
import sys
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import ply.lex
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
class JsonPathLexerError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class JsonPathLexer(object):
|
|
||||||
'''
|
|
||||||
A Lexical analyzer for JsonPath.
|
|
||||||
'''
|
|
||||||
|
|
||||||
def __init__(self, debug=False):
|
|
||||||
self.debug = debug
|
|
||||||
if self.__doc__ == None:
|
|
||||||
raise JsonPathLexerError('Docstrings have been removed! By design of PLY, jsonpath-rw requires docstrings. You must not use PYTHONOPTIMIZE=2 or python -OO.')
|
|
||||||
|
|
||||||
def tokenize(self, string):
|
|
||||||
'''
|
|
||||||
Maps a string to an iterator over tokens. In other words: [char] -> [token]
|
|
||||||
'''
|
|
||||||
|
|
||||||
new_lexer = ply.lex.lex(module=self, debug=self.debug, errorlog=logger)
|
|
||||||
new_lexer.latest_newline = 0
|
|
||||||
new_lexer.string_value = None
|
|
||||||
new_lexer.input(string)
|
|
||||||
|
|
||||||
while True:
|
|
||||||
t = new_lexer.token()
|
|
||||||
if t is None: break
|
|
||||||
t.col = t.lexpos - new_lexer.latest_newline
|
|
||||||
yield t
|
|
||||||
|
|
||||||
if new_lexer.string_value is not None:
|
|
||||||
raise JsonPathLexerError('Unexpected EOF in string literal or identifier')
|
|
||||||
|
|
||||||
# ============== PLY Lexer specification ==================
|
|
||||||
#
|
|
||||||
# This probably should be private but:
|
|
||||||
# - the parser requires access to `tokens` (perhaps they should be defined in a third, shared dependency)
|
|
||||||
# - things like `literals` might be a legitimate part of the public interface.
|
|
||||||
#
|
|
||||||
# Anyhow, it is pythonic to give some rope to hang oneself with :-)
|
|
||||||
|
|
||||||
literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&']
|
|
||||||
|
|
||||||
reserved_words = { 'where': 'WHERE' }
|
|
||||||
|
|
||||||
tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR'] + list(reserved_words.values())
|
|
||||||
|
|
||||||
states = [ ('singlequote', 'exclusive'),
|
|
||||||
('doublequote', 'exclusive'),
|
|
||||||
('backquote', 'exclusive') ]
|
|
||||||
|
|
||||||
# Normal lexing, rather easy
|
|
||||||
t_DOUBLEDOT = r'\.\.'
|
|
||||||
t_ignore = ' \t'
|
|
||||||
|
|
||||||
def t_ID(self, t):
|
|
||||||
r'[a-zA-Z_@][a-zA-Z0-9_@\-]*'
|
|
||||||
t.type = self.reserved_words.get(t.value, 'ID')
|
|
||||||
return t
|
|
||||||
|
|
||||||
def t_NUMBER(self, t):
|
|
||||||
r'-?\d+'
|
|
||||||
t.value = int(t.value)
|
|
||||||
return t
|
|
||||||
|
|
||||||
|
|
||||||
# Single-quoted strings
|
|
||||||
t_singlequote_ignore = ''
|
|
||||||
def t_singlequote(self, t):
|
|
||||||
r"'"
|
|
||||||
t.lexer.string_start = t.lexer.lexpos
|
|
||||||
t.lexer.string_value = ''
|
|
||||||
t.lexer.push_state('singlequote')
|
|
||||||
|
|
||||||
def t_singlequote_content(self, t):
|
|
||||||
r"[^'\\]+"
|
|
||||||
t.lexer.string_value += t.value
|
|
||||||
|
|
||||||
def t_singlequote_escape(self, t):
|
|
||||||
r'\\.'
|
|
||||||
t.lexer.string_value += t.value[1]
|
|
||||||
|
|
||||||
def t_singlequote_end(self, t):
|
|
||||||
r"'"
|
|
||||||
t.value = t.lexer.string_value
|
|
||||||
t.type = 'ID'
|
|
||||||
t.lexer.string_value = None
|
|
||||||
t.lexer.pop_state()
|
|
||||||
return t
|
|
||||||
|
|
||||||
def t_singlequote_error(self, t):
|
|
||||||
raise JsonPathLexerError('Error on line %s, col %s while lexing singlequoted field: Unexpected character: %s ' % (t.lexer.lineno, t.lexpos - t.lexer.latest_newline, t.value[0]))
|
|
||||||
|
|
||||||
|
|
||||||
# Double-quoted strings
|
|
||||||
t_doublequote_ignore = ''
|
|
||||||
def t_doublequote(self, t):
|
|
||||||
r'"'
|
|
||||||
t.lexer.string_start = t.lexer.lexpos
|
|
||||||
t.lexer.string_value = ''
|
|
||||||
t.lexer.push_state('doublequote')
|
|
||||||
|
|
||||||
def t_doublequote_content(self, t):
|
|
||||||
r'[^"\\]+'
|
|
||||||
t.lexer.string_value += t.value
|
|
||||||
|
|
||||||
def t_doublequote_escape(self, t):
|
|
||||||
r'\\.'
|
|
||||||
t.lexer.string_value += t.value[1]
|
|
||||||
|
|
||||||
def t_doublequote_end(self, t):
|
|
||||||
r'"'
|
|
||||||
t.value = t.lexer.string_value
|
|
||||||
t.type = 'ID'
|
|
||||||
t.lexer.string_value = None
|
|
||||||
t.lexer.pop_state()
|
|
||||||
return t
|
|
||||||
|
|
||||||
def t_doublequote_error(self, t):
|
|
||||||
raise JsonPathLexerError('Error on line %s, col %s while lexing doublequoted field: Unexpected character: %s ' % (t.lexer.lineno, t.lexpos - t.lexer.latest_newline, t.value[0]))
|
|
||||||
|
|
||||||
|
|
||||||
# Back-quoted "magic" operators
|
|
||||||
t_backquote_ignore = ''
|
|
||||||
def t_backquote(self, t):
|
|
||||||
r'`'
|
|
||||||
t.lexer.string_start = t.lexer.lexpos
|
|
||||||
t.lexer.string_value = ''
|
|
||||||
t.lexer.push_state('backquote')
|
|
||||||
|
|
||||||
def t_backquote_escape(self, t):
|
|
||||||
r'\\.'
|
|
||||||
t.lexer.string_value += t.value[1]
|
|
||||||
|
|
||||||
def t_backquote_content(self, t):
|
|
||||||
r"[^`\\]+"
|
|
||||||
t.lexer.string_value += t.value
|
|
||||||
|
|
||||||
def t_backquote_end(self, t):
|
|
||||||
r'`'
|
|
||||||
t.value = t.lexer.string_value
|
|
||||||
t.type = 'NAMED_OPERATOR'
|
|
||||||
t.lexer.string_value = None
|
|
||||||
t.lexer.pop_state()
|
|
||||||
return t
|
|
||||||
|
|
||||||
def t_backquote_error(self, t):
|
|
||||||
raise JsonPathLexerError('Error on line %s, col %s while lexing backquoted operator: Unexpected character: %s ' % (t.lexer.lineno, t.lexpos - t.lexer.latest_newline, t.value[0]))
|
|
||||||
|
|
||||||
|
|
||||||
# Counting lines, handling errors
|
|
||||||
def t_newline(self, t):
|
|
||||||
r'\n'
|
|
||||||
t.lexer.lineno += 1
|
|
||||||
t.lexer.latest_newline = t.lexpos
|
|
||||||
|
|
||||||
def t_error(self, t):
|
|
||||||
raise JsonPathLexerError('Error on line %s, col %s: Unexpected character: %s ' % (t.lexer.lineno, t.lexpos - t.lexer.latest_newline, t.value[0]))
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
logging.basicConfig()
|
|
||||||
lexer = JsonPathLexer(debug=True)
|
|
||||||
for token in lexer.tokenize(sys.stdin.read()):
|
|
||||||
print('%-20s%s' % (token.value, token.type))
|
|
||||||
@@ -1,187 +0,0 @@
|
|||||||
from __future__ import print_function, absolute_import, division, generators, nested_scopes
|
|
||||||
import sys
|
|
||||||
import os.path
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import ply.yacc
|
|
||||||
|
|
||||||
from jsonpath_rw.jsonpath import *
|
|
||||||
from jsonpath_rw.lexer import JsonPathLexer
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
def parse(string):
|
|
||||||
return JsonPathParser().parse(string)
|
|
||||||
|
|
||||||
class JsonPathParser(object):
|
|
||||||
'''
|
|
||||||
An LALR-parser for JsonPath
|
|
||||||
'''
|
|
||||||
|
|
||||||
tokens = JsonPathLexer.tokens
|
|
||||||
|
|
||||||
def __init__(self, debug=False, lexer_class=None):
|
|
||||||
if self.__doc__ == None:
|
|
||||||
raise Exception('Docstrings have been removed! By design of PLY, jsonpath-rw requires docstrings. You must not use PYTHONOPTIMIZE=2 or python -OO.')
|
|
||||||
|
|
||||||
self.debug = debug
|
|
||||||
self.lexer_class = lexer_class or JsonPathLexer # Crufty but works around statefulness in PLY
|
|
||||||
|
|
||||||
def parse(self, string, lexer = None):
|
|
||||||
lexer = lexer or self.lexer_class()
|
|
||||||
return self.parse_token_stream(lexer.tokenize(string))
|
|
||||||
|
|
||||||
def parse_token_stream(self, token_iterator, start_symbol='jsonpath'):
|
|
||||||
|
|
||||||
# Since PLY has some crufty aspects and dumps files, we try to keep them local
|
|
||||||
# However, we need to derive the name of the output Python file :-/
|
|
||||||
output_directory = os.path.dirname(__file__)
|
|
||||||
try:
|
|
||||||
module_name = os.path.splitext(os.path.split(__file__)[1])[0]
|
|
||||||
except:
|
|
||||||
module_name = __name__
|
|
||||||
|
|
||||||
parsing_table_module = '_'.join([module_name, start_symbol, 'parsetab'])
|
|
||||||
|
|
||||||
# And we regenerate the parse table every time; it doesn't actually take that long!
|
|
||||||
new_parser = ply.yacc.yacc(module=self,
|
|
||||||
debug=self.debug,
|
|
||||||
tabmodule = parsing_table_module,
|
|
||||||
outputdir = output_directory,
|
|
||||||
write_tables=0,
|
|
||||||
start = start_symbol,
|
|
||||||
errorlog = logger)
|
|
||||||
|
|
||||||
return new_parser.parse(lexer = IteratorToTokenStream(token_iterator))
|
|
||||||
|
|
||||||
# ===================== PLY Parser specification =====================
|
|
||||||
|
|
||||||
precedence = [
|
|
||||||
('left', ','),
|
|
||||||
('left', 'DOUBLEDOT'),
|
|
||||||
('left', '.'),
|
|
||||||
('left', '|'),
|
|
||||||
('left', '&'),
|
|
||||||
('left', 'WHERE'),
|
|
||||||
]
|
|
||||||
|
|
||||||
def p_error(self, t):
|
|
||||||
raise Exception('Parse error at %s:%s near token %s (%s)' % (t.lineno, t.col, t.value, t.type))
|
|
||||||
|
|
||||||
def p_jsonpath_binop(self, p):
|
|
||||||
"""jsonpath : jsonpath '.' jsonpath
|
|
||||||
| jsonpath DOUBLEDOT jsonpath
|
|
||||||
| jsonpath WHERE jsonpath
|
|
||||||
| jsonpath '|' jsonpath
|
|
||||||
| jsonpath '&' jsonpath"""
|
|
||||||
op = p[2]
|
|
||||||
|
|
||||||
if op == '.':
|
|
||||||
p[0] = Child(p[1], p[3])
|
|
||||||
elif op == '..':
|
|
||||||
p[0] = Descendants(p[1], p[3])
|
|
||||||
elif op == 'where':
|
|
||||||
p[0] = Where(p[1], p[3])
|
|
||||||
elif op == '|':
|
|
||||||
p[0] = Union(p[1], p[3])
|
|
||||||
elif op == '&':
|
|
||||||
p[0] = Intersect(p[1], p[3])
|
|
||||||
|
|
||||||
def p_jsonpath_fields(self, p):
|
|
||||||
"jsonpath : fields_or_any"
|
|
||||||
p[0] = Fields(*p[1])
|
|
||||||
|
|
||||||
def p_jsonpath_named_operator(self, p):
|
|
||||||
"jsonpath : NAMED_OPERATOR"
|
|
||||||
if p[1] == 'this':
|
|
||||||
p[0] = This()
|
|
||||||
elif p[1] == 'parent':
|
|
||||||
p[0] = Parent()
|
|
||||||
else:
|
|
||||||
raise Exception('Unknown named operator `%s` at %s:%s' % (p[1], p.lineno(1), p.lexpos(1)))
|
|
||||||
|
|
||||||
def p_jsonpath_root(self, p):
|
|
||||||
"jsonpath : '$'"
|
|
||||||
p[0] = Root()
|
|
||||||
|
|
||||||
def p_jsonpath_idx(self, p):
|
|
||||||
"jsonpath : '[' idx ']'"
|
|
||||||
p[0] = p[2]
|
|
||||||
|
|
||||||
def p_jsonpath_slice(self, p):
|
|
||||||
"jsonpath : '[' slice ']'"
|
|
||||||
p[0] = p[2]
|
|
||||||
|
|
||||||
def p_jsonpath_fieldbrackets(self, p):
|
|
||||||
"jsonpath : '[' fields ']'"
|
|
||||||
p[0] = Fields(*p[2])
|
|
||||||
|
|
||||||
def p_jsonpath_child_fieldbrackets(self, p):
|
|
||||||
"jsonpath : jsonpath '[' fields ']'"
|
|
||||||
p[0] = Child(p[1], Fields(*p[3]))
|
|
||||||
|
|
||||||
def p_jsonpath_child_idxbrackets(self, p):
|
|
||||||
"jsonpath : jsonpath '[' idx ']'"
|
|
||||||
p[0] = Child(p[1], p[3])
|
|
||||||
|
|
||||||
def p_jsonpath_child_slicebrackets(self, p):
|
|
||||||
"jsonpath : jsonpath '[' slice ']'"
|
|
||||||
p[0] = Child(p[1], p[3])
|
|
||||||
|
|
||||||
def p_jsonpath_parens(self, p):
|
|
||||||
"jsonpath : '(' jsonpath ')'"
|
|
||||||
p[0] = p[2]
|
|
||||||
|
|
||||||
# Because fields in brackets cannot be '*' - that is reserved for array indices
|
|
||||||
def p_fields_or_any(self, p):
|
|
||||||
"""fields_or_any : fields
|
|
||||||
| '*' """
|
|
||||||
if p[1] == '*':
|
|
||||||
p[0] = ['*']
|
|
||||||
else:
|
|
||||||
p[0] = p[1]
|
|
||||||
|
|
||||||
def p_fields_id(self, p):
|
|
||||||
"fields : ID"
|
|
||||||
p[0] = [p[1]]
|
|
||||||
|
|
||||||
def p_fields_comma(self, p):
|
|
||||||
"fields : fields ',' fields"
|
|
||||||
p[0] = p[1] + p[3]
|
|
||||||
|
|
||||||
def p_idx(self, p):
|
|
||||||
"idx : NUMBER"
|
|
||||||
p[0] = Index(p[1])
|
|
||||||
|
|
||||||
def p_slice_any(self, p):
|
|
||||||
"slice : '*'"
|
|
||||||
p[0] = Slice()
|
|
||||||
|
|
||||||
def p_slice(self, p): # Currently does not support `step`
|
|
||||||
"slice : maybe_int ':' maybe_int"
|
|
||||||
p[0] = Slice(start=p[1], end=p[3])
|
|
||||||
|
|
||||||
def p_maybe_int(self, p):
|
|
||||||
"""maybe_int : NUMBER
|
|
||||||
| empty"""
|
|
||||||
p[0] = p[1]
|
|
||||||
|
|
||||||
def p_empty(self, p):
|
|
||||||
'empty :'
|
|
||||||
p[0] = None
|
|
||||||
|
|
||||||
class IteratorToTokenStream(object):
|
|
||||||
def __init__(self, iterator):
|
|
||||||
self.iterator = iterator
|
|
||||||
|
|
||||||
def token(self):
|
|
||||||
try:
|
|
||||||
return next(self.iterator)
|
|
||||||
except StopIteration:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
logging.basicConfig()
|
|
||||||
parser = JsonPathParser(debug=True)
|
|
||||||
print(parser.parse(sys.stdin.read()))
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
# PLY package
|
|
||||||
# Author: David Beazley (dave@dabeaz.com)
|
|
||||||
|
|
||||||
__all__ = ['lex','yacc']
|
|
||||||
@@ -1,898 +0,0 @@
|
|||||||
# -----------------------------------------------------------------------------
|
|
||||||
# cpp.py
|
|
||||||
#
|
|
||||||
# Author: David Beazley (http://www.dabeaz.com)
|
|
||||||
# Copyright (C) 2007
|
|
||||||
# All rights reserved
|
|
||||||
#
|
|
||||||
# This module implements an ANSI-C style lexical preprocessor for PLY.
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
from __future__ import generators
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
# Default preprocessor lexer definitions. These tokens are enough to get
|
|
||||||
# a basic preprocessor working. Other modules may import these if they want
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
tokens = (
|
|
||||||
'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT', 'CPP_POUND','CPP_DPOUND'
|
|
||||||
)
|
|
||||||
|
|
||||||
literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\""
|
|
||||||
|
|
||||||
# Whitespace
|
|
||||||
def t_CPP_WS(t):
|
|
||||||
r'\s+'
|
|
||||||
t.lexer.lineno += t.value.count("\n")
|
|
||||||
return t
|
|
||||||
|
|
||||||
t_CPP_POUND = r'\#'
|
|
||||||
t_CPP_DPOUND = r'\#\#'
|
|
||||||
|
|
||||||
# Identifier
|
|
||||||
t_CPP_ID = r'[A-Za-z_][\w_]*'
|
|
||||||
|
|
||||||
# Integer literal
|
|
||||||
def CPP_INTEGER(t):
|
|
||||||
r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU]|[lL]|[uU][lL]|[lL][uU])?)'
|
|
||||||
return t
|
|
||||||
|
|
||||||
t_CPP_INTEGER = CPP_INTEGER
|
|
||||||
|
|
||||||
# Floating literal
|
|
||||||
t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
|
|
||||||
|
|
||||||
# String literal
|
|
||||||
def t_CPP_STRING(t):
|
|
||||||
r'\"([^\\\n]|(\\(.|\n)))*?\"'
|
|
||||||
t.lexer.lineno += t.value.count("\n")
|
|
||||||
return t
|
|
||||||
|
|
||||||
# Character constant 'c' or L'c'
|
|
||||||
def t_CPP_CHAR(t):
|
|
||||||
r'(L)?\'([^\\\n]|(\\(.|\n)))*?\''
|
|
||||||
t.lexer.lineno += t.value.count("\n")
|
|
||||||
return t
|
|
||||||
|
|
||||||
# Comment
|
|
||||||
def t_CPP_COMMENT(t):
|
|
||||||
r'(/\*(.|\n)*?\*/)|(//.*?\n)'
|
|
||||||
t.lexer.lineno += t.value.count("\n")
|
|
||||||
return t
|
|
||||||
|
|
||||||
def t_error(t):
|
|
||||||
t.type = t.value[0]
|
|
||||||
t.value = t.value[0]
|
|
||||||
t.lexer.skip(1)
|
|
||||||
return t
|
|
||||||
|
|
||||||
import re
|
|
||||||
import copy
|
|
||||||
import time
|
|
||||||
import os.path
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
# trigraph()
|
|
||||||
#
|
|
||||||
# Given an input string, this function replaces all trigraph sequences.
|
|
||||||
# The following mapping is used:
|
|
||||||
#
|
|
||||||
# ??= #
|
|
||||||
# ??/ \
|
|
||||||
# ??' ^
|
|
||||||
# ??( [
|
|
||||||
# ??) ]
|
|
||||||
# ??! |
|
|
||||||
# ??< {
|
|
||||||
# ??> }
|
|
||||||
# ??- ~
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''')
|
|
||||||
_trigraph_rep = {
|
|
||||||
'=':'#',
|
|
||||||
'/':'\\',
|
|
||||||
"'":'^',
|
|
||||||
'(':'[',
|
|
||||||
')':']',
|
|
||||||
'!':'|',
|
|
||||||
'<':'{',
|
|
||||||
'>':'}',
|
|
||||||
'-':'~'
|
|
||||||
}
|
|
||||||
|
|
||||||
def trigraph(input):
|
|
||||||
return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input)
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# Macro object
|
|
||||||
#
|
|
||||||
# This object holds information about preprocessor macros
|
|
||||||
#
|
|
||||||
# .name - Macro name (string)
|
|
||||||
# .value - Macro value (a list of tokens)
|
|
||||||
# .arglist - List of argument names
|
|
||||||
# .variadic - Boolean indicating whether or not variadic macro
|
|
||||||
# .vararg - Name of the variadic parameter
|
|
||||||
#
|
|
||||||
# When a macro is created, the macro replacement token sequence is
|
|
||||||
# pre-scanned and used to create patch lists that are later used
|
|
||||||
# during macro expansion
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
|
|
||||||
class Macro(object):
|
|
||||||
def __init__(self,name,value,arglist=None,variadic=False):
|
|
||||||
self.name = name
|
|
||||||
self.value = value
|
|
||||||
self.arglist = arglist
|
|
||||||
self.variadic = variadic
|
|
||||||
if variadic:
|
|
||||||
self.vararg = arglist[-1]
|
|
||||||
self.source = None
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# Preprocessor object
|
|
||||||
#
|
|
||||||
# Object representing a preprocessor. Contains macro definitions,
|
|
||||||
# include directories, and other information
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
|
|
||||||
class Preprocessor(object):
|
|
||||||
def __init__(self,lexer=None):
|
|
||||||
if lexer is None:
|
|
||||||
lexer = lex.lexer
|
|
||||||
self.lexer = lexer
|
|
||||||
self.macros = { }
|
|
||||||
self.path = []
|
|
||||||
self.temp_path = []
|
|
||||||
|
|
||||||
# Probe the lexer for selected tokens
|
|
||||||
self.lexprobe()
|
|
||||||
|
|
||||||
tm = time.localtime()
|
|
||||||
self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm))
|
|
||||||
self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm))
|
|
||||||
self.parser = None
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
# tokenize()
|
|
||||||
#
|
|
||||||
# Utility function. Given a string of text, tokenize into a list of tokens
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
def tokenize(self,text):
|
|
||||||
tokens = []
|
|
||||||
self.lexer.input(text)
|
|
||||||
while True:
|
|
||||||
tok = self.lexer.token()
|
|
||||||
if not tok: break
|
|
||||||
tokens.append(tok)
|
|
||||||
return tokens
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------
|
|
||||||
# error()
|
|
||||||
#
|
|
||||||
# Report a preprocessor error/warning of some kind
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
def error(self,file,line,msg):
|
|
||||||
print("%s:%d %s" % (file,line,msg))
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# lexprobe()
|
|
||||||
#
|
|
||||||
# This method probes the preprocessor lexer object to discover
|
|
||||||
# the token types of symbols that are important to the preprocessor.
|
|
||||||
# If this works right, the preprocessor will simply "work"
|
|
||||||
# with any suitable lexer regardless of how tokens have been named.
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
def lexprobe(self):
|
|
||||||
|
|
||||||
# Determine the token type for identifiers
|
|
||||||
self.lexer.input("identifier")
|
|
||||||
tok = self.lexer.token()
|
|
||||||
if not tok or tok.value != "identifier":
|
|
||||||
print("Couldn't determine identifier type")
|
|
||||||
else:
|
|
||||||
self.t_ID = tok.type
|
|
||||||
|
|
||||||
# Determine the token type for integers
|
|
||||||
self.lexer.input("12345")
|
|
||||||
tok = self.lexer.token()
|
|
||||||
if not tok or int(tok.value) != 12345:
|
|
||||||
print("Couldn't determine integer type")
|
|
||||||
else:
|
|
||||||
self.t_INTEGER = tok.type
|
|
||||||
self.t_INTEGER_TYPE = type(tok.value)
|
|
||||||
|
|
||||||
# Determine the token type for strings enclosed in double quotes
|
|
||||||
self.lexer.input("\"filename\"")
|
|
||||||
tok = self.lexer.token()
|
|
||||||
if not tok or tok.value != "\"filename\"":
|
|
||||||
print("Couldn't determine string type")
|
|
||||||
else:
|
|
||||||
self.t_STRING = tok.type
|
|
||||||
|
|
||||||
# Determine the token type for whitespace--if any
|
|
||||||
self.lexer.input(" ")
|
|
||||||
tok = self.lexer.token()
|
|
||||||
if not tok or tok.value != " ":
|
|
||||||
self.t_SPACE = None
|
|
||||||
else:
|
|
||||||
self.t_SPACE = tok.type
|
|
||||||
|
|
||||||
# Determine the token type for newlines
|
|
||||||
self.lexer.input("\n")
|
|
||||||
tok = self.lexer.token()
|
|
||||||
if not tok or tok.value != "\n":
|
|
||||||
self.t_NEWLINE = None
|
|
||||||
print("Couldn't determine token for newlines")
|
|
||||||
else:
|
|
||||||
self.t_NEWLINE = tok.type
|
|
||||||
|
|
||||||
self.t_WS = (self.t_SPACE, self.t_NEWLINE)
|
|
||||||
|
|
||||||
# Check for other characters used by the preprocessor
|
|
||||||
chars = [ '<','>','#','##','\\','(',')',',','.']
|
|
||||||
for c in chars:
|
|
||||||
self.lexer.input(c)
|
|
||||||
tok = self.lexer.token()
|
|
||||||
if not tok or tok.value != c:
|
|
||||||
print("Unable to lex '%s' required for preprocessor" % c)
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# add_path()
|
|
||||||
#
|
|
||||||
# Adds a search path to the preprocessor.
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
def add_path(self,path):
|
|
||||||
self.path.append(path)
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# group_lines()
|
|
||||||
#
|
|
||||||
# Given an input string, this function splits it into lines. Trailing whitespace
|
|
||||||
# is removed. Any line ending with \ is grouped with the next line. This
|
|
||||||
# function forms the lowest level of the preprocessor---grouping into text into
|
|
||||||
# a line-by-line format.
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
def group_lines(self,input):
|
|
||||||
lex = self.lexer.clone()
|
|
||||||
lines = [x.rstrip() for x in input.splitlines()]
|
|
||||||
for i in xrange(len(lines)):
|
|
||||||
j = i+1
|
|
||||||
while lines[i].endswith('\\') and (j < len(lines)):
|
|
||||||
lines[i] = lines[i][:-1]+lines[j]
|
|
||||||
lines[j] = ""
|
|
||||||
j += 1
|
|
||||||
|
|
||||||
input = "\n".join(lines)
|
|
||||||
lex.input(input)
|
|
||||||
lex.lineno = 1
|
|
||||||
|
|
||||||
current_line = []
|
|
||||||
while True:
|
|
||||||
tok = lex.token()
|
|
||||||
if not tok:
|
|
||||||
break
|
|
||||||
current_line.append(tok)
|
|
||||||
if tok.type in self.t_WS and '\n' in tok.value:
|
|
||||||
yield current_line
|
|
||||||
current_line = []
|
|
||||||
|
|
||||||
if current_line:
|
|
||||||
yield current_line
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# tokenstrip()
|
|
||||||
#
|
|
||||||
# Remove leading/trailing whitespace tokens from a token list
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
def tokenstrip(self,tokens):
|
|
||||||
i = 0
|
|
||||||
while i < len(tokens) and tokens[i].type in self.t_WS:
|
|
||||||
i += 1
|
|
||||||
del tokens[:i]
|
|
||||||
i = len(tokens)-1
|
|
||||||
while i >= 0 and tokens[i].type in self.t_WS:
|
|
||||||
i -= 1
|
|
||||||
del tokens[i+1:]
|
|
||||||
return tokens
|
|
||||||
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# collect_args()
|
|
||||||
#
|
|
||||||
# Collects comma separated arguments from a list of tokens. The arguments
|
|
||||||
# must be enclosed in parenthesis. Returns a tuple (tokencount,args,positions)
|
|
||||||
# where tokencount is the number of tokens consumed, args is a list of arguments,
|
|
||||||
# and positions is a list of integers containing the starting index of each
|
|
||||||
# argument. Each argument is represented by a list of tokens.
|
|
||||||
#
|
|
||||||
# When collecting arguments, leading and trailing whitespace is removed
|
|
||||||
# from each argument.
|
|
||||||
#
|
|
||||||
# This function properly handles nested parenthesis and commas---these do not
|
|
||||||
# define new arguments.
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
def collect_args(self,tokenlist):
|
|
||||||
args = []
|
|
||||||
positions = []
|
|
||||||
current_arg = []
|
|
||||||
nesting = 1
|
|
||||||
tokenlen = len(tokenlist)
|
|
||||||
|
|
||||||
# Search for the opening '('.
|
|
||||||
i = 0
|
|
||||||
while (i < tokenlen) and (tokenlist[i].type in self.t_WS):
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
if (i < tokenlen) and (tokenlist[i].value == '('):
|
|
||||||
positions.append(i+1)
|
|
||||||
else:
|
|
||||||
self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments")
|
|
||||||
return 0, [], []
|
|
||||||
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
while i < tokenlen:
|
|
||||||
t = tokenlist[i]
|
|
||||||
if t.value == '(':
|
|
||||||
current_arg.append(t)
|
|
||||||
nesting += 1
|
|
||||||
elif t.value == ')':
|
|
||||||
nesting -= 1
|
|
||||||
if nesting == 0:
|
|
||||||
if current_arg:
|
|
||||||
args.append(self.tokenstrip(current_arg))
|
|
||||||
positions.append(i)
|
|
||||||
return i+1,args,positions
|
|
||||||
current_arg.append(t)
|
|
||||||
elif t.value == ',' and nesting == 1:
|
|
||||||
args.append(self.tokenstrip(current_arg))
|
|
||||||
positions.append(i+1)
|
|
||||||
current_arg = []
|
|
||||||
else:
|
|
||||||
current_arg.append(t)
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
# Missing end argument
|
|
||||||
self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments")
|
|
||||||
return 0, [],[]
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# macro_prescan()
|
|
||||||
#
|
|
||||||
# Examine the macro value (token sequence) and identify patch points
|
|
||||||
# This is used to speed up macro expansion later on---we'll know
|
|
||||||
# right away where to apply patches to the value to form the expansion
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
def macro_prescan(self,macro):
|
|
||||||
macro.patch = [] # Standard macro arguments
|
|
||||||
macro.str_patch = [] # String conversion expansion
|
|
||||||
macro.var_comma_patch = [] # Variadic macro comma patch
|
|
||||||
i = 0
|
|
||||||
while i < len(macro.value):
|
|
||||||
if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist:
|
|
||||||
argnum = macro.arglist.index(macro.value[i].value)
|
|
||||||
# Conversion of argument to a string
|
|
||||||
if i > 0 and macro.value[i-1].value == '#':
|
|
||||||
macro.value[i] = copy.copy(macro.value[i])
|
|
||||||
macro.value[i].type = self.t_STRING
|
|
||||||
del macro.value[i-1]
|
|
||||||
macro.str_patch.append((argnum,i-1))
|
|
||||||
continue
|
|
||||||
# Concatenation
|
|
||||||
elif (i > 0 and macro.value[i-1].value == '##'):
|
|
||||||
macro.patch.append(('c',argnum,i-1))
|
|
||||||
del macro.value[i-1]
|
|
||||||
continue
|
|
||||||
elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'):
|
|
||||||
macro.patch.append(('c',argnum,i))
|
|
||||||
i += 1
|
|
||||||
continue
|
|
||||||
# Standard expansion
|
|
||||||
else:
|
|
||||||
macro.patch.append(('e',argnum,i))
|
|
||||||
elif macro.value[i].value == '##':
|
|
||||||
if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \
|
|
||||||
((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \
|
|
||||||
(macro.value[i+1].value == macro.vararg):
|
|
||||||
macro.var_comma_patch.append(i-1)
|
|
||||||
i += 1
|
|
||||||
macro.patch.sort(key=lambda x: x[2],reverse=True)
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# macro_expand_args()
|
|
||||||
#
|
|
||||||
# Given a Macro and list of arguments (each a token list), this method
|
|
||||||
# returns an expanded version of a macro. The return value is a token sequence
|
|
||||||
# representing the replacement macro tokens
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
def macro_expand_args(self,macro,args):
|
|
||||||
# Make a copy of the macro token sequence
|
|
||||||
rep = [copy.copy(_x) for _x in macro.value]
|
|
||||||
|
|
||||||
# Make string expansion patches. These do not alter the length of the replacement sequence
|
|
||||||
|
|
||||||
str_expansion = {}
|
|
||||||
for argnum, i in macro.str_patch:
|
|
||||||
if argnum not in str_expansion:
|
|
||||||
str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\")
|
|
||||||
rep[i] = copy.copy(rep[i])
|
|
||||||
rep[i].value = str_expansion[argnum]
|
|
||||||
|
|
||||||
# Make the variadic macro comma patch. If the variadic macro argument is empty, we get rid
|
|
||||||
comma_patch = False
|
|
||||||
if macro.variadic and not args[-1]:
|
|
||||||
for i in macro.var_comma_patch:
|
|
||||||
rep[i] = None
|
|
||||||
comma_patch = True
|
|
||||||
|
|
||||||
# Make all other patches. The order of these matters. It is assumed that the patch list
|
|
||||||
# has been sorted in reverse order of patch location since replacements will cause the
|
|
||||||
# size of the replacement sequence to expand from the patch point.
|
|
||||||
|
|
||||||
expanded = { }
|
|
||||||
for ptype, argnum, i in macro.patch:
|
|
||||||
# Concatenation. Argument is left unexpanded
|
|
||||||
if ptype == 'c':
|
|
||||||
rep[i:i+1] = args[argnum]
|
|
||||||
# Normal expansion. Argument is macro expanded first
|
|
||||||
elif ptype == 'e':
|
|
||||||
if argnum not in expanded:
|
|
||||||
expanded[argnum] = self.expand_macros(args[argnum])
|
|
||||||
rep[i:i+1] = expanded[argnum]
|
|
||||||
|
|
||||||
# Get rid of removed comma if necessary
|
|
||||||
if comma_patch:
|
|
||||||
rep = [_i for _i in rep if _i]
|
|
||||||
|
|
||||||
return rep
|
|
||||||
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# expand_macros()
|
|
||||||
#
|
|
||||||
# Given a list of tokens, this function performs macro expansion.
|
|
||||||
# The expanded argument is a dictionary that contains macros already
|
|
||||||
# expanded. This is used to prevent infinite recursion.
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
def expand_macros(self,tokens,expanded=None):
|
|
||||||
if expanded is None:
|
|
||||||
expanded = {}
|
|
||||||
i = 0
|
|
||||||
while i < len(tokens):
|
|
||||||
t = tokens[i]
|
|
||||||
if t.type == self.t_ID:
|
|
||||||
if t.value in self.macros and t.value not in expanded:
|
|
||||||
# Yes, we found a macro match
|
|
||||||
expanded[t.value] = True
|
|
||||||
|
|
||||||
m = self.macros[t.value]
|
|
||||||
if not m.arglist:
|
|
||||||
# A simple macro
|
|
||||||
ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded)
|
|
||||||
for e in ex:
|
|
||||||
e.lineno = t.lineno
|
|
||||||
tokens[i:i+1] = ex
|
|
||||||
i += len(ex)
|
|
||||||
else:
|
|
||||||
# A macro with arguments
|
|
||||||
j = i + 1
|
|
||||||
while j < len(tokens) and tokens[j].type in self.t_WS:
|
|
||||||
j += 1
|
|
||||||
if tokens[j].value == '(':
|
|
||||||
tokcount,args,positions = self.collect_args(tokens[j:])
|
|
||||||
if not m.variadic and len(args) != len(m.arglist):
|
|
||||||
self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist)))
|
|
||||||
i = j + tokcount
|
|
||||||
elif m.variadic and len(args) < len(m.arglist)-1:
|
|
||||||
if len(m.arglist) > 2:
|
|
||||||
self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1))
|
|
||||||
else:
|
|
||||||
self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1))
|
|
||||||
i = j + tokcount
|
|
||||||
else:
|
|
||||||
if m.variadic:
|
|
||||||
if len(args) == len(m.arglist)-1:
|
|
||||||
args.append([])
|
|
||||||
else:
|
|
||||||
args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1]
|
|
||||||
del args[len(m.arglist):]
|
|
||||||
|
|
||||||
# Get macro replacement text
|
|
||||||
rep = self.macro_expand_args(m,args)
|
|
||||||
rep = self.expand_macros(rep,expanded)
|
|
||||||
for r in rep:
|
|
||||||
r.lineno = t.lineno
|
|
||||||
tokens[i:j+tokcount] = rep
|
|
||||||
i += len(rep)
|
|
||||||
del expanded[t.value]
|
|
||||||
continue
|
|
||||||
elif t.value == '__LINE__':
|
|
||||||
t.type = self.t_INTEGER
|
|
||||||
t.value = self.t_INTEGER_TYPE(t.lineno)
|
|
||||||
|
|
||||||
i += 1
|
|
||||||
return tokens
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# evalexpr()
|
|
||||||
#
|
|
||||||
# Evaluate an expression token sequence for the purposes of evaluating
|
|
||||||
# integral expressions.
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
def evalexpr(self,tokens):
|
|
||||||
# tokens = tokenize(line)
|
|
||||||
# Search for defined macros
|
|
||||||
i = 0
|
|
||||||
while i < len(tokens):
|
|
||||||
if tokens[i].type == self.t_ID and tokens[i].value == 'defined':
|
|
||||||
j = i + 1
|
|
||||||
needparen = False
|
|
||||||
result = "0L"
|
|
||||||
while j < len(tokens):
|
|
||||||
if tokens[j].type in self.t_WS:
|
|
||||||
j += 1
|
|
||||||
continue
|
|
||||||
elif tokens[j].type == self.t_ID:
|
|
||||||
if tokens[j].value in self.macros:
|
|
||||||
result = "1L"
|
|
||||||
else:
|
|
||||||
result = "0L"
|
|
||||||
if not needparen: break
|
|
||||||
elif tokens[j].value == '(':
|
|
||||||
needparen = True
|
|
||||||
elif tokens[j].value == ')':
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
self.error(self.source,tokens[i].lineno,"Malformed defined()")
|
|
||||||
j += 1
|
|
||||||
tokens[i].type = self.t_INTEGER
|
|
||||||
tokens[i].value = self.t_INTEGER_TYPE(result)
|
|
||||||
del tokens[i+1:j+1]
|
|
||||||
i += 1
|
|
||||||
tokens = self.expand_macros(tokens)
|
|
||||||
for i,t in enumerate(tokens):
|
|
||||||
if t.type == self.t_ID:
|
|
||||||
tokens[i] = copy.copy(t)
|
|
||||||
tokens[i].type = self.t_INTEGER
|
|
||||||
tokens[i].value = self.t_INTEGER_TYPE("0L")
|
|
||||||
elif t.type == self.t_INTEGER:
|
|
||||||
tokens[i] = copy.copy(t)
|
|
||||||
# Strip off any trailing suffixes
|
|
||||||
tokens[i].value = str(tokens[i].value)
|
|
||||||
while tokens[i].value[-1] not in "0123456789abcdefABCDEF":
|
|
||||||
tokens[i].value = tokens[i].value[:-1]
|
|
||||||
|
|
||||||
expr = "".join([str(x.value) for x in tokens])
|
|
||||||
expr = expr.replace("&&"," and ")
|
|
||||||
expr = expr.replace("||"," or ")
|
|
||||||
expr = expr.replace("!"," not ")
|
|
||||||
try:
|
|
||||||
result = eval(expr)
|
|
||||||
except StandardError:
|
|
||||||
self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression")
|
|
||||||
result = 0
|
|
||||||
return result
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# parsegen()
|
|
||||||
#
|
|
||||||
# Parse an input string/
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
def parsegen(self,input,source=None):
|
|
||||||
|
|
||||||
# Replace trigraph sequences
|
|
||||||
t = trigraph(input)
|
|
||||||
lines = self.group_lines(t)
|
|
||||||
|
|
||||||
if not source:
|
|
||||||
source = ""
|
|
||||||
|
|
||||||
self.define("__FILE__ \"%s\"" % source)
|
|
||||||
|
|
||||||
self.source = source
|
|
||||||
chunk = []
|
|
||||||
enable = True
|
|
||||||
iftrigger = False
|
|
||||||
ifstack = []
|
|
||||||
|
|
||||||
for x in lines:
|
|
||||||
for i,tok in enumerate(x):
|
|
||||||
if tok.type not in self.t_WS: break
|
|
||||||
if tok.value == '#':
|
|
||||||
# Preprocessor directive
|
|
||||||
|
|
||||||
for tok in x:
|
|
||||||
if tok in self.t_WS and '\n' in tok.value:
|
|
||||||
chunk.append(tok)
|
|
||||||
|
|
||||||
dirtokens = self.tokenstrip(x[i+1:])
|
|
||||||
if dirtokens:
|
|
||||||
name = dirtokens[0].value
|
|
||||||
args = self.tokenstrip(dirtokens[1:])
|
|
||||||
else:
|
|
||||||
name = ""
|
|
||||||
args = []
|
|
||||||
|
|
||||||
if name == 'define':
|
|
||||||
if enable:
|
|
||||||
for tok in self.expand_macros(chunk):
|
|
||||||
yield tok
|
|
||||||
chunk = []
|
|
||||||
self.define(args)
|
|
||||||
elif name == 'include':
|
|
||||||
if enable:
|
|
||||||
for tok in self.expand_macros(chunk):
|
|
||||||
yield tok
|
|
||||||
chunk = []
|
|
||||||
oldfile = self.macros['__FILE__']
|
|
||||||
for tok in self.include(args):
|
|
||||||
yield tok
|
|
||||||
self.macros['__FILE__'] = oldfile
|
|
||||||
self.source = source
|
|
||||||
elif name == 'undef':
|
|
||||||
if enable:
|
|
||||||
for tok in self.expand_macros(chunk):
|
|
||||||
yield tok
|
|
||||||
chunk = []
|
|
||||||
self.undef(args)
|
|
||||||
elif name == 'ifdef':
|
|
||||||
ifstack.append((enable,iftrigger))
|
|
||||||
if enable:
|
|
||||||
if not args[0].value in self.macros:
|
|
||||||
enable = False
|
|
||||||
iftrigger = False
|
|
||||||
else:
|
|
||||||
iftrigger = True
|
|
||||||
elif name == 'ifndef':
|
|
||||||
ifstack.append((enable,iftrigger))
|
|
||||||
if enable:
|
|
||||||
if args[0].value in self.macros:
|
|
||||||
enable = False
|
|
||||||
iftrigger = False
|
|
||||||
else:
|
|
||||||
iftrigger = True
|
|
||||||
elif name == 'if':
|
|
||||||
ifstack.append((enable,iftrigger))
|
|
||||||
if enable:
|
|
||||||
result = self.evalexpr(args)
|
|
||||||
if not result:
|
|
||||||
enable = False
|
|
||||||
iftrigger = False
|
|
||||||
else:
|
|
||||||
iftrigger = True
|
|
||||||
elif name == 'elif':
|
|
||||||
if ifstack:
|
|
||||||
if ifstack[-1][0]: # We only pay attention if outer "if" allows this
|
|
||||||
if enable: # If already true, we flip enable False
|
|
||||||
enable = False
|
|
||||||
elif not iftrigger: # If False, but not triggered yet, we'll check expression
|
|
||||||
result = self.evalexpr(args)
|
|
||||||
if result:
|
|
||||||
enable = True
|
|
||||||
iftrigger = True
|
|
||||||
else:
|
|
||||||
self.error(self.source,dirtokens[0].lineno,"Misplaced #elif")
|
|
||||||
|
|
||||||
elif name == 'else':
|
|
||||||
if ifstack:
|
|
||||||
if ifstack[-1][0]:
|
|
||||||
if enable:
|
|
||||||
enable = False
|
|
||||||
elif not iftrigger:
|
|
||||||
enable = True
|
|
||||||
iftrigger = True
|
|
||||||
else:
|
|
||||||
self.error(self.source,dirtokens[0].lineno,"Misplaced #else")
|
|
||||||
|
|
||||||
elif name == 'endif':
|
|
||||||
if ifstack:
|
|
||||||
enable,iftrigger = ifstack.pop()
|
|
||||||
else:
|
|
||||||
self.error(self.source,dirtokens[0].lineno,"Misplaced #endif")
|
|
||||||
else:
|
|
||||||
# Unknown preprocessor directive
|
|
||||||
pass
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Normal text
|
|
||||||
if enable:
|
|
||||||
chunk.extend(x)
|
|
||||||
|
|
||||||
for tok in self.expand_macros(chunk):
|
|
||||||
yield tok
|
|
||||||
chunk = []
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# include()
|
|
||||||
#
|
|
||||||
# Implementation of file-inclusion
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
def include(self,tokens):
|
|
||||||
# Try to extract the filename and then process an include file
|
|
||||||
if not tokens:
|
|
||||||
return
|
|
||||||
if tokens:
|
|
||||||
if tokens[0].value != '<' and tokens[0].type != self.t_STRING:
|
|
||||||
tokens = self.expand_macros(tokens)
|
|
||||||
|
|
||||||
if tokens[0].value == '<':
|
|
||||||
# Include <...>
|
|
||||||
i = 1
|
|
||||||
while i < len(tokens):
|
|
||||||
if tokens[i].value == '>':
|
|
||||||
break
|
|
||||||
i += 1
|
|
||||||
else:
|
|
||||||
print("Malformed #include <...>")
|
|
||||||
return
|
|
||||||
filename = "".join([x.value for x in tokens[1:i]])
|
|
||||||
path = self.path + [""] + self.temp_path
|
|
||||||
elif tokens[0].type == self.t_STRING:
|
|
||||||
filename = tokens[0].value[1:-1]
|
|
||||||
path = self.temp_path + [""] + self.path
|
|
||||||
else:
|
|
||||||
print("Malformed #include statement")
|
|
||||||
return
|
|
||||||
for p in path:
|
|
||||||
iname = os.path.join(p,filename)
|
|
||||||
try:
|
|
||||||
data = open(iname,"r").read()
|
|
||||||
dname = os.path.dirname(iname)
|
|
||||||
if dname:
|
|
||||||
self.temp_path.insert(0,dname)
|
|
||||||
for tok in self.parsegen(data,filename):
|
|
||||||
yield tok
|
|
||||||
if dname:
|
|
||||||
del self.temp_path[0]
|
|
||||||
break
|
|
||||||
except IOError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
print("Couldn't find '%s'" % filename)
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# define()
|
|
||||||
#
|
|
||||||
# Define a new macro
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
def define(self,tokens):
|
|
||||||
if isinstance(tokens,(str,unicode)):
|
|
||||||
tokens = self.tokenize(tokens)
|
|
||||||
|
|
||||||
linetok = tokens
|
|
||||||
try:
|
|
||||||
name = linetok[0]
|
|
||||||
if len(linetok) > 1:
|
|
||||||
mtype = linetok[1]
|
|
||||||
else:
|
|
||||||
mtype = None
|
|
||||||
if not mtype:
|
|
||||||
m = Macro(name.value,[])
|
|
||||||
self.macros[name.value] = m
|
|
||||||
elif mtype.type in self.t_WS:
|
|
||||||
# A normal macro
|
|
||||||
m = Macro(name.value,self.tokenstrip(linetok[2:]))
|
|
||||||
self.macros[name.value] = m
|
|
||||||
elif mtype.value == '(':
|
|
||||||
# A macro with arguments
|
|
||||||
tokcount, args, positions = self.collect_args(linetok[1:])
|
|
||||||
variadic = False
|
|
||||||
for a in args:
|
|
||||||
if variadic:
|
|
||||||
print("No more arguments may follow a variadic argument")
|
|
||||||
break
|
|
||||||
astr = "".join([str(_i.value) for _i in a])
|
|
||||||
if astr == "...":
|
|
||||||
variadic = True
|
|
||||||
a[0].type = self.t_ID
|
|
||||||
a[0].value = '__VA_ARGS__'
|
|
||||||
variadic = True
|
|
||||||
del a[1:]
|
|
||||||
continue
|
|
||||||
elif astr[-3:] == "..." and a[0].type == self.t_ID:
|
|
||||||
variadic = True
|
|
||||||
del a[1:]
|
|
||||||
# If, for some reason, "." is part of the identifier, strip off the name for the purposes
|
|
||||||
# of macro expansion
|
|
||||||
if a[0].value[-3:] == '...':
|
|
||||||
a[0].value = a[0].value[:-3]
|
|
||||||
continue
|
|
||||||
if len(a) > 1 or a[0].type != self.t_ID:
|
|
||||||
print("Invalid macro argument")
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
mvalue = self.tokenstrip(linetok[1+tokcount:])
|
|
||||||
i = 0
|
|
||||||
while i < len(mvalue):
|
|
||||||
if i+1 < len(mvalue):
|
|
||||||
if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##':
|
|
||||||
del mvalue[i]
|
|
||||||
continue
|
|
||||||
elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS:
|
|
||||||
del mvalue[i+1]
|
|
||||||
i += 1
|
|
||||||
m = Macro(name.value,mvalue,[x[0].value for x in args],variadic)
|
|
||||||
self.macro_prescan(m)
|
|
||||||
self.macros[name.value] = m
|
|
||||||
else:
|
|
||||||
print("Bad macro definition")
|
|
||||||
except LookupError:
|
|
||||||
print("Bad macro definition")
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# undef()
|
|
||||||
#
|
|
||||||
# Undefine a macro
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
def undef(self,tokens):
|
|
||||||
id = tokens[0].value
|
|
||||||
try:
|
|
||||||
del self.macros[id]
|
|
||||||
except LookupError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# parse()
|
|
||||||
#
|
|
||||||
# Parse input text.
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
def parse(self,input,source=None,ignore={}):
|
|
||||||
self.ignore = ignore
|
|
||||||
self.parser = self.parsegen(input,source)
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# token()
|
|
||||||
#
|
|
||||||
# Method to return individual tokens
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
def token(self):
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
tok = next(self.parser)
|
|
||||||
if tok.type not in self.ignore: return tok
|
|
||||||
except StopIteration:
|
|
||||||
self.parser = None
|
|
||||||
return None
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
import ply.lex as lex
|
|
||||||
lexer = lex.lex()
|
|
||||||
|
|
||||||
# Run a preprocessor
|
|
||||||
import sys
|
|
||||||
f = open(sys.argv[1])
|
|
||||||
input = f.read()
|
|
||||||
|
|
||||||
p = Preprocessor(lexer)
|
|
||||||
p.parse(input,sys.argv[1])
|
|
||||||
while True:
|
|
||||||
tok = p.token()
|
|
||||||
if not tok: break
|
|
||||||
print(p.source, tok)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,133 +0,0 @@
|
|||||||
# ----------------------------------------------------------------------
|
|
||||||
# ctokens.py
|
|
||||||
#
|
|
||||||
# Token specifications for symbols in ANSI C and C++. This file is
|
|
||||||
# meant to be used as a library in other tokenizers.
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
# Reserved words
|
|
||||||
|
|
||||||
tokens = [
|
|
||||||
# Literals (identifier, integer constant, float constant, string constant, char const)
|
|
||||||
'ID', 'TYPEID', 'ICONST', 'FCONST', 'SCONST', 'CCONST',
|
|
||||||
|
|
||||||
# Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=)
|
|
||||||
'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
|
|
||||||
'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
|
|
||||||
'LOR', 'LAND', 'LNOT',
|
|
||||||
'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
|
|
||||||
|
|
||||||
# Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=)
|
|
||||||
'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL',
|
|
||||||
'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL',
|
|
||||||
|
|
||||||
# Increment/decrement (++,--)
|
|
||||||
'PLUSPLUS', 'MINUSMINUS',
|
|
||||||
|
|
||||||
# Structure dereference (->)
|
|
||||||
'ARROW',
|
|
||||||
|
|
||||||
# Ternary operator (?)
|
|
||||||
'TERNARY',
|
|
||||||
|
|
||||||
# Delimeters ( ) [ ] { } , . ; :
|
|
||||||
'LPAREN', 'RPAREN',
|
|
||||||
'LBRACKET', 'RBRACKET',
|
|
||||||
'LBRACE', 'RBRACE',
|
|
||||||
'COMMA', 'PERIOD', 'SEMI', 'COLON',
|
|
||||||
|
|
||||||
# Ellipsis (...)
|
|
||||||
'ELLIPSIS',
|
|
||||||
]
|
|
||||||
|
|
||||||
# Operators
|
|
||||||
t_PLUS = r'\+'
|
|
||||||
t_MINUS = r'-'
|
|
||||||
t_TIMES = r'\*'
|
|
||||||
t_DIVIDE = r'/'
|
|
||||||
t_MODULO = r'%'
|
|
||||||
t_OR = r'\|'
|
|
||||||
t_AND = r'&'
|
|
||||||
t_NOT = r'~'
|
|
||||||
t_XOR = r'\^'
|
|
||||||
t_LSHIFT = r'<<'
|
|
||||||
t_RSHIFT = r'>>'
|
|
||||||
t_LOR = r'\|\|'
|
|
||||||
t_LAND = r'&&'
|
|
||||||
t_LNOT = r'!'
|
|
||||||
t_LT = r'<'
|
|
||||||
t_GT = r'>'
|
|
||||||
t_LE = r'<='
|
|
||||||
t_GE = r'>='
|
|
||||||
t_EQ = r'=='
|
|
||||||
t_NE = r'!='
|
|
||||||
|
|
||||||
# Assignment operators
|
|
||||||
|
|
||||||
t_EQUALS = r'='
|
|
||||||
t_TIMESEQUAL = r'\*='
|
|
||||||
t_DIVEQUAL = r'/='
|
|
||||||
t_MODEQUAL = r'%='
|
|
||||||
t_PLUSEQUAL = r'\+='
|
|
||||||
t_MINUSEQUAL = r'-='
|
|
||||||
t_LSHIFTEQUAL = r'<<='
|
|
||||||
t_RSHIFTEQUAL = r'>>='
|
|
||||||
t_ANDEQUAL = r'&='
|
|
||||||
t_OREQUAL = r'\|='
|
|
||||||
t_XOREQUAL = r'^='
|
|
||||||
|
|
||||||
# Increment/decrement
|
|
||||||
t_INCREMENT = r'\+\+'
|
|
||||||
t_DECREMENT = r'--'
|
|
||||||
|
|
||||||
# ->
|
|
||||||
t_ARROW = r'->'
|
|
||||||
|
|
||||||
# ?
|
|
||||||
t_TERNARY = r'\?'
|
|
||||||
|
|
||||||
# Delimeters
|
|
||||||
t_LPAREN = r'\('
|
|
||||||
t_RPAREN = r'\)'
|
|
||||||
t_LBRACKET = r'\['
|
|
||||||
t_RBRACKET = r'\]'
|
|
||||||
t_LBRACE = r'\{'
|
|
||||||
t_RBRACE = r'\}'
|
|
||||||
t_COMMA = r','
|
|
||||||
t_PERIOD = r'\.'
|
|
||||||
t_SEMI = r';'
|
|
||||||
t_COLON = r':'
|
|
||||||
t_ELLIPSIS = r'\.\.\.'
|
|
||||||
|
|
||||||
# Identifiers
|
|
||||||
t_ID = r'[A-Za-z_][A-Za-z0-9_]*'
|
|
||||||
|
|
||||||
# Integer literal
|
|
||||||
t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
|
|
||||||
|
|
||||||
# Floating literal
|
|
||||||
t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
|
|
||||||
|
|
||||||
# String literal
|
|
||||||
t_STRING = r'\"([^\\\n]|(\\.))*?\"'
|
|
||||||
|
|
||||||
# Character constant 'c' or L'c'
|
|
||||||
t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\''
|
|
||||||
|
|
||||||
# Comment (C-Style)
|
|
||||||
def t_COMMENT(t):
|
|
||||||
r'/\*(.|\n)*?\*/'
|
|
||||||
t.lexer.lineno += t.value.count('\n')
|
|
||||||
return t
|
|
||||||
|
|
||||||
# Comment (C++-Style)
|
|
||||||
def t_CPPCOMMENT(t):
|
|
||||||
r'//.*\n'
|
|
||||||
t.lexer.lineno += 1
|
|
||||||
return t
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,187 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import importlib
|
|
||||||
import os
|
|
||||||
|
|
||||||
from ripple.ledger import LedgerNumber
|
|
||||||
from ripple.util import File
|
|
||||||
from ripple.util import Log
|
|
||||||
from ripple.util import PrettyPrint
|
|
||||||
from ripple.util import Range
|
|
||||||
from ripple.util.Function import Function
|
|
||||||
|
|
||||||
NAME = 'LedgerTool'
|
|
||||||
VERSION = '0.1'
|
|
||||||
NONE = '(none)'
|
|
||||||
|
|
||||||
_parser = argparse.ArgumentParser(
|
|
||||||
prog=NAME,
|
|
||||||
description='Retrieve and process Ripple ledgers.',
|
|
||||||
epilog=LedgerNumber.HELP,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Positional arguments.
|
|
||||||
_parser.add_argument(
|
|
||||||
'command',
|
|
||||||
nargs='*',
|
|
||||||
help='Command to execute.'
|
|
||||||
)
|
|
||||||
|
|
||||||
# Flag arguments.
|
|
||||||
_parser.add_argument(
|
|
||||||
'--binary',
|
|
||||||
action='store_true',
|
|
||||||
help='If true, searches are binary - by default linear search is used.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--cache',
|
|
||||||
default='~/.local/share/ripple/ledger',
|
|
||||||
help='The cache directory.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--complete',
|
|
||||||
action='store_true',
|
|
||||||
help='If set, only match complete ledgers.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--condition', '-c',
|
|
||||||
help='The name of a condition function used to match ledgers.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--config',
|
|
||||||
help='The rippled configuration file name.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--database', '-d',
|
|
||||||
nargs='*',
|
|
||||||
default=NONE,
|
|
||||||
help='Specify a database.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--display',
|
|
||||||
help='Specify a function to display ledgers.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--full', '-f',
|
|
||||||
action='store_true',
|
|
||||||
help='If true, request full ledgers.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--indent', '-i',
|
|
||||||
type=int,
|
|
||||||
default=2,
|
|
||||||
help='How many spaces to indent when display in JSON.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--offline', '-o',
|
|
||||||
action='store_true',
|
|
||||||
help='If true, work entirely from cache, do not try to contact the server.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--position', '-p',
|
|
||||||
choices=['all', 'first', 'last'],
|
|
||||||
default='last',
|
|
||||||
help='Select which ledgers to display.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--rippled', '-r',
|
|
||||||
help='The filename of a rippled binary for retrieving ledgers.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--server', '-s',
|
|
||||||
help='IP address of a rippled JSON server.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--utc', '-u',
|
|
||||||
action='store_true',
|
|
||||||
help='If true, display times in UTC rather than local time.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--validations',
|
|
||||||
default=3,
|
|
||||||
help='The number of validations needed before considering a ledger valid.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--version',
|
|
||||||
action='version',
|
|
||||||
version='%(prog)s ' + VERSION,
|
|
||||||
help='Print the current version of %(prog)s',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--verbose', '-v',
|
|
||||||
action='store_true',
|
|
||||||
help='If true, give status messages on stderr.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--window', '-w',
|
|
||||||
type=int,
|
|
||||||
default=0,
|
|
||||||
help='How many ledgers to display around the matching ledger.',
|
|
||||||
)
|
|
||||||
|
|
||||||
_parser.add_argument(
|
|
||||||
'--yes', '-y',
|
|
||||||
action='store_true',
|
|
||||||
help='If true, don\'t ask for confirmation on large commands.',
|
|
||||||
)
|
|
||||||
|
|
||||||
# Read the arguments from the command line.
|
|
||||||
ARGS = _parser.parse_args()
|
|
||||||
ARGS.NONE = NONE
|
|
||||||
|
|
||||||
Log.VERBOSE = ARGS.verbose
|
|
||||||
|
|
||||||
# Now remove any items that look like ledger numbers from the command line.
|
|
||||||
_command = ARGS.command
|
|
||||||
_parts = (ARGS.command, ARGS.ledgers) = ([], [])
|
|
||||||
|
|
||||||
for c in _command:
|
|
||||||
_parts[Range.is_range(c, *LedgerNumber.LEDGERS)].append(c)
|
|
||||||
|
|
||||||
ARGS.command = ARGS.command or ['print' if ARGS.ledgers else 'info']
|
|
||||||
|
|
||||||
ARGS.cache = File.normalize(ARGS.cache)
|
|
||||||
|
|
||||||
if not ARGS.ledgers:
|
|
||||||
if ARGS.condition:
|
|
||||||
Log.warn('--condition needs a range of ledgers')
|
|
||||||
if ARGS.display:
|
|
||||||
Log.warn('--display needs a range of ledgers')
|
|
||||||
|
|
||||||
ARGS.condition = Function(
|
|
||||||
ARGS.condition or 'all_ledgers', 'ripple.ledger.conditions')
|
|
||||||
ARGS.display = Function(
|
|
||||||
ARGS.display or 'ledger_number', 'ripple.ledger.displays')
|
|
||||||
|
|
||||||
if ARGS.window < 0:
|
|
||||||
raise ValueError('Window cannot be negative: --window=%d' %
|
|
||||||
ARGS.window)
|
|
||||||
|
|
||||||
PrettyPrint.INDENT = (ARGS.indent * ' ')
|
|
||||||
|
|
||||||
_loaders = (ARGS.database != NONE) + bool(ARGS.rippled) + bool(ARGS.server)
|
|
||||||
|
|
||||||
if not _loaders:
|
|
||||||
ARGS.rippled = 'rippled'
|
|
||||||
|
|
||||||
elif _loaders > 1:
|
|
||||||
raise ValueError('At most one of --database, --rippled and --server '
|
|
||||||
'may be specified')
|
|
||||||
@@ -1,78 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
from ripple.ledger.Args import ARGS
|
|
||||||
from ripple.util import ConfigFile
|
|
||||||
from ripple.util import Database
|
|
||||||
from ripple.util import File
|
|
||||||
from ripple.util import Log
|
|
||||||
from ripple.util import Range
|
|
||||||
|
|
||||||
LEDGER_QUERY = """
|
|
||||||
SELECT
|
|
||||||
L.*, count(1) validations
|
|
||||||
FROM
|
|
||||||
(select LedgerHash, LedgerSeq from Ledgers ORDER BY LedgerSeq DESC) L
|
|
||||||
JOIN Validations V
|
|
||||||
ON (V.LedgerHash = L.LedgerHash)
|
|
||||||
GROUP BY L.LedgerHash
|
|
||||||
HAVING validations >= {validation_quorum}
|
|
||||||
ORDER BY 2;
|
|
||||||
"""
|
|
||||||
|
|
||||||
COMPLETE_QUERY = """
|
|
||||||
SELECT
|
|
||||||
L.LedgerSeq, count(*) validations
|
|
||||||
FROM
|
|
||||||
(select LedgerHash, LedgerSeq from Ledgers ORDER BY LedgerSeq) L
|
|
||||||
JOIN Validations V
|
|
||||||
ON (V.LedgerHash = L.LedgerHash)
|
|
||||||
GROUP BY L.LedgerHash
|
|
||||||
HAVING validations >= :validation_quorum
|
|
||||||
ORDER BY 2;
|
|
||||||
"""
|
|
||||||
|
|
||||||
_DATABASE_NAME = 'ledger.db'
|
|
||||||
|
|
||||||
USE_PLACEHOLDERS = False
|
|
||||||
|
|
||||||
class DatabaseReader(object):
|
|
||||||
def __init__(self, config):
|
|
||||||
assert ARGS.database != ARGS.NONE
|
|
||||||
database = ARGS.database or config['database_path']
|
|
||||||
if not database.endswith(_DATABASE_NAME):
|
|
||||||
database = os.path.join(database, _DATABASE_NAME)
|
|
||||||
if USE_PLACEHOLDERS:
|
|
||||||
cursor = Database.fetchall(
|
|
||||||
database, COMPLETE_QUERY, config)
|
|
||||||
else:
|
|
||||||
cursor = Database.fetchall(
|
|
||||||
database, LEDGER_QUERY.format(**config), {})
|
|
||||||
self.complete = [c[1] for c in cursor]
|
|
||||||
|
|
||||||
def name_to_ledger_index(self, ledger_name, is_full=False):
|
|
||||||
if not self.complete:
|
|
||||||
return None
|
|
||||||
if ledger_name == 'closed':
|
|
||||||
return self.complete[-1]
|
|
||||||
if ledger_name == 'current':
|
|
||||||
return None
|
|
||||||
if ledger_name == 'validated':
|
|
||||||
return self.complete[-1]
|
|
||||||
|
|
||||||
def get_ledger(self, name, is_full=False):
|
|
||||||
cmd = ['ledger', str(name)]
|
|
||||||
if is_full:
|
|
||||||
cmd.append('full')
|
|
||||||
response = self._command(*cmd)
|
|
||||||
result = response.get('ledger')
|
|
||||||
if result:
|
|
||||||
return result
|
|
||||||
error = response['error']
|
|
||||||
etext = _ERROR_TEXT.get(error)
|
|
||||||
if etext:
|
|
||||||
error = '%s (%s)' % (etext, error)
|
|
||||||
Log.fatal(_ERROR_TEXT.get(error, error))
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
from ripple.util import Range
|
|
||||||
|
|
||||||
FIRST_EVER = 32570
|
|
||||||
|
|
||||||
LEDGERS = {
|
|
||||||
'closed': 'the most recently closed ledger',
|
|
||||||
'current': 'the current ledger',
|
|
||||||
'first': 'the first complete ledger on this server',
|
|
||||||
'last': 'the last complete ledger on this server',
|
|
||||||
'validated': 'the most recently validated ledger',
|
|
||||||
}
|
|
||||||
|
|
||||||
HELP = """
|
|
||||||
Ledgers are either represented by a number, or one of the special ledgers;
|
|
||||||
""" + ',\n'.join('%s, %s' % (k, v) for k, v in sorted(LEDGERS.items())
|
|
||||||
)
|
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
from ripple.ledger.Args import ARGS
|
|
||||||
from ripple.util import File
|
|
||||||
from ripple.util import Log
|
|
||||||
from ripple.util import Range
|
|
||||||
|
|
||||||
_ERROR_CODE_REASON = {
|
|
||||||
62: 'No rippled server is running.',
|
|
||||||
}
|
|
||||||
|
|
||||||
_ERROR_TEXT = {
|
|
||||||
'lgrNotFound': 'The ledger you requested was not found.',
|
|
||||||
'noCurrent': 'The server has no current ledger.',
|
|
||||||
'noNetwork': 'The server did not respond to your request.',
|
|
||||||
}
|
|
||||||
|
|
||||||
_DEFAULT_ERROR_ = "Couldn't connect to server."
|
|
||||||
|
|
||||||
class RippledReader(object):
|
|
||||||
def __init__(self, config):
|
|
||||||
fname = File.normalize(ARGS.rippled)
|
|
||||||
if not os.path.exists(fname):
|
|
||||||
raise Exception('No rippled found at %s.' % fname)
|
|
||||||
self.cmd = [fname]
|
|
||||||
if ARGS.config:
|
|
||||||
self.cmd.extend(['--conf', File.normalize(ARGS.config)])
|
|
||||||
self.info = self._command('server_info')['info']
|
|
||||||
c = self.info.get('complete_ledgers')
|
|
||||||
if c == 'empty':
|
|
||||||
self.complete = []
|
|
||||||
else:
|
|
||||||
self.complete = sorted(Range.from_string(c))
|
|
||||||
|
|
||||||
def name_to_ledger_index(self, ledger_name, is_full=False):
|
|
||||||
return self.get_ledger(ledger_name, is_full)['ledger_index']
|
|
||||||
|
|
||||||
def get_ledger(self, name, is_full=False):
|
|
||||||
cmd = ['ledger', str(name)]
|
|
||||||
if is_full:
|
|
||||||
cmd.append('full')
|
|
||||||
response = self._command(*cmd)
|
|
||||||
result = response.get('ledger')
|
|
||||||
if result:
|
|
||||||
return result
|
|
||||||
error = response['error']
|
|
||||||
etext = _ERROR_TEXT.get(error)
|
|
||||||
if etext:
|
|
||||||
error = '%s (%s)' % (etext, error)
|
|
||||||
Log.fatal(_ERROR_TEXT.get(error, error))
|
|
||||||
|
|
||||||
def _command(self, *cmds):
|
|
||||||
cmd = self.cmd + list(cmds)
|
|
||||||
try:
|
|
||||||
data = subprocess.check_output(cmd, stderr=subprocess.PIPE)
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
raise Exception(_ERROR_CODE_REASON.get(
|
|
||||||
e.returncode, _DEFAULT_ERROR_))
|
|
||||||
|
|
||||||
part = json.loads(data)
|
|
||||||
try:
|
|
||||||
return part['result']
|
|
||||||
except:
|
|
||||||
raise ValueError(part.get('error', 'unknown error'))
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
# Constants from ripple/protocol/SField.h
|
|
||||||
|
|
||||||
# special types
|
|
||||||
STI_UNKNOWN = -2
|
|
||||||
STI_DONE = -1
|
|
||||||
STI_NOTPRESENT = 0
|
|
||||||
|
|
||||||
# # types (common)
|
|
||||||
STI_UINT16 = 1
|
|
||||||
STI_UINT32 = 2
|
|
||||||
STI_UINT64 = 3
|
|
||||||
STI_HASH128 = 4
|
|
||||||
STI_HASH256 = 5
|
|
||||||
STI_AMOUNT = 6
|
|
||||||
STI_VL = 7
|
|
||||||
STI_ACCOUNT = 8
|
|
||||||
# 9-13 are reserved
|
|
||||||
STI_OBJECT = 14
|
|
||||||
STI_ARRAY = 15
|
|
||||||
|
|
||||||
# types (uncommon)
|
|
||||||
STI_UINT8 = 16
|
|
||||||
STI_HASH160 = 17
|
|
||||||
STI_PATHSET = 18
|
|
||||||
STI_VECTOR256 = 19
|
|
||||||
|
|
||||||
# high level types
|
|
||||||
# cannot be serialized inside other types
|
|
||||||
STI_TRANSACTION = 10001
|
|
||||||
STI_LEDGERENTRY = 10002
|
|
||||||
STI_VALIDATION = 10003
|
|
||||||
STI_METADATA = 10004
|
|
||||||
|
|
||||||
def field_code(sti, name):
|
|
||||||
if sti < 16:
|
|
||||||
if name < 16:
|
|
||||||
bytes = [(sti << 4) + name]
|
|
||||||
else:
|
|
||||||
bytes = [sti << 4, name]
|
|
||||||
elif name < 16:
|
|
||||||
bytes = [name, sti]
|
|
||||||
else:
|
|
||||||
bytes = [0, sti, name]
|
|
||||||
return ''.join(chr(i) for i in bytes)
|
|
||||||
|
|
||||||
# Selected constants from SField.cpp
|
|
||||||
|
|
||||||
sfSequence = field_code(STI_UINT32, 4)
|
|
||||||
sfPublicKey = field_code(STI_VL, 1)
|
|
||||||
sfSigningPubKey = field_code(STI_VL, 3)
|
|
||||||
sfSignature = field_code(STI_VL, 6)
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from ripple.ledger.Args import ARGS
|
|
||||||
from ripple.util import Log
|
|
||||||
from ripple.util import Range
|
|
||||||
from ripple.util import Search
|
|
||||||
|
|
||||||
def search(server):
|
|
||||||
"""Yields a stream of ledger numbers that match the given condition."""
|
|
||||||
condition = lambda number: ARGS.condition(server, number)
|
|
||||||
ledgers = server.ledgers
|
|
||||||
if ARGS.binary:
|
|
||||||
try:
|
|
||||||
position = Search.FIRST if ARGS.position == 'first' else Search.LAST
|
|
||||||
yield Search.binary_search(
|
|
||||||
ledgers[0], ledgers[-1], condition, position)
|
|
||||||
except:
|
|
||||||
Log.fatal('No ledgers matching condition "%s".' % condition,
|
|
||||||
file=sys.stderr)
|
|
||||||
else:
|
|
||||||
for x in Search.linear_search(ledgers, condition):
|
|
||||||
yield x
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
|
|
||||||
from ripple.ledger import DatabaseReader, RippledReader
|
|
||||||
from ripple.ledger.Args import ARGS
|
|
||||||
from ripple.util.FileCache import FileCache
|
|
||||||
from ripple.util import ConfigFile
|
|
||||||
from ripple.util import File
|
|
||||||
from ripple.util import Range
|
|
||||||
|
|
||||||
class Server(object):
|
|
||||||
def __init__(self):
|
|
||||||
cfg_file = File.normalize(ARGS.config or 'rippled.cfg')
|
|
||||||
self.config = ConfigFile.read(open(cfg_file))
|
|
||||||
if ARGS.database != ARGS.NONE:
|
|
||||||
reader = DatabaseReader.DatabaseReader(self.config)
|
|
||||||
else:
|
|
||||||
reader = RippledReader.RippledReader(self.config)
|
|
||||||
|
|
||||||
self.reader = reader
|
|
||||||
self.complete = reader.complete
|
|
||||||
|
|
||||||
names = {
|
|
||||||
'closed': reader.name_to_ledger_index('closed'),
|
|
||||||
'current': reader.name_to_ledger_index('current'),
|
|
||||||
'validated': reader.name_to_ledger_index('validated'),
|
|
||||||
'first': self.complete[0] if self.complete else None,
|
|
||||||
'last': self.complete[-1] if self.complete else None,
|
|
||||||
}
|
|
||||||
self.__dict__.update(names)
|
|
||||||
self.ledgers = sorted(Range.join_ranges(*ARGS.ledgers, **names))
|
|
||||||
|
|
||||||
def make_cache(is_full):
|
|
||||||
name = 'full' if is_full else 'summary'
|
|
||||||
filepath = os.path.join(ARGS.cache, name)
|
|
||||||
creator = lambda n: reader.get_ledger(n, is_full)
|
|
||||||
return FileCache(filepath, creator)
|
|
||||||
self._caches = [make_cache(False), make_cache(True)]
|
|
||||||
|
|
||||||
def info(self):
|
|
||||||
return self.reader.info
|
|
||||||
|
|
||||||
def cache(self, is_full):
|
|
||||||
return self._caches[is_full]
|
|
||||||
|
|
||||||
def get_ledger(self, number, is_full=False):
|
|
||||||
num = int(number)
|
|
||||||
save_in_cache = num in self.complete
|
|
||||||
can_create = (not ARGS.offline and
|
|
||||||
self.complete and
|
|
||||||
self.complete[0] <= num - 1)
|
|
||||||
cache = self.cache(is_full)
|
|
||||||
return cache.get_data(number, save_in_cache, can_create)
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
class ServerReader(object):
|
|
||||||
def __init__(self, config):
|
|
||||||
raise ValueError('Direct server connections are not yet implemented.')
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
from ripple.ledger.Args import ARGS
|
|
||||||
from ripple.util import Log
|
|
||||||
from ripple.util import Range
|
|
||||||
from ripple.util.PrettyPrint import pretty_print
|
|
||||||
|
|
||||||
SAFE = True
|
|
||||||
|
|
||||||
HELP = """cache
|
|
||||||
return server_info"""
|
|
||||||
|
|
||||||
def cache(server, clear=False):
|
|
||||||
cache = server.cache(ARGS.full)
|
|
||||||
name = ['summary', 'full'][ARGS.full]
|
|
||||||
files = cache.file_count()
|
|
||||||
if not files:
|
|
||||||
Log.error('No files in %s cache.' % name)
|
|
||||||
|
|
||||||
elif clear:
|
|
||||||
if not clear.strip() == 'clear':
|
|
||||||
raise Exception("Don't understand 'clear %s'." % clear)
|
|
||||||
if not ARGS.yes:
|
|
||||||
yes = raw_input('OK to clear %s cache? (y/N) ' % name)
|
|
||||||
if not yes.lower().startswith('y'):
|
|
||||||
Log.out('Cancelled.')
|
|
||||||
return
|
|
||||||
cache.clear(ARGS.full)
|
|
||||||
Log.out('%s cache cleared - %d file%s deleted.' %
|
|
||||||
(name.capitalize(), files, '' if files == 1 else 's'))
|
|
||||||
|
|
||||||
else:
|
|
||||||
caches = (int(c) for c in cache.cache_list())
|
|
||||||
Log.out(Range.to_string(caches))
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
from ripple.ledger.Args import ARGS
|
|
||||||
from ripple.util import Log
|
|
||||||
from ripple.util import Range
|
|
||||||
from ripple.util.PrettyPrint import pretty_print
|
|
||||||
|
|
||||||
SAFE = True
|
|
||||||
|
|
||||||
HELP = 'info - return server_info'
|
|
||||||
|
|
||||||
def info(server):
|
|
||||||
Log.out('first =', server.first)
|
|
||||||
Log.out('last =', server.last)
|
|
||||||
Log.out('closed =', server.closed)
|
|
||||||
Log.out('current =', server.current)
|
|
||||||
Log.out('validated =', server.validated)
|
|
||||||
Log.out('complete =', Range.to_string(server.complete))
|
|
||||||
|
|
||||||
if ARGS.full:
|
|
||||||
Log.out(pretty_print(server.info()))
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
from ripple.ledger.Args import ARGS
|
|
||||||
from ripple.ledger import SearchLedgers
|
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
SAFE = True
|
|
||||||
|
|
||||||
HELP = """print
|
|
||||||
|
|
||||||
Print the ledgers to stdout. The default command."""
|
|
||||||
|
|
||||||
def run_print(server):
|
|
||||||
ARGS.display(print, server, SearchLedgers.search(server))
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
def all_ledgers(server, ledger_number):
|
|
||||||
return True
|
|
||||||
@@ -1,89 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
from functools import wraps
|
|
||||||
|
|
||||||
import jsonpath_rw
|
|
||||||
|
|
||||||
from ripple.ledger.Args import ARGS
|
|
||||||
from ripple.util import Dict
|
|
||||||
from ripple.util import Log
|
|
||||||
from ripple.util import Range
|
|
||||||
from ripple.util.Decimal import Decimal
|
|
||||||
from ripple.util.PrettyPrint import pretty_print, Streamer
|
|
||||||
|
|
||||||
TRANSACT_FIELDS = (
|
|
||||||
'accepted',
|
|
||||||
'close_time_human',
|
|
||||||
'closed',
|
|
||||||
'ledger_index',
|
|
||||||
'total_coins',
|
|
||||||
'transactions',
|
|
||||||
)
|
|
||||||
|
|
||||||
LEDGER_FIELDS = (
|
|
||||||
'accepted',
|
|
||||||
'accountState',
|
|
||||||
'close_time_human',
|
|
||||||
'closed',
|
|
||||||
'ledger_index',
|
|
||||||
'total_coins',
|
|
||||||
'transactions',
|
|
||||||
)
|
|
||||||
|
|
||||||
def _dict_filter(d, keys):
|
|
||||||
return dict((k, v) for (k, v) in d.items() if k in keys)
|
|
||||||
|
|
||||||
def ledger_number(print, server, numbers):
|
|
||||||
print(Range.to_string(numbers))
|
|
||||||
|
|
||||||
def display(f):
|
|
||||||
@wraps(f)
|
|
||||||
def wrapper(printer, server, numbers, *args):
|
|
||||||
streamer = Streamer(printer=printer)
|
|
||||||
for number in numbers:
|
|
||||||
ledger = server.get_ledger(number, ARGS.full)
|
|
||||||
if ledger:
|
|
||||||
streamer.add(number, f(ledger, *args))
|
|
||||||
streamer.finish()
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
def extractor(f):
|
|
||||||
@wraps(f)
|
|
||||||
def wrapper(printer, server, numbers, *paths):
|
|
||||||
try:
|
|
||||||
find = jsonpath_rw.parse('|'.join(paths)).find
|
|
||||||
except:
|
|
||||||
raise ValueError("Can't understand jsonpath '%s'." % path)
|
|
||||||
def fn(ledger, *args):
|
|
||||||
return f(find(ledger), *args)
|
|
||||||
display(fn)(printer, server, numbers)
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
@display
|
|
||||||
def ledger(ledger, full=False):
|
|
||||||
if ARGS.full:
|
|
||||||
if full:
|
|
||||||
return ledger
|
|
||||||
|
|
||||||
ledger = Dict.prune(ledger, 1, False)
|
|
||||||
|
|
||||||
return _dict_filter(ledger, LEDGER_FIELDS)
|
|
||||||
|
|
||||||
@display
|
|
||||||
def prune(ledger, level=1):
|
|
||||||
return Dict.prune(ledger, level, False)
|
|
||||||
|
|
||||||
@display
|
|
||||||
def transact(ledger):
|
|
||||||
return _dict_filter(ledger, TRANSACT_FIELDS)
|
|
||||||
|
|
||||||
@extractor
|
|
||||||
def extract(finds):
|
|
||||||
return dict((str(f.full_path), str(f.value)) for f in finds)
|
|
||||||
|
|
||||||
@extractor
|
|
||||||
def sum(finds):
|
|
||||||
d = Decimal()
|
|
||||||
for f in finds:
|
|
||||||
d.accumulate(f.value)
|
|
||||||
return [str(d), len(finds)]
|
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
from hashlib import sha256
|
|
||||||
|
|
||||||
#
|
|
||||||
# Human strings are base-58 with a
|
|
||||||
# version prefix and a checksum suffix.
|
|
||||||
#
|
|
||||||
# Copied from ripple/protocol/RippleAddress.h
|
|
||||||
#
|
|
||||||
|
|
||||||
VER_NONE = 1
|
|
||||||
VER_NODE_PUBLIC = 28
|
|
||||||
VER_NODE_PRIVATE = 32
|
|
||||||
VER_ACCOUNT_ID = 0
|
|
||||||
VER_ACCOUNT_PUBLIC = 35
|
|
||||||
VER_ACCOUNT_PRIVATE = 34
|
|
||||||
VER_FAMILY_GENERATOR = 41
|
|
||||||
VER_FAMILY_SEED = 33
|
|
||||||
|
|
||||||
ALPHABET = 'rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz'
|
|
||||||
|
|
||||||
VERSION_NAME = {
|
|
||||||
VER_NONE: 'VER_NONE',
|
|
||||||
VER_NODE_PUBLIC: 'VER_NODE_PUBLIC',
|
|
||||||
VER_NODE_PRIVATE: 'VER_NODE_PRIVATE',
|
|
||||||
VER_ACCOUNT_ID: 'VER_ACCOUNT_ID',
|
|
||||||
VER_ACCOUNT_PUBLIC: 'VER_ACCOUNT_PUBLIC',
|
|
||||||
VER_ACCOUNT_PRIVATE: 'VER_ACCOUNT_PRIVATE',
|
|
||||||
VER_FAMILY_GENERATOR: 'VER_FAMILY_GENERATOR',
|
|
||||||
VER_FAMILY_SEED: 'VER_FAMILY_SEED'
|
|
||||||
}
|
|
||||||
|
|
||||||
class Alphabet(object):
|
|
||||||
def __init__(self, radix, digit_to_char, char_to_digit):
|
|
||||||
self.radix = radix
|
|
||||||
self.digit_to_char = digit_to_char
|
|
||||||
self.char_to_digit = char_to_digit
|
|
||||||
|
|
||||||
def transcode_from(self, s, source_alphabet):
|
|
||||||
n, zero_count = source_alphabet._digits_to_number(s)
|
|
||||||
digits = []
|
|
||||||
while n > 0:
|
|
||||||
n, digit = divmod(n, self.radix)
|
|
||||||
digits.append(self.digit_to_char(digit))
|
|
||||||
|
|
||||||
s = ''.join(digits)
|
|
||||||
return self.digit_to_char(0) * zero_count + s[::-1]
|
|
||||||
|
|
||||||
def _digits_to_number(self, digits):
|
|
||||||
stripped = digits.lstrip(self.digit_to_char(0))
|
|
||||||
n = 0
|
|
||||||
for d in stripped:
|
|
||||||
n *= self.radix
|
|
||||||
n += self.char_to_digit(d)
|
|
||||||
return n, len(digits) - len(stripped)
|
|
||||||
|
|
||||||
_INVERSE_INDEX = dict((c, i) for (i, c) in enumerate(ALPHABET))
|
|
||||||
|
|
||||||
# In base 58 encoding, the digits come from the ALPHABET string.
|
|
||||||
BASE58 = Alphabet(len(ALPHABET), ALPHABET.__getitem__, _INVERSE_INDEX.get)
|
|
||||||
|
|
||||||
# In base 256 encoding, each digit is just a character between 0 and 255.
|
|
||||||
BASE256 = Alphabet(256, chr, ord)
|
|
||||||
|
|
||||||
def encode(b):
|
|
||||||
return BASE58.transcode_from(b, BASE256)
|
|
||||||
|
|
||||||
def decode(b):
|
|
||||||
return BASE256.transcode_from(b, BASE58)
|
|
||||||
|
|
||||||
def checksum(b):
|
|
||||||
"""Returns a 4-byte checksum of a binary."""
|
|
||||||
return sha256(sha256(b).digest()).digest()[:4]
|
|
||||||
|
|
||||||
def encode_version(ver, b):
|
|
||||||
"""Encodes a version encoding and a binary as human string."""
|
|
||||||
b = chr(ver) + b
|
|
||||||
return encode(b + checksum(b))
|
|
||||||
|
|
||||||
def decode_version(s):
|
|
||||||
"""Decodes a human base-58 string into its version encoding and binary."""
|
|
||||||
b = decode(s)
|
|
||||||
body, check = b[:-4], b[-4:]
|
|
||||||
assert check == checksum(body), ('Bad checksum for', s)
|
|
||||||
return ord(body[0]), body[1:]
|
|
||||||
|
|
||||||
def version_name(ver):
|
|
||||||
return VERSION_NAME.get(ver) or ('(unknown version %s)' % ver)
|
|
||||||
|
|
||||||
def check_version(version, expected):
|
|
||||||
if version != expected:
|
|
||||||
raise ValueError('Expected version %s but got %s' % (
|
|
||||||
version_name(version), version_name(expected)))
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
class Cache(object):
|
|
||||||
def __init__(self):
|
|
||||||
self._value_to_index = {}
|
|
||||||
self._index_to_value = []
|
|
||||||
|
|
||||||
def value_to_index(self, value, **kwds):
|
|
||||||
index = self._value_to_index.get(value, None)
|
|
||||||
if index is None:
|
|
||||||
index = len(self._index_to_value)
|
|
||||||
self._index_to_value.append((value, kwds))
|
|
||||||
self._value_to_index[value] = index
|
|
||||||
return index
|
|
||||||
|
|
||||||
def index_to_value(self, index):
|
|
||||||
return self._index_to_value[index]
|
|
||||||
|
|
||||||
def NamedCache():
|
|
||||||
return defaultdict(Cache)
|
|
||||||
|
|
||||||
def cache_by_key(d, keyfunc=None, exclude=None):
|
|
||||||
cache = defaultdict(Cache)
|
|
||||||
exclude = exclude or None
|
|
||||||
keyfunc = keyfunc or (lambda x: x)
|
|
||||||
|
|
||||||
def visit(item):
|
|
||||||
if isinstance(item, list):
|
|
||||||
for i, x in enumerate(item):
|
|
||||||
item[i] = visit(x)
|
|
||||||
|
|
||||||
elif isinstance(item, dict):
|
|
||||||
for k, v in item.items():
|
|
||||||
item[k] = visit(v)
|
|
||||||
|
|
||||||
return item
|
|
||||||
|
|
||||||
return cache
|
|
||||||
@@ -1,77 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Code taken from github/rec/grit.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
from ripple.ledger.Args import ARGS
|
|
||||||
from ripple.util import Log
|
|
||||||
|
|
||||||
Command = namedtuple('Command', 'function help safe')
|
|
||||||
|
|
||||||
def make_command(module):
|
|
||||||
name = module.__name__.split('.')[-1].lower()
|
|
||||||
return name, Command(getattr(module, name, None) or
|
|
||||||
getattr(module, 'run_' + name),
|
|
||||||
getattr(module, 'HELP'),
|
|
||||||
getattr(module, 'SAFE', False))
|
|
||||||
|
|
||||||
class CommandList(object):
|
|
||||||
def __init__(self, *args, **kwds):
|
|
||||||
self.registry = {}
|
|
||||||
self.register(*args, **kwds)
|
|
||||||
|
|
||||||
def register(self, *modules, **kwds):
|
|
||||||
for module in modules:
|
|
||||||
name, command = make_command(module)
|
|
||||||
self.registry[name] = command
|
|
||||||
|
|
||||||
for k, v in kwds.items():
|
|
||||||
if not isinstance(v, (list, tuple)):
|
|
||||||
v = [v]
|
|
||||||
self.register_one(k, *v)
|
|
||||||
|
|
||||||
def keys(self):
|
|
||||||
return self.registry.keys()
|
|
||||||
|
|
||||||
def register_one(self, name, function, help='', safe=False):
|
|
||||||
assert name not in self.registry
|
|
||||||
self.registry[name] = Command(function, help, safe)
|
|
||||||
|
|
||||||
def _get(self, command):
|
|
||||||
command = command.lower()
|
|
||||||
c = self.registry.get(command)
|
|
||||||
if c:
|
|
||||||
return command, c
|
|
||||||
commands = [c for c in self.registry if c.startswith(command)]
|
|
||||||
if len(commands) == 1:
|
|
||||||
command = commands[0]
|
|
||||||
return command, self.registry[command]
|
|
||||||
if not commands:
|
|
||||||
raise ValueError('No such command: %s. Commands are %s.' %
|
|
||||||
(command, ', '.join(sorted(self.registry))))
|
|
||||||
if len(commands) > 1:
|
|
||||||
raise ValueError('Command %s was ambiguous: %s.' %
|
|
||||||
(command, ', '.join(commands)))
|
|
||||||
|
|
||||||
def get(self, command):
|
|
||||||
return self._get(command)[1]
|
|
||||||
|
|
||||||
def run(self, command, *args):
|
|
||||||
return self.get(command).function(*args)
|
|
||||||
|
|
||||||
def run_safe(self, command, *args):
|
|
||||||
name, cmd = self._get(command)
|
|
||||||
if not (ARGS.yes or cmd.safe):
|
|
||||||
confirm = raw_input('OK to execute "rl %s %s"? (y/N) ' %
|
|
||||||
(name, ' '.join(args)))
|
|
||||||
if not confirm.lower().startswith('y'):
|
|
||||||
Log.error('Cancelled.')
|
|
||||||
return
|
|
||||||
cmd.function(*args)
|
|
||||||
|
|
||||||
def help(self, command):
|
|
||||||
return self.get(command).help()
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
"""Ripple has a proprietary format for their .cfg files, so we need a reader for
|
|
||||||
them."""
|
|
||||||
|
|
||||||
def read(lines):
|
|
||||||
sections = []
|
|
||||||
section = []
|
|
||||||
for line in lines:
|
|
||||||
line = line.strip()
|
|
||||||
if (not line) or line[0] == '#':
|
|
||||||
continue
|
|
||||||
if line.startswith('['):
|
|
||||||
if section:
|
|
||||||
sections.append(section)
|
|
||||||
section = []
|
|
||||||
section.append(line)
|
|
||||||
if section:
|
|
||||||
sections.append(section)
|
|
||||||
|
|
||||||
result = {}
|
|
||||||
for section in sections:
|
|
||||||
option = section.pop(0)
|
|
||||||
assert section, ('No value for option "%s".' % option)
|
|
||||||
assert option.startswith('[') and option.endswith(']'), (
|
|
||||||
'No option name in block "%s"' % p[0])
|
|
||||||
option = option[1:-1]
|
|
||||||
assert option not in result, 'Duplicate option "%s".' % option
|
|
||||||
|
|
||||||
subdict = {}
|
|
||||||
items = []
|
|
||||||
for part in section:
|
|
||||||
if '=' in part:
|
|
||||||
assert not items, 'Dictionary mixed with list.'
|
|
||||||
k, v = part.split('=', 1)
|
|
||||||
assert k not in subdict, 'Repeated dictionary entry ' + k
|
|
||||||
subdict[k] = v
|
|
||||||
else:
|
|
||||||
assert not subdict, 'List mixed with dictionary.'
|
|
||||||
if part.startswith('{'):
|
|
||||||
items.append(json.loads(part))
|
|
||||||
else:
|
|
||||||
words = part.split()
|
|
||||||
if len(words) > 1:
|
|
||||||
items.append(words)
|
|
||||||
else:
|
|
||||||
items.append(part)
|
|
||||||
if len(items) == 1:
|
|
||||||
result[option] = items[0]
|
|
||||||
else:
|
|
||||||
result[option] = items or subdict
|
|
||||||
return result
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
import sqlite3
|
|
||||||
|
|
||||||
def fetchall(database, query, kwds):
|
|
||||||
conn = sqlite3.connect(database)
|
|
||||||
try:
|
|
||||||
cursor = conn.execute(query, kwds)
|
|
||||||
return cursor.fetchall()
|
|
||||||
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
"""Fixed point numbers."""
|
|
||||||
|
|
||||||
POSITIONS = 10
|
|
||||||
POSITIONS_SHIFT = 10 ** POSITIONS
|
|
||||||
|
|
||||||
class Decimal(object):
|
|
||||||
def __init__(self, desc='0'):
|
|
||||||
if isinstance(desc, int):
|
|
||||||
self.value = desc
|
|
||||||
return
|
|
||||||
if desc.startswith('-'):
|
|
||||||
sign = -1
|
|
||||||
desc = desc[1:]
|
|
||||||
else:
|
|
||||||
sign = 1
|
|
||||||
parts = desc.split('.')
|
|
||||||
if len(parts) == 1:
|
|
||||||
parts.append('0')
|
|
||||||
elif len(parts) > 2:
|
|
||||||
raise Exception('Too many decimals in "%s"' % desc)
|
|
||||||
number, decimal = parts
|
|
||||||
# Fix the number of positions.
|
|
||||||
decimal = (decimal + POSITIONS * '0')[:POSITIONS]
|
|
||||||
self.value = sign * int(number + decimal)
|
|
||||||
|
|
||||||
def accumulate(self, item):
|
|
||||||
if not isinstance(item, Decimal):
|
|
||||||
item = Decimal(item)
|
|
||||||
self.value += item.value
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
if self.value >= 0:
|
|
||||||
sign = ''
|
|
||||||
value = self.value
|
|
||||||
else:
|
|
||||||
sign = '-'
|
|
||||||
value = -self.value
|
|
||||||
number = value // POSITIONS_SHIFT
|
|
||||||
decimal = (value % POSITIONS_SHIFT) * POSITIONS_SHIFT
|
|
||||||
|
|
||||||
if decimal:
|
|
||||||
return '%s%s.%s' % (sign, number, str(decimal).rstrip('0'))
|
|
||||||
else:
|
|
||||||
return '%s%s' % (sign, number)
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
def count_all_subitems(x):
|
|
||||||
"""Count the subitems of a Python object, including the object itself."""
|
|
||||||
if isinstance(x, list):
|
|
||||||
return 1 + sum(count_all_subitems(i) for i in x)
|
|
||||||
if isinstance(x, dict):
|
|
||||||
return 1 + sum(count_all_subitems(i) for i in x.itervalues())
|
|
||||||
return 1
|
|
||||||
|
|
||||||
def prune(item, level, count_recursively=True):
|
|
||||||
def subitems(x):
|
|
||||||
i = count_all_subitems(x) - 1 if count_recursively else len(x)
|
|
||||||
return '1 subitem' if i == 1 else '%d subitems' % i
|
|
||||||
|
|
||||||
assert level >= 0
|
|
||||||
if not item:
|
|
||||||
return item
|
|
||||||
|
|
||||||
if isinstance(item, list):
|
|
||||||
if level:
|
|
||||||
return [prune(i, level - 1, count_recursively) for i in item]
|
|
||||||
else:
|
|
||||||
return '[list with %s]' % subitems(item)
|
|
||||||
|
|
||||||
if isinstance(item, dict):
|
|
||||||
if level:
|
|
||||||
return dict((k, prune(v, level - 1, count_recursively))
|
|
||||||
for k, v in item.iteritems())
|
|
||||||
else:
|
|
||||||
return '{dict with %s}' % subitems(item)
|
|
||||||
|
|
||||||
return item
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
def normalize(f):
|
|
||||||
f = os.path.join(*f.split('/')) # For Windows users.
|
|
||||||
return os.path.abspath(os.path.expanduser(f))
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
import gzip
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
|
|
||||||
_NONE = object()
|
|
||||||
|
|
||||||
class FileCache(object):
|
|
||||||
"""A two-level cache, which stores expensive results in memory and on disk.
|
|
||||||
"""
|
|
||||||
def __init__(self, cache_directory, creator, open=gzip.open, suffix='.gz'):
|
|
||||||
self.cache_directory = cache_directory
|
|
||||||
self.creator = creator
|
|
||||||
self.open = open
|
|
||||||
self.suffix = suffix
|
|
||||||
self.cached_data = {}
|
|
||||||
if not os.path.exists(self.cache_directory):
|
|
||||||
os.makedirs(self.cache_directory)
|
|
||||||
|
|
||||||
def get_file_data(self, name):
|
|
||||||
if os.path.exists(filename):
|
|
||||||
return json.load(self.open(filename))
|
|
||||||
|
|
||||||
result = self.creator(name)
|
|
||||||
return result
|
|
||||||
|
|
||||||
def get_data(self, name, save_in_cache, can_create, default=None):
|
|
||||||
name = str(name)
|
|
||||||
result = self.cached_data.get(name, _NONE)
|
|
||||||
if result is _NONE:
|
|
||||||
filename = os.path.join(self.cache_directory, name) + self.suffix
|
|
||||||
if os.path.exists(filename):
|
|
||||||
result = json.load(self.open(filename)) or _NONE
|
|
||||||
if result is _NONE and can_create:
|
|
||||||
result = self.creator(name)
|
|
||||||
if save_in_cache:
|
|
||||||
json.dump(result, self.open(filename, 'w'))
|
|
||||||
return default if result is _NONE else result
|
|
||||||
|
|
||||||
def _files(self):
|
|
||||||
return os.listdir(self.cache_directory)
|
|
||||||
|
|
||||||
def cache_list(self):
|
|
||||||
for f in self._files():
|
|
||||||
if f.endswith(self.suffix):
|
|
||||||
yield f[:-len(self.suffix)]
|
|
||||||
|
|
||||||
def file_count(self):
|
|
||||||
return len(self._files())
|
|
||||||
|
|
||||||
def clear(self):
|
|
||||||
"""Clears both local files and memory."""
|
|
||||||
self.cached_data = {}
|
|
||||||
for f in self._files():
|
|
||||||
os.remove(os.path.join(self.cache_directory, f))
|
|
||||||
@@ -1,82 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
"""A function that can be specified at the command line, with an argument."""
|
|
||||||
|
|
||||||
import importlib
|
|
||||||
import re
|
|
||||||
import tokenize
|
|
||||||
|
|
||||||
from StringIO import StringIO
|
|
||||||
|
|
||||||
MATCHER = re.compile(r'([\w.]+)(.*)')
|
|
||||||
|
|
||||||
REMAPPINGS = {
|
|
||||||
'false': False,
|
|
||||||
'true': True,
|
|
||||||
'null': None,
|
|
||||||
'False': False,
|
|
||||||
'True': True,
|
|
||||||
'None': None,
|
|
||||||
}
|
|
||||||
|
|
||||||
def eval_arguments(args):
|
|
||||||
args = args.strip()
|
|
||||||
if not args or (args == '()'):
|
|
||||||
return ()
|
|
||||||
tokens = list(tokenize.generate_tokens(StringIO(args).readline))
|
|
||||||
def remap():
|
|
||||||
for type, name, _, _, _ in tokens:
|
|
||||||
if type == tokenize.NAME and name not in REMAPPINGS:
|
|
||||||
yield tokenize.STRING, '"%s"' % name
|
|
||||||
else:
|
|
||||||
yield type, name
|
|
||||||
untok = tokenize.untokenize(remap())
|
|
||||||
if untok[1:-1].strip():
|
|
||||||
untok = untok[:-1] + ',)' # Force a tuple.
|
|
||||||
try:
|
|
||||||
return eval(untok, REMAPPINGS)
|
|
||||||
except Exception as e:
|
|
||||||
raise ValueError('Couldn\'t evaluate expression "%s" (became "%s"), '
|
|
||||||
'error "%s"' % (args, untok, str(e)))
|
|
||||||
|
|
||||||
class Function(object):
|
|
||||||
def __init__(self, desc='', default_path=''):
|
|
||||||
self.desc = desc.strip()
|
|
||||||
if not self.desc:
|
|
||||||
# Make an empty function that does nothing.
|
|
||||||
self.args = ()
|
|
||||||
self.function = lambda *args, **kwds: None
|
|
||||||
return
|
|
||||||
|
|
||||||
m = MATCHER.match(desc)
|
|
||||||
if not m:
|
|
||||||
raise ValueError('"%s" is not a function' % desc)
|
|
||||||
self.function, self.args = (g.strip() for g in m.groups())
|
|
||||||
self.args = eval_arguments(self.args)
|
|
||||||
|
|
||||||
if '.' not in self.function:
|
|
||||||
if default_path and not default_path.endswith('.'):
|
|
||||||
default_path += '.'
|
|
||||||
self.function = default_path + self.function
|
|
||||||
p, m = self.function.rsplit('.', 1)
|
|
||||||
mod = importlib.import_module(p)
|
|
||||||
# Errors in modules are swallowed here.
|
|
||||||
# except:
|
|
||||||
# raise ValueError('Can\'t find Python module "%s"' % p)
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.function = getattr(mod, m)
|
|
||||||
except:
|
|
||||||
raise ValueError('No function "%s" in module "%s"' % (m, p))
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.desc
|
|
||||||
|
|
||||||
def __call__(self, *args, **kwds):
|
|
||||||
return self.function(*(args + self.args), **kwds)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return self.function == other.function and self.args == other.args
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not (self == other)
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
VERBOSE = False
|
|
||||||
|
|
||||||
def out(*args, **kwds):
|
|
||||||
kwds.get('print', print)(*args, file=sys.stdout, **kwds)
|
|
||||||
|
|
||||||
def info(*args, **kwds):
|
|
||||||
if VERBOSE:
|
|
||||||
out(*args, **kwds)
|
|
||||||
|
|
||||||
def warn(*args, **kwds):
|
|
||||||
out('WARNING:', *args, **kwds)
|
|
||||||
|
|
||||||
def error(*args, **kwds):
|
|
||||||
out('ERROR:', *args, **kwds)
|
|
||||||
|
|
||||||
def fatal(*args, **kwds):
|
|
||||||
raise Exception('FATAL: ' + ' '.join(str(a) for a in args))
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
from functools import wraps
|
|
||||||
import json
|
|
||||||
|
|
||||||
SEPARATORS = ',', ': '
|
|
||||||
INDENT = ' '
|
|
||||||
|
|
||||||
def pretty_print(item):
|
|
||||||
return json.dumps(item,
|
|
||||||
sort_keys=True,
|
|
||||||
indent=len(INDENT),
|
|
||||||
separators=SEPARATORS)
|
|
||||||
|
|
||||||
class Streamer(object):
|
|
||||||
def __init__(self, printer=print):
|
|
||||||
# No automatic spacing or carriage returns.
|
|
||||||
self.printer = lambda *args: printer(*args, end='', sep='')
|
|
||||||
self.first_key = True
|
|
||||||
|
|
||||||
def add(self, key, value):
|
|
||||||
if self.first_key:
|
|
||||||
self.first_key = False
|
|
||||||
self.printer('{')
|
|
||||||
else:
|
|
||||||
self.printer(',')
|
|
||||||
|
|
||||||
self.printer('\n', INDENT, '"', str(key), '": ')
|
|
||||||
|
|
||||||
pp = pretty_print(value).splitlines()
|
|
||||||
if len(pp) > 1:
|
|
||||||
for i, line in enumerate(pp):
|
|
||||||
if i > 0:
|
|
||||||
self.printer('\n', INDENT)
|
|
||||||
self.printer(line)
|
|
||||||
else:
|
|
||||||
self.printer(pp[0])
|
|
||||||
|
|
||||||
def finish(self):
|
|
||||||
if not self.first_key:
|
|
||||||
self.first_key = True
|
|
||||||
self.printer('\n}')
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
"""
|
|
||||||
Convert a discontiguous range of integers to and from a human-friendly form.
|
|
||||||
|
|
||||||
Real world example is the server_info.complete_ledgers:
|
|
||||||
8252899-8403772,8403824,8403827-8403830,8403834-8403876
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def from_string(desc, **aliases):
|
|
||||||
if not desc:
|
|
||||||
return []
|
|
||||||
result = set()
|
|
||||||
for d in desc.split(','):
|
|
||||||
nums = [int(aliases.get(x) or x) for x in d.split('-')]
|
|
||||||
if len(nums) == 1:
|
|
||||||
result.add(nums[0])
|
|
||||||
elif len(nums) == 2:
|
|
||||||
result.update(range(nums[0], nums[1] + 1))
|
|
||||||
return result
|
|
||||||
|
|
||||||
def to_string(r):
|
|
||||||
groups = []
|
|
||||||
next_group = []
|
|
||||||
for i, x in enumerate(sorted(r)):
|
|
||||||
if next_group and (x - next_group[-1]) > 1:
|
|
||||||
groups.append(next_group)
|
|
||||||
next_group = []
|
|
||||||
next_group.append(x)
|
|
||||||
if next_group:
|
|
||||||
groups.append(next_group)
|
|
||||||
|
|
||||||
def display(g):
|
|
||||||
if len(g) == 1:
|
|
||||||
return str(g[0])
|
|
||||||
else:
|
|
||||||
return '%s-%s' % (g[0], g[-1])
|
|
||||||
|
|
||||||
return ','.join(display(g) for g in groups)
|
|
||||||
|
|
||||||
def is_range(desc, *names):
|
|
||||||
try:
|
|
||||||
from_string(desc, **dict((n, 1) for n in names))
|
|
||||||
return True;
|
|
||||||
except ValueError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def join_ranges(*ranges, **aliases):
|
|
||||||
result = set()
|
|
||||||
for r in ranges:
|
|
||||||
result.update(from_string(r, **aliases))
|
|
||||||
return result
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
FIRST, LAST = range(2)
|
|
||||||
|
|
||||||
def binary_search(begin, end, condition, location=FIRST):
|
|
||||||
"""Search for an i in the interval [begin, end] where condition(i) is true.
|
|
||||||
If location is FIRST, return the first such i.
|
|
||||||
If location is LAST, return the last such i.
|
|
||||||
If there is no such i, then throw an exception.
|
|
||||||
"""
|
|
||||||
b = condition(begin)
|
|
||||||
e = condition(end)
|
|
||||||
if b and e:
|
|
||||||
return begin if location == FIRST else end
|
|
||||||
|
|
||||||
if not (b or e):
|
|
||||||
raise ValueError('%d/%d' % (begin, end))
|
|
||||||
|
|
||||||
if b and location is FIRST:
|
|
||||||
return begin
|
|
||||||
|
|
||||||
if e and location is LAST:
|
|
||||||
return end
|
|
||||||
|
|
||||||
width = end - begin + 1
|
|
||||||
if width == 1:
|
|
||||||
if not b:
|
|
||||||
raise ValueError('%d/%d' % (begin, end))
|
|
||||||
return begin
|
|
||||||
if width == 2:
|
|
||||||
return begin if b else end
|
|
||||||
|
|
||||||
mid = (begin + end) // 2
|
|
||||||
m = condition(mid)
|
|
||||||
|
|
||||||
if m == b:
|
|
||||||
return binary_search(mid, end, condition, location)
|
|
||||||
else:
|
|
||||||
return binary_search(begin, mid, condition, location)
|
|
||||||
|
|
||||||
def linear_search(items, condition):
|
|
||||||
"""Yields each i in the interval [begin, end] where condition(i) is true.
|
|
||||||
"""
|
|
||||||
for i in items:
|
|
||||||
if condition(i):
|
|
||||||
yield i
|
|
||||||
@@ -1,164 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import base64, os, random, struct, sys
|
|
||||||
import ed25519
|
|
||||||
import ecdsa
|
|
||||||
from ripple.util import Base58
|
|
||||||
from ripple.ledger import SField
|
|
||||||
|
|
||||||
ED25519_BYTE = chr(0xed)
|
|
||||||
WRAP_COLUMNS = 60
|
|
||||||
|
|
||||||
USAGE = """\
|
|
||||||
Usage:
|
|
||||||
create
|
|
||||||
Create a new master public/secret key pair.
|
|
||||||
|
|
||||||
check <key>
|
|
||||||
Check an existing key for validity.
|
|
||||||
|
|
||||||
sign <sequence> <validator-public> <master-secret>
|
|
||||||
Create a new signed manifest with the given sequence
|
|
||||||
number, validator public key, and master secret key.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def prepend_length_byte(b):
|
|
||||||
assert len(b) <= 192, 'Too long'
|
|
||||||
return chr(len(b)) + b
|
|
||||||
|
|
||||||
def to_int32(i):
|
|
||||||
return struct.pack('>I', i)
|
|
||||||
|
|
||||||
#-----------------------------------------------------------
|
|
||||||
|
|
||||||
def make_seed(urandom=os.urandom):
|
|
||||||
# This is not used.
|
|
||||||
return urandom(16)
|
|
||||||
|
|
||||||
def make_ed25519_keypair(urandom=os.urandom):
|
|
||||||
private_key = urandom(32)
|
|
||||||
return private_key, ed25519.publickey(private_key)
|
|
||||||
|
|
||||||
def make_ecdsa_keypair():
|
|
||||||
# This is not used.
|
|
||||||
private_key = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1)
|
|
||||||
# Can't be unit tested easily - need a mock for ecdsa.
|
|
||||||
vk = private_key.get_verifying_key()
|
|
||||||
sig = private_key.sign('message')
|
|
||||||
assert vk.verify(sig, 'message')
|
|
||||||
return private_key, vk
|
|
||||||
|
|
||||||
def make_seed_from_passphrase(passphrase):
|
|
||||||
# For convenience, like say testing against rippled we can hash a passphrase
|
|
||||||
# to get the seed. validation_create (Josh may have killed it by now) takes
|
|
||||||
# an optional arg, which can be a base58 encoded seed, or a passphrase.
|
|
||||||
return hashlib.sha512(passphrase).digest()[:16]
|
|
||||||
|
|
||||||
def make_manifest(public_key, validator_public_key, seq):
|
|
||||||
return ''.join([
|
|
||||||
SField.sfSequence,
|
|
||||||
to_int32(seq),
|
|
||||||
SField.sfPublicKey, # Master public key.
|
|
||||||
prepend_length_byte(public_key),
|
|
||||||
SField.sfSigningPubKey, # Ephemeral public key.
|
|
||||||
prepend_length_byte(validator_public_key)])
|
|
||||||
|
|
||||||
def sign_manifest(manifest, private_key, public_key):
|
|
||||||
sig = ed25519.signature('MAN\0' + manifest, private_key, public_key)
|
|
||||||
return manifest + SField.sfSignature + prepend_length_byte(sig)
|
|
||||||
|
|
||||||
def wrap(s, cols=WRAP_COLUMNS):
|
|
||||||
if s:
|
|
||||||
size = max((len(s) + cols - 1) / cols, 1)
|
|
||||||
w = len(s) / size
|
|
||||||
s = '\n'.join(s[i:i + w] for i in range(0, len(s), w))
|
|
||||||
return s
|
|
||||||
|
|
||||||
def create_ed_keys(urandom=os.urandom):
|
|
||||||
private_key, public_key = make_ed25519_keypair(urandom)
|
|
||||||
public_key_human = Base58.encode_version(
|
|
||||||
Base58.VER_NODE_PUBLIC, ED25519_BYTE + public_key)
|
|
||||||
private_key_human = Base58.encode_version(
|
|
||||||
Base58.VER_NODE_PRIVATE, private_key)
|
|
||||||
return public_key_human, private_key_human
|
|
||||||
|
|
||||||
def check_validator_public(v, validator_public_key):
|
|
||||||
Base58.check_version(v, Base58.VER_NODE_PUBLIC)
|
|
||||||
if len(validator_public_key) != 33:
|
|
||||||
raise ValueError('Validator key should be length 33, is %s' %
|
|
||||||
len(validator_public_key))
|
|
||||||
b = ord(validator_public_key[0])
|
|
||||||
if b not in (2, 3):
|
|
||||||
raise ValueError('First validator key byte must be 2 or 3, is %d' % b)
|
|
||||||
|
|
||||||
def check_master_secret(v, private_key):
|
|
||||||
Base58.check_version(v, Base58.VER_NODE_PRIVATE)
|
|
||||||
if len(private_key) != 32:
|
|
||||||
raise ValueError('Length of master secret should be 32, is %s' %
|
|
||||||
len(private_key))
|
|
||||||
|
|
||||||
|
|
||||||
def get_signature(seq, validator_public_key_human, private_key_human):
|
|
||||||
v, validator_public_key = Base58.decode_version(validator_public_key_human)
|
|
||||||
check_validator_public(v, validator_public_key)
|
|
||||||
|
|
||||||
v, private_key = Base58.decode_version(private_key_human)
|
|
||||||
check_master_secret(v, private_key)
|
|
||||||
|
|
||||||
pk = ed25519.publickey(private_key)
|
|
||||||
apk = ED25519_BYTE + pk
|
|
||||||
m = make_manifest(apk, validator_public_key, seq)
|
|
||||||
m1 = sign_manifest(m, private_key, pk)
|
|
||||||
return base64.b64encode(m1)
|
|
||||||
|
|
||||||
|
|
||||||
# Testable versions of functions.
|
|
||||||
def perform_create(urandom=os.urandom, print=print):
|
|
||||||
public, private = create_ed_keys(urandom)
|
|
||||||
print('[validator_keys]', public, '', '[master_secret]', private, sep='\n')
|
|
||||||
|
|
||||||
def perform_check(s, print=print):
|
|
||||||
version, b = Base58.decode_version(s)
|
|
||||||
print('version = ' + Base58.version_name(version))
|
|
||||||
print('decoded length = ' + str(len(b)))
|
|
||||||
assert Base58.encode_version(version, b) == s
|
|
||||||
|
|
||||||
def perform_sign(
|
|
||||||
seq, validator_public_key_human, private_key_human, print=print):
|
|
||||||
print('[validation_manifest]')
|
|
||||||
print(wrap(get_signature(
|
|
||||||
int(seq), validator_public_key_human, private_key_human)))
|
|
||||||
|
|
||||||
# Externally visible versions of functions.
|
|
||||||
def create():
|
|
||||||
perform_create()
|
|
||||||
|
|
||||||
def check(s):
|
|
||||||
perform_check(s)
|
|
||||||
|
|
||||||
def sign(seq, validator_public_key_human, private_key_human):
|
|
||||||
perform_sign(seq, validator_public_key_human, private_key_human)
|
|
||||||
|
|
||||||
|
|
||||||
def usage(*errors):
|
|
||||||
if errors:
|
|
||||||
print(*errors)
|
|
||||||
print(USAGE)
|
|
||||||
return not errors
|
|
||||||
|
|
||||||
_COMMANDS = dict((f.__name__, f) for f in (create, check, sign))
|
|
||||||
|
|
||||||
def run_command(args):
|
|
||||||
if not args:
|
|
||||||
return usage()
|
|
||||||
name = args[0]
|
|
||||||
command = _COMMANDS.get(name)
|
|
||||||
if not command:
|
|
||||||
return usage('No such command:', command)
|
|
||||||
try:
|
|
||||||
command(*args[1:])
|
|
||||||
except TypeError:
|
|
||||||
return usage('Wrong number of arguments for:', command)
|
|
||||||
return True
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
# Format for human-readable dates in rippled
|
|
||||||
_DATE_FORMAT = '%Y-%b-%d'
|
|
||||||
_TIME_FORMAT = '%H:%M:%S'
|
|
||||||
_DATETIME_FORMAT = '%s %s' % (_DATE_FORMAT, _TIME_FORMAT)
|
|
||||||
|
|
||||||
_FORMATS = _DATE_FORMAT, _TIME_FORMAT, _DATETIME_FORMAT
|
|
||||||
|
|
||||||
def parse_datetime(desc):
|
|
||||||
for fmt in _FORMATS:
|
|
||||||
try:
|
|
||||||
return datetime.date.strptime(desc, fmt)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
raise ValueError("Can't understand date '%s'." % date)
|
|
||||||
|
|
||||||
def format_datetime(dt):
|
|
||||||
return dt.strftime(_DATETIME_FORMAT)
|
|
||||||
@@ -1,682 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
"""
|
|
||||||
Test for setting ephemeral keys for the validator manifest.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import (
|
|
||||||
absolute_import, division, print_function, unicode_literals
|
|
||||||
)
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import contextlib
|
|
||||||
from contextlib import contextmanager
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import platform
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import time
|
|
||||||
|
|
||||||
DELAY_WHILE_PROCESS_STARTS_UP = 1.5
|
|
||||||
ARGS = None
|
|
||||||
|
|
||||||
NOT_FOUND = -1 # not in log
|
|
||||||
ACCEPTED_NEW = 0 # added new manifest
|
|
||||||
ACCEPTED_UPDATE = 1 # replaced old manifest with new
|
|
||||||
UNTRUSTED = 2 # don't trust master key
|
|
||||||
STALE = 3 # seq is too old
|
|
||||||
REVOKED = 4 # revoked validator key
|
|
||||||
INVALID = 5 # invalid signature
|
|
||||||
|
|
||||||
MANIFEST_ACTION_STR_TO_ID = {
|
|
||||||
'NotFound': NOT_FOUND, # not found in log
|
|
||||||
'AcceptedNew': ACCEPTED_NEW,
|
|
||||||
'AcceptedUpdate': ACCEPTED_UPDATE,
|
|
||||||
'Untrusted': UNTRUSTED,
|
|
||||||
'Stale': STALE,
|
|
||||||
'Revoked': REVOKED,
|
|
||||||
'Invalid': INVALID,
|
|
||||||
}
|
|
||||||
|
|
||||||
MANIFEST_ACTION_ID_TO_STR = {
|
|
||||||
v: k for k, v in MANIFEST_ACTION_STR_TO_ID.items()
|
|
||||||
}
|
|
||||||
|
|
||||||
CONF_TEMPLATE = """
|
|
||||||
[server]
|
|
||||||
port_rpc
|
|
||||||
port_peer
|
|
||||||
port_wss_admin
|
|
||||||
|
|
||||||
[port_rpc]
|
|
||||||
port = {rpc_port}
|
|
||||||
ip = 127.0.0.1
|
|
||||||
admin = 127.0.0.1
|
|
||||||
protocol = https
|
|
||||||
|
|
||||||
[port_peer]
|
|
||||||
port = {peer_port}
|
|
||||||
ip = 0.0.0.0
|
|
||||||
protocol = peer
|
|
||||||
|
|
||||||
[port_wss_admin]
|
|
||||||
port = {wss_port}
|
|
||||||
ip = 127.0.0.1
|
|
||||||
admin = 127.0.0.1
|
|
||||||
protocol = wss
|
|
||||||
|
|
||||||
[node_size]
|
|
||||||
medium
|
|
||||||
|
|
||||||
[node_db]
|
|
||||||
type={node_db_type}
|
|
||||||
path={node_db_path}
|
|
||||||
open_files=2000
|
|
||||||
filter_bits=12
|
|
||||||
cache_mb=256
|
|
||||||
file_size_mb=8
|
|
||||||
file_size_mult=2
|
|
||||||
online_delete=256
|
|
||||||
advisory_delete=0
|
|
||||||
|
|
||||||
[database_path]
|
|
||||||
{db_path}
|
|
||||||
|
|
||||||
[debug_logfile]
|
|
||||||
{debug_logfile}
|
|
||||||
|
|
||||||
[sntp_servers]
|
|
||||||
time.windows.com
|
|
||||||
time.apple.com
|
|
||||||
time.nist.gov
|
|
||||||
pool.ntp.org
|
|
||||||
|
|
||||||
[ips]
|
|
||||||
r.ripple.com 51235
|
|
||||||
|
|
||||||
[ips_fixed]
|
|
||||||
{sibling_ip} {sibling_port}
|
|
||||||
|
|
||||||
[validators]
|
|
||||||
n949f75evCHwgyP4fPVgaHqNHxUVN15PsJEZ3B3HnXPcPjcZAoy7 RL1
|
|
||||||
n9MD5h24qrQqiyBC8aeqqCWvpiBiYQ3jxSr91uiDvmrkyHRdYLUj RL2
|
|
||||||
n9L81uNCaPgtUJfaHh89gmdvXKAmSt5Gdsw2g1iPWaPkAHW5Nm4C RL3
|
|
||||||
n9KiYM9CgngLvtRCQHZwgC2gjpdaZcCcbt3VboxiNFcKuwFVujzS RL4
|
|
||||||
n9LdgEtkmGB9E2h3K4Vp7iGUaKuq23Zr32ehxiU8FWY7xoxbWTSA RL5
|
|
||||||
|
|
||||||
[validation_quorum]
|
|
||||||
3
|
|
||||||
|
|
||||||
[validation_seed]
|
|
||||||
{validation_seed}
|
|
||||||
#vaidation_public_key: {validation_public_key}
|
|
||||||
|
|
||||||
# Other rippled's trusting this validator need this key
|
|
||||||
[validator_keys]
|
|
||||||
{all_validator_keys}
|
|
||||||
|
|
||||||
[peer_private]
|
|
||||||
1
|
|
||||||
|
|
||||||
[overlay]
|
|
||||||
expire = 1
|
|
||||||
auto_connect = 1
|
|
||||||
|
|
||||||
[validation_manifest]
|
|
||||||
{validation_manifest}
|
|
||||||
|
|
||||||
[rpc_startup]
|
|
||||||
{{ "command": "log_level", "severity": "debug" }}
|
|
||||||
|
|
||||||
[ssl_verify]
|
|
||||||
0
|
|
||||||
"""
|
|
||||||
# End config template
|
|
||||||
|
|
||||||
|
|
||||||
def static_vars(**kwargs):
|
|
||||||
def decorate(func):
|
|
||||||
for k in kwargs:
|
|
||||||
setattr(func, k, kwargs[k])
|
|
||||||
return func
|
|
||||||
return decorate
|
|
||||||
|
|
||||||
|
|
||||||
@static_vars(rpc=5005, peer=51235, wss=6006)
|
|
||||||
def checkout_port_nums():
|
|
||||||
"""Returns a tuple of port nums for rpc, peer, and wss_admin"""
|
|
||||||
checkout_port_nums.rpc += 1
|
|
||||||
checkout_port_nums.peer += 1
|
|
||||||
checkout_port_nums.wss += 1
|
|
||||||
return (
|
|
||||||
checkout_port_nums.rpc,
|
|
||||||
checkout_port_nums.peer,
|
|
||||||
checkout_port_nums.wss
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def is_windows():
|
|
||||||
return platform.system() == 'Windows'
|
|
||||||
|
|
||||||
|
|
||||||
def manifest_create():
|
|
||||||
"""returns dict with keys: 'validator_keys', 'master_secret'"""
|
|
||||||
to_run = ['python', ARGS.ripple_home + '/bin/python/Manifest.py', 'create']
|
|
||||||
r = subprocess.check_output(to_run)
|
|
||||||
result = {}
|
|
||||||
k = None
|
|
||||||
for l in r.splitlines():
|
|
||||||
l = l.strip()
|
|
||||||
if not l:
|
|
||||||
continue
|
|
||||||
elif l == '[validator_keys]':
|
|
||||||
k = l[1:-1]
|
|
||||||
elif l == '[master_secret]':
|
|
||||||
k = l[1:-1]
|
|
||||||
elif l.startswith('['):
|
|
||||||
raise ValueError(
|
|
||||||
'Unexpected key: {} from `manifest create`'.format(l))
|
|
||||||
else:
|
|
||||||
if not k:
|
|
||||||
raise ValueError('Value with no key')
|
|
||||||
result[k] = l
|
|
||||||
k = None
|
|
||||||
|
|
||||||
if k in result:
|
|
||||||
raise ValueError('Repeat key from `manifest create`: ' + k)
|
|
||||||
if len(result) != 2:
|
|
||||||
raise ValueError(
|
|
||||||
'Expected 2 keys from `manifest create` but got {} keys instead ({})'.
|
|
||||||
format(len(result), result))
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def sign_manifest(seq, validation_pk, master_secret):
|
|
||||||
"""returns the signed manifest as a string"""
|
|
||||||
to_run = ['python', ARGS.ripple_home + '/bin/python/Manifest.py', 'sign',
|
|
||||||
str(seq), validation_pk, master_secret]
|
|
||||||
try:
|
|
||||||
r = subprocess.check_output(to_run)
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print('Error in sign_manifest: ', e.output)
|
|
||||||
raise e
|
|
||||||
result = []
|
|
||||||
for l in r.splitlines():
|
|
||||||
l.strip()
|
|
||||||
if not l or l == '[validation_manifest]':
|
|
||||||
continue
|
|
||||||
result.append(l)
|
|
||||||
return '\n'.join(result)
|
|
||||||
|
|
||||||
|
|
||||||
def get_ripple_exe():
|
|
||||||
"""Find the rippled executable"""
|
|
||||||
prefix = ARGS.ripple_home + '/build/'
|
|
||||||
exe = ['rippled', 'RippleD.exe']
|
|
||||||
to_test = [prefix + t + '.debug/' + e
|
|
||||||
for t in ['clang', 'gcc', 'msvc'] for e in exe]
|
|
||||||
for e in exe:
|
|
||||||
to_test.append(prefix + '/' + e)
|
|
||||||
for t in to_test:
|
|
||||||
if os.path.isfile(t):
|
|
||||||
return t
|
|
||||||
|
|
||||||
|
|
||||||
class RippledServer(object):
|
|
||||||
def __init__(self, exe, config_file, server_out):
|
|
||||||
self.config_file = config_file
|
|
||||||
self.exe = exe
|
|
||||||
self.process = None
|
|
||||||
self.server_out = server_out
|
|
||||||
self.reinit(config_file)
|
|
||||||
|
|
||||||
def reinit(self, config_file):
|
|
||||||
self.config_file = config_file
|
|
||||||
self.to_run = [self.exe, '--verbose', '--conf', self.config_file]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def config_root(self):
|
|
||||||
return os.path.dirname(self.config_file)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def master_secret_file(self):
|
|
||||||
return self.config_root + '/master_secret.txt'
|
|
||||||
|
|
||||||
def startup(self):
|
|
||||||
if ARGS.verbose:
|
|
||||||
print('starting rippled:' + self.config_file)
|
|
||||||
fout = open(self.server_out, 'w')
|
|
||||||
self.process = subprocess.Popen(
|
|
||||||
self.to_run, stdout=fout, stderr=subprocess.STDOUT)
|
|
||||||
|
|
||||||
def shutdown(self):
|
|
||||||
if not self.process:
|
|
||||||
return
|
|
||||||
fout = open(os.devnull, 'w')
|
|
||||||
subprocess.Popen(
|
|
||||||
self.to_run + ['stop'], stdout=fout, stderr=subprocess.STDOUT)
|
|
||||||
self.process.wait()
|
|
||||||
self.process = None
|
|
||||||
|
|
||||||
def rotate_logfile(self):
|
|
||||||
if self.server_out == os.devnull:
|
|
||||||
return
|
|
||||||
for i in range(100):
|
|
||||||
backup_name = '{}.{}'.format(self.server_out, i)
|
|
||||||
if not os.path.exists(backup_name):
|
|
||||||
os.rename(self.server_out, backup_name)
|
|
||||||
return
|
|
||||||
raise ValueError('Could not rotate logfile: {}'.
|
|
||||||
format(self.server_out))
|
|
||||||
|
|
||||||
def validation_create(self):
|
|
||||||
"""returns dict with keys:
|
|
||||||
'validation_key', 'validation_public_key', 'validation_seed'
|
|
||||||
"""
|
|
||||||
to_run = [self.exe, '-q', '--conf', self.config_file,
|
|
||||||
'--', 'validation_create']
|
|
||||||
try:
|
|
||||||
return json.loads(subprocess.check_output(to_run))['result']
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print('Error in validation_create: ', e.output)
|
|
||||||
raise e
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def rippled_server(config_file, server_out=os.devnull):
|
|
||||||
"""Start a ripple server"""
|
|
||||||
try:
|
|
||||||
server = None
|
|
||||||
server = RippledServer(ARGS.ripple_exe, config_file, server_out)
|
|
||||||
server.startup()
|
|
||||||
yield server
|
|
||||||
finally:
|
|
||||||
if server:
|
|
||||||
server.shutdown()
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def pause_server(server, config_file):
|
|
||||||
"""Shutdown and then restart a ripple server"""
|
|
||||||
try:
|
|
||||||
server.shutdown()
|
|
||||||
server.rotate_logfile()
|
|
||||||
yield server
|
|
||||||
finally:
|
|
||||||
server.reinit(config_file)
|
|
||||||
server.startup()
|
|
||||||
|
|
||||||
|
|
||||||
def parse_date(d, t):
|
|
||||||
"""Return the timestamp of a line, or none if the line has no timestamp"""
|
|
||||||
try:
|
|
||||||
return time.strptime(d+' '+t, '%Y-%B-%d %H:%M:%S')
|
|
||||||
except:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def to_dict(l):
|
|
||||||
"""Given a line of the form Key0: Value0;Key2: Valuue2; Return a dict"""
|
|
||||||
fields = l.split(';')
|
|
||||||
result = {}
|
|
||||||
for f in fields:
|
|
||||||
if f:
|
|
||||||
v = f.split(':')
|
|
||||||
assert len(v) == 2
|
|
||||||
result[v[0].strip()] = v[1].strip()
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def check_ephemeral_key(validator_key,
|
|
||||||
log_file,
|
|
||||||
seq,
|
|
||||||
change_time):
|
|
||||||
"""
|
|
||||||
Detect when a server is informed of a validator's ephemeral key change.
|
|
||||||
`change_time` and `seq` may be None, in which case they are ignored.
|
|
||||||
"""
|
|
||||||
manifest_prefix = 'Manifest:'
|
|
||||||
# a manifest line has the form Manifest: action; Key: value;
|
|
||||||
# Key can be Pk (public key), Seq, OldSeq,
|
|
||||||
for l in open(log_file):
|
|
||||||
sa = l.split()
|
|
||||||
if len(sa) < 5 or sa[4] != manifest_prefix:
|
|
||||||
continue
|
|
||||||
|
|
||||||
d = to_dict(' '.join(sa[4:]))
|
|
||||||
# check the seq number and validator_key
|
|
||||||
if d['Pk'] != validator_key:
|
|
||||||
continue
|
|
||||||
if seq is not None and int(d['Seq']) != seq:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if change_time:
|
|
||||||
t = parse_date(sa[0], sa[1])
|
|
||||||
if not t or t < change_time:
|
|
||||||
continue
|
|
||||||
action = d['Manifest']
|
|
||||||
return MANIFEST_ACTION_STR_TO_ID[action]
|
|
||||||
return NOT_FOUND
|
|
||||||
|
|
||||||
|
|
||||||
def check_ephemeral_keys(validator_key,
|
|
||||||
log_files,
|
|
||||||
seq,
|
|
||||||
change_time=None,
|
|
||||||
timeout_s=60):
|
|
||||||
result = [NOT_FOUND for i in range(len(log_files))]
|
|
||||||
if timeout_s < 10:
|
|
||||||
sleep_time = 1
|
|
||||||
elif timeout_s < 60:
|
|
||||||
sleep_time = 5
|
|
||||||
else:
|
|
||||||
sleep_time = 10
|
|
||||||
n = timeout_s//sleep_time
|
|
||||||
if n == 0:
|
|
||||||
n = 1
|
|
||||||
start_time = time.time()
|
|
||||||
for _ in range(n):
|
|
||||||
for i, lf in enumerate(log_files):
|
|
||||||
if result[i] != NOT_FOUND:
|
|
||||||
continue
|
|
||||||
result[i] = check_ephemeral_key(validator_key,
|
|
||||||
lf,
|
|
||||||
seq,
|
|
||||||
change_time)
|
|
||||||
if result[i] != NOT_FOUND:
|
|
||||||
if all(r != NOT_FOUND for r in result):
|
|
||||||
return result
|
|
||||||
else:
|
|
||||||
server_dir = os.path.basename(os.path.dirname(log_files[i]))
|
|
||||||
if ARGS.verbose:
|
|
||||||
print('Check for {}: {}'.format(
|
|
||||||
server_dir, MANIFEST_ACTION_ID_TO_STR[result[i]]))
|
|
||||||
tsf = time.time() - start_time
|
|
||||||
if tsf > 20:
|
|
||||||
if ARGS.verbose:
|
|
||||||
print('Waiting for key to propigate: ', tsf)
|
|
||||||
time.sleep(sleep_time)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def get_validator_key(config_file):
|
|
||||||
in_validator_keys = False
|
|
||||||
for l in open(config_file):
|
|
||||||
sl = l.strip()
|
|
||||||
if not in_validator_keys and sl == '[validator_keys]':
|
|
||||||
in_validator_keys = True
|
|
||||||
continue
|
|
||||||
if in_validator_keys:
|
|
||||||
if sl.startswith('['):
|
|
||||||
raise ValueError('ThisServer validator key not found')
|
|
||||||
if sl.startswith('#'):
|
|
||||||
continue
|
|
||||||
s = sl.split()
|
|
||||||
if len(s) == 2 and s[1] == 'ThisServer':
|
|
||||||
return s[0]
|
|
||||||
|
|
||||||
|
|
||||||
def new_config_ephemeral_key(
|
|
||||||
server, seq, rm_dbs=False, master_secret_file=None):
|
|
||||||
"""Generate a new ephemeral key, add to config, restart server"""
|
|
||||||
config_root = server.config_root
|
|
||||||
config_file = config_root + '/rippled.cfg'
|
|
||||||
db_dir = config_root + '/db'
|
|
||||||
if not master_secret_file:
|
|
||||||
master_secret_file = server.master_secret_file
|
|
||||||
with open(master_secret_file) as f:
|
|
||||||
master_secret = f.read()
|
|
||||||
v = server.validation_create()
|
|
||||||
signed = sign_manifest(seq, v['validation_public_key'], master_secret)
|
|
||||||
with pause_server(server, config_file):
|
|
||||||
if rm_dbs and os.path.exists(db_dir):
|
|
||||||
shutil.rmtree(db_dir)
|
|
||||||
os.makedirs(db_dir)
|
|
||||||
# replace the validation_manifest section with `signed`
|
|
||||||
bak = config_file + '.bak'
|
|
||||||
if is_windows() and os.path.isfile(bak):
|
|
||||||
os.remove(bak)
|
|
||||||
os.rename(config_file, bak)
|
|
||||||
in_manifest = False
|
|
||||||
with open(bak, 'r') as src:
|
|
||||||
with open(config_file, 'w') as out:
|
|
||||||
for l in src:
|
|
||||||
sl = l.strip()
|
|
||||||
if not in_manifest and sl == '[validation_manifest]':
|
|
||||||
in_manifest = True
|
|
||||||
elif in_manifest:
|
|
||||||
if sl.startswith('[') or sl.startswith('#'):
|
|
||||||
in_manifest = False
|
|
||||||
out.write(signed)
|
|
||||||
out.write('\n\n')
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
out.write(l)
|
|
||||||
return (bak, config_file)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description=('Create config files for n validators')
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
'--ripple_home', '-r',
|
|
||||||
default=os.sep.join(os.path.realpath(__file__).split(os.sep)[:-5]),
|
|
||||||
help=('Root directory of the ripple repo'), )
|
|
||||||
parser.add_argument('--num_validators', '-n',
|
|
||||||
default=2,
|
|
||||||
help=('Number of validators'), )
|
|
||||||
parser.add_argument('--conf', '-c', help=('rippled config file'), )
|
|
||||||
parser.add_argument('--out', '-o',
|
|
||||||
default='test_output',
|
|
||||||
help=('config root directory'), )
|
|
||||||
parser.add_argument(
|
|
||||||
'--existing', '-e',
|
|
||||||
action='store_true',
|
|
||||||
help=('use existing config files'), )
|
|
||||||
parser.add_argument(
|
|
||||||
'--generate', '-g',
|
|
||||||
action='store_true',
|
|
||||||
help=('generate conf files only'), )
|
|
||||||
parser.add_argument(
|
|
||||||
'--verbose', '-v',
|
|
||||||
action='store_true',
|
|
||||||
help=('verbose status reporting'), )
|
|
||||||
parser.add_argument(
|
|
||||||
'--quiet', '-q',
|
|
||||||
action='store_true',
|
|
||||||
help=('quiet status reporting'), )
|
|
||||||
|
|
||||||
return parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
def get_configs(manifest_seq):
|
|
||||||
global ARGS
|
|
||||||
ARGS.ripple_home = os.path.expanduser(ARGS.ripple_home)
|
|
||||||
|
|
||||||
n = int(ARGS.num_validators)
|
|
||||||
if n<2:
|
|
||||||
raise ValueError(
|
|
||||||
'Need at least 2 rippled servers. Specified: {}'.format(n))
|
|
||||||
config_root = ARGS.out
|
|
||||||
ARGS.ripple_exe = get_ripple_exe()
|
|
||||||
if not ARGS.ripple_exe:
|
|
||||||
raise ValueError('No Exe Found')
|
|
||||||
|
|
||||||
if ARGS.existing:
|
|
||||||
return [
|
|
||||||
os.path.abspath('{}/validator_{}/rippled.cfg'.format(config_root, i))
|
|
||||||
for i in range(n)
|
|
||||||
]
|
|
||||||
|
|
||||||
initial_config = ARGS.conf
|
|
||||||
|
|
||||||
manifests = [manifest_create() for i in range(n)]
|
|
||||||
port_nums = [checkout_port_nums() for i in range(n)]
|
|
||||||
with rippled_server(initial_config) as server:
|
|
||||||
time.sleep(DELAY_WHILE_PROCESS_STARTS_UP)
|
|
||||||
validations = [server.validation_create() for i in range(n)]
|
|
||||||
|
|
||||||
signed_manifests = [sign_manifest(manifest_seq,
|
|
||||||
v['validation_public_key'],
|
|
||||||
m['master_secret'])
|
|
||||||
for m, v in zip(manifests, validations)]
|
|
||||||
node_db_type = 'RocksDB' if not is_windows() else 'NuDB'
|
|
||||||
node_db_filename = node_db_type.lower()
|
|
||||||
|
|
||||||
config_files = []
|
|
||||||
for i, (m, v, s) in enumerate(zip(manifests, validations, signed_manifests)):
|
|
||||||
sibling_index = (i - 1) % len(manifests)
|
|
||||||
all_validator_keys = '\n'.join([
|
|
||||||
m['validator_keys'] + ' ThisServer',
|
|
||||||
manifests[sibling_index]['validator_keys'] + ' NextInRing'])
|
|
||||||
this_validator_dir = os.path.abspath(
|
|
||||||
'{}/validator_{}'.format(config_root, i))
|
|
||||||
db_path = this_validator_dir + '/db'
|
|
||||||
node_db_path = db_path + '/' + node_db_filename
|
|
||||||
log_path = this_validator_dir + '/log'
|
|
||||||
debug_logfile = log_path + '/debug.log'
|
|
||||||
rpc_port, peer_port, wss_port = port_nums[i]
|
|
||||||
sibling_ip = '127.0.0.1'
|
|
||||||
sibling_port = port_nums[sibling_index][1]
|
|
||||||
d = {
|
|
||||||
'validation_manifest': s,
|
|
||||||
'all_validator_keys': all_validator_keys,
|
|
||||||
'node_db_type': node_db_type,
|
|
||||||
'node_db_path': node_db_path,
|
|
||||||
'db_path': db_path,
|
|
||||||
'debug_logfile': debug_logfile,
|
|
||||||
'rpc_port': rpc_port,
|
|
||||||
'peer_port': peer_port,
|
|
||||||
'wss_port': wss_port,
|
|
||||||
'sibling_ip': sibling_ip,
|
|
||||||
'sibling_port': sibling_port,
|
|
||||||
}
|
|
||||||
d.update(m)
|
|
||||||
d.update(v)
|
|
||||||
|
|
||||||
for p in [this_validator_dir, db_path, log_path]:
|
|
||||||
if not os.path.exists(p):
|
|
||||||
os.makedirs(p)
|
|
||||||
|
|
||||||
config_files.append('{}/rippled.cfg'.format(this_validator_dir))
|
|
||||||
with open(config_files[-1], 'w') as f:
|
|
||||||
f.write(CONF_TEMPLATE.format(**d))
|
|
||||||
|
|
||||||
with open('{}/master_secret.txt'.format(this_validator_dir), 'w') as f:
|
|
||||||
f.write(m['master_secret'])
|
|
||||||
|
|
||||||
return config_files
|
|
||||||
|
|
||||||
|
|
||||||
def update_ephemeral_key(
|
|
||||||
server, new_seq, log_files,
|
|
||||||
expected=None, rm_dbs=False, master_secret_file=None,
|
|
||||||
restore_origional_conf=False, timeout_s=300):
|
|
||||||
if not expected:
|
|
||||||
expected = {}
|
|
||||||
|
|
||||||
change_time = time.gmtime()
|
|
||||||
back_conf, new_conf = new_config_ephemeral_key(
|
|
||||||
server,
|
|
||||||
new_seq,
|
|
||||||
rm_dbs,
|
|
||||||
master_secret_file
|
|
||||||
)
|
|
||||||
validator_key = get_validator_key(server.config_file)
|
|
||||||
start_time = time.time()
|
|
||||||
ck = check_ephemeral_keys(validator_key,
|
|
||||||
log_files,
|
|
||||||
seq=new_seq,
|
|
||||||
change_time=change_time,
|
|
||||||
timeout_s=timeout_s)
|
|
||||||
if ARGS.verbose:
|
|
||||||
print('Check finished: {} secs.'.format(int(time.time() - start_time)))
|
|
||||||
all_success = True
|
|
||||||
for i, r in enumerate(ck):
|
|
||||||
e = expected.get(i, UNTRUSTED)
|
|
||||||
server_dir = os.path.basename(os.path.dirname(log_files[i]))
|
|
||||||
status = 'OK' if e == r else 'FAIL'
|
|
||||||
print('{}: Server: {} Expected: {} Got: {}'.
|
|
||||||
format(status, server_dir,
|
|
||||||
MANIFEST_ACTION_ID_TO_STR[e], MANIFEST_ACTION_ID_TO_STR[r]))
|
|
||||||
all_success = all_success and (e == r)
|
|
||||||
if restore_origional_conf:
|
|
||||||
if is_windows() and os.path.isfile(new_conf):
|
|
||||||
os.remove(new_conf)
|
|
||||||
os.rename(back_conf, new_conf)
|
|
||||||
return all_success
|
|
||||||
|
|
||||||
|
|
||||||
def run_main():
|
|
||||||
global ARGS
|
|
||||||
ARGS = parse_args()
|
|
||||||
manifest_seq = 1
|
|
||||||
config_files = get_configs(manifest_seq)
|
|
||||||
if ARGS.generate:
|
|
||||||
return
|
|
||||||
if len(config_files) <= 1:
|
|
||||||
print('Script requires at least 2 servers. Actual #: {}'.
|
|
||||||
format(len(config_files)))
|
|
||||||
return
|
|
||||||
with contextlib.nested(*(rippled_server(c, os.path.dirname(c)+'/log.txt')
|
|
||||||
for c in config_files)) as servers:
|
|
||||||
log_files = [os.path.dirname(cf)+'/log.txt' for cf in config_files[1:]]
|
|
||||||
validator_key = get_validator_key(config_files[0])
|
|
||||||
start_time = time.time()
|
|
||||||
ck = check_ephemeral_keys(validator_key,
|
|
||||||
[log_files[0]],
|
|
||||||
seq=None,
|
|
||||||
timeout_s=60)
|
|
||||||
if ARGS.verbose:
|
|
||||||
print('Check finished: {} secs.'.format(
|
|
||||||
int(time.time() - start_time)))
|
|
||||||
if any(r == NOT_FOUND for r in ck):
|
|
||||||
print('FAIL: Initial key did not propigate to all servers')
|
|
||||||
return
|
|
||||||
|
|
||||||
manifest_seq += 2
|
|
||||||
expected = {i: UNTRUSTED for i in range(len(log_files))}
|
|
||||||
expected[0] = ACCEPTED_UPDATE
|
|
||||||
if not ARGS.quiet:
|
|
||||||
print('Testing key update')
|
|
||||||
kr = update_ephemeral_key(servers[0], manifest_seq, log_files, expected)
|
|
||||||
if not kr:
|
|
||||||
print('\nFail: Key Update Test. Exiting')
|
|
||||||
return
|
|
||||||
|
|
||||||
expected = {i: UNTRUSTED for i in range(len(log_files))}
|
|
||||||
expected[0] = STALE
|
|
||||||
if not ARGS.quiet:
|
|
||||||
print('Testing stale key')
|
|
||||||
kr = update_ephemeral_key(
|
|
||||||
servers[0], manifest_seq-1, log_files, expected, rm_dbs=True)
|
|
||||||
if not kr:
|
|
||||||
print('\nFail: Stale Key Test. Exiting')
|
|
||||||
return
|
|
||||||
|
|
||||||
expected = {i: UNTRUSTED for i in range(len(log_files))}
|
|
||||||
expected[0] = STALE
|
|
||||||
if not ARGS.quiet:
|
|
||||||
print('Testing stale key 2')
|
|
||||||
kr = update_ephemeral_key(
|
|
||||||
servers[0], manifest_seq, log_files, expected, rm_dbs=True)
|
|
||||||
if not kr:
|
|
||||||
print('\nFail: Stale Key Test. Exiting')
|
|
||||||
return
|
|
||||||
|
|
||||||
expected = {i: UNTRUSTED for i in range(len(log_files))}
|
|
||||||
expected[0] = REVOKED
|
|
||||||
if not ARGS.quiet:
|
|
||||||
print('Testing revoked key')
|
|
||||||
kr = update_ephemeral_key(
|
|
||||||
servers[0], 0xffffffff, log_files, expected, rm_dbs=True)
|
|
||||||
if not kr:
|
|
||||||
print('\nFail: Revoked Key Text. Exiting')
|
|
||||||
return
|
|
||||||
print('\nOK: All tests passed')
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
run_main()
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from ripple.util import Base58
|
|
||||||
|
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
BINARY = 'nN9kfUnKTf7PpgLG'
|
|
||||||
|
|
||||||
class test_Base58(TestCase):
|
|
||||||
def run_test(self, before, after):
|
|
||||||
self.assertEquals(Base58.decode(before), after)
|
|
||||||
self.assertEquals(Base58.encode(after), before)
|
|
||||||
|
|
||||||
def test_trivial(self):
|
|
||||||
self.run_test('', '')
|
|
||||||
|
|
||||||
def test_zeroes(self):
|
|
||||||
for before, after in (('', ''), ('abc', 'I\x8b')):
|
|
||||||
for i in range(1, 257):
|
|
||||||
self.run_test('r' * i + before, '\0' * i + after)
|
|
||||||
|
|
||||||
def test_single_digits(self):
|
|
||||||
for i, c in enumerate(Base58.ALPHABET):
|
|
||||||
self.run_test(c, chr(i))
|
|
||||||
|
|
||||||
def test_various(self):
|
|
||||||
# Test three random numbers.
|
|
||||||
self.run_test('88Mw', '\x88L\xed')
|
|
||||||
self.run_test(
|
|
||||||
'nN9kfUnKTf7PpgLG', '\x03\xdc\x9co\xdea\xefn\xd3\xb8\xe2\xc1')
|
|
||||||
self.run_test(
|
|
||||||
'zzWWb4C5p6kNrVa4fEBoZpZKd3XQLXch7QJbLCuLdoS1CWr8qdAZHEmwMiJy8Hwp',
|
|
||||||
'xN\x82\xfcQ\x1f\xb3~\xdf\xc7\xb37#\xc6~A\xe9\xf6-\x1f\xcb"\xfab'
|
|
||||||
'(\'\xccv\x9e\x85\xc3\xd1\x19\x941{\x8et\xfbS}\x86.k\x07\xb5\xb3')
|
|
||||||
|
|
||||||
def test_check(self):
|
|
||||||
self.assertEquals(Base58.checksum(BINARY), '\xaa\xaar\x9d')
|
|
||||||
|
|
||||||
def test_encode(self):
|
|
||||||
self.assertEquals(
|
|
||||||
Base58.encode_version(Base58.VER_ACCOUNT_PUBLIC, BINARY),
|
|
||||||
'sB49XwJgmdEZDo8LmYwki7FYkiaN7')
|
|
||||||
|
|
||||||
def test_decode(self):
|
|
||||||
ver, b = Base58.decode_version('sB49XwJgmdEZDo8LmYwki7FYkiaN7')
|
|
||||||
self.assertEquals(ver, Base58.VER_ACCOUNT_PUBLIC)
|
|
||||||
self.assertEquals(b, BINARY)
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
from ripple.util.Cache import NamedCache
|
|
||||||
|
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
class test_Cache(TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.cache = NamedCache()
|
|
||||||
|
|
||||||
def test_trivial(self):
|
|
||||||
pass
|
|
||||||
@@ -1,163 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
from ripple.util import ConfigFile
|
|
||||||
|
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
class test_ConfigFile(TestCase):
|
|
||||||
def test_trivial(self):
|
|
||||||
self.assertEquals(ConfigFile.read(''), {})
|
|
||||||
|
|
||||||
def test_full(self):
|
|
||||||
self.assertEquals(ConfigFile.read(FULL.splitlines()), RESULT)
|
|
||||||
|
|
||||||
RESULT = {
|
|
||||||
'websocket_port': '6206',
|
|
||||||
'database_path': '/development/alpha/db',
|
|
||||||
'sntp_servers':
|
|
||||||
['time.windows.com', 'time.apple.com', 'time.nist.gov', 'pool.ntp.org'],
|
|
||||||
'validation_seed': 'sh1T8T9yGuV7Jb6DPhqSzdU2s5LcV',
|
|
||||||
'node_size': 'medium',
|
|
||||||
'rpc_startup': {
|
|
||||||
'command': 'log_level',
|
|
||||||
'severity': 'debug'},
|
|
||||||
'ips': ['r.ripple.com', '51235'],
|
|
||||||
'node_db': {
|
|
||||||
'file_size_mult': '2',
|
|
||||||
'file_size_mb': '8',
|
|
||||||
'cache_mb': '256',
|
|
||||||
'path': '/development/alpha/db/rocksdb',
|
|
||||||
'open_files': '2000',
|
|
||||||
'type': 'RocksDB',
|
|
||||||
'filter_bits': '12'},
|
|
||||||
'peer_port': '53235',
|
|
||||||
'ledger_history': 'full',
|
|
||||||
'rpc_ip': '127.0.0.1',
|
|
||||||
'websocket_public_ip': '0.0.0.0',
|
|
||||||
'rpc_allow_remote': '0',
|
|
||||||
'validators':
|
|
||||||
[['n949f75evCHwgyP4fPVgaHqNHxUVN15PsJEZ3B3HnXPcPjcZAoy7', 'RL1'],
|
|
||||||
['n9MD5h24qrQqiyBC8aeqqCWvpiBiYQ3jxSr91uiDvmrkyHRdYLUj', 'RL2'],
|
|
||||||
['n9L81uNCaPgtUJfaHh89gmdvXKAmSt5Gdsw2g1iPWaPkAHW5Nm4C', 'RL3'],
|
|
||||||
['n9KiYM9CgngLvtRCQHZwgC2gjpdaZcCcbt3VboxiNFcKuwFVujzS', 'RL4'],
|
|
||||||
['n9LdgEtkmGB9E2h3K4Vp7iGUaKuq23Zr32ehxiU8FWY7xoxbWTSA', 'RL5']],
|
|
||||||
'debug_logfile': '/development/alpha/debug.log',
|
|
||||||
'websocket_public_port': '5206',
|
|
||||||
'peer_ip': '0.0.0.0',
|
|
||||||
'rpc_port': '5205',
|
|
||||||
'validation_quorum': '3',
|
|
||||||
'websocket_ip': '127.0.0.1'}
|
|
||||||
|
|
||||||
FULL = """
|
|
||||||
[ledger_history]
|
|
||||||
full
|
|
||||||
|
|
||||||
# Allow other peers to connect to this server.
|
|
||||||
#
|
|
||||||
[peer_ip]
|
|
||||||
0.0.0.0
|
|
||||||
|
|
||||||
[peer_port]
|
|
||||||
53235
|
|
||||||
|
|
||||||
# Allow untrusted clients to connect to this server.
|
|
||||||
#
|
|
||||||
[websocket_public_ip]
|
|
||||||
0.0.0.0
|
|
||||||
|
|
||||||
[websocket_public_port]
|
|
||||||
5206
|
|
||||||
|
|
||||||
# Provide trusted websocket ADMIN access to the localhost.
|
|
||||||
#
|
|
||||||
[websocket_ip]
|
|
||||||
127.0.0.1
|
|
||||||
|
|
||||||
[websocket_port]
|
|
||||||
6206
|
|
||||||
|
|
||||||
# Provide trusted json-rpc ADMIN access to the localhost.
|
|
||||||
#
|
|
||||||
[rpc_ip]
|
|
||||||
127.0.0.1
|
|
||||||
|
|
||||||
[rpc_port]
|
|
||||||
5205
|
|
||||||
|
|
||||||
[rpc_allow_remote]
|
|
||||||
0
|
|
||||||
|
|
||||||
[node_size]
|
|
||||||
medium
|
|
||||||
|
|
||||||
# This is primary persistent datastore for rippled. This includes transaction
|
|
||||||
# metadata, account states, and ledger headers. Helpful information can be
|
|
||||||
# found here: https://ripple.com/wiki/NodeBackEnd
|
|
||||||
[node_db]
|
|
||||||
type=RocksDB
|
|
||||||
path=/development/alpha/db/rocksdb
|
|
||||||
open_files=2000
|
|
||||||
filter_bits=12
|
|
||||||
cache_mb=256
|
|
||||||
file_size_mb=8
|
|
||||||
file_size_mult=2
|
|
||||||
|
|
||||||
[database_path]
|
|
||||||
/development/alpha/db
|
|
||||||
|
|
||||||
# This needs to be an absolute directory reference, not a relative one.
|
|
||||||
# Modify this value as required.
|
|
||||||
[debug_logfile]
|
|
||||||
/development/alpha/debug.log
|
|
||||||
|
|
||||||
[sntp_servers]
|
|
||||||
time.windows.com
|
|
||||||
time.apple.com
|
|
||||||
time.nist.gov
|
|
||||||
pool.ntp.org
|
|
||||||
|
|
||||||
# Where to find some other servers speaking the Ripple protocol.
|
|
||||||
#
|
|
||||||
[ips]
|
|
||||||
r.ripple.com 51235
|
|
||||||
|
|
||||||
# The latest validators can be obtained from
|
|
||||||
# https://ripple.com/ripple.txt
|
|
||||||
#
|
|
||||||
[validators]
|
|
||||||
n949f75evCHwgyP4fPVgaHqNHxUVN15PsJEZ3B3HnXPcPjcZAoy7 RL1
|
|
||||||
n9MD5h24qrQqiyBC8aeqqCWvpiBiYQ3jxSr91uiDvmrkyHRdYLUj RL2
|
|
||||||
n9L81uNCaPgtUJfaHh89gmdvXKAmSt5Gdsw2g1iPWaPkAHW5Nm4C RL3
|
|
||||||
n9KiYM9CgngLvtRCQHZwgC2gjpdaZcCcbt3VboxiNFcKuwFVujzS RL4
|
|
||||||
n9LdgEtkmGB9E2h3K4Vp7iGUaKuq23Zr32ehxiU8FWY7xoxbWTSA RL5
|
|
||||||
|
|
||||||
# Ditto.
|
|
||||||
[validation_quorum]
|
|
||||||
3
|
|
||||||
|
|
||||||
[validation_seed]
|
|
||||||
sh1T8T9yGuV7Jb6DPhqSzdU2s5LcV
|
|
||||||
|
|
||||||
# Turn down default logging to save disk space in the long run.
|
|
||||||
# Valid values here are trace, debug, info, warning, error, and fatal
|
|
||||||
[rpc_startup]
|
|
||||||
{ "command": "log_level", "severity": "debug" }
|
|
||||||
|
|
||||||
# Configure SSL for WebSockets. Not enabled by default because not everybody
|
|
||||||
# has an SSL cert on their server, but if you uncomment the following lines and
|
|
||||||
# set the path to the SSL certificate and private key the WebSockets protocol
|
|
||||||
# will be protected by SSL/TLS.
|
|
||||||
#[websocket_secure]
|
|
||||||
#1
|
|
||||||
|
|
||||||
#[websocket_ssl_cert]
|
|
||||||
#/etc/ssl/certs/server.crt
|
|
||||||
|
|
||||||
#[websocket_ssl_key]
|
|
||||||
#/etc/ssl/private/server.key
|
|
||||||
|
|
||||||
# Defaults to 0 ("no") so that you can use self-signed SSL certificates for
|
|
||||||
# development, or internally.
|
|
||||||
#[ssl_verify]
|
|
||||||
#0
|
|
||||||
""".strip()
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
from ripple.util.Decimal import Decimal
|
|
||||||
|
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
class test_Decimal(TestCase):
|
|
||||||
def test_construct(self):
|
|
||||||
self.assertEquals(str(Decimal('')), '0')
|
|
||||||
self.assertEquals(str(Decimal('0')), '0')
|
|
||||||
self.assertEquals(str(Decimal('0.2')), '0.2')
|
|
||||||
self.assertEquals(str(Decimal('-0.2')), '-0.2')
|
|
||||||
self.assertEquals(str(Decimal('3.1416')), '3.1416')
|
|
||||||
|
|
||||||
def test_accumulate(self):
|
|
||||||
d = Decimal()
|
|
||||||
d.accumulate('0.5')
|
|
||||||
d.accumulate('3.1416')
|
|
||||||
d.accumulate('-23.34234')
|
|
||||||
self.assertEquals(str(d), '-19.70074')
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
from ripple.util import Dict
|
|
||||||
|
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
class test_Dict(TestCase):
|
|
||||||
def test_count_all_subitems(self):
|
|
||||||
self.assertEquals(Dict.count_all_subitems({}), 1)
|
|
||||||
self.assertEquals(Dict.count_all_subitems({'a': {}}), 2)
|
|
||||||
self.assertEquals(Dict.count_all_subitems([1]), 2)
|
|
||||||
self.assertEquals(Dict.count_all_subitems([1, 2]), 3)
|
|
||||||
self.assertEquals(Dict.count_all_subitems([1, {2: 3}]), 4)
|
|
||||||
self.assertEquals(Dict.count_all_subitems([1, {2: [3]}]), 5)
|
|
||||||
self.assertEquals(Dict.count_all_subitems([1, {2: [3, 4]}]), 6)
|
|
||||||
|
|
||||||
def test_prune(self):
|
|
||||||
self.assertEquals(Dict.prune({}, 0), {})
|
|
||||||
self.assertEquals(Dict.prune({}, 1), {})
|
|
||||||
|
|
||||||
self.assertEquals(Dict.prune({1: 2}, 0), '{dict with 1 subitem}')
|
|
||||||
self.assertEquals(Dict.prune({1: 2}, 1), {1: 2})
|
|
||||||
self.assertEquals(Dict.prune({1: 2}, 2), {1: 2})
|
|
||||||
|
|
||||||
self.assertEquals(Dict.prune([1, 2, 3], 0), '[list with 3 subitems]')
|
|
||||||
self.assertEquals(Dict.prune([1, 2, 3], 1), [1, 2, 3])
|
|
||||||
|
|
||||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 0),
|
|
||||||
'[list with 4 subitems]')
|
|
||||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 1),
|
|
||||||
['{dict with 3 subitems}'])
|
|
||||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 2),
|
|
||||||
[{1: u'[list with 2 subitems]'}])
|
|
||||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 3),
|
|
||||||
[{1: [2, 3]}])
|
|
||||||
|
|
||||||
def test_prune_nosub(self):
|
|
||||||
self.assertEquals(Dict.prune({}, 0, False), {})
|
|
||||||
self.assertEquals(Dict.prune({}, 1, False), {})
|
|
||||||
|
|
||||||
self.assertEquals(Dict.prune({1: 2}, 0, False), '{dict with 1 subitem}')
|
|
||||||
self.assertEquals(Dict.prune({1: 2}, 1, False), {1: 2})
|
|
||||||
self.assertEquals(Dict.prune({1: 2}, 2, False), {1: 2})
|
|
||||||
|
|
||||||
self.assertEquals(Dict.prune([1, 2, 3], 0, False),
|
|
||||||
'[list with 3 subitems]')
|
|
||||||
self.assertEquals(Dict.prune([1, 2, 3], 1, False), [1, 2, 3])
|
|
||||||
|
|
||||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 0, False),
|
|
||||||
'[list with 1 subitem]')
|
|
||||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 1, False),
|
|
||||||
['{dict with 1 subitem}'])
|
|
||||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 2, False),
|
|
||||||
[{1: u'[list with 2 subitems]'}])
|
|
||||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 3, False),
|
|
||||||
[{1: [2, 3]}])
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
from ripple.util.Function import Function, MATCHER
|
|
||||||
|
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
def FN(*args, **kwds):
|
|
||||||
return args, kwds
|
|
||||||
|
|
||||||
class test_Function(TestCase):
|
|
||||||
def match_test(self, item, *results):
|
|
||||||
self.assertEquals(MATCHER.match(item).groups(), results)
|
|
||||||
|
|
||||||
def test_simple(self):
|
|
||||||
self.match_test('function', 'function', '')
|
|
||||||
self.match_test('f(x)', 'f', '(x)')
|
|
||||||
|
|
||||||
def test_empty_function(self):
|
|
||||||
self.assertEquals(Function()(), None)
|
|
||||||
|
|
||||||
def test_empty_args(self):
|
|
||||||
f = Function('ripple.util.test_Function.FN()')
|
|
||||||
self.assertEquals(f(), ((), {}))
|
|
||||||
|
|
||||||
def test_function(self):
|
|
||||||
f = Function('ripple.util.test_Function.FN(True, {1: 2}, None)')
|
|
||||||
self.assertEquals(f(), ((True, {1: 2}, None), {}))
|
|
||||||
self.assertEquals(f('hello', foo='bar'),
|
|
||||||
(('hello', True, {1: 2}, None), {'foo':'bar'}))
|
|
||||||
self.assertEquals(
|
|
||||||
f, Function('ripple.util.test_Function.FN(true, {1: 2}, null)'))
|
|
||||||
|
|
||||||
def test_quoting(self):
|
|
||||||
f = Function('ripple.util.test_Function.FN(testing)')
|
|
||||||
self.assertEquals(f(), (('testing',), {}))
|
|
||||||
f = Function('ripple.util.test_Function.FN(testing, true, false, null)')
|
|
||||||
self.assertEquals(f(), (('testing', True, False, None), {}))
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
from ripple.util import PrettyPrint
|
|
||||||
|
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
class test_PrettyPrint(TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self._results = []
|
|
||||||
self.printer = PrettyPrint.Streamer(printer=self.printer)
|
|
||||||
|
|
||||||
def printer(self, *args, **kwds):
|
|
||||||
self._results.extend(args)
|
|
||||||
|
|
||||||
def run_test(self, expected, *args):
|
|
||||||
for i in range(0, len(args), 2):
|
|
||||||
self.printer.add(args[i], args[i + 1])
|
|
||||||
self.printer.finish()
|
|
||||||
self.assertEquals(''.join(self._results), expected)
|
|
||||||
|
|
||||||
def test_simple_printer(self):
|
|
||||||
self.run_test(
|
|
||||||
'{\n "foo": "bar"\n}',
|
|
||||||
'foo', 'bar')
|
|
||||||
|
|
||||||
def test_multiple_lines(self):
|
|
||||||
self.run_test(
|
|
||||||
'{\n "foo": "bar",\n "baz": 5\n}',
|
|
||||||
'foo', 'bar', 'baz', 5)
|
|
||||||
|
|
||||||
def test_multiple_lines(self):
|
|
||||||
self.run_test(
|
|
||||||
"""
|
|
||||||
{
|
|
||||||
"foo": {
|
|
||||||
"bar": 1,
|
|
||||||
"baz": true
|
|
||||||
},
|
|
||||||
"bang": "bing"
|
|
||||||
}
|
|
||||||
""".strip(), 'foo', {'bar': 1, 'baz': True}, 'bang', 'bing')
|
|
||||||
|
|
||||||
def test_multiple_lines_with_list(self):
|
|
||||||
self.run_test(
|
|
||||||
"""
|
|
||||||
{
|
|
||||||
"foo": [
|
|
||||||
"bar",
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"baz": [
|
|
||||||
23,
|
|
||||||
42
|
|
||||||
]
|
|
||||||
}
|
|
||||||
""".strip(), 'foo', ['bar', 1], 'baz', [23, 42])
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
from ripple.util import Range
|
|
||||||
|
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
class test_Range(TestCase):
|
|
||||||
def round_trip(self, s, *items):
|
|
||||||
self.assertEquals(Range.from_string(s), set(items))
|
|
||||||
self.assertEquals(Range.to_string(items), s)
|
|
||||||
|
|
||||||
def test_complete(self):
|
|
||||||
self.round_trip('10,19', 10, 19)
|
|
||||||
self.round_trip('10', 10)
|
|
||||||
self.round_trip('10-12', 10, 11, 12)
|
|
||||||
self.round_trip('10,19,42-45', 10, 19, 42, 43, 44, 45)
|
|
||||||
|
|
||||||
def test_names(self):
|
|
||||||
self.assertEquals(
|
|
||||||
Range.from_string('first,last,current', first=1, last=3, current=5),
|
|
||||||
set([1, 3, 5]))
|
|
||||||
|
|
||||||
def test_is_range(self):
|
|
||||||
self.assertTrue(Range.is_range(''))
|
|
||||||
self.assertTrue(Range.is_range('10'))
|
|
||||||
self.assertTrue(Range.is_range('10,12'))
|
|
||||||
self.assertFalse(Range.is_range('10,12,fred'))
|
|
||||||
self.assertTrue(Range.is_range('10,12,fred', 'fred'))
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user