Compare commits
747 Commits
0.27.4
...
0.30.0-rc1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
caccee1d98 | ||
|
|
379110a8a2 | ||
|
|
8d37cd9169 | ||
|
|
b40ade5165 | ||
|
|
c475b23c7d | ||
|
|
d6b9cfcc34 | ||
|
|
0c05bd3def | ||
|
|
8f7ab21423 | ||
|
|
07418cfb34 | ||
|
|
ac9816c01d | ||
|
|
bd3e4ac11c | ||
|
|
926d08db6f | ||
|
|
a23f6457dc | ||
|
|
4f9dba22c7 | ||
|
|
97e1a7db25 | ||
|
|
e03effd63b | ||
|
|
f9a65e4966 | ||
|
|
3c52fdfabe | ||
|
|
938b2fed7c | ||
|
|
d6875975ab | ||
|
|
92b2ca70b7 | ||
|
|
df6ac8f7f5 | ||
|
|
fa796a2eb5 | ||
|
|
c7b3153958 | ||
|
|
5bbb89753d | ||
|
|
654084d181 | ||
|
|
094f08211a | ||
|
|
74b0a7c633 | ||
|
|
8f09d3449d | ||
|
|
0c7a7903b6 | ||
|
|
6a8d24372e | ||
|
|
884dc11365 | ||
|
|
83830ef9c0 | ||
|
|
849e1ce5f4 | ||
|
|
4eb6020813 | ||
|
|
d655fdca56 | ||
|
|
b6df6748df | ||
|
|
269809dd1a | ||
|
|
65fdf1dc5e | ||
|
|
60002bf9bc | ||
|
|
dd94de2830 | ||
|
|
e1fc81f66f | ||
|
|
b2cf1e4c65 | ||
|
|
a65f692ab7 | ||
|
|
44e4a50050 | ||
|
|
ffbcb96eff | ||
|
|
eed1a891a7 | ||
|
|
9b787434c9 | ||
|
|
f4fe55caff | ||
|
|
8df88238cd | ||
|
|
ea0bd08660 | ||
|
|
c1f50ca7b3 | ||
|
|
a6f866b4d8 | ||
|
|
545b2fd6b1 | ||
|
|
b0a855a10e | ||
|
|
b5600e940a | ||
|
|
7f5d273e53 | ||
|
|
b0e6be93ff | ||
|
|
324c42ae09 | ||
|
|
acd03faee5 | ||
|
|
94af42da44 | ||
|
|
1e9624270d | ||
|
|
a50d67257c | ||
|
|
3759c553b0 | ||
|
|
332114c02a | ||
|
|
5d841c13b7 | ||
|
|
caecf78a6d | ||
|
|
408a62f7d0 | ||
|
|
b822d061ef | ||
|
|
020a112e77 | ||
|
|
8e33ae78f8 | ||
|
|
dbddc6b7f2 | ||
|
|
f32be2b28d | ||
|
|
0f05ebd834 | ||
|
|
14db51e3e4 | ||
|
|
8f3bb286f2 | ||
|
|
258c93f8d8 | ||
|
|
88f885f2e7 | ||
|
|
91eee1a42d | ||
|
|
6a55f99ede | ||
|
|
0b457497d0 | ||
|
|
b7c9e33343 | ||
|
|
502d5689bf | ||
|
|
d015debe2b | ||
|
|
d8aab5a749 | ||
|
|
7ed2094a6a | ||
|
|
464410d8be | ||
|
|
aa0e17dd93 | ||
|
|
b2cb4df29a | ||
|
|
3d777f3f5d | ||
|
|
1842878c40 | ||
|
|
23f47adb60 | ||
|
|
377e3d479c | ||
|
|
d5193a776e | ||
|
|
ef51128270 | ||
|
|
9b15c88b0e | ||
|
|
f1c29ae20b | ||
|
|
6d2e3da306 | ||
|
|
7695ea2822 | ||
|
|
c729ceab20 | ||
|
|
e8643dd8cc | ||
|
|
7b69592fe1 | ||
|
|
12e11721f9 | ||
|
|
96c13f0d98 | ||
|
|
df728cd2cd | ||
|
|
0d7cad8d64 | ||
|
|
d69285f6ad | ||
|
|
b8e192e058 | ||
|
|
aeebfeab10 | ||
|
|
8aafebbb75 | ||
|
|
c3da2e1f03 | ||
|
|
c8c8003677 | ||
|
|
caab155a00 | ||
|
|
1b85b6eaba | ||
|
|
863add6a19 | ||
|
|
64b80e0573 | ||
|
|
ed902d9dea | ||
|
|
f38b373cb6 | ||
|
|
b8f2fdb6ac | ||
|
|
182f570f24 | ||
|
|
5b90ccf65d | ||
|
|
22a8e25538 | ||
|
|
d63aab6312 | ||
|
|
774dcad392 | ||
|
|
7b7b27ee9e | ||
|
|
c5adbc859a | ||
|
|
2cc12b2f2f | ||
|
|
13b33b5d4d | ||
|
|
c7dea3ed17 | ||
|
|
a7e6ecb5b3 | ||
|
|
5964710f73 | ||
|
|
262a61564c | ||
|
|
4cc4421c82 | ||
|
|
2786950c16 | ||
|
|
e45c1b238f | ||
|
|
d166e6a45e | ||
|
|
e759137f15 | ||
|
|
0bb570a36d | ||
|
|
38c6083a2f | ||
|
|
cfdf0d2f0a | ||
|
|
f0dc2bc425 | ||
|
|
32ec3fe089 | ||
|
|
9e69bd5c56 | ||
|
|
ceeb36039e | ||
|
|
2bfae2f0ac | ||
|
|
b25f322c93 | ||
|
|
f77ecba896 | ||
|
|
5d2d88209f | ||
|
|
ecf1a3c69c | ||
|
|
3f0eacf5e7 | ||
|
|
d49f9ea109 | ||
|
|
2ec40cb6f1 | ||
|
|
3e342e4b71 | ||
|
|
147fee0272 | ||
|
|
fa900de548 | ||
|
|
8cd44c637d | ||
|
|
729caaacff | ||
|
|
2f5d721ec1 | ||
|
|
c15394c42a | ||
|
|
36a62f110c | ||
|
|
2d02b46253 | ||
|
|
6cf75f0fc2 | ||
|
|
1a3e2e3f36 | ||
|
|
645e32b19e | ||
|
|
fa0a61b5d7 | ||
|
|
2336fe2708 | ||
|
|
0cbd81146f | ||
|
|
a268c1a7ad | ||
|
|
3b7107b255 | ||
|
|
a2ea89c64e | ||
|
|
b44cb1a64c | ||
|
|
1dc3acb071 | ||
|
|
0cf58cc505 | ||
|
|
eb49e1bf47 | ||
|
|
0627d3487b | ||
|
|
84161b86c7 | ||
|
|
110bbf3956 | ||
|
|
fad9998f9d | ||
|
|
b38a96ae82 | ||
|
|
e82d774d32 | ||
|
|
8d1b169f5a | ||
|
|
70ccdabf7c | ||
|
|
af36942e1f | ||
|
|
b1b98fa3b0 | ||
|
|
bb251063fc | ||
|
|
663742e0d1 | ||
|
|
16c89aee04 | ||
|
|
abe735102a | ||
|
|
70a37811bd | ||
|
|
edab96e973 | ||
|
|
e7a50e2a5a | ||
|
|
e18ba24670 | ||
|
|
f41fc87a33 | ||
|
|
5ec3534fea | ||
|
|
6bd39a316e | ||
|
|
8434203e71 | ||
|
|
59431c513a | ||
|
|
edc1bc35fd | ||
|
|
18ccdf8bd4 | ||
|
|
a0dcc4da8c | ||
|
|
2b91e62d5d | ||
|
|
bc5a25168a | ||
|
|
f453c58389 | ||
|
|
ad74606ab3 | ||
|
|
bb15295935 | ||
|
|
f0c1fbb098 | ||
|
|
692f224e1c | ||
|
|
5bd9cd2ee8 | ||
|
|
70fc091e74 | ||
|
|
1d09c54fdc | ||
|
|
c7ebe7205c | ||
|
|
1b6c707abb | ||
|
|
a0010effbc | ||
|
|
c094772bc0 | ||
|
|
7ce871f3b2 | ||
|
|
77b3ad5de1 | ||
|
|
a75a0c0b84 | ||
|
|
072fc53019 | ||
|
|
5bad2db667 | ||
|
|
7c2478480d | ||
|
|
761f218c0a | ||
|
|
163e8eb8fc | ||
|
|
6fccd07479 | ||
|
|
c86a40a361 | ||
|
|
362726de4a | ||
|
|
5962a4817a | ||
|
|
2e05471d72 | ||
|
|
d998c6461e | ||
|
|
94f13fb606 | ||
|
|
7edf783102 | ||
|
|
efc2159441 | ||
|
|
3078c6da12 | ||
|
|
de6f678de7 | ||
|
|
d8d51e8103 | ||
|
|
361f1da5b8 | ||
|
|
30e068ae17 | ||
|
|
3b751cc6e6 | ||
|
|
6d60f19d73 | ||
|
|
94235d4b4f | ||
|
|
367c3a5bfc | ||
|
|
f3b172b0c9 | ||
|
|
023715474c | ||
|
|
f5873bcad0 | ||
|
|
9f27801b8d | ||
|
|
9eb0c2964c | ||
|
|
73d64bbafc | ||
|
|
ea67a2d051 | ||
|
|
1e7588d0ab | ||
|
|
b6b3548c0c | ||
|
|
097a8ce640 | ||
|
|
6374aad9bc | ||
|
|
fed3f7b74b | ||
|
|
c079d9ae38 | ||
|
|
6a093b1b44 | ||
|
|
4dc573f195 | ||
|
|
b7f07aed00 | ||
|
|
cb791482a0 | ||
|
|
bd7eb94d69 | ||
|
|
223389a464 | ||
|
|
575f124fb8 | ||
|
|
483f768370 | ||
|
|
ab20ca95aa | ||
|
|
bf775036bc | ||
|
|
08bfd302fe | ||
|
|
3b6ea02920 | ||
|
|
26bfeb1319 | ||
|
|
72659d431e | ||
|
|
53dabe68ef | ||
|
|
ccd6b46995 | ||
|
|
91b3227a0f | ||
|
|
adebba94dc | ||
|
|
9d3b3f7a01 | ||
|
|
b354360bc0 | ||
|
|
8c24f7eb03 | ||
|
|
f473eade5a | ||
|
|
dc74a44b70 | ||
|
|
f96ac3db67 | ||
|
|
b11b9939f4 | ||
|
|
e595fd5e02 | ||
|
|
af52276cd9 | ||
|
|
2eec47415e | ||
|
|
4a2af70c2c | ||
|
|
6211065802 | ||
|
|
a338d9efe0 | ||
|
|
e2ef423624 | ||
|
|
0f8206e269 | ||
|
|
4ccc3751d6 | ||
|
|
7ef6e58024 | ||
|
|
57689c4e66 | ||
|
|
2fc8d70655 | ||
|
|
7ee9f8513c | ||
|
|
1a843fb4f6 | ||
|
|
9111ad1a9d | ||
|
|
a5a9242f4e | ||
|
|
3fcf4ae8b7 | ||
|
|
c3d34aaf4d | ||
|
|
e4f585b7fe | ||
|
|
e3ac1001be | ||
|
|
67f2a5d9d6 | ||
|
|
89b9fa0b35 | ||
|
|
4c0de726c8 | ||
|
|
7d96075e14 | ||
|
|
924a8cdd4b | ||
|
|
b27d078c67 | ||
|
|
c64ec9cfb9 | ||
|
|
3d6e76046c | ||
|
|
48d6a4ab6a | ||
|
|
f535304e1b | ||
|
|
2f485672fa | ||
|
|
63d438c979 | ||
|
|
c955c03197 | ||
|
|
ceff6bc271 | ||
|
|
80a5f5878f | ||
|
|
1e6111c09c | ||
|
|
d468deee12 | ||
|
|
e7eb3aa63d | ||
|
|
749f31f69d | ||
|
|
2d6af1da1d | ||
|
|
6ec5fa9cae | ||
|
|
72832c0fa2 | ||
|
|
7d329570f4 | ||
|
|
c334093223 | ||
|
|
27274c9620 | ||
|
|
cd5e36045c | ||
|
|
0062a260b9 | ||
|
|
fdd2ea8feb | ||
|
|
fbc1768784 | ||
|
|
c53b9f3713 | ||
|
|
eb709f415b | ||
|
|
9dd08e4dab | ||
|
|
e1cd1e9e32 | ||
|
|
f875603525 | ||
|
|
e95ab65396 | ||
|
|
e0907ede4f | ||
|
|
17020ffc54 | ||
|
|
abd05a6378 | ||
|
|
189592938a | ||
|
|
4f34724c5a | ||
|
|
e958f72ba9 | ||
|
|
5c5b121b62 | ||
|
|
d21171b21e | ||
|
|
aead038215 | ||
|
|
a16c07c78a | ||
|
|
3aebabce3d | ||
|
|
7239bcf0b7 | ||
|
|
623faa1550 | ||
|
|
60fad25476 | ||
|
|
d06e07ef0e | ||
|
|
ca800f9e8d | ||
|
|
4225b78bf5 | ||
|
|
8c68eff460 | ||
|
|
c2814308f1 | ||
|
|
f237187b0d | ||
|
|
187b4caf3c | ||
|
|
b8526f7894 | ||
|
|
b2b0377717 | ||
|
|
24ea1ab035 | ||
|
|
e980e69eca | ||
|
|
2904add428 | ||
|
|
babaac9305 | ||
|
|
454d2f8c45 | ||
|
|
5d42d52660 | ||
|
|
a0e87c7aee | ||
|
|
49c4ec6f93 | ||
|
|
eb0ece417d | ||
|
|
7b5bf7f129 | ||
|
|
ab8ffc1a00 | ||
|
|
64c8335e22 | ||
|
|
4cfffdf76f | ||
|
|
8adc4c1e3c | ||
|
|
02c118981f | ||
|
|
4825cefbf8 | ||
|
|
4b91a18532 | ||
|
|
ff2453e42c | ||
|
|
269ad321e6 | ||
|
|
8be4e7e65f | ||
|
|
c25184cc88 | ||
|
|
06823349f9 | ||
|
|
e18c6f63cc | ||
|
|
36a864106d | ||
|
|
3028ffd083 | ||
|
|
a3d5a97df6 | ||
|
|
52879d964e | ||
|
|
aaf209485c | ||
|
|
4515ac0bca | ||
|
|
d7def5509d | ||
|
|
4ff845ac91 | ||
|
|
df26c08a34 | ||
|
|
e9d147f4b8 | ||
|
|
f1a1ef49d5 | ||
|
|
fd03c3297c | ||
|
|
ffd2e884f2 | ||
|
|
0d700d9833 | ||
|
|
dd902292ed | ||
|
|
1b4e0f5f48 | ||
|
|
31d352b3aa | ||
|
|
c26b8124e5 | ||
|
|
6bf7de2415 | ||
|
|
18c51f4e4a | ||
|
|
adf4860988 | ||
|
|
0dd6b95ac2 | ||
|
|
41a840e776 | ||
|
|
7d75041fb1 | ||
|
|
6675ee7f5c | ||
|
|
155fcdbcd0 | ||
|
|
52f298f150 | ||
|
|
860ab3523c | ||
|
|
36ac3bc672 | ||
|
|
fc9ab12bf9 | ||
|
|
b39c1eb92c | ||
|
|
6994423a49 | ||
|
|
172e967a73 | ||
|
|
6f5d8bba2d | ||
|
|
67b18e4bea | ||
|
|
e932ba591f | ||
|
|
f58399d2f3 | ||
|
|
730cd5d513 | ||
|
|
e838b30def | ||
|
|
dddcc09378 | ||
|
|
5adb68bba4 | ||
|
|
d6ef66646f | ||
|
|
cf1638e6de | ||
|
|
64ebd64d2b | ||
|
|
92799187ed | ||
|
|
2f3834359e | ||
|
|
399c43cae6 | ||
|
|
80acbe6f6b | ||
|
|
d90ba775e8 | ||
|
|
ea4161d880 | ||
|
|
172a060330 | ||
|
|
231efb5aa5 | ||
|
|
079475e491 | ||
|
|
aa775b4d3d | ||
|
|
216c8125e2 | ||
|
|
c61d0c663e | ||
|
|
0187c6a5a1 | ||
|
|
8289d4140a | ||
|
|
c7118a183a | ||
|
|
b1881e798b | ||
|
|
d44230b745 | ||
|
|
7b417b9d51 | ||
|
|
cc05e5727d | ||
|
|
764a8f2644 | ||
|
|
a15785eb64 | ||
|
|
688f8c5f3f | ||
|
|
dde5ccf7fa | ||
|
|
d5a6313c71 | ||
|
|
f030aab759 | ||
|
|
4393f98a2c | ||
|
|
c377d6c94b | ||
|
|
16aa015682 | ||
|
|
9cded76cf0 | ||
|
|
4ad07bb6b2 | ||
|
|
d0b28a6700 | ||
|
|
18299c3f7a | ||
|
|
ca07a1230b | ||
|
|
e0ad66d967 | ||
|
|
5615c4a2a7 | ||
|
|
d7fbef6764 | ||
|
|
e95bda3bdf | ||
|
|
c010a85ef5 | ||
|
|
798d36efcf | ||
|
|
2d44c8568f | ||
|
|
7232bdb40c | ||
|
|
45f092488a | ||
|
|
4244e1070d | ||
|
|
5a7fa8cfa9 | ||
|
|
daf4f8fcde | ||
|
|
d182d1455e | ||
|
|
dc2260adbe | ||
|
|
83a01e0c7d | ||
|
|
53c1269ebd | ||
|
|
f8bfe3a550 | ||
|
|
90bb53af20 | ||
|
|
c77a2f335a | ||
|
|
8e34a1f6a7 | ||
|
|
2564b62f5c | ||
|
|
a7598c5610 | ||
|
|
8377f2516b | ||
|
|
7efd0ab0d6 | ||
|
|
14d38a1a8d | ||
|
|
c8447c190c | ||
|
|
aa5d16b3d8 | ||
|
|
fd1135315c | ||
|
|
98c915b2ca | ||
|
|
9114f3d2e6 | ||
|
|
5b0109055d | ||
|
|
5a3168c9ff | ||
|
|
a14f29f84f | ||
|
|
6c1190a361 | ||
|
|
100a76f0e8 | ||
|
|
47482acf83 | ||
|
|
54ef4ee6ef | ||
|
|
2389abc295 | ||
|
|
67c666b033 | ||
|
|
5ce3ed3555 | ||
|
|
d30b32fcde | ||
|
|
568e4cebda | ||
|
|
29d644e9d3 | ||
|
|
2dbb7301fb | ||
|
|
d2cba1c54f | ||
|
|
6a0c26a709 | ||
|
|
e44ae6af93 | ||
|
|
837b0799ac | ||
|
|
bc85a8b24f | ||
|
|
15d68649d5 | ||
|
|
e0d96ae807 | ||
|
|
3aa39ced60 | ||
|
|
1f1c0618e1 | ||
|
|
7788aa25b5 | ||
|
|
d5b460a85c | ||
|
|
9019f3a4f2 | ||
|
|
dfda0d566a | ||
|
|
99c2fac143 | ||
|
|
4c5308da8d | ||
|
|
4d0ed3d857 | ||
|
|
0b5582ed0d | ||
|
|
17734f833c | ||
|
|
98a9d5d424 | ||
|
|
6d74f36449 | ||
|
|
47a5bf6aa5 | ||
|
|
2805e9eb3b | ||
|
|
72a1a86886 | ||
|
|
ec190bae33 | ||
|
|
83003e43d7 | ||
|
|
3b20dc2994 | ||
|
|
a7198298e7 | ||
|
|
f3d76d5780 | ||
|
|
e2305c3c5e | ||
|
|
ba737d7e58 | ||
|
|
88f69204c8 | ||
|
|
bb4561c2b8 | ||
|
|
4710f764e4 | ||
|
|
11a59a767e | ||
|
|
4cf3157aad | ||
|
|
b1f6cb349b | ||
|
|
0c134582ca | ||
|
|
acf2833362 | ||
|
|
20f9971096 | ||
|
|
cefeaceef0 | ||
|
|
1ba7c4b6ee | ||
|
|
1b49776819 | ||
|
|
41c68f4bbc | ||
|
|
56ac830405 | ||
|
|
ebcf821d81 | ||
|
|
e874a2624f | ||
|
|
03d1c0ed21 | ||
|
|
1b8c77eee0 | ||
|
|
d575cd50b1 | ||
|
|
2b040569e7 | ||
|
|
7a53f86fff | ||
|
|
a90bb53cd2 | ||
|
|
b450d62138 | ||
|
|
1a9d65c52a | ||
|
|
05f4746bbe | ||
|
|
1c587723fa | ||
|
|
b2a9c79de5 | ||
|
|
64259c7bcb | ||
|
|
a7efdb4e52 | ||
|
|
091ff0cce0 | ||
|
|
7e25a3a942 | ||
|
|
b3254e2b18 | ||
|
|
9a0fa79144 | ||
|
|
352db260b2 | ||
|
|
f072b5b679 | ||
|
|
b4058a813b | ||
|
|
b27e152ead | ||
|
|
936e83759d | ||
|
|
18fdc175c6 | ||
|
|
47c6ab0ced | ||
|
|
4868135d47 | ||
|
|
5e70db651d | ||
|
|
1fedede771 | ||
|
|
00596f1436 | ||
|
|
db840b5604 | ||
|
|
45070d0e51 | ||
|
|
8a1081f9ef | ||
|
|
ac84e44161 | ||
|
|
836dfb6503 | ||
|
|
35a8ce2349 | ||
|
|
bb7d68b3b9 | ||
|
|
1979846e5e | ||
|
|
a61ffab3f9 | ||
|
|
698fe73608 | ||
|
|
0083c32629 | ||
|
|
f313caaa73 | ||
|
|
6e3f07ddce | ||
|
|
11d28c4856 | ||
|
|
e9394ca85a | ||
|
|
9445a30e72 | ||
|
|
185b1a3d36 | ||
|
|
1c2f5d60a5 | ||
|
|
2f32910bef | ||
|
|
8de1b20bb5 | ||
|
|
60a7abcef6 | ||
|
|
e44e75fa6b | ||
|
|
ff7dc0b446 | ||
|
|
f813cb2310 | ||
|
|
cba19d7e23 | ||
|
|
9479c0e12d | ||
|
|
65c9c45ec6 | ||
|
|
6d79004d4f | ||
|
|
97623d20c5 | ||
|
|
d37802a42f | ||
|
|
9b837a24aa | ||
|
|
d0ef2f7dd8 | ||
|
|
c7cfd23580 | ||
|
|
9708a12607 | ||
|
|
7cf1ec3f89 | ||
|
|
ef2a436769 | ||
|
|
7f1a95550f | ||
|
|
803f5b5613 | ||
|
|
8ca9fa1c26 | ||
|
|
3b3b897193 | ||
|
|
6c364f63cc | ||
|
|
6b9e842ddd | ||
|
|
8f88d915ba | ||
|
|
eaa1f47f00 | ||
|
|
cbeae85731 | ||
|
|
84e618b3f2 | ||
|
|
382a16ff07 | ||
|
|
7bd339b645 | ||
|
|
70d8b2c4b7 | ||
|
|
3764a83c6b | ||
|
|
c3d200ddcd | ||
|
|
44c5e337ab | ||
|
|
040982e321 | ||
|
|
6c81ea846c | ||
|
|
d082a0696d | ||
|
|
1b46e003c3 | ||
|
|
4611d5a35f | ||
|
|
2e59378ab7 | ||
|
|
fc8bf39043 | ||
|
|
2cccd8ab28 | ||
|
|
d537ceedd6 | ||
|
|
ac7243b309 | ||
|
|
607e983f37 | ||
|
|
02f7326b7e | ||
|
|
b688f69031 | ||
|
|
df41329df9 | ||
|
|
0825bd7076 | ||
|
|
e9b7003cf5 | ||
|
|
c5d673c426 | ||
|
|
9f64ad8d89 | ||
|
|
e5b0b7e9a7 | ||
|
|
9c3522cb70 | ||
|
|
b357390215 | ||
|
|
c66fc2f656 | ||
|
|
64554aca6d | ||
|
|
f1df9a02fa | ||
|
|
f3725bdd2e | ||
|
|
cb92b94d55 | ||
|
|
ef01f82e0c | ||
|
|
4ba7ee8c92 | ||
|
|
c59633a588 | ||
|
|
f56e37398c | ||
|
|
e43ffa6f2b | ||
|
|
6991bc9723 | ||
|
|
9d6106a80b | ||
|
|
9e70404411 | ||
|
|
bc48d299b6 | ||
|
|
a8db5650a5 | ||
|
|
91871b418b | ||
|
|
aaf98082e9 | ||
|
|
ac228deeda | ||
|
|
fc661c83ef | ||
|
|
a2acffdfa3 | ||
|
|
79ce4ed226 | ||
|
|
e3a7aa0033 | ||
|
|
fde6303ae6 | ||
|
|
b4a1948951 | ||
|
|
b927028416 | ||
|
|
fe5d1ff6c5 | ||
|
|
1308656000 | ||
|
|
ec1e6b9385 | ||
|
|
315a8b6b60 | ||
|
|
558c6b621b | ||
|
|
6d91d02c62 | ||
|
|
436ded68b7 | ||
|
|
3ec88b3665 | ||
|
|
2caedb38a6 | ||
|
|
49378ab7fe | ||
|
|
982dc6aa8c | ||
|
|
33175187b7 | ||
|
|
0339904920 | ||
|
|
8bda9487c6 | ||
|
|
617d84c0ef | ||
|
|
b11ad375cd | ||
|
|
7a6d533014 | ||
|
|
be44f75d2d | ||
|
|
ab14123aed | ||
|
|
a963a6d10d | ||
|
|
69b4cd22a2 | ||
|
|
958325653f | ||
|
|
c5dc419f9e | ||
|
|
2a201f9525 | ||
|
|
b7ba509618 | ||
|
|
f946d7b447 | ||
|
|
e2a5535ed6 | ||
|
|
62c5b5e570 | ||
|
|
feaa0871ac | ||
|
|
9f41976926 | ||
|
|
1784f24c5f | ||
|
|
fc47d9fc4d | ||
|
|
eade9f8f2b | ||
|
|
f4dcbe3a84 | ||
|
|
9c02cc1b17 | ||
|
|
0cc3ef8f90 | ||
|
|
4cbbacc946 | ||
|
|
9a0c71d4a7 | ||
|
|
0f1b831de7 | ||
|
|
37a7a2aacd | ||
|
|
635b157b11 | ||
|
|
c3ae4da83a | ||
|
|
c3809ece67 | ||
|
|
bfc436dccd | ||
|
|
71d6874236 | ||
|
|
9bf1f994ae | ||
|
|
bb4127a6fb | ||
|
|
a691632995 | ||
|
|
5d6ea3d75f | ||
|
|
43873b1b2c | ||
|
|
9430f3665b | ||
|
|
f3c1f63444 | ||
|
|
b5c7232d6f | ||
|
|
2f3677d593 | ||
|
|
1e0efaffe8 | ||
|
|
fc79754750 | ||
|
|
0e4de42be8 | ||
|
|
4e389127b5 | ||
|
|
47593730d6 | ||
|
|
e742da73bd | ||
|
|
890bf3cce1 | ||
|
|
60eb312e3b | ||
|
|
06207da185 | ||
|
|
4dc2cf8a6b | ||
|
|
44450bf644 | ||
|
|
312aec79ca | ||
|
|
a8578c73f8 | ||
|
|
c522ffa6db | ||
|
|
93b7599b1c | ||
|
|
3ccbd7c9b2 | ||
|
|
385a87db31 | ||
|
|
5530353eef | ||
|
|
d1193093ef | ||
|
|
b203db27a4 | ||
|
|
0a3e1af04c |
9
.gitignore
vendored
@@ -24,6 +24,11 @@ bin/rippled
|
|||||||
Debug/*.*
|
Debug/*.*
|
||||||
Release/*.*
|
Release/*.*
|
||||||
|
|
||||||
|
# Ignore coverage files.
|
||||||
|
*.gcno
|
||||||
|
*.gcda
|
||||||
|
*.gcov
|
||||||
|
|
||||||
# Ignore locally installed node_modules
|
# Ignore locally installed node_modules
|
||||||
/node_modules
|
/node_modules
|
||||||
|
|
||||||
@@ -31,6 +36,7 @@ Release/*.*
|
|||||||
tmp
|
tmp
|
||||||
|
|
||||||
# Ignore database directory.
|
# Ignore database directory.
|
||||||
|
db/
|
||||||
db/*.db
|
db/*.db
|
||||||
db/*.db-*
|
db/*.db-*
|
||||||
|
|
||||||
@@ -76,3 +82,6 @@ My Amplifier XE Results - RippleD
|
|||||||
|
|
||||||
# Build Log
|
# Build Log
|
||||||
rippled-build.log
|
rippled-build.log
|
||||||
|
|
||||||
|
# Profiling data
|
||||||
|
gmon.out
|
||||||
|
|||||||
128
.travis.yml
@@ -1,60 +1,84 @@
|
|||||||
|
sudo: false
|
||||||
language: cpp
|
language: cpp
|
||||||
compiler:
|
|
||||||
- clang
|
env:
|
||||||
- gcc
|
global:
|
||||||
|
# Maintenance note: to move to a new version
|
||||||
|
# of boost, update both BOOST_ROOT and BOOST_URL.
|
||||||
|
# Note that for simplicity, BOOST_ROOT's final
|
||||||
|
# namepart must match the folder name internal
|
||||||
|
# to boost's .tar.gz.
|
||||||
|
- BOOST_ROOT=$HOME/boost_1_59_0
|
||||||
|
- BOOST_URL='http://downloads.sourceforge.net/project/boost/boost/1.59.0/boost_1_59_0.tar.gz?r=http%3A%2F%2Fsourceforge.net%2Fprojects%2Fboost%2Ffiles%2Fboost%2F1.59.0%2Fboost_1_59_0.tar.gz%2Fdownload&ts=1441761349&use_mirror=skylineservers'
|
||||||
|
- RIPPLED_OLD_GCC_ABI=1
|
||||||
|
|
||||||
|
|
||||||
|
packages: &gcc5_pkgs
|
||||||
|
- gcc-5
|
||||||
|
- g++-5
|
||||||
|
- python-software-properties
|
||||||
|
- protobuf-compiler
|
||||||
|
- libprotobuf-dev
|
||||||
|
- libssl-dev
|
||||||
|
- libstdc++6
|
||||||
|
- binutils-gold
|
||||||
|
# Provides a backtrace if the unittests crash
|
||||||
|
- gdb
|
||||||
|
|
||||||
|
packages: &gcc48_pkgs
|
||||||
|
- gcc-4.8
|
||||||
|
- g++-4.8
|
||||||
|
- python-software-properties
|
||||||
|
- protobuf-compiler
|
||||||
|
- libprotobuf-dev
|
||||||
|
- libssl-dev
|
||||||
|
- libstdc++6
|
||||||
|
- binutils-gold
|
||||||
|
# Provides a backtrace if the unittests crash
|
||||||
|
- gdb
|
||||||
|
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- compiler: gcc
|
||||||
|
env: GCC_VER=5 TARGET=debug.nounity
|
||||||
|
addons: &ao_gcc5
|
||||||
|
apt:
|
||||||
|
sources: ['ubuntu-toolchain-r-test']
|
||||||
|
packages: *gcc5_pkgs
|
||||||
|
|
||||||
|
- compiler: gcc
|
||||||
|
env: GCC_VER=5 TARGET=coverage
|
||||||
|
addons: *ao_gcc5
|
||||||
|
|
||||||
|
- compiler: clang
|
||||||
|
env: GCC_VER=4.8 TARGET=debug
|
||||||
|
addons: &ao_gcc48
|
||||||
|
apt:
|
||||||
|
sources: ['ubuntu-toolchain-r-test']
|
||||||
|
packages: *gcc48_pkgs
|
||||||
|
|
||||||
|
- compiler: clang
|
||||||
|
env: GCC_VER=4.8 TARGET=debug.nounity
|
||||||
|
addons: *ao_gcc48
|
||||||
|
|
||||||
|
- compiler: gcc
|
||||||
|
env: GCC_VER=4.8 TARGET=debug
|
||||||
|
addons: *ao_gcc48
|
||||||
|
|
||||||
|
- compiler: gcc
|
||||||
|
env: GCC_VER=4.8 TARGET=debug.nounity
|
||||||
|
addons: *ao_gcc48
|
||||||
|
|
||||||
|
cache:
|
||||||
|
directories:
|
||||||
|
- $BOOST_ROOT
|
||||||
|
|
||||||
before_install:
|
before_install:
|
||||||
- sudo apt-get update -qq
|
- bin/ci/ubuntu/install-dependencies.sh
|
||||||
- sudo apt-get install -qq python-software-properties
|
|
||||||
- sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test
|
|
||||||
- sudo add-apt-repository -y ppa:afrank/boost
|
|
||||||
- sudo apt-get update -qq
|
|
||||||
- sudo apt-get install -qq g++-4.8
|
|
||||||
- sudo apt-get install -qq libboost1.57-all-dev
|
|
||||||
- sudo apt-get install -qq mlocate
|
|
||||||
- sudo updatedb
|
|
||||||
- sudo locate libboost | grep /lib | grep -e ".a$"
|
|
||||||
- sudo apt-get install -qq protobuf-compiler libprotobuf-dev libssl-dev exuberant-ctags
|
|
||||||
# We need gcc >= 4.8 for some c++11 features
|
|
||||||
- sudo apt-get install -qq gcc-4.8
|
|
||||||
- sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.8 40 --slave /usr/bin/g++ g++ /usr/bin/g++-4.8
|
|
||||||
- sudo update-alternatives --set gcc /usr/bin/gcc-4.8
|
|
||||||
# Stuff is gold. Nuff said ;)
|
|
||||||
- sudo apt-get -y install binutils-gold
|
|
||||||
# We can get a backtrace if the guy crashes
|
|
||||||
- sudo apt-get -y install gdb
|
|
||||||
# What versions are we ACTUALLY running?
|
|
||||||
- g++ -v
|
|
||||||
- clang -v
|
|
||||||
# Avoid `spurious errors` caused by ~/.npm permission issues
|
|
||||||
# Does it already exist? Who owns? What permissions?
|
|
||||||
- ls -lah ~/.npm || mkdir ~/.npm
|
|
||||||
# Make sure we own it
|
|
||||||
- sudo chown -R $USER ~/.npm
|
|
||||||
|
|
||||||
script:
|
script:
|
||||||
# Set so any failing command will abort the build
|
- bin/ci/ubuntu/build-and-test.sh
|
||||||
- set -e
|
|
||||||
# $CC will be either `clang` or `gcc` (If only we could do -j12 ;)
|
|
||||||
- scons $CC.debug
|
|
||||||
# We can be sure we're using the build/$CC.debug variant (-f so never err)
|
|
||||||
- rm -f build/rippled
|
|
||||||
- export RIPPLED_PATH="$PWD/build/$CC.debug/rippled"
|
|
||||||
# See what we've actually built
|
|
||||||
- ldd $RIPPLED_PATH
|
|
||||||
# Run unittests (under gdb)
|
|
||||||
- | # create gdb script
|
|
||||||
echo "set env MALLOC_CHECK_=3" > script.gdb
|
|
||||||
echo "run" >> script.gdb
|
|
||||||
echo "backtrace full" >> script.gdb
|
|
||||||
# gdb --help
|
|
||||||
- cat script.gdb | gdb --ex 'set print thread-events off' --return-child-result --args $RIPPLED_PATH --unittest
|
|
||||||
- npm install
|
|
||||||
# Use build/(gcc|clang).debug/rippled
|
|
||||||
- |
|
|
||||||
echo "exports.default_server_config = {\"rippled_path\" : \"$RIPPLED_PATH\"};" > test/config.js
|
|
||||||
|
|
||||||
# Run integration tests
|
|
||||||
- npm test
|
|
||||||
notifications:
|
notifications:
|
||||||
email:
|
email:
|
||||||
false
|
false
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ pkgver() {
|
|||||||
|
|
||||||
build() {
|
build() {
|
||||||
cd "$srcdir/$pkgname"
|
cd "$srcdir/$pkgname"
|
||||||
scons build/rippled
|
scons
|
||||||
}
|
}
|
||||||
|
|
||||||
check() {
|
check() {
|
||||||
|
|||||||
23
Builds/Docker/Dockerfile-testnet
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
FROM ubuntu
|
||||||
|
MAINTAINER Torrie Fischer <torrie@ripple.com>
|
||||||
|
|
||||||
|
RUN apt-get update -qq &&\
|
||||||
|
apt-get install -qq software-properties-common &&\
|
||||||
|
apt-add-repository -y ppa:ubuntu-toolchain-r/test &&\
|
||||||
|
apt-add-repository -y ppa:afrank/boost &&\
|
||||||
|
apt-get update -qq
|
||||||
|
|
||||||
|
RUN apt-get purge -qq libboost1.48-dev &&\
|
||||||
|
apt-get install -qq libprotobuf8 libboost1.57-all-dev
|
||||||
|
|
||||||
|
RUN mkdir -p /srv/rippled/data
|
||||||
|
|
||||||
|
VOLUME /srv/rippled/data/
|
||||||
|
|
||||||
|
ENTRYPOINT ["/srv/rippled/bin/rippled"]
|
||||||
|
CMD ["--conf", "/srv/rippled/data/rippled.cfg"]
|
||||||
|
EXPOSE 51235/udp
|
||||||
|
EXPOSE 5005/tcp
|
||||||
|
|
||||||
|
ADD ./rippled.cfg /srv/rippled/data/rippled.cfg
|
||||||
|
ADD ./rippled /srv/rippled/bin/
|
||||||
13
Builds/Docker/build-ci.sh
Executable file
@@ -0,0 +1,13 @@
|
|||||||
|
set -e
|
||||||
|
|
||||||
|
mkdir -p build/docker/
|
||||||
|
cp doc/rippled-example.cfg build/clang.debug/rippled build/docker/
|
||||||
|
cp Builds/Docker/Dockerfile-testnet build/docker/Dockerfile
|
||||||
|
mv build/docker/rippled-example.cfg build/docker/rippled.cfg
|
||||||
|
strip build/docker/rippled
|
||||||
|
docker build -t ripple/rippled:$CIRCLE_SHA1 build/docker/
|
||||||
|
docker tag ripple/rippled:$CIRCLE_SHA1 ripple/rippled:latest
|
||||||
|
|
||||||
|
if [ -z "$CIRCLE_PR_NUMBER" ]; then
|
||||||
|
docker tag ripple/rippled:$CIRCLE_SHA1 ripple/rippled:$CIRCLE_BRANCH
|
||||||
|
fi
|
||||||
16
Builds/Docker/push-to-hub.sh
Executable file
@@ -0,0 +1,16 @@
|
|||||||
|
set -e
|
||||||
|
|
||||||
|
if [ -z "$DOCKER_EMAIL" -o -z "$DOCKER_USERNAME" -o -z "$DOCKER_PASSWORD" ];then
|
||||||
|
echo "Docker credentials are not set. Can't login to docker, no containers will be pushed."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$CIRCLE_PR_NUMBER" ]; then
|
||||||
|
echo "Not pushing results of a pull request build."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
docker login -e $DOCKER_EMAIL -u $DOCKER_USERNAME -p $DOCKER_PASSWORD
|
||||||
|
docker push ripple/rippled:$CIRCLE_SHA1
|
||||||
|
docker push ripple/rippled:$CIRCLE_BRANCH
|
||||||
|
docker push ripple/rippled:latest
|
||||||
31
Builds/Eclipse/README.md
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
**Requirements**
|
||||||
|
|
||||||
|
1. Java Runtime Environment (JRE)
|
||||||
|
2. Eclipse with CDT (tested on Luna):
|
||||||
|
http://www.eclipse.org/downloads/packages/eclipse-ide-cc-developers/lunasr2
|
||||||
|
3. Eclipse SCons plugin: http://sconsolidator.com/
|
||||||
|
**WARNING**: by default the SCons plugin uses 16 threads. Go to
|
||||||
|
*Window->Preferences->SCons->Build Settings* in Eclipse and make it
|
||||||
|
use only 4-8 jobs(threads) or whatever you feel confortable with. It will
|
||||||
|
positively freeze your system if you run with 16 threads/jobs.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
**Getting Started**
|
||||||
|
|
||||||
|
After setting up Eclipse just do a File->New->Other...
|
||||||
|
Select: C/C++ / New SCons project from existing source
|
||||||
|
Point the importer to the folder where the SConstruct resides (the root
|
||||||
|
folder of your git workspace normally)
|
||||||
|
|
||||||
|
**Build**
|
||||||
|
|
||||||
|
Just hit Project->Build All in Eclipse to get started. And remember to not
|
||||||
|
let it run 16 threads!
|
||||||
|
|
||||||
|
**Debug**
|
||||||
|
|
||||||
|
Start a new Eclipse debug configuration and set binary to run to build/rippled
|
||||||
|
(assuming you have built it).
|
||||||
|
|
||||||
|

|
||||||
BIN
Builds/Eclipse/debug.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
Builds/Eclipse/scons.png
Normal file
|
After Width: | Height: | Size: 17 KiB |
191
Builds/Test.py
Executable file
@@ -0,0 +1,191 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# This file is part of rippled: https://github.com/ripple/rippled
|
||||||
|
# Copyright (c) 2012 - 2015 Ripple Labs Inc.
|
||||||
|
#
|
||||||
|
# Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
# purpose with or without fee is hereby granted, provided that the above
|
||||||
|
# copyright notice and this permission notice appear in all copies.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
# ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Invocation:
|
||||||
|
|
||||||
|
./Builds/Test.py - builds and tests all configurations
|
||||||
|
|
||||||
|
The build must succeed without shell aliases for this to work.
|
||||||
|
|
||||||
|
To pass flags to scons, put them at the very end of the command line, after
|
||||||
|
the -- flag - like this:
|
||||||
|
|
||||||
|
./Builds/Test.py -- -j4 # Pass -j4 to scons.
|
||||||
|
|
||||||
|
|
||||||
|
Common problems:
|
||||||
|
|
||||||
|
1) Boost not found. Solution: export BOOST_ROOT=[path to boost folder]
|
||||||
|
|
||||||
|
2) OpenSSL not found. Solution: export OPENSSL_ROOT=[path to OpenSSL folder]
|
||||||
|
|
||||||
|
3) scons is an alias. Solution: Create a script named "scons" somewhere in
|
||||||
|
your $PATH (eg. ~/bin/scons will often work).
|
||||||
|
|
||||||
|
#!/bin/sh
|
||||||
|
python /C/Python27/Scripts/scons.py "${@}"
|
||||||
|
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import itertools
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
IS_WINDOWS = platform.system().lower() == 'windows'
|
||||||
|
|
||||||
|
if IS_WINDOWS:
|
||||||
|
BINARY_RE = re.compile(r'build\\([^\\]+)\\rippled.exe')
|
||||||
|
|
||||||
|
else:
|
||||||
|
BINARY_RE = re.compile(r'build/([^/]+)/rippled')
|
||||||
|
|
||||||
|
ALL_TARGETS = ['debug', 'release']
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Test.py - run ripple tests'
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--all', '-a',
|
||||||
|
action='store_true',
|
||||||
|
help='Build all configurations.',
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--keep_going', '-k',
|
||||||
|
action='store_true',
|
||||||
|
help='Keep going after one configuration has failed.',
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--silent', '-s',
|
||||||
|
action='store_true',
|
||||||
|
help='Silence all messages except errors',
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--verbose', '-v',
|
||||||
|
action='store_true',
|
||||||
|
help=('Report more information about which commands are executed and the '
|
||||||
|
'results.'),
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--test', '-t',
|
||||||
|
default='',
|
||||||
|
help='Add a prefix for unit tests',
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'scons_args',
|
||||||
|
default=(),
|
||||||
|
nargs='*'
|
||||||
|
)
|
||||||
|
|
||||||
|
ARGS = parser.parse_args()
|
||||||
|
|
||||||
|
def shell(*cmd, **kwds):
|
||||||
|
"Execute a shell command and return the output."
|
||||||
|
silent = kwds.pop('silent', ARGS.silent)
|
||||||
|
verbose = not silent and kwds.pop('verbose', ARGS.verbose)
|
||||||
|
if verbose:
|
||||||
|
print('$', ' '.join(cmd))
|
||||||
|
kwds['shell'] = IS_WINDOWS
|
||||||
|
|
||||||
|
process = subprocess.Popen(
|
||||||
|
cmd,
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
**kwds)
|
||||||
|
lines = []
|
||||||
|
count = 0
|
||||||
|
for line in process.stdout:
|
||||||
|
lines.append(line)
|
||||||
|
if verbose:
|
||||||
|
print(line, end='')
|
||||||
|
elif not silent:
|
||||||
|
count += 1
|
||||||
|
if count >= 80:
|
||||||
|
print()
|
||||||
|
count = 0
|
||||||
|
else:
|
||||||
|
print('.', end='')
|
||||||
|
|
||||||
|
if not verbose and count:
|
||||||
|
print()
|
||||||
|
process.wait()
|
||||||
|
return process.returncode, lines
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
args = list(ARGS.scons_args)
|
||||||
|
if ARGS.all:
|
||||||
|
for a in ALL_TARGETS:
|
||||||
|
if a not in args:
|
||||||
|
args.append(a)
|
||||||
|
print('Building:', *(args or ['(default)']))
|
||||||
|
|
||||||
|
# Build everything.
|
||||||
|
resultcode, lines = shell('scons', *args)
|
||||||
|
if resultcode:
|
||||||
|
print('Build FAILED:')
|
||||||
|
if not ARGS.verbose:
|
||||||
|
print(*lines, sep='')
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Now extract the executable names and corresponding targets.
|
||||||
|
failed = []
|
||||||
|
_, lines = shell('scons', '-n', '--tree=derived', *args, silent=True)
|
||||||
|
for line in lines:
|
||||||
|
match = BINARY_RE.search(line)
|
||||||
|
if match:
|
||||||
|
executable, target = match.group(0, 1)
|
||||||
|
|
||||||
|
print('Unit tests for', target)
|
||||||
|
testflag = '--unittest'
|
||||||
|
if ARGS.test:
|
||||||
|
testflag += ('=' + ARGS.test)
|
||||||
|
|
||||||
|
resultcode, lines = shell(executable, testflag)
|
||||||
|
if resultcode:
|
||||||
|
print('ERROR:', *lines, sep='')
|
||||||
|
failed.append([target, 'unittest'])
|
||||||
|
if not ARGS.keep_going:
|
||||||
|
break
|
||||||
|
ARGS.verbose and print(*lines, sep='')
|
||||||
|
|
||||||
|
print('npm tests for', target)
|
||||||
|
resultcode, lines = shell('npm', 'test', '--rippled=' + executable)
|
||||||
|
if resultcode:
|
||||||
|
print('ERROR:\n', *lines, sep='')
|
||||||
|
failed.append([target, 'npm'])
|
||||||
|
if not ARGS.keep_going:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
ARGS.verbose and print(*lines, sep='')
|
||||||
|
|
||||||
|
if failed:
|
||||||
|
print('FAILED:', *(':'.join(f) for f in failed))
|
||||||
|
exit(1)
|
||||||
|
else:
|
||||||
|
print('Success')
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
|
|
||||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
|
||||||
# Visual Studio Express 2013 for Windows Desktop
|
|
||||||
VisualStudioVersion = 12.0.31101.0
|
|
||||||
MinimumVisualStudioVersion = 10.0.40219.1
|
|
||||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "RippleD", "RippleD.vcxproj", "{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}"
|
|
||||||
EndProject
|
|
||||||
Global
|
|
||||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
|
||||||
Debug|Win32 = Debug|Win32
|
|
||||||
Debug|x64 = Debug|x64
|
|
||||||
Release|Win32 = Release|Win32
|
|
||||||
Release|x64 = Release|x64
|
|
||||||
EndGlobalSection
|
|
||||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.Debug|Win32.ActiveCfg = debug|x64
|
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.Debug|Win32.Build.0 = debug|x64
|
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.Debug|x64.ActiveCfg = debug|x64
|
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.Debug|x64.Build.0 = debug|x64
|
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.Release|Win32.ActiveCfg = release|x64
|
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.Release|Win32.Build.0 = release|x64
|
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.Release|x64.ActiveCfg = release|x64
|
|
||||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.Release|x64.Build.0 = release|x64
|
|
||||||
EndGlobalSection
|
|
||||||
GlobalSection(SolutionProperties) = preSolution
|
|
||||||
HideSolutionNode = FALSE
|
|
||||||
EndGlobalSection
|
|
||||||
EndGlobal
|
|
||||||
339
Builds/VisualStudio2015/README.md
Normal file
@@ -0,0 +1,339 @@
|
|||||||
|
# Visual Studio 2015 Build Instructions
|
||||||
|
|
||||||
|
## Important
|
||||||
|
|
||||||
|
We do not recommend Windows for rippled production use at this time. Currently, the Ubuntu
|
||||||
|
platform has received the highest level of quality assurance, testing, and support.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
To clone the source code repository, create branches for inspection or modification,
|
||||||
|
build rippled under Visual Studio, and run the unit tests you will need these
|
||||||
|
software components:
|
||||||
|
|
||||||
|
* [Visual Studio 2015](https://www.visualstudio.com/)
|
||||||
|
* [Git for Windows](http://git-scm.com/)
|
||||||
|
* [Google Protocol Buffers Compiler](https://code.google.com/p/protobuf/source/checkout)
|
||||||
|
* [OpenSSL Library](README.md#install-openssl)
|
||||||
|
* [ActivePerl](https://www.activestate.com/activeperl/downloads)
|
||||||
|
(Recommended to build OpenSSL.)
|
||||||
|
* [Boost 1.59 library](http://www.boost.org/users/download/)
|
||||||
|
* [Node.js](http://nodejs.org/download/)
|
||||||
|
|
||||||
|
Any version of Visual Studio 2015 may be used, including the Visual Studio Community
|
||||||
|
Edition which is available under a free license.
|
||||||
|
|
||||||
|
## Install Software
|
||||||
|
|
||||||
|
### Install Visual Studio 2015
|
||||||
|
|
||||||
|
If not already installed on your system, download your choice of installers from the
|
||||||
|
[Visual Studio 2015 Download](https://www.visualstudio.com/downloads/download-visual-studio-vs)
|
||||||
|
page, run the installer, and follow the directions.
|
||||||
|
The **Visual Studio 2015 Community** edition is available for free, while paid editions
|
||||||
|
may be used for an free initial trial period.
|
||||||
|
|
||||||
|
### Install Git for Windows
|
||||||
|
|
||||||
|
Git is a distributed revision control system. The Windows version also provides the
|
||||||
|
bash shell and many Windows versions of Unix commands. While there are other
|
||||||
|
varieties of Git (such as TortoiseGit, which has a native Windows interface and
|
||||||
|
integrates with the Explorer shell), we recommend installing
|
||||||
|
[Git for Windows](https://git-scm.com/) since
|
||||||
|
it provides a Unix-like command line environment useful for running shell scripts.
|
||||||
|
Use of the bash shell under Windows is mandatory for running the unit tests.
|
||||||
|
|
||||||
|
* NOTE: To gain full featured access to the
|
||||||
|
[git-subtree](https://blogs.atlassian.com/2013/05/alternatives-to-git-submodule-git-subtree/)
|
||||||
|
functionality used in the rippled repository we suggest Git version 1.8.3.2 or later.
|
||||||
|
|
||||||
|
### Install Google Protocol Buffers Compiler
|
||||||
|
|
||||||
|
Building rippled requires **protoc.exe** version 2.5.1 or later. At your option you
|
||||||
|
may build it yourself from the sources in the
|
||||||
|
[Google Protocol Buffers](https://github.com/google/protobuf) repository,
|
||||||
|
or you may download a
|
||||||
|
[protoc.exe](https://ripple.github.io/Downloads/protoc/2.5.1/protoc.exe)
|
||||||
|
([alternate link](https://github.com/ripple/Downloads/raw/gh-pages/protoc/2.5.1/protoc.exe))
|
||||||
|
precompiled Windows executable from the
|
||||||
|
[Ripple Organization](https://github.com/ripple).
|
||||||
|
|
||||||
|
Either way, once you have the required version of **protoc.exe**, copy it into
|
||||||
|
a folder in your command line `%PATH%`.
|
||||||
|
|
||||||
|
* **NOTE:** If you use an older version of the compiler, the build will fail with
|
||||||
|
errors related to a mismatch of the version of protocol buffer headers versus
|
||||||
|
the compiler.
|
||||||
|
|
||||||
|
### Install ActivePerl
|
||||||
|
|
||||||
|
If not already installed on your system, download your choice of installers from the
|
||||||
|
[Activeperl Download](https://www.activestate.com/activeperl/downloads)
|
||||||
|
page, run the installer, and follow the directions.
|
||||||
|
|
||||||
|
## Configure Dependencies
|
||||||
|
|
||||||
|
### Install OpenSSL
|
||||||
|
|
||||||
|
1. Download OpenSSL *v1.0.2d or higher* source https://www.openssl.org/source/
|
||||||
|
2. Unpack the source archive into a temporary folder.
|
||||||
|
3. Open `cmd.exe`. Change the the folder where you unpacked OpenSSL.
|
||||||
|
4. Build the 64-bit libraries: (
|
||||||
|
[Reference 1](http://developer.covenanteyes.com/building-openssl-for-visual-studio/),
|
||||||
|
[Reference 2](http://www.p-nand-q.com/programming/windows/building_openssl_with_visual_studio_2013.html))
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
"C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin\vcvars32.bat"
|
||||||
|
"C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin\amd64\vcvars64.bat"
|
||||||
|
|
||||||
|
perl Configure VC-WIN64A --prefix=C:\lib\openssl-VC-64
|
||||||
|
ms\do_win64a
|
||||||
|
nmake -f ms\nt.mak
|
||||||
|
nmake -f ms\nt.mak install
|
||||||
|
|
||||||
|
copy tmp32\lib.pdb C:\lib\openssl-VC-64\lib\
|
||||||
|
```
|
||||||
|
5. Optionally, delete the temporary folder.
|
||||||
|
|
||||||
|
* NOTE: Since rippled links statically to OpenSSL, it does not matter where the OpenSSL
|
||||||
|
.DLL files are placed, or what version they are. rippled does not use or require any
|
||||||
|
external .DLL files to run other than the standard operating system ones.
|
||||||
|
|
||||||
|
### Build Boost
|
||||||
|
|
||||||
|
After downloading boost and unpacking it, open a **Developer Command Prompt** for
|
||||||
|
Visual Studio, change to the directory containing boost, then bootstrap the build tools:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
cd C:\lib\boost_1_59_0
|
||||||
|
bootstrap
|
||||||
|
```
|
||||||
|
|
||||||
|
The rippled application is linked statically to the standard runtimes and external
|
||||||
|
dependencies on Windows, to ensure that the behavior of the executable is not
|
||||||
|
affected by changes in outside files. Therefore, it is necessary to build the
|
||||||
|
required boost static libraries using this command:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
bjam --toolset=msvc-14.0 --build-type=complete variant=debug,release link=static runtime-link=static address-model=64
|
||||||
|
```
|
||||||
|
|
||||||
|
Building the boost libraries may take considerable time. When the build process
|
||||||
|
is completed, take note of both the reported compiler include paths and linker
|
||||||
|
library paths as they will be required later.
|
||||||
|
|
||||||
|
* NOTE: If older versions of Visual Studio are also installed, the build may fail.
|
||||||
|
If this happens, make sure that only Visual Studio 2015 is installed. Due to
|
||||||
|
defects in the uninstallation procedures of these Microsoft products, it may
|
||||||
|
be necessary to start with a fresh install of the operating system with only
|
||||||
|
the necessary development environment components installed to have a successful build.
|
||||||
|
|
||||||
|
### Clone the rippled repository
|
||||||
|
|
||||||
|
If you are familiar with cloning github repositories, just follow your normal process
|
||||||
|
and clone `git@github.com:ripple/rippled.git`. Otherwise follow this section for instructions.
|
||||||
|
|
||||||
|
1. If you don't have a github account, sign up for one at
|
||||||
|
[github.com](https://github.com/).
|
||||||
|
2. Make sure you have Github ssh keys. For help see
|
||||||
|
[generating-ssh-keys](https://help.github.com/articles/generating-ssh-keys).
|
||||||
|
|
||||||
|
Open the "Git Bash" shell that was installed with "Git for Windows" in the
|
||||||
|
step above. Navigate to the directory where you want to clone rippled (git
|
||||||
|
bash uses `/c` for windows's `C:` and forward slash where windows uses
|
||||||
|
backslash, so `C:\Users\joe\projs` would be `/c/Users/joe/projs` in git bash).
|
||||||
|
Now clone the repository and optionally switch to the *master* branch.
|
||||||
|
Type the following at the bash prompt:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
git clone git@github.com:ripple/rippled.git
|
||||||
|
cd rippled
|
||||||
|
git checkout master
|
||||||
|
```
|
||||||
|
|
||||||
|
* If you receive an error about not having the "correct access rights"
|
||||||
|
make sure you have Github ssh keys, as described above.
|
||||||
|
|
||||||
|
### Configure Library Paths
|
||||||
|
|
||||||
|
Open the solution file located at **Builds/Visual Studio 2015/ripple.sln**
|
||||||
|
and select the "View->Other Windows->Property Manager" to bring up the Property Manager.
|
||||||
|
Expand the *debug | x64* section (or similar section on 32-windows) and
|
||||||
|
double click the *Microsoft.Cpp.x64.user* property sheet to bring up the
|
||||||
|
*Property Pages* dialog (these sections will be called *Win32* instead of
|
||||||
|
*x64* on 32-bit windows). These are global properties applied to all
|
||||||
|
64-bit build targets:
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Go to *C/C++, General, Additional Include Directories* and add the
|
||||||
|
location of the boost installation:
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Then, go to *Linker, General, Additional Library Directories* and add
|
||||||
|
the location of the compiled boost libraries reported at the completion
|
||||||
|
of building the boost libraries:
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Follow the same procedure for adding the `Additional Include Directories`
|
||||||
|
and `Additional Library Directories` required for OpenSSL. In our example
|
||||||
|
these directories are **C:\lib\openssl-VC-64\include** and
|
||||||
|
**C:\lib\openssl-VC-64\lib** respectively.
|
||||||
|
|
||||||
|
# Setup Environment
|
||||||
|
|
||||||
|
## Create a working directory for rippled.cfg
|
||||||
|
|
||||||
|
The rippled server uses the [Rippled.cfg](https://wiki.ripple.com/Rippled.cfg)
|
||||||
|
file to read its configuration parameters. This section describes setting up
|
||||||
|
a directory to hold the config file. The next sections describe how to tell
|
||||||
|
the rippled server where that file is.
|
||||||
|
|
||||||
|
1. Create a directory to hold the configuration file. In this example, the
|
||||||
|
ripple config directory was created in `C:\Users\joe\ripple\config`.
|
||||||
|
2. Copy the example config file located in `doc\rippled-example.cfg` to the
|
||||||
|
new directory and rename it "rippled.cfg".
|
||||||
|
3. Read the rippled.cfg file and edit as appropriate.
|
||||||
|
|
||||||
|
## Change the Visual Studio Projects Debugging Properties
|
||||||
|
|
||||||
|
1. If not already open, open the solution file located at **Builds/Visual Studio 2015/Ripple.sln**
|
||||||
|
2. Select the correct solution platform in the solution platform dropdown (either *x64*
|
||||||
|
or *Win32* depending on machine type).
|
||||||
|
3. Select the "Project->Properties" menu item to bring up RippleD's Properties Pages
|
||||||
|
4. In "Configuration Properties" select "Debugging".
|
||||||
|
5. In the upper-left Configurations drop down, select "All Configurations".
|
||||||
|
6. In "Debugger to Launch" select "Local Windows Debugger".
|
||||||
|
|
||||||
|
### Tell rippled where to find the configuration file.
|
||||||
|
|
||||||
|
The `--conf` command-line switch to tell rippled where to find this file.
|
||||||
|
In the "Command Arguments" field in the properties dialog (that you opened
|
||||||
|
in the above section), add: `--conf="C:/Users/joe/ripple/config/rippled.cfg"`
|
||||||
|
(of course replacing that path with the path you set up above).
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
### Set the _NO_DEBUG_HEAP Environment Variable
|
||||||
|
|
||||||
|
Rippled can run very slowly in the debugger when using the Windows Debug Heap.
|
||||||
|
Set the `_NO_DEBUG_HEAP` environment variable to one to disable the debug heap.
|
||||||
|
In the "Environment" field (that you opened in the above section), add:
|
||||||
|
`_NO_DEBUG_HEAP=1`
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
# Build
|
||||||
|
|
||||||
|
After these steps are complete, rippled should be ready to build. Simply
|
||||||
|
set rippled as the startup project by right clicking on it in the
|
||||||
|
Visual Studio Solution Explorer, choose **Set as Startup Project**,
|
||||||
|
and then choose the **Build->Build Solution** menu item.
|
||||||
|
|
||||||
|
# Unit Tests (Recommended)
|
||||||
|
|
||||||
|
The external rippled unit tests are written in Javascript using Node.js,
|
||||||
|
and utilize the mocha unit test framework. To run the unit tests, it
|
||||||
|
will be necessary to perform the following steps:
|
||||||
|
|
||||||
|
## Install Node.js
|
||||||
|
|
||||||
|
[Install Node.js](http://nodejs.org/download/). We recommend the Windows
|
||||||
|
installer (**.msi** file) as it takes care of updating the *PATH* environment
|
||||||
|
variable so that scripts can find the command. On Windows systems,
|
||||||
|
**Node.js** comes with **npm**. A separate installation of **npm**
|
||||||
|
is not necessary.
|
||||||
|
|
||||||
|
## Create node_modules
|
||||||
|
|
||||||
|
Open a windows console. From the root of your local rippled repository
|
||||||
|
directory, invoke **npm** to bring in the necessary components:
|
||||||
|
|
||||||
|
```
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
If you get an error that looks like
|
||||||
|
|
||||||
|
```
|
||||||
|
Error: ENOENT, stat 'C:\Users\username\AppData\Roaming\npm'
|
||||||
|
```
|
||||||
|
|
||||||
|
simply create the indicated folder and try again.
|
||||||
|
|
||||||
|
## Create a test config.js
|
||||||
|
|
||||||
|
From a *bash* shell (installed with Git for Windows), copy the
|
||||||
|
example configuration file into the appropriate location:
|
||||||
|
|
||||||
|
```
|
||||||
|
cp test/config-example.js test/config.js
|
||||||
|
```
|
||||||
|
|
||||||
|
Edit your version of test/config.js to reflect the correct path to the rippled executable:
|
||||||
|
|
||||||
|
```
|
||||||
|
exports.default_server_config = {
|
||||||
|
// Where to find the binary.
|
||||||
|
rippled_path: path.resolve(__dirname, "../build/msvc.debug/rippled.exe")
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
Also in **test/config.js**, change any occurrences of the
|
||||||
|
IP address *0.0.0.0* to *127.0.0.1*.
|
||||||
|
|
||||||
|
## Run Tests
|
||||||
|
|
||||||
|
From a windows console, run the unit tests:
|
||||||
|
|
||||||
|
```
|
||||||
|
npm test
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternatively, run an individual test using mocha:
|
||||||
|
|
||||||
|
```
|
||||||
|
sh
|
||||||
|
node_modules/mocha/bin/mocha test/account_tx-test.js
|
||||||
|
```
|
||||||
|
|
||||||
|
* NOTE: The version of ripple-lib provided by the npm install
|
||||||
|
facility is usually slightly behind the develop branch of the
|
||||||
|
authoritative ripple-lib repository. Therefore, some tests might fail.
|
||||||
|
|
||||||
|
## Development ripple-lib
|
||||||
|
|
||||||
|
To use the latest branch of **ripple-lib** during the unit tests,
|
||||||
|
first clone the repository in a new location outside of your rippled
|
||||||
|
repository. Then update the submodules. After, run **npm install**
|
||||||
|
to set up the **node_modules** directory. Finally, install the
|
||||||
|
**grunt** command line tools required to run **grunt** and
|
||||||
|
build **ripple-lib**.
|
||||||
|
|
||||||
|
```
|
||||||
|
git clone git@github.com:ripple/ripple-lib.git
|
||||||
|
cd ripple-lib
|
||||||
|
git submodule update --init
|
||||||
|
npm install
|
||||||
|
npm install -g grunt-cli
|
||||||
|
grunt
|
||||||
|
```
|
||||||
|
|
||||||
|
Now link this version of **ripple-lib** into the global packages:
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo npm link
|
||||||
|
```
|
||||||
|
|
||||||
|
To make rippled use the newly linked global **ripple-lib** package
|
||||||
|
instead of the one installed under **node_modules**, change
|
||||||
|
directories to the local rippled repository and delete the old
|
||||||
|
**ripple-lib** then link to the new one:
|
||||||
|
|
||||||
|
```
|
||||||
|
sh
|
||||||
|
rm -rf node_modules/ripple-lib
|
||||||
|
npm link ripple-lib
|
||||||
|
```
|
||||||
5046
Builds/VisualStudio2015/RippleD.vcxproj
Normal file
BIN
Builds/VisualStudio2015/images/NoDebugHeapPropPage.png
Normal file
|
After Width: | Height: | Size: 34 KiB |
BIN
Builds/VisualStudio2015/images/VS2015x64IncludeDirs.png
Normal file
|
After Width: | Height: | Size: 62 KiB |
BIN
Builds/VisualStudio2015/images/VS2015x64LibraryDirs.png
Normal file
|
After Width: | Height: | Size: 49 KiB |
BIN
Builds/VisualStudio2015/images/VS2015x64Properties.png
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
Builds/VisualStudio2015/images/VSCommandArgsPropPage.png
Normal file
|
After Width: | Height: | Size: 19 KiB |
28
Builds/VisualStudio2015/ripple.sln
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
|
||||||
|
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||||
|
# Visual Studio 14
|
||||||
|
VisualStudioVersion = 14.0.23107.0
|
||||||
|
MinimumVisualStudioVersion = 10.0.40219.1
|
||||||
|
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "RippleD", "RippleD.vcxproj", "{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}"
|
||||||
|
EndProject
|
||||||
|
Global
|
||||||
|
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||||
|
debug.classic|x64 = debug.classic|x64
|
||||||
|
debug|x64 = debug|x64
|
||||||
|
release.classic|x64 = release.classic|x64
|
||||||
|
release|x64 = release|x64
|
||||||
|
EndGlobalSection
|
||||||
|
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug.classic|x64.ActiveCfg = debug.classic|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug.classic|x64.Build.0 = debug.classic|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug|x64.ActiveCfg = debug|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug|x64.Build.0 = debug|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release.classic|x64.ActiveCfg = release.classic|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release.classic|x64.Build.0 = release.classic|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release|x64.ActiveCfg = release|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release|x64.Build.0 = release|x64
|
||||||
|
EndGlobalSection
|
||||||
|
GlobalSection(SolutionProperties) = preSolution
|
||||||
|
HideSolutionNode = FALSE
|
||||||
|
EndGlobalSection
|
||||||
|
EndGlobal
|
||||||
@@ -1,14 +1,18 @@
|
|||||||
|
%define rippled_branch %(echo $RIPPLED_BRANCH)
|
||||||
Name: rippled
|
Name: rippled
|
||||||
Version: 0.27.4
|
# Version must be limited to MAJOR.MINOR.PATCH
|
||||||
Release: 1%{?dist}
|
Version: 0.30.0
|
||||||
|
# Release should include either the build or hotfix number (ex: hf1%{?dist} or b2%{?dist})
|
||||||
|
# If there is no b# or hf#, then use 1%{?dist}
|
||||||
|
Release: rc1%{?dist}
|
||||||
Summary: Ripple peer-to-peer network daemon
|
Summary: Ripple peer-to-peer network daemon
|
||||||
|
|
||||||
Group: Applications/Internet
|
Group: Applications/Internet
|
||||||
License: ISC
|
License: ISC
|
||||||
URL: https://github.com/ripple/rippled
|
URL: https://github.com/ripple/rippled
|
||||||
|
|
||||||
# curl -L -o SOURCES/rippled-release.zip https://github.com/ripple/rippled/archive/release.zip
|
# curl -L -o SOURCES/rippled-release.zip https://github.com/ripple/rippled/archive/${RIPPLED_BRANCH}.zip
|
||||||
Source0: rippled-release.zip
|
Source0: rippled-%{rippled_branch}.zip
|
||||||
BuildRoot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
|
BuildRoot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
|
||||||
|
|
||||||
BuildRequires: gcc-c++ scons openssl-devel protobuf-devel
|
BuildRequires: gcc-c++ scons openssl-devel protobuf-devel
|
||||||
@@ -20,13 +24,11 @@ Rippled is the server component of the Ripple network.
|
|||||||
|
|
||||||
|
|
||||||
%prep
|
%prep
|
||||||
%setup -n rippled-release
|
%setup -n rippled-%{rippled_branch}
|
||||||
|
|
||||||
|
|
||||||
%build
|
%build
|
||||||
# Assume boost is manually installed
|
scons -j `grep -c processor /proc/cpuinfo`
|
||||||
export RIPPLED_BOOST_HOME=/usr/local/boost_1_55_0
|
|
||||||
scons -j `grep -c processor /proc/cpuinfo` build/rippled
|
|
||||||
|
|
||||||
|
|
||||||
%install
|
%install
|
||||||
@@ -49,4 +51,4 @@ rm -rf %{buildroot}
|
|||||||
%defattr(-,root,root,-)
|
%defattr(-,root,root,-)
|
||||||
/usr/bin/rippled
|
/usr/bin/rippled
|
||||||
/usr/share/rippled/LICENSE
|
/usr/share/rippled/LICENSE
|
||||||
/etc/rippled/rippled-example.cfg
|
%config(noreplace) /etc/rippled/rippled.cfg
|
||||||
|
|||||||
16
README.md
@@ -39,7 +39,7 @@ lowest practical level.
|
|||||||
**What can you do with Ripple?**
|
**What can you do with Ripple?**
|
||||||
|
|
||||||
The protocol is entirely open-source and the network’s shared ledger is public
|
The protocol is entirely open-source and the network’s shared ledger is public
|
||||||
information, so no central authority prevents anyone from participating.Anyone
|
information, so no central authority prevents anyone from participating. Anyone
|
||||||
can become a market maker, create a wallet or a gateway, or monitor network
|
can become a market maker, create a wallet or a gateway, or monitor network
|
||||||
behavior. Competition drives down spreads and fees, making the network useful
|
behavior. Competition drives down spreads and fees, making the network useful
|
||||||
to everyone.
|
to everyone.
|
||||||
@@ -49,12 +49,12 @@ to everyone.
|
|||||||
1. XRP is Ripple’s native [cryptocurrency]
|
1. XRP is Ripple’s native [cryptocurrency]
|
||||||
(http://en.wikipedia.org/wiki/Cryptocurrency) with a fixed supply that
|
(http://en.wikipedia.org/wiki/Cryptocurrency) with a fixed supply that
|
||||||
decreases slowly over time, with no mining. XRP acts as a bridge currency, and
|
decreases slowly over time, with no mining. XRP acts as a bridge currency, and
|
||||||
pays for transaction fees that protect the network against spam
|
pays for transaction fees that protect the network against spam.
|
||||||

|

|
||||||
|
|
||||||
2. Pathfinding discovers cheap and efficient payment paths through multiple
|
2. Pathfinding discovers cheap and efficient payment paths through multiple
|
||||||
[order books](https://www.ripplecharts.com) allowing anyone to [trade](https://www.rippletrade.com) anything. When two accounts aren’t linked by relationships of trust, the Ripple pathfinding engine considers intermediate links and order books to produce a set of possible paths the transaction can take. When the payment is processed, the liquidity along these paths is iteratively consumed in best-first order.
|
[order books](https://www.ripplecharts.com) allowing anyone to [trade](https://www.rippletrade.com) anything. When two accounts aren’t linked by relationships of trust, the Ripple pathfinding engine considers intermediate links and order books to produce a set of possible paths the transaction can take. When the payment is processed, the liquidity along these paths is iteratively consumed in best-first order.
|
||||||

|

|
||||||
|
|
||||||
3. [Consensus](https://www.youtube.com/watch?v=pj1QVb1vlC0) confirms
|
3. [Consensus](https://www.youtube.com/watch?v=pj1QVb1vlC0) confirms
|
||||||
transactions in an atomic fashion, without mining, ensuring efficient use of
|
transactions in an atomic fashion, without mining, ensuring efficient use of
|
||||||
@@ -84,6 +84,9 @@ This is the repository for Ripple's `rippled`, reference P2P server.
|
|||||||
###Setup instructions:
|
###Setup instructions:
|
||||||
* https://ripple.com/wiki/Rippled_setup_instructions
|
* https://ripple.com/wiki/Rippled_setup_instructions
|
||||||
|
|
||||||
|
###Issues
|
||||||
|
* https://ripplelabs.atlassian.net/browse/RIPD
|
||||||
|
|
||||||
### Repository Contents
|
### Repository Contents
|
||||||
|
|
||||||
#### ./bin
|
#### ./bin
|
||||||
@@ -117,3 +120,10 @@ LICENSE file for more details.
|
|||||||
* Ripple Gateway Primer - https://ripple.com/ripple-gateways.pdf
|
* Ripple Gateway Primer - https://ripple.com/ripple-gateways.pdf
|
||||||
* Consensus - https://wiki.ripple.com/Consensus
|
* Consensus - https://wiki.ripple.com/Consensus
|
||||||
|
|
||||||
|
- - -
|
||||||
|
|
||||||
|
Copyright © 2015, Ripple Labs. All rights reserved.
|
||||||
|
|
||||||
|
Portions of this document, including but not limited to the Ripple logo, images
|
||||||
|
and image templates are the property of Ripple Labs and cannot be copied or
|
||||||
|
used without permission.
|
||||||
|
|||||||
427
SConstruct
@@ -15,11 +15,13 @@
|
|||||||
|
|
||||||
clang All clang variants
|
clang All clang variants
|
||||||
clang.debug clang debug variant
|
clang.debug clang debug variant
|
||||||
|
clang.coverage clang coverage variant
|
||||||
clang.release clang release variant
|
clang.release clang release variant
|
||||||
clang.profile clang profile variant
|
clang.profile clang profile variant
|
||||||
|
|
||||||
gcc All gcc variants
|
gcc All gcc variants
|
||||||
gcc.debug gcc debug variant
|
gcc.debug gcc debug variant
|
||||||
|
gcc.coverage gcc coverage variant
|
||||||
gcc.release gcc release variant
|
gcc.release gcc release variant
|
||||||
gcc.profile gcc profile variant
|
gcc.profile gcc profile variant
|
||||||
|
|
||||||
@@ -40,6 +42,44 @@ If the clang toolchain is detected, then the default target will use it, else
|
|||||||
the gcc toolchain will be used. On Windows environments, the MSVC toolchain is
|
the gcc toolchain will be used. On Windows environments, the MSVC toolchain is
|
||||||
also detected.
|
also detected.
|
||||||
|
|
||||||
|
The following environment variables modify the build environment:
|
||||||
|
CLANG_CC
|
||||||
|
CLANG_CXX
|
||||||
|
CLANG_LINK
|
||||||
|
If set, a clang toolchain will be used. These must all be set together.
|
||||||
|
|
||||||
|
GNU_CC
|
||||||
|
GNU_CXX
|
||||||
|
GNU_LINK
|
||||||
|
If set, a gcc toolchain will be used (unless a clang toolchain is
|
||||||
|
detected first). These must all be set together.
|
||||||
|
|
||||||
|
CXX
|
||||||
|
If set, used to detect a toolchain.
|
||||||
|
|
||||||
|
BOOST_ROOT
|
||||||
|
Path to the boost directory.
|
||||||
|
OPENSSL_ROOT
|
||||||
|
Path to the openssl directory.
|
||||||
|
|
||||||
|
The following extra options may be used:
|
||||||
|
--ninja Generate a `build.ninja` build file for the specified target
|
||||||
|
(see: https://martine.github.io/ninja/). Only gcc and clang targets
|
||||||
|
are supported.
|
||||||
|
|
||||||
|
GCC 5 support: There is transitional support for user-installed gcc 5. Setting
|
||||||
|
the environment variable: `RIPPLED_OLD_GCC_ABI` to one enables the transitional
|
||||||
|
support. Due to an ABI change between gcc 4 and gcc 5, it is assumed all
|
||||||
|
libraries are built with the old, gcc 4 ABI. Since no linux distro has upgraded
|
||||||
|
to gcc 5, this allows us to use the package manager to install rippled
|
||||||
|
dependencies and to easily switch between gcc 4 and gcc 5. It also means if the
|
||||||
|
user builds C++ dependencies themselves - such as boost - they must either be
|
||||||
|
built with gcc 4 or with the preprocessor flag `_GLIBCXX_USE_CXX11_ABI` set to
|
||||||
|
zero. When linux distros upgrade to gcc 5, the transitional support will be
|
||||||
|
removed. To enable C++-14 support, define the environment variable `RIPPLED_USE_CPP_14`
|
||||||
|
to one. This is also transitional and will be removed when we permanently enable C++ 14
|
||||||
|
support.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
#
|
#
|
||||||
'''
|
'''
|
||||||
@@ -63,11 +103,16 @@ import time
|
|||||||
import SCons.Action
|
import SCons.Action
|
||||||
|
|
||||||
sys.path.append(os.path.join('src', 'beast', 'site_scons'))
|
sys.path.append(os.path.join('src', 'beast', 'site_scons'))
|
||||||
|
sys.path.append(os.path.join('src', 'ripple', 'site_scons'))
|
||||||
|
|
||||||
import Beast
|
import Beast
|
||||||
|
import scons_to_ninja
|
||||||
|
|
||||||
#------------------------------------------------------------------------------
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
AddOption('--ninja', dest='ninja', action='store_true',
|
||||||
|
help='generate ninja build file build.ninja')
|
||||||
|
|
||||||
def parse_time(t):
|
def parse_time(t):
|
||||||
return time.strptime(t, '%a %b %d %H:%M:%S %Z %Y')
|
return time.strptime(t, '%a %b %d %H:%M:%S %Z %Y')
|
||||||
|
|
||||||
@@ -78,6 +123,7 @@ BUILD_TIME = 'Mon Apr 7 20:33:19 UTC 2014'
|
|||||||
OPENSSL_ERROR = ('Your openSSL was built on %s; '
|
OPENSSL_ERROR = ('Your openSSL was built on %s; '
|
||||||
'rippled needs a version built on or after %s.')
|
'rippled needs a version built on or after %s.')
|
||||||
UNITY_BUILD_DIRECTORY = 'src/ripple/unity/'
|
UNITY_BUILD_DIRECTORY = 'src/ripple/unity/'
|
||||||
|
USE_CPP_14 = os.getenv('RIPPLED_USE_CPP_14')
|
||||||
|
|
||||||
def check_openssl():
|
def check_openssl():
|
||||||
if Beast.system.platform in CHECK_PLATFORMS:
|
if Beast.system.platform in CHECK_PLATFORMS:
|
||||||
@@ -93,6 +139,29 @@ def check_openssl():
|
|||||||
(CHECK_LINE, CHECK_COMMAND))
|
(CHECK_LINE, CHECK_COMMAND))
|
||||||
|
|
||||||
|
|
||||||
|
def set_implicit_cache():
|
||||||
|
'''Use implicit_cache on some targets to improve build times.
|
||||||
|
|
||||||
|
By default, scons scans each file for include dependecies. The implicit
|
||||||
|
cache flag lets you cache these dependencies for later builds, and will
|
||||||
|
only rescan files that change.
|
||||||
|
|
||||||
|
Failure cases are:
|
||||||
|
1) If the include search paths are changed (i.e. CPPPATH), then a file
|
||||||
|
may not be rebuilt.
|
||||||
|
2) If a same-named file has been added to a directory that is earlier in
|
||||||
|
the search path than the directory in which the file was found.
|
||||||
|
Turn on if this build is for a specific debug target (i.e. clang.debug)
|
||||||
|
|
||||||
|
If one of the failure cases applies, you can force a rescan of dependencies
|
||||||
|
using the command line option `--implicit-deps-changed`
|
||||||
|
'''
|
||||||
|
if len(COMMAND_LINE_TARGETS) == 1:
|
||||||
|
s = COMMAND_LINE_TARGETS[0].split('.')
|
||||||
|
if len(s) > 1 and 'debug' in s:
|
||||||
|
SetOption('implicit_cache', 1)
|
||||||
|
|
||||||
|
|
||||||
def import_environ(env):
|
def import_environ(env):
|
||||||
'''Imports environment settings into the construction environment'''
|
'''Imports environment settings into the construction environment'''
|
||||||
def set(keys):
|
def set(keys):
|
||||||
@@ -197,6 +266,9 @@ def print_coms(target, source, env):
|
|||||||
# TODO Add 'PROTOCCOM' to this list and make it work
|
# TODO Add 'PROTOCCOM' to this list and make it work
|
||||||
Beast.print_coms(['CXXCOM', 'CCCOM', 'LINKCOM'], env)
|
Beast.print_coms(['CXXCOM', 'CCCOM', 'LINKCOM'], env)
|
||||||
|
|
||||||
|
def is_debug_variant(variant):
|
||||||
|
return variant in ('debug', 'coverage')
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
|
|
||||||
# Set construction variables for the base environment
|
# Set construction variables for the base environment
|
||||||
@@ -212,13 +284,23 @@ def config_base(env):
|
|||||||
env.Append(CPPDEFINES=[
|
env.Append(CPPDEFINES=[
|
||||||
'OPENSSL_NO_SSL2'
|
'OPENSSL_NO_SSL2'
|
||||||
,'DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER'
|
,'DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER'
|
||||||
|
,{'HAVE_USLEEP' : '1'}
|
||||||
|
,{'SOCI_CXX_C11' : '1'}
|
||||||
|
,'_SILENCE_STDEXT_HASH_DEPRECATION_WARNINGS'
|
||||||
|
])
|
||||||
|
|
||||||
|
if USE_CPP_14:
|
||||||
|
env.Append(CPPDEFINES=[
|
||||||
|
'-DBEAST_NO_CXX14_COMPATIBILITY',
|
||||||
|
'-DBEAST_NO_CXX14_INTEGER_SEQUENCE',
|
||||||
|
'-DBEAST_NO_CXX14_MAKE_UNIQUE',
|
||||||
|
'-DBEAST_NO_CXX14_EQUAL',
|
||||||
|
'-DBOOST_NO_AUTO_PTR',
|
||||||
|
'-DBEAST_NO_CXX14_MAKE_REVERSE_ITERATOR',
|
||||||
])
|
])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
BOOST_ROOT = os.path.normpath(os.environ['BOOST_ROOT'])
|
BOOST_ROOT = os.path.normpath(os.environ['BOOST_ROOT'])
|
||||||
env.Append(CPPPATH=[
|
|
||||||
BOOST_ROOT,
|
|
||||||
])
|
|
||||||
env.Append(LIBPATH=[
|
env.Append(LIBPATH=[
|
||||||
os.path.join(BOOST_ROOT, 'stage', 'lib'),
|
os.path.join(BOOST_ROOT, 'stage', 'lib'),
|
||||||
])
|
])
|
||||||
@@ -233,7 +315,7 @@ def config_base(env):
|
|||||||
os.path.join(OPENSSL_ROOT, 'include'),
|
os.path.join(OPENSSL_ROOT, 'include'),
|
||||||
])
|
])
|
||||||
env.Append(LIBPATH=[
|
env.Append(LIBPATH=[
|
||||||
os.path.join(OPENSSL_ROOT, 'lib', 'VC', 'static'),
|
os.path.join(OPENSSL_ROOT, 'lib'),
|
||||||
])
|
])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
@@ -253,14 +335,31 @@ def config_base(env):
|
|||||||
env.Append(CPPPATH=[os.path.join(profile_jemalloc, 'include')])
|
env.Append(CPPPATH=[os.path.join(profile_jemalloc, 'include')])
|
||||||
env.Append(LINKFLAGS=['-Wl,-rpath,' + os.path.join(profile_jemalloc, 'lib')])
|
env.Append(LINKFLAGS=['-Wl,-rpath,' + os.path.join(profile_jemalloc, 'lib')])
|
||||||
|
|
||||||
|
def gccStdLibDir():
|
||||||
|
try:
|
||||||
|
for l in subprocess.check_output(['gcc', '-v'], stderr=subprocess.STDOUT).split():
|
||||||
|
if l.startswith('--prefix'):
|
||||||
|
return l.split('=')[1] + '/lib64'
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
raise SCons.UserError('Could not find gccStdLibDir')
|
||||||
|
|
||||||
# Set toolchain and variant specific construction variables
|
# Set toolchain and variant specific construction variables
|
||||||
def config_env(toolchain, variant, env):
|
def config_env(toolchain, variant, env):
|
||||||
if variant == 'debug':
|
if is_debug_variant(variant):
|
||||||
env.Append(CPPDEFINES=['DEBUG', '_DEBUG'])
|
env.Append(CPPDEFINES=['DEBUG', '_DEBUG'])
|
||||||
|
|
||||||
elif variant == 'release' or variant == 'profile':
|
elif variant == 'release' or variant == 'profile':
|
||||||
env.Append(CPPDEFINES=['NDEBUG'])
|
env.Append(CPPDEFINES=['NDEBUG'])
|
||||||
|
|
||||||
|
if 'BOOST_ROOT' in env:
|
||||||
|
if toolchain == 'gcc':
|
||||||
|
env.Append(CCFLAGS=['-isystem' + env['BOOST_ROOT']])
|
||||||
|
else:
|
||||||
|
env.Append(CPPPATH=[
|
||||||
|
env['BOOST_ROOT'],
|
||||||
|
])
|
||||||
|
|
||||||
if toolchain in Split('clang gcc'):
|
if toolchain in Split('clang gcc'):
|
||||||
if Beast.system.linux:
|
if Beast.system.linux:
|
||||||
env.ParseConfig('pkg-config --static --cflags --libs openssl')
|
env.ParseConfig('pkg-config --static --cflags --libs openssl')
|
||||||
@@ -297,7 +396,7 @@ def config_env(toolchain, variant, env):
|
|||||||
|
|
||||||
env.Append(CXXFLAGS=[
|
env.Append(CXXFLAGS=[
|
||||||
'-frtti',
|
'-frtti',
|
||||||
'-std=c++11',
|
'-std=c++14' if USE_CPP_14 else '-std=c++11',
|
||||||
'-Wno-invalid-offsetof'])
|
'-Wno-invalid-offsetof'])
|
||||||
|
|
||||||
env.Append(CPPDEFINES=['_FILE_OFFSET_BITS=64'])
|
env.Append(CPPDEFINES=['_FILE_OFFSET_BITS=64'])
|
||||||
@@ -317,8 +416,24 @@ def config_env(toolchain, variant, env):
|
|||||||
])
|
])
|
||||||
else:
|
else:
|
||||||
if toolchain == 'gcc':
|
if toolchain == 'gcc':
|
||||||
|
if os.getenv('RIPPLED_OLD_GCC_ABI'):
|
||||||
|
gcc_ver = ''
|
||||||
|
try:
|
||||||
|
gcc_ver = subprocess.check_output(['gcc', '-dumpversion'],
|
||||||
|
stderr=subprocess.STDOUT).strip()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if gcc_ver.startswith('5'):
|
||||||
|
# remove rpath and CXX11_ABI flag when distro uses
|
||||||
|
# non-user installed gcc 5
|
||||||
|
env.Append(CPPDEFINES={
|
||||||
|
'-D_GLIBCXX_USE_CXX11_ABI' : 0
|
||||||
|
})
|
||||||
|
env.Append(LINKFLAGS=['-Wl,-rpath,' + gccStdLibDir()])
|
||||||
|
|
||||||
env.Append(CCFLAGS=[
|
env.Append(CCFLAGS=[
|
||||||
'-Wno-unused-but-set-variable'
|
'-Wno-unused-but-set-variable',
|
||||||
|
'-Wno-deprecated',
|
||||||
])
|
])
|
||||||
|
|
||||||
boost_libs = [
|
boost_libs = [
|
||||||
@@ -360,6 +475,12 @@ def config_env(toolchain, variant, env):
|
|||||||
'-fno-strict-aliasing'
|
'-fno-strict-aliasing'
|
||||||
])
|
])
|
||||||
|
|
||||||
|
if variant == 'coverage':
|
||||||
|
env.Append(CXXFLAGS=[
|
||||||
|
'-fprofile-arcs', '-ftest-coverage'])
|
||||||
|
env.Append(LINKFLAGS=[
|
||||||
|
'-fprofile-arcs', '-ftest-coverage'])
|
||||||
|
|
||||||
if toolchain == 'clang':
|
if toolchain == 'clang':
|
||||||
if Beast.system.osx:
|
if Beast.system.osx:
|
||||||
env.Replace(CC='clang', CXX='clang++', LINK='clang++')
|
env.Replace(CC='clang', CXX='clang++', LINK='clang++')
|
||||||
@@ -374,6 +495,8 @@ def config_env(toolchain, variant, env):
|
|||||||
env.Append(CXXFLAGS=[
|
env.Append(CXXFLAGS=[
|
||||||
'-Wno-mismatched-tags',
|
'-Wno-mismatched-tags',
|
||||||
'-Wno-deprecated-register',
|
'-Wno-deprecated-register',
|
||||||
|
'-Wno-unused-local-typedefs',
|
||||||
|
'-Wno-unknown-warning-option',
|
||||||
])
|
])
|
||||||
|
|
||||||
elif toolchain == 'gcc':
|
elif toolchain == 'gcc':
|
||||||
@@ -387,7 +510,7 @@ def config_env(toolchain, variant, env):
|
|||||||
# If we are in debug mode, use GCC-specific functionality to add
|
# If we are in debug mode, use GCC-specific functionality to add
|
||||||
# extra error checking into the code (e.g. std::vector will throw
|
# extra error checking into the code (e.g. std::vector will throw
|
||||||
# for out-of-bounds conditions)
|
# for out-of-bounds conditions)
|
||||||
if variant == 'debug':
|
if is_debug_variant(variant):
|
||||||
env.Append(CPPDEFINES={
|
env.Append(CPPDEFINES={
|
||||||
'_FORTIFY_SOURCE': 2
|
'_FORTIFY_SOURCE': 2
|
||||||
})
|
})
|
||||||
@@ -432,8 +555,8 @@ def config_env(toolchain, variant, env):
|
|||||||
'WIN32_CONSOLE',
|
'WIN32_CONSOLE',
|
||||||
])
|
])
|
||||||
env.Append(LIBS=[
|
env.Append(LIBS=[
|
||||||
'ssleay32MT.lib',
|
'ssleay32.lib',
|
||||||
'libeay32MT.lib',
|
'libeay32.lib',
|
||||||
'Shlwapi.lib',
|
'Shlwapi.lib',
|
||||||
'kernel32.lib',
|
'kernel32.lib',
|
||||||
'user32.lib',
|
'user32.lib',
|
||||||
@@ -486,6 +609,7 @@ def config_env(toolchain, variant, env):
|
|||||||
# Configure the base construction environment
|
# Configure the base construction environment
|
||||||
root_dir = Dir('#').srcnode().get_abspath() # Path to this SConstruct file
|
root_dir = Dir('#').srcnode().get_abspath() # Path to this SConstruct file
|
||||||
build_dir = os.path.join('build')
|
build_dir = os.path.join('build')
|
||||||
|
|
||||||
base = Environment(
|
base = Environment(
|
||||||
toolpath=[os.path.join ('src', 'beast', 'site_scons', 'site_tools')],
|
toolpath=[os.path.join ('src', 'beast', 'site_scons', 'site_tools')],
|
||||||
tools=['default', 'Protoc', 'VSProject'],
|
tools=['default', 'Protoc', 'VSProject'],
|
||||||
@@ -497,10 +621,15 @@ base.Append(CPPPATH=[
|
|||||||
'src',
|
'src',
|
||||||
os.path.join('src', 'beast'),
|
os.path.join('src', 'beast'),
|
||||||
os.path.join(build_dir, 'proto'),
|
os.path.join(build_dir, 'proto'),
|
||||||
|
os.path.join('src','soci','src'),
|
||||||
|
os.path.join('src','soci','include'),
|
||||||
])
|
])
|
||||||
|
|
||||||
|
base.Decider('MD5-timestamp')
|
||||||
|
set_implicit_cache()
|
||||||
|
|
||||||
# Configure the toolchains, variants, default toolchain, and default target
|
# Configure the toolchains, variants, default toolchain, and default target
|
||||||
variants = ['debug', 'release', 'profile']
|
variants = ['debug', 'coverage', 'release', 'profile']
|
||||||
all_toolchains = ['clang', 'gcc', 'msvc']
|
all_toolchains = ['clang', 'gcc', 'msvc']
|
||||||
if Beast.system.osx:
|
if Beast.system.osx:
|
||||||
toolchains = ['clang']
|
toolchains = ['clang']
|
||||||
@@ -542,6 +671,7 @@ class ObjectBuilder(object):
|
|||||||
self.env = env
|
self.env = env
|
||||||
self.variant_dirs = variant_dirs
|
self.variant_dirs = variant_dirs
|
||||||
self.objects = []
|
self.objects = []
|
||||||
|
self.child_envs = []
|
||||||
|
|
||||||
def add_source_files(self, *filenames, **kwds):
|
def add_source_files(self, *filenames, **kwds):
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
@@ -549,6 +679,7 @@ class ObjectBuilder(object):
|
|||||||
if kwds:
|
if kwds:
|
||||||
env = env.Clone()
|
env = env.Clone()
|
||||||
env.Prepend(**kwds)
|
env.Prepend(**kwds)
|
||||||
|
self.child_envs.append(env)
|
||||||
o = env.Object(Beast.variantFile(filename, self.variant_dirs))
|
o = env.Object(Beast.variantFile(filename, self.variant_dirs))
|
||||||
self.objects.append(o)
|
self.objects.append(o)
|
||||||
|
|
||||||
@@ -564,14 +695,181 @@ def list_sources(base, suffixes):
|
|||||||
yield os.path.normpath(path)
|
yield os.path.normpath(path)
|
||||||
return list(_iter(base))
|
return list(_iter(base))
|
||||||
|
|
||||||
|
|
||||||
|
def append_sources(result, *filenames, **kwds):
|
||||||
|
result.append([filenames, kwds])
|
||||||
|
|
||||||
|
|
||||||
|
def get_soci_sources(style):
|
||||||
|
result = []
|
||||||
|
cpp_path = [
|
||||||
|
'src/soci/src/core',
|
||||||
|
'src/soci/include/private',
|
||||||
|
'src/sqlite', ]
|
||||||
|
append_sources(result,
|
||||||
|
'src/ripple/unity/soci.cpp',
|
||||||
|
CPPPATH=cpp_path)
|
||||||
|
if style == 'unity':
|
||||||
|
append_sources(result,
|
||||||
|
'src/ripple/unity/soci_ripple.cpp',
|
||||||
|
CPPPATH=cpp_path)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_common_sources(toolchain):
|
||||||
|
result = []
|
||||||
|
if toolchain == 'msvc':
|
||||||
|
warning_flags = {}
|
||||||
|
else:
|
||||||
|
warning_flags = {'CCFLAGS': ['-Wno-unused-function']}
|
||||||
|
append_sources(
|
||||||
|
result,
|
||||||
|
'src/ripple/unity/secp256k1.cpp',
|
||||||
|
CPPPATH=['src/secp256k1'],
|
||||||
|
**warning_flags)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_classic_sources(toolchain):
|
||||||
|
result = []
|
||||||
|
append_sources(
|
||||||
|
result,
|
||||||
|
*list_sources('src/ripple/core', '.cpp'),
|
||||||
|
CPPPATH=[
|
||||||
|
'src/soci/src/core',
|
||||||
|
'src/sqlite']
|
||||||
|
)
|
||||||
|
append_sources(result, *list_sources('src/ripple/app', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/basics', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/crypto', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/json', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/ledger', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/legacy', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/net', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/overlay', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/peerfinder', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/protocol', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/rpc', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/shamap', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/test', '.cpp'))
|
||||||
|
append_sources(result, *list_sources('src/ripple/unl', '.cpp'))
|
||||||
|
|
||||||
|
append_sources(
|
||||||
|
result,
|
||||||
|
*list_sources('src/ripple/nodestore', '.cpp'),
|
||||||
|
CPPPATH=[
|
||||||
|
'src/rocksdb2/include',
|
||||||
|
'src/snappy/snappy',
|
||||||
|
'src/snappy/config',
|
||||||
|
])
|
||||||
|
|
||||||
|
result += get_soci_sources('classic')
|
||||||
|
result += get_common_sources(toolchain)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_unity_sources(toolchain):
|
||||||
|
result = []
|
||||||
|
append_sources(
|
||||||
|
result,
|
||||||
|
'src/ripple/unity/app_ledger.cpp',
|
||||||
|
'src/ripple/unity/app_main.cpp',
|
||||||
|
'src/ripple/unity/app_misc.cpp',
|
||||||
|
'src/ripple/unity/app_paths.cpp',
|
||||||
|
'src/ripple/unity/app_tests.cpp',
|
||||||
|
'src/ripple/unity/app_tx.cpp',
|
||||||
|
'src/ripple/unity/core.cpp',
|
||||||
|
'src/ripple/unity/basics.cpp',
|
||||||
|
'src/ripple/unity/crypto.cpp',
|
||||||
|
'src/ripple/unity/ledger.cpp',
|
||||||
|
'src/ripple/unity/net.cpp',
|
||||||
|
'src/ripple/unity/overlay.cpp',
|
||||||
|
'src/ripple/unity/peerfinder.cpp',
|
||||||
|
'src/ripple/unity/json.cpp',
|
||||||
|
'src/ripple/unity/protocol.cpp',
|
||||||
|
'src/ripple/unity/rpcx.cpp',
|
||||||
|
'src/ripple/unity/shamap.cpp',
|
||||||
|
'src/ripple/unity/test.cpp',
|
||||||
|
'src/ripple/unity/unl.cpp',
|
||||||
|
)
|
||||||
|
|
||||||
|
append_sources(
|
||||||
|
result,
|
||||||
|
'src/ripple/unity/nodestore.cpp',
|
||||||
|
CPPPATH=[
|
||||||
|
'src/rocksdb2/include',
|
||||||
|
'src/snappy/snappy',
|
||||||
|
'src/snappy/config',
|
||||||
|
])
|
||||||
|
|
||||||
|
result += get_soci_sources('unity')
|
||||||
|
result += get_common_sources(toolchain)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
# Declare the targets
|
# Declare the targets
|
||||||
aliases = collections.defaultdict(list)
|
aliases = collections.defaultdict(list)
|
||||||
msvc_configs = []
|
msvc_configs = []
|
||||||
|
|
||||||
|
|
||||||
|
def should_prepare_target(cl_target,
|
||||||
|
style, toolchain, variant):
|
||||||
|
if not cl_target:
|
||||||
|
# default target
|
||||||
|
return (style == default_tu_style and
|
||||||
|
toolchain == default_toolchain and
|
||||||
|
variant == default_variant)
|
||||||
|
if 'vcxproj' in cl_target:
|
||||||
|
return toolchain == 'msvc'
|
||||||
|
s = cl_target.split('.')
|
||||||
|
if style == 'unity' and 'nounity' in s:
|
||||||
|
return False
|
||||||
|
if len(s) == 1:
|
||||||
|
return ('all' in cl_target or
|
||||||
|
variant in cl_target or
|
||||||
|
toolchain in cl_target)
|
||||||
|
if len(s) == 2 or len(s) == 3:
|
||||||
|
return s[0] == toolchain and s[1] == variant
|
||||||
|
|
||||||
|
return True # A target we don't know about, better prepare to build it
|
||||||
|
|
||||||
|
|
||||||
|
def should_prepare_targets(style, toolchain, variant):
|
||||||
|
if not COMMAND_LINE_TARGETS:
|
||||||
|
return should_prepare_target(None, style, toolchain, variant)
|
||||||
|
for t in COMMAND_LINE_TARGETS:
|
||||||
|
if should_prepare_target(t, style, toolchain, variant):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def should_build_ninja(style, toolchain, variant):
|
||||||
|
"""
|
||||||
|
Return True if a ninja build file should be generated.
|
||||||
|
|
||||||
|
Typically, scons will be called as follows to generate a ninja build file:
|
||||||
|
`scons ninja=1 gcc.debug` where `gcc.debug` may be replaced with any of our
|
||||||
|
non-visual studio targets. Raise an exception if we cannot generate the
|
||||||
|
requested ninja build file (for example, if multiple targets are requested).
|
||||||
|
"""
|
||||||
|
if not GetOption('ninja'):
|
||||||
|
return False
|
||||||
|
if len(COMMAND_LINE_TARGETS) != 1:
|
||||||
|
raise Exception('Can only generate a ninja file for a single target')
|
||||||
|
cl_target = COMMAND_LINE_TARGETS[0]
|
||||||
|
if 'vcxproj' in cl_target:
|
||||||
|
raise Exception('Cannot generate a ninja file for a vcxproj')
|
||||||
|
s = cl_target.split('.')
|
||||||
|
if ( style == 'unity' and 'nounity' in s or
|
||||||
|
style == 'classic' and 'nounity' not in s or
|
||||||
|
len(s) == 1 ):
|
||||||
|
return False
|
||||||
|
if len(s) == 2 or len(s) == 3:
|
||||||
|
return s[0] == toolchain and s[1] == variant
|
||||||
|
return False
|
||||||
|
|
||||||
for tu_style in ['classic', 'unity']:
|
for tu_style in ['classic', 'unity']:
|
||||||
for toolchain in all_toolchains:
|
for toolchain in all_toolchains:
|
||||||
for variant in variants:
|
for variant in variants:
|
||||||
if variant == 'profile' and toolchain == 'msvc':
|
if not should_prepare_targets(tu_style, toolchain, variant):
|
||||||
|
continue
|
||||||
|
if variant in ['profile', 'coverage'] and toolchain == 'msvc':
|
||||||
continue
|
continue
|
||||||
# Configure this variant's construction environment
|
# Configure this variant's construction environment
|
||||||
env = base.Clone()
|
env = base.Clone()
|
||||||
@@ -592,65 +890,11 @@ for tu_style in ['classic', 'unity']:
|
|||||||
object_builder = ObjectBuilder(env, variant_dirs)
|
object_builder = ObjectBuilder(env, variant_dirs)
|
||||||
|
|
||||||
if tu_style == 'classic':
|
if tu_style == 'classic':
|
||||||
object_builder.add_source_files(
|
sources = get_classic_sources(toolchain)
|
||||||
*list_sources('src/ripple/app', '.cpp'))
|
|
||||||
object_builder.add_source_files(
|
|
||||||
*list_sources('src/ripple/basics', '.cpp'))
|
|
||||||
object_builder.add_source_files(
|
|
||||||
*list_sources('src/ripple/core', '.cpp'))
|
|
||||||
object_builder.add_source_files(
|
|
||||||
*list_sources('src/ripple/crypto', '.cpp'))
|
|
||||||
object_builder.add_source_files(
|
|
||||||
*list_sources('src/ripple/json', '.cpp'))
|
|
||||||
object_builder.add_source_files(
|
|
||||||
*list_sources('src/ripple/net', '.cpp'))
|
|
||||||
object_builder.add_source_files(
|
|
||||||
*list_sources('src/ripple/overlay', '.cpp'))
|
|
||||||
object_builder.add_source_files(
|
|
||||||
*list_sources('src/ripple/peerfinder', '.cpp'))
|
|
||||||
object_builder.add_source_files(
|
|
||||||
*list_sources('src/ripple/protocol', '.cpp'))
|
|
||||||
object_builder.add_source_files(
|
|
||||||
*list_sources('src/ripple/shamap', '.cpp'))
|
|
||||||
object_builder.add_source_files(
|
|
||||||
*list_sources('src/ripple/nodestore', '.cpp'),
|
|
||||||
CPPPATH=[
|
|
||||||
'src/leveldb/include',
|
|
||||||
'src/rocksdb2/include',
|
|
||||||
'src/snappy/snappy',
|
|
||||||
'src/snappy/config',
|
|
||||||
])
|
|
||||||
else:
|
else:
|
||||||
object_builder.add_source_files(
|
sources = get_unity_sources(toolchain)
|
||||||
'src/ripple/unity/app.cpp',
|
for s, k in sources:
|
||||||
'src/ripple/unity/app1.cpp',
|
object_builder.add_source_files(*s, **k)
|
||||||
'src/ripple/unity/app2.cpp',
|
|
||||||
'src/ripple/unity/app3.cpp',
|
|
||||||
'src/ripple/unity/app4.cpp',
|
|
||||||
'src/ripple/unity/app5.cpp',
|
|
||||||
'src/ripple/unity/app6.cpp',
|
|
||||||
'src/ripple/unity/app7.cpp',
|
|
||||||
'src/ripple/unity/app8.cpp',
|
|
||||||
'src/ripple/unity/app9.cpp',
|
|
||||||
'src/ripple/unity/core.cpp',
|
|
||||||
'src/ripple/unity/basics.cpp',
|
|
||||||
'src/ripple/unity/crypto.cpp',
|
|
||||||
'src/ripple/unity/net.cpp',
|
|
||||||
'src/ripple/unity/overlay.cpp',
|
|
||||||
'src/ripple/unity/peerfinder.cpp',
|
|
||||||
'src/ripple/unity/json.cpp',
|
|
||||||
'src/ripple/unity/protocol.cpp',
|
|
||||||
'src/ripple/unity/shamap.cpp',
|
|
||||||
)
|
|
||||||
|
|
||||||
object_builder.add_source_files(
|
|
||||||
'src/ripple/unity/nodestore.cpp',
|
|
||||||
CPPPATH=[
|
|
||||||
'src/leveldb/include',
|
|
||||||
'src/rocksdb2/include',
|
|
||||||
'src/snappy/snappy',
|
|
||||||
'src/snappy/config',
|
|
||||||
])
|
|
||||||
|
|
||||||
git_commit_tag = {}
|
git_commit_tag = {}
|
||||||
if toolchain != 'msvc':
|
if toolchain != 'msvc':
|
||||||
@@ -671,10 +915,8 @@ for tu_style in ['classic', 'unity']:
|
|||||||
'src/ripple/unity/protobuf.cpp',
|
'src/ripple/unity/protobuf.cpp',
|
||||||
'src/ripple/unity/ripple.proto.cpp',
|
'src/ripple/unity/ripple.proto.cpp',
|
||||||
'src/ripple/unity/resource.cpp',
|
'src/ripple/unity/resource.cpp',
|
||||||
'src/ripple/unity/rpcx.cpp',
|
|
||||||
'src/ripple/unity/server.cpp',
|
'src/ripple/unity/server.cpp',
|
||||||
'src/ripple/unity/validators.cpp',
|
'src/ripple/unity/websocket02.cpp'
|
||||||
'src/ripple/unity/websocket.cpp'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
object_builder.add_source_files(
|
object_builder.add_source_files(
|
||||||
@@ -693,27 +935,6 @@ for tu_style in ['classic', 'unity']:
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
object_builder.add_source_files(
|
|
||||||
'src/ripple/unity/leveldb.cpp',
|
|
||||||
CPPPATH=[
|
|
||||||
'src/leveldb/',
|
|
||||||
'src/leveldb/include',
|
|
||||||
'src/snappy/snappy',
|
|
||||||
'src/snappy/config',
|
|
||||||
],
|
|
||||||
**no_uninitialized_warning
|
|
||||||
)
|
|
||||||
|
|
||||||
object_builder.add_source_files(
|
|
||||||
'src/ripple/unity/hyperleveldb.cpp',
|
|
||||||
CPPPATH=[
|
|
||||||
'src/hyperleveldb',
|
|
||||||
'src/snappy/snappy',
|
|
||||||
'src/snappy/config',
|
|
||||||
],
|
|
||||||
**no_uninitialized_warning
|
|
||||||
)
|
|
||||||
|
|
||||||
object_builder.add_source_files(
|
object_builder.add_source_files(
|
||||||
'src/ripple/unity/rocksdb.cpp',
|
'src/ripple/unity/rocksdb.cpp',
|
||||||
CPPPATH=[
|
CPPPATH=[
|
||||||
@@ -734,6 +955,11 @@ for tu_style in ['classic', 'unity']:
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
object_builder.add_source_files(
|
||||||
|
'src/ripple/unity/websocket04.cpp',
|
||||||
|
CPPPATH='src/websocketpp',
|
||||||
|
)
|
||||||
|
|
||||||
if toolchain == "clang" and Beast.system.osx:
|
if toolchain == "clang" and Beast.system.osx:
|
||||||
object_builder.add_source_files('src/ripple/unity/beastobjc.mm')
|
object_builder.add_source_files('src/ripple/unity/beastobjc.mm')
|
||||||
|
|
||||||
@@ -756,15 +982,26 @@ for tu_style in ['classic', 'unity']:
|
|||||||
if toolchain in toolchains:
|
if toolchain in toolchains:
|
||||||
aliases['all'].extend(target)
|
aliases['all'].extend(target)
|
||||||
aliases[toolchain].extend(target)
|
aliases[toolchain].extend(target)
|
||||||
|
elif toolchain == 'msvc':
|
||||||
|
config = env.VSProjectConfig(variant + ".classic", 'x64', target, env)
|
||||||
|
msvc_configs.append(config)
|
||||||
|
|
||||||
if toolchain in toolchains:
|
if toolchain in toolchains:
|
||||||
aliases[variant].extend(target)
|
aliases[variant].extend(target)
|
||||||
env.Alias(variant_name, target)
|
env.Alias(variant_name, target)
|
||||||
|
|
||||||
|
# ninja support
|
||||||
|
if should_build_ninja(tu_style, toolchain, variant):
|
||||||
|
print('Generating ninja: {}:{}:{}'.format(tu_style, toolchain, variant))
|
||||||
|
scons_to_ninja.GenerateNinjaFile(
|
||||||
|
[object_builder.env] + object_builder.child_envs,
|
||||||
|
dest_file='build.ninja')
|
||||||
|
|
||||||
for key, value in aliases.iteritems():
|
for key, value in aliases.iteritems():
|
||||||
env.Alias(key, value)
|
env.Alias(key, value)
|
||||||
|
|
||||||
vcxproj = base.VSProject(
|
vcxproj = base.VSProject(
|
||||||
os.path.join('Builds', 'VisualStudio2013', 'RippleD'),
|
os.path.join('Builds', 'VisualStudio2015', 'RippleD'),
|
||||||
source = [],
|
source = [],
|
||||||
VSPROJECT_ROOT_DIRS = ['src/beast', 'src', '.'],
|
VSPROJECT_ROOT_DIRS = ['src/beast', 'src', '.'],
|
||||||
VSPROJECT_CONFIGS = msvc_configs)
|
VSPROJECT_CONFIGS = msvc_configs)
|
||||||
|
|||||||
26
appveyor.yml
@@ -6,20 +6,22 @@ environment:
|
|||||||
# that it's a small download. We also use appveyor's free cache, avoiding fees
|
# that it's a small download. We also use appveyor's free cache, avoiding fees
|
||||||
# downloading from S3 each time.
|
# downloading from S3 each time.
|
||||||
# TODO: script to create this package.
|
# TODO: script to create this package.
|
||||||
RIPPLED_DEPS_URL: https://s3-ap-northeast-1.amazonaws.com/history-replay/rippled_deps.zip
|
RIPPLED_DEPS_URL: https://github.com/ripple/Downloads/blob/gh-pages/appveyor/rippled_deps15.zip?raw=true
|
||||||
|
|
||||||
# Other dependencies we just download each time.
|
# Other dependencies we just download each time.
|
||||||
PIP_URL: https://bootstrap.pypa.io/get-pip.py
|
PIP_URL: https://bootstrap.pypa.io/get-pip.py
|
||||||
PYWIN32_URL: https://downloads.sourceforge.net/project/pywin32/pywin32/Build%20219/pywin32-219.win-amd64-py2.7.exe
|
PYWIN32_URL: https://downloads.sourceforge.net/project/pywin32/pywin32/Build%20219/pywin32-219.win-amd64-py2.7.exe
|
||||||
|
|
||||||
# Scons honours these environment variables, setting the include/lib paths.
|
# Scons honours these environment variables, setting the include/lib paths.
|
||||||
BOOST_ROOT: C:/rippled_deps/boost
|
BOOST_ROOT: C:/rippled_deps15/boost
|
||||||
OPENSSL_ROOT: C:/rippled_deps/openssl
|
OPENSSL_ROOT: C:/rippled_deps15/openssl
|
||||||
|
|
||||||
|
os: Visual Studio 2015
|
||||||
|
|
||||||
# At the end of each successful build we cache this directory. It must be less
|
# At the end of each successful build we cache this directory. It must be less
|
||||||
# than 100MB total compressed.
|
# than 100MB total compressed.
|
||||||
cache:
|
cache:
|
||||||
- 'C:\\rippled_deps'
|
- "C:\\rippled_deps15"
|
||||||
|
|
||||||
# This means we'll download a zip of the branch we want, rather than the full
|
# This means we'll download a zip of the branch we want, rather than the full
|
||||||
# history.
|
# history.
|
||||||
@@ -27,7 +29,7 @@ shallow_clone: true
|
|||||||
|
|
||||||
install:
|
install:
|
||||||
# We want easy_install, python and protoc.exe on PATH.
|
# We want easy_install, python and protoc.exe on PATH.
|
||||||
- SET PATH=%PYTHON%;%PYTHON%/Scripts;C:/rippled_deps;%PATH%
|
- SET PATH=%PYTHON%;%PYTHON%/Scripts;C:/rippled_deps15;%PATH%
|
||||||
|
|
||||||
# `ps` prefix means the command is executed by powershell.
|
# `ps` prefix means the command is executed by powershell.
|
||||||
- ps: Start-FileDownload $env:PIP_URL
|
- ps: Start-FileDownload $env:PIP_URL
|
||||||
@@ -46,9 +48,17 @@ install:
|
|||||||
# Download dependencies if appveyor didn't restore them from the cache.
|
# Download dependencies if appveyor didn't restore them from the cache.
|
||||||
# Use 7zip to unzip.
|
# Use 7zip to unzip.
|
||||||
- ps: |
|
- ps: |
|
||||||
if (-not(Test-Path 'C:/rippled_deps')) {
|
if (-not(Test-Path 'C:/rippled_deps15')) {
|
||||||
|
echo "Download from $env:RIPPLED_DEPS_URL"
|
||||||
Start-FileDownload "$env:RIPPLED_DEPS_URL"
|
Start-FileDownload "$env:RIPPLED_DEPS_URL"
|
||||||
7z x rippled_deps.zip -oC:\ -y > $null
|
7z x rippled_deps15.zip -oC:\ -y > $null
|
||||||
|
}
|
||||||
|
|
||||||
|
# Newer DEPS include a versions file.
|
||||||
|
# Dump it so we can verify correct behavior.
|
||||||
|
- ps: |
|
||||||
|
if (Test-Path 'C:/rippled_deps15/versions.txt') {
|
||||||
|
cat 'C:/rippled_deps15/versions.txt'
|
||||||
}
|
}
|
||||||
|
|
||||||
# TODO: This is giving me grief
|
# TODO: This is giving me grief
|
||||||
@@ -58,7 +68,7 @@ install:
|
|||||||
|
|
||||||
build_script:
|
build_script:
|
||||||
# We set the environment variables needed to put compilers on the PATH.
|
# We set the environment variables needed to put compilers on the PATH.
|
||||||
- '"%VS120COMNTOOLS%../../VC/vcvarsall.bat" x86_amd64'
|
- '"%VS140COMNTOOLS%../../VC/vcvarsall.bat" x86_amd64'
|
||||||
# Show which version of the compiler we are using.
|
# Show which version of the compiler we are using.
|
||||||
- cl
|
- cl
|
||||||
- scons msvc.debug -j%NUMBER_OF_PROCESSORS%
|
- scons msvc.debug -j%NUMBER_OF_PROCESSORS%
|
||||||
|
|||||||
36
bin/ci/ubuntu/build-and-test.sh
Executable file
@@ -0,0 +1,36 @@
|
|||||||
|
#!/bin/bash -u
|
||||||
|
# We use set -e and bash with -u to bail on first non zero exit code of any
|
||||||
|
# processes launched or upon any unbound variable
|
||||||
|
set -e
|
||||||
|
__dirname=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||||
|
echo "using CC: $CC"
|
||||||
|
echo "using TARGET: $TARGET"
|
||||||
|
export RIPPLED_PATH="$PWD/build/$CC.$TARGET/rippled"
|
||||||
|
echo "using RIPPLED_PATH: $RIPPLED_PATH"
|
||||||
|
# Make sure vcxproj is up to date
|
||||||
|
scons vcxproj
|
||||||
|
git diff --exit-code
|
||||||
|
# $CC will be either `clang` or `gcc`
|
||||||
|
# http://docs.travis-ci.com/user/migrating-from-legacy/?utm_source=legacy-notice&utm_medium=banner&utm_campaign=legacy-upgrade
|
||||||
|
# indicates that 2 cores are available to containers.
|
||||||
|
scons -j${NUM_PROCESSORS:-2} $CC.$TARGET
|
||||||
|
# We can be sure we're using the build/$CC.$TARGET variant
|
||||||
|
# (-f so never err)
|
||||||
|
rm -f build/rippled
|
||||||
|
|
||||||
|
# See what we've actually built
|
||||||
|
ldd $RIPPLED_PATH
|
||||||
|
if [[ $TARGET == "coverage" ]]; then
|
||||||
|
$RIPPLED_PATH --unittest
|
||||||
|
# We pass along -p to keep path segments so as to avoid collisions
|
||||||
|
codecov --gcov-args=-p --gcov-source-match='^src/(ripple|beast)'
|
||||||
|
else
|
||||||
|
# Run unittests (under gdb)
|
||||||
|
cat $__dirname/unittests.gdb | gdb \
|
||||||
|
--return-child-result \
|
||||||
|
--args $RIPPLED_PATH --unittest
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run NPM tests
|
||||||
|
npm install
|
||||||
|
npm test --rippled=$RIPPLED_PATH
|
||||||
25
bin/ci/ubuntu/install-dependencies.sh
Executable file
@@ -0,0 +1,25 @@
|
|||||||
|
#!/bin/bash -u
|
||||||
|
# Exit if anything fails.
|
||||||
|
set -e
|
||||||
|
# Override gcc version to $GCC_VER.
|
||||||
|
# Put an appropriate symlink at the front of the path.
|
||||||
|
mkdir -v $HOME/bin
|
||||||
|
for g in gcc g++ gcov gcc-ar gcc-nm gcc-ranlib
|
||||||
|
do
|
||||||
|
test -x $( type -p ${g}-$GCC_VER )
|
||||||
|
ln -sv $(type -p ${g}-$GCC_VER) $HOME/bin/${g}
|
||||||
|
done
|
||||||
|
export PATH=$PWD/bin:$PATH
|
||||||
|
|
||||||
|
# What versions are we ACTUALLY running?
|
||||||
|
g++ -v
|
||||||
|
clang -v
|
||||||
|
# Avoid `spurious errors` caused by ~/.npm permission issues
|
||||||
|
# Does it already exist? Who owns? What permissions?
|
||||||
|
ls -lah ~/.npm || mkdir ~/.npm
|
||||||
|
# Make sure we own it
|
||||||
|
chown -Rc $USER ~/.npm
|
||||||
|
# We use this so we can filter the subtrees from our coverage report
|
||||||
|
pip install --user https://github.com/sublimator/codecov-python/zipball/source-match
|
||||||
|
|
||||||
|
bash bin/sh/install-boost.sh
|
||||||
4
bin/ci/ubuntu/unittests.gdb
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
set env MALLOC_CHECK_=3
|
||||||
|
set print thread-events off
|
||||||
|
run
|
||||||
|
backtrace full
|
||||||
1
bin/manifest
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
python/Manifest.py
|
||||||
7
bin/python/Manifest.py
Executable file
@@ -0,0 +1,7 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from ripple.util import Sign
|
||||||
|
|
||||||
|
result = Sign.run_command(sys.argv[1:])
|
||||||
|
exit(0 if result else -1)
|
||||||
14
bin/python/ecdsa/__init__.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
__all__ = ["curves", "der", "ecdsa", "ellipticcurve", "keys", "numbertheory",
|
||||||
|
"test_pyecdsa", "util", "six"]
|
||||||
|
from .keys import SigningKey, VerifyingKey, BadSignatureError, BadDigestError
|
||||||
|
from .curves import NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1
|
||||||
|
|
||||||
|
_hush_pyflakes = [SigningKey, VerifyingKey, BadSignatureError, BadDigestError,
|
||||||
|
NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1]
|
||||||
|
del _hush_pyflakes
|
||||||
|
|
||||||
|
# This code comes from http://github.com/warner/python-ecdsa
|
||||||
|
|
||||||
|
from ._version import get_versions
|
||||||
|
__version__ = get_versions()['version']
|
||||||
|
del get_versions
|
||||||
183
bin/python/ecdsa/_version.py
Normal file
@@ -0,0 +1,183 @@
|
|||||||
|
|
||||||
|
# This file helps to compute a version number in source trees obtained from
|
||||||
|
# git-archive tarball (such as those provided by githubs download-from-tag
|
||||||
|
# feature). Distribution tarballs (built by setup.py sdist) and build
|
||||||
|
# directories (produced by setup.py build) will contain a much shorter file
|
||||||
|
# that just contains the computed version number.
|
||||||
|
|
||||||
|
# This file is released into the public domain. Generated by
|
||||||
|
# versioneer-0.12 (https://github.com/warner/python-versioneer)
|
||||||
|
|
||||||
|
# these strings will be replaced by git during git-archive
|
||||||
|
git_refnames = " (HEAD, master)"
|
||||||
|
git_full = "e7a6daff51221b8edd888cff404596ef90432869"
|
||||||
|
|
||||||
|
# these strings are filled in when 'setup.py versioneer' creates _version.py
|
||||||
|
tag_prefix = "python-ecdsa-"
|
||||||
|
parentdir_prefix = "ecdsa-"
|
||||||
|
versionfile_source = "ecdsa/_version.py"
|
||||||
|
|
||||||
|
import os, sys, re, subprocess, errno
|
||||||
|
|
||||||
|
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
|
||||||
|
assert isinstance(commands, list)
|
||||||
|
p = None
|
||||||
|
for c in commands:
|
||||||
|
try:
|
||||||
|
# remember shell=False, so use git.cmd on windows, not just git
|
||||||
|
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
|
||||||
|
stderr=(subprocess.PIPE if hide_stderr
|
||||||
|
else None))
|
||||||
|
break
|
||||||
|
except EnvironmentError:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
|
if e.errno == errno.ENOENT:
|
||||||
|
continue
|
||||||
|
if verbose:
|
||||||
|
print("unable to run %s" % args[0])
|
||||||
|
print(e)
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
if verbose:
|
||||||
|
print("unable to find command, tried %s" % (commands,))
|
||||||
|
return None
|
||||||
|
stdout = p.communicate()[0].strip()
|
||||||
|
if sys.version >= '3':
|
||||||
|
stdout = stdout.decode()
|
||||||
|
if p.returncode != 0:
|
||||||
|
if verbose:
|
||||||
|
print("unable to run %s (error)" % args[0])
|
||||||
|
return None
|
||||||
|
return stdout
|
||||||
|
|
||||||
|
|
||||||
|
def versions_from_parentdir(parentdir_prefix, root, verbose=False):
|
||||||
|
# Source tarballs conventionally unpack into a directory that includes
|
||||||
|
# both the project name and a version string.
|
||||||
|
dirname = os.path.basename(root)
|
||||||
|
if not dirname.startswith(parentdir_prefix):
|
||||||
|
if verbose:
|
||||||
|
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
|
||||||
|
(root, dirname, parentdir_prefix))
|
||||||
|
return None
|
||||||
|
return {"version": dirname[len(parentdir_prefix):], "full": ""}
|
||||||
|
|
||||||
|
def git_get_keywords(versionfile_abs):
|
||||||
|
# the code embedded in _version.py can just fetch the value of these
|
||||||
|
# keywords. When used from setup.py, we don't want to import _version.py,
|
||||||
|
# so we do it with a regexp instead. This function is not used from
|
||||||
|
# _version.py.
|
||||||
|
keywords = {}
|
||||||
|
try:
|
||||||
|
f = open(versionfile_abs,"r")
|
||||||
|
for line in f.readlines():
|
||||||
|
if line.strip().startswith("git_refnames ="):
|
||||||
|
mo = re.search(r'=\s*"(.*)"', line)
|
||||||
|
if mo:
|
||||||
|
keywords["refnames"] = mo.group(1)
|
||||||
|
if line.strip().startswith("git_full ="):
|
||||||
|
mo = re.search(r'=\s*"(.*)"', line)
|
||||||
|
if mo:
|
||||||
|
keywords["full"] = mo.group(1)
|
||||||
|
f.close()
|
||||||
|
except EnvironmentError:
|
||||||
|
pass
|
||||||
|
return keywords
|
||||||
|
|
||||||
|
def git_versions_from_keywords(keywords, tag_prefix, verbose=False):
|
||||||
|
if not keywords:
|
||||||
|
return {} # keyword-finding function failed to find keywords
|
||||||
|
refnames = keywords["refnames"].strip()
|
||||||
|
if refnames.startswith("$Format"):
|
||||||
|
if verbose:
|
||||||
|
print("keywords are unexpanded, not using")
|
||||||
|
return {} # unexpanded, so not in an unpacked git-archive tarball
|
||||||
|
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
||||||
|
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
|
||||||
|
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
|
||||||
|
TAG = "tag: "
|
||||||
|
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
|
||||||
|
if not tags:
|
||||||
|
# Either we're using git < 1.8.3, or there really are no tags. We use
|
||||||
|
# a heuristic: assume all version tags have a digit. The old git %d
|
||||||
|
# expansion behaves like git log --decorate=short and strips out the
|
||||||
|
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
|
||||||
|
# between branches and tags. By ignoring refnames without digits, we
|
||||||
|
# filter out many common branch names like "release" and
|
||||||
|
# "stabilization", as well as "HEAD" and "master".
|
||||||
|
tags = set([r for r in refs if re.search(r'\d', r)])
|
||||||
|
if verbose:
|
||||||
|
print("discarding '%s', no digits" % ",".join(refs-tags))
|
||||||
|
if verbose:
|
||||||
|
print("likely tags: %s" % ",".join(sorted(tags)))
|
||||||
|
for ref in sorted(tags):
|
||||||
|
# sorting will prefer e.g. "2.0" over "2.0rc1"
|
||||||
|
if ref.startswith(tag_prefix):
|
||||||
|
r = ref[len(tag_prefix):]
|
||||||
|
if verbose:
|
||||||
|
print("picking %s" % r)
|
||||||
|
return { "version": r,
|
||||||
|
"full": keywords["full"].strip() }
|
||||||
|
# no suitable tags, so we use the full revision id
|
||||||
|
if verbose:
|
||||||
|
print("no suitable tags, using full revision id")
|
||||||
|
return { "version": keywords["full"].strip(),
|
||||||
|
"full": keywords["full"].strip() }
|
||||||
|
|
||||||
|
|
||||||
|
def git_versions_from_vcs(tag_prefix, root, verbose=False):
|
||||||
|
# this runs 'git' from the root of the source tree. This only gets called
|
||||||
|
# if the git-archive 'subst' keywords were *not* expanded, and
|
||||||
|
# _version.py hasn't already been rewritten with a short version string,
|
||||||
|
# meaning we're inside a checked out source tree.
|
||||||
|
|
||||||
|
if not os.path.exists(os.path.join(root, ".git")):
|
||||||
|
if verbose:
|
||||||
|
print("no .git in %s" % root)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
GITS = ["git"]
|
||||||
|
if sys.platform == "win32":
|
||||||
|
GITS = ["git.cmd", "git.exe"]
|
||||||
|
stdout = run_command(GITS, ["describe", "--tags", "--dirty", "--always"],
|
||||||
|
cwd=root)
|
||||||
|
if stdout is None:
|
||||||
|
return {}
|
||||||
|
if not stdout.startswith(tag_prefix):
|
||||||
|
if verbose:
|
||||||
|
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
|
||||||
|
return {}
|
||||||
|
tag = stdout[len(tag_prefix):]
|
||||||
|
stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
|
||||||
|
if stdout is None:
|
||||||
|
return {}
|
||||||
|
full = stdout.strip()
|
||||||
|
if tag.endswith("-dirty"):
|
||||||
|
full += "-dirty"
|
||||||
|
return {"version": tag, "full": full}
|
||||||
|
|
||||||
|
|
||||||
|
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
|
||||||
|
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
|
||||||
|
# __file__, we can work backwards from there to the root. Some
|
||||||
|
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
|
||||||
|
# case we can only use expanded keywords.
|
||||||
|
|
||||||
|
keywords = { "refnames": git_refnames, "full": git_full }
|
||||||
|
ver = git_versions_from_keywords(keywords, tag_prefix, verbose)
|
||||||
|
if ver:
|
||||||
|
return ver
|
||||||
|
|
||||||
|
try:
|
||||||
|
root = os.path.abspath(__file__)
|
||||||
|
# versionfile_source is the relative path from the top of the source
|
||||||
|
# tree (where the .git directory might live) to this file. Invert
|
||||||
|
# this to find the root from __file__.
|
||||||
|
for i in range(len(versionfile_source.split(os.sep))):
|
||||||
|
root = os.path.dirname(root)
|
||||||
|
except NameError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
return (git_versions_from_vcs(tag_prefix, root, verbose)
|
||||||
|
or versions_from_parentdir(parentdir_prefix, root, verbose)
|
||||||
|
or default)
|
||||||
53
bin/python/ecdsa/curves.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
from __future__ import division
|
||||||
|
|
||||||
|
from . import der, ecdsa
|
||||||
|
|
||||||
|
class UnknownCurveError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def orderlen(order):
|
||||||
|
return (1+len("%x"%order))//2 # bytes
|
||||||
|
|
||||||
|
# the NIST curves
|
||||||
|
class Curve:
|
||||||
|
def __init__(self, name, openssl_name,
|
||||||
|
curve, generator, oid):
|
||||||
|
self.name = name
|
||||||
|
self.openssl_name = openssl_name # maybe None
|
||||||
|
self.curve = curve
|
||||||
|
self.generator = generator
|
||||||
|
self.order = generator.order()
|
||||||
|
self.baselen = orderlen(self.order)
|
||||||
|
self.verifying_key_length = 2*self.baselen
|
||||||
|
self.signature_length = 2*self.baselen
|
||||||
|
self.oid = oid
|
||||||
|
self.encoded_oid = der.encode_oid(*oid)
|
||||||
|
|
||||||
|
NIST192p = Curve("NIST192p", "prime192v1",
|
||||||
|
ecdsa.curve_192, ecdsa.generator_192,
|
||||||
|
(1, 2, 840, 10045, 3, 1, 1))
|
||||||
|
NIST224p = Curve("NIST224p", "secp224r1",
|
||||||
|
ecdsa.curve_224, ecdsa.generator_224,
|
||||||
|
(1, 3, 132, 0, 33))
|
||||||
|
NIST256p = Curve("NIST256p", "prime256v1",
|
||||||
|
ecdsa.curve_256, ecdsa.generator_256,
|
||||||
|
(1, 2, 840, 10045, 3, 1, 7))
|
||||||
|
NIST384p = Curve("NIST384p", "secp384r1",
|
||||||
|
ecdsa.curve_384, ecdsa.generator_384,
|
||||||
|
(1, 3, 132, 0, 34))
|
||||||
|
NIST521p = Curve("NIST521p", "secp521r1",
|
||||||
|
ecdsa.curve_521, ecdsa.generator_521,
|
||||||
|
(1, 3, 132, 0, 35))
|
||||||
|
SECP256k1 = Curve("SECP256k1", "secp256k1",
|
||||||
|
ecdsa.curve_secp256k1, ecdsa.generator_secp256k1,
|
||||||
|
(1, 3, 132, 0, 10))
|
||||||
|
|
||||||
|
curves = [NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1]
|
||||||
|
|
||||||
|
def find_curve(oid_curve):
|
||||||
|
for c in curves:
|
||||||
|
if c.oid == oid_curve:
|
||||||
|
return c
|
||||||
|
raise UnknownCurveError("I don't know about the curve with oid %s."
|
||||||
|
"I only know about these: %s" %
|
||||||
|
(oid_curve, [c.name for c in curves]))
|
||||||
199
bin/python/ecdsa/der.py
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
from __future__ import division
|
||||||
|
|
||||||
|
import binascii
|
||||||
|
import base64
|
||||||
|
from .six import int2byte, b, integer_types, text_type
|
||||||
|
|
||||||
|
class UnexpectedDER(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def encode_constructed(tag, value):
|
||||||
|
return int2byte(0xa0+tag) + encode_length(len(value)) + value
|
||||||
|
def encode_integer(r):
|
||||||
|
assert r >= 0 # can't support negative numbers yet
|
||||||
|
h = ("%x" % r).encode()
|
||||||
|
if len(h) % 2:
|
||||||
|
h = b("0") + h
|
||||||
|
s = binascii.unhexlify(h)
|
||||||
|
num = s[0] if isinstance(s[0], integer_types) else ord(s[0])
|
||||||
|
if num <= 0x7f:
|
||||||
|
return b("\x02") + int2byte(len(s)) + s
|
||||||
|
else:
|
||||||
|
# DER integers are two's complement, so if the first byte is
|
||||||
|
# 0x80-0xff then we need an extra 0x00 byte to prevent it from
|
||||||
|
# looking negative.
|
||||||
|
return b("\x02") + int2byte(len(s)+1) + b("\x00") + s
|
||||||
|
|
||||||
|
def encode_bitstring(s):
|
||||||
|
return b("\x03") + encode_length(len(s)) + s
|
||||||
|
def encode_octet_string(s):
|
||||||
|
return b("\x04") + encode_length(len(s)) + s
|
||||||
|
def encode_oid(first, second, *pieces):
|
||||||
|
assert first <= 2
|
||||||
|
assert second <= 39
|
||||||
|
encoded_pieces = [int2byte(40*first+second)] + [encode_number(p)
|
||||||
|
for p in pieces]
|
||||||
|
body = b('').join(encoded_pieces)
|
||||||
|
return b('\x06') + encode_length(len(body)) + body
|
||||||
|
def encode_sequence(*encoded_pieces):
|
||||||
|
total_len = sum([len(p) for p in encoded_pieces])
|
||||||
|
return b('\x30') + encode_length(total_len) + b('').join(encoded_pieces)
|
||||||
|
def encode_number(n):
|
||||||
|
b128_digits = []
|
||||||
|
while n:
|
||||||
|
b128_digits.insert(0, (n & 0x7f) | 0x80)
|
||||||
|
n = n >> 7
|
||||||
|
if not b128_digits:
|
||||||
|
b128_digits.append(0)
|
||||||
|
b128_digits[-1] &= 0x7f
|
||||||
|
return b('').join([int2byte(d) for d in b128_digits])
|
||||||
|
|
||||||
|
def remove_constructed(string):
|
||||||
|
s0 = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||||
|
if (s0 & 0xe0) != 0xa0:
|
||||||
|
raise UnexpectedDER("wanted constructed tag (0xa0-0xbf), got 0x%02x"
|
||||||
|
% s0)
|
||||||
|
tag = s0 & 0x1f
|
||||||
|
length, llen = read_length(string[1:])
|
||||||
|
body = string[1+llen:1+llen+length]
|
||||||
|
rest = string[1+llen+length:]
|
||||||
|
return tag, body, rest
|
||||||
|
|
||||||
|
def remove_sequence(string):
|
||||||
|
if not string.startswith(b("\x30")):
|
||||||
|
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||||
|
raise UnexpectedDER("wanted sequence (0x30), got 0x%02x" % n)
|
||||||
|
length, lengthlength = read_length(string[1:])
|
||||||
|
endseq = 1+lengthlength+length
|
||||||
|
return string[1+lengthlength:endseq], string[endseq:]
|
||||||
|
|
||||||
|
def remove_octet_string(string):
|
||||||
|
if not string.startswith(b("\x04")):
|
||||||
|
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||||
|
raise UnexpectedDER("wanted octetstring (0x04), got 0x%02x" % n)
|
||||||
|
length, llen = read_length(string[1:])
|
||||||
|
body = string[1+llen:1+llen+length]
|
||||||
|
rest = string[1+llen+length:]
|
||||||
|
return body, rest
|
||||||
|
|
||||||
|
def remove_object(string):
|
||||||
|
if not string.startswith(b("\x06")):
|
||||||
|
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||||
|
raise UnexpectedDER("wanted object (0x06), got 0x%02x" % n)
|
||||||
|
length, lengthlength = read_length(string[1:])
|
||||||
|
body = string[1+lengthlength:1+lengthlength+length]
|
||||||
|
rest = string[1+lengthlength+length:]
|
||||||
|
numbers = []
|
||||||
|
while body:
|
||||||
|
n, ll = read_number(body)
|
||||||
|
numbers.append(n)
|
||||||
|
body = body[ll:]
|
||||||
|
n0 = numbers.pop(0)
|
||||||
|
first = n0//40
|
||||||
|
second = n0-(40*first)
|
||||||
|
numbers.insert(0, first)
|
||||||
|
numbers.insert(1, second)
|
||||||
|
return tuple(numbers), rest
|
||||||
|
|
||||||
|
def remove_integer(string):
|
||||||
|
if not string.startswith(b("\x02")):
|
||||||
|
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||||
|
raise UnexpectedDER("wanted integer (0x02), got 0x%02x" % n)
|
||||||
|
length, llen = read_length(string[1:])
|
||||||
|
numberbytes = string[1+llen:1+llen+length]
|
||||||
|
rest = string[1+llen+length:]
|
||||||
|
nbytes = numberbytes[0] if isinstance(numberbytes[0], integer_types) else ord(numberbytes[0])
|
||||||
|
assert nbytes < 0x80 # can't support negative numbers yet
|
||||||
|
return int(binascii.hexlify(numberbytes), 16), rest
|
||||||
|
|
||||||
|
def read_number(string):
|
||||||
|
number = 0
|
||||||
|
llen = 0
|
||||||
|
# base-128 big endian, with b7 set in all but the last byte
|
||||||
|
while True:
|
||||||
|
if llen > len(string):
|
||||||
|
raise UnexpectedDER("ran out of length bytes")
|
||||||
|
number = number << 7
|
||||||
|
d = string[llen] if isinstance(string[llen], integer_types) else ord(string[llen])
|
||||||
|
number += (d & 0x7f)
|
||||||
|
llen += 1
|
||||||
|
if not d & 0x80:
|
||||||
|
break
|
||||||
|
return number, llen
|
||||||
|
|
||||||
|
def encode_length(l):
|
||||||
|
assert l >= 0
|
||||||
|
if l < 0x80:
|
||||||
|
return int2byte(l)
|
||||||
|
s = ("%x" % l).encode()
|
||||||
|
if len(s)%2:
|
||||||
|
s = b("0")+s
|
||||||
|
s = binascii.unhexlify(s)
|
||||||
|
llen = len(s)
|
||||||
|
return int2byte(0x80|llen) + s
|
||||||
|
|
||||||
|
def read_length(string):
|
||||||
|
num = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||||
|
if not (num & 0x80):
|
||||||
|
# short form
|
||||||
|
return (num & 0x7f), 1
|
||||||
|
# else long-form: b0&0x7f is number of additional base256 length bytes,
|
||||||
|
# big-endian
|
||||||
|
llen = num & 0x7f
|
||||||
|
if llen > len(string)-1:
|
||||||
|
raise UnexpectedDER("ran out of length bytes")
|
||||||
|
return int(binascii.hexlify(string[1:1+llen]), 16), 1+llen
|
||||||
|
|
||||||
|
def remove_bitstring(string):
|
||||||
|
num = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||||
|
if not string.startswith(b("\x03")):
|
||||||
|
raise UnexpectedDER("wanted bitstring (0x03), got 0x%02x" % num)
|
||||||
|
length, llen = read_length(string[1:])
|
||||||
|
body = string[1+llen:1+llen+length]
|
||||||
|
rest = string[1+llen+length:]
|
||||||
|
return body, rest
|
||||||
|
|
||||||
|
# SEQUENCE([1, STRING(secexp), cont[0], OBJECT(curvename), cont[1], BINTSTRING)
|
||||||
|
|
||||||
|
|
||||||
|
# signatures: (from RFC3279)
|
||||||
|
# ansi-X9-62 OBJECT IDENTIFIER ::= {
|
||||||
|
# iso(1) member-body(2) us(840) 10045 }
|
||||||
|
#
|
||||||
|
# id-ecSigType OBJECT IDENTIFIER ::= {
|
||||||
|
# ansi-X9-62 signatures(4) }
|
||||||
|
# ecdsa-with-SHA1 OBJECT IDENTIFIER ::= {
|
||||||
|
# id-ecSigType 1 }
|
||||||
|
## so 1,2,840,10045,4,1
|
||||||
|
## so 0x42, .. ..
|
||||||
|
|
||||||
|
# Ecdsa-Sig-Value ::= SEQUENCE {
|
||||||
|
# r INTEGER,
|
||||||
|
# s INTEGER }
|
||||||
|
|
||||||
|
# id-public-key-type OBJECT IDENTIFIER ::= { ansi-X9.62 2 }
|
||||||
|
#
|
||||||
|
# id-ecPublicKey OBJECT IDENTIFIER ::= { id-publicKeyType 1 }
|
||||||
|
|
||||||
|
# I think the secp224r1 identifier is (t=06,l=05,v=2b81040021)
|
||||||
|
# secp224r1 OBJECT IDENTIFIER ::= {
|
||||||
|
# iso(1) identified-organization(3) certicom(132) curve(0) 33 }
|
||||||
|
# and the secp384r1 is (t=06,l=05,v=2b81040022)
|
||||||
|
# secp384r1 OBJECT IDENTIFIER ::= {
|
||||||
|
# iso(1) identified-organization(3) certicom(132) curve(0) 34 }
|
||||||
|
|
||||||
|
def unpem(pem):
|
||||||
|
if isinstance(pem, text_type):
|
||||||
|
pem = pem.encode()
|
||||||
|
|
||||||
|
d = b("").join([l.strip() for l in pem.split(b("\n"))
|
||||||
|
if l and not l.startswith(b("-----"))])
|
||||||
|
return base64.b64decode(d)
|
||||||
|
def topem(der, name):
|
||||||
|
b64 = base64.b64encode(der)
|
||||||
|
lines = [("-----BEGIN %s-----\n" % name).encode()]
|
||||||
|
lines.extend([b64[start:start+64]+b("\n")
|
||||||
|
for start in range(0, len(b64), 64)])
|
||||||
|
lines.append(("-----END %s-----\n" % name).encode())
|
||||||
|
return b("").join(lines)
|
||||||
|
|
||||||
576
bin/python/ecdsa/ecdsa.py
Normal file
@@ -0,0 +1,576 @@
|
|||||||
|
#! /usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Implementation of Elliptic-Curve Digital Signatures.
|
||||||
|
|
||||||
|
Classes and methods for elliptic-curve signatures:
|
||||||
|
private keys, public keys, signatures,
|
||||||
|
NIST prime-modulus curves with modulus lengths of
|
||||||
|
192, 224, 256, 384, and 521 bits.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
# (In real-life applications, you would probably want to
|
||||||
|
# protect against defects in SystemRandom.)
|
||||||
|
from random import SystemRandom
|
||||||
|
randrange = SystemRandom().randrange
|
||||||
|
|
||||||
|
# Generate a public/private key pair using the NIST Curve P-192:
|
||||||
|
|
||||||
|
g = generator_192
|
||||||
|
n = g.order()
|
||||||
|
secret = randrange( 1, n )
|
||||||
|
pubkey = Public_key( g, g * secret )
|
||||||
|
privkey = Private_key( pubkey, secret )
|
||||||
|
|
||||||
|
# Signing a hash value:
|
||||||
|
|
||||||
|
hash = randrange( 1, n )
|
||||||
|
signature = privkey.sign( hash, randrange( 1, n ) )
|
||||||
|
|
||||||
|
# Verifying a signature for a hash value:
|
||||||
|
|
||||||
|
if pubkey.verifies( hash, signature ):
|
||||||
|
print_("Demo verification succeeded.")
|
||||||
|
else:
|
||||||
|
print_("*** Demo verification failed.")
|
||||||
|
|
||||||
|
# Verification fails if the hash value is modified:
|
||||||
|
|
||||||
|
if pubkey.verifies( hash-1, signature ):
|
||||||
|
print_("**** Demo verification failed to reject tampered hash.")
|
||||||
|
else:
|
||||||
|
print_("Demo verification correctly rejected tampered hash.")
|
||||||
|
|
||||||
|
Version of 2009.05.16.
|
||||||
|
|
||||||
|
Revision history:
|
||||||
|
2005.12.31 - Initial version.
|
||||||
|
2008.11.25 - Substantial revisions introducing new classes.
|
||||||
|
2009.05.16 - Warn against using random.randrange in real applications.
|
||||||
|
2009.05.17 - Use random.SystemRandom by default.
|
||||||
|
|
||||||
|
Written in 2005 by Peter Pearson and placed in the public domain.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .six import int2byte, b, print_
|
||||||
|
from . import ellipticcurve
|
||||||
|
from . import numbertheory
|
||||||
|
import random
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Signature( object ):
|
||||||
|
"""ECDSA signature.
|
||||||
|
"""
|
||||||
|
def __init__( self, r, s ):
|
||||||
|
self.r = r
|
||||||
|
self.s = s
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Public_key( object ):
|
||||||
|
"""Public key for ECDSA.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__( self, generator, point ):
|
||||||
|
"""generator is the Point that generates the group,
|
||||||
|
point is the Point that defines the public key.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.curve = generator.curve()
|
||||||
|
self.generator = generator
|
||||||
|
self.point = point
|
||||||
|
n = generator.order()
|
||||||
|
if not n:
|
||||||
|
raise RuntimeError("Generator point must have order.")
|
||||||
|
if not n * point == ellipticcurve.INFINITY:
|
||||||
|
raise RuntimeError("Generator point order is bad.")
|
||||||
|
if point.x() < 0 or n <= point.x() or point.y() < 0 or n <= point.y():
|
||||||
|
raise RuntimeError("Generator point has x or y out of range.")
|
||||||
|
|
||||||
|
|
||||||
|
def verifies( self, hash, signature ):
|
||||||
|
"""Verify that signature is a valid signature of hash.
|
||||||
|
Return True if the signature is valid.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# From X9.62 J.3.1.
|
||||||
|
|
||||||
|
G = self.generator
|
||||||
|
n = G.order()
|
||||||
|
r = signature.r
|
||||||
|
s = signature.s
|
||||||
|
if r < 1 or r > n-1: return False
|
||||||
|
if s < 1 or s > n-1: return False
|
||||||
|
c = numbertheory.inverse_mod( s, n )
|
||||||
|
u1 = ( hash * c ) % n
|
||||||
|
u2 = ( r * c ) % n
|
||||||
|
xy = u1 * G + u2 * self.point
|
||||||
|
v = xy.x() % n
|
||||||
|
return v == r
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Private_key( object ):
|
||||||
|
"""Private key for ECDSA.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__( self, public_key, secret_multiplier ):
|
||||||
|
"""public_key is of class Public_key;
|
||||||
|
secret_multiplier is a large integer.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.public_key = public_key
|
||||||
|
self.secret_multiplier = secret_multiplier
|
||||||
|
|
||||||
|
def sign( self, hash, random_k ):
|
||||||
|
"""Return a signature for the provided hash, using the provided
|
||||||
|
random nonce. It is absolutely vital that random_k be an unpredictable
|
||||||
|
number in the range [1, self.public_key.point.order()-1]. If
|
||||||
|
an attacker can guess random_k, he can compute our private key from a
|
||||||
|
single signature. Also, if an attacker knows a few high-order
|
||||||
|
bits (or a few low-order bits) of random_k, he can compute our private
|
||||||
|
key from many signatures. The generation of nonces with adequate
|
||||||
|
cryptographic strength is very difficult and far beyond the scope
|
||||||
|
of this comment.
|
||||||
|
|
||||||
|
May raise RuntimeError, in which case retrying with a new
|
||||||
|
random value k is in order.
|
||||||
|
"""
|
||||||
|
|
||||||
|
G = self.public_key.generator
|
||||||
|
n = G.order()
|
||||||
|
k = random_k % n
|
||||||
|
p1 = k * G
|
||||||
|
r = p1.x()
|
||||||
|
if r == 0: raise RuntimeError("amazingly unlucky random number r")
|
||||||
|
s = ( numbertheory.inverse_mod( k, n ) * \
|
||||||
|
( hash + ( self.secret_multiplier * r ) % n ) ) % n
|
||||||
|
if s == 0: raise RuntimeError("amazingly unlucky random number s")
|
||||||
|
return Signature( r, s )
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def int_to_string( x ):
|
||||||
|
"""Convert integer x into a string of bytes, as per X9.62."""
|
||||||
|
assert x >= 0
|
||||||
|
if x == 0: return b('\0')
|
||||||
|
result = []
|
||||||
|
while x:
|
||||||
|
ordinal = x & 0xFF
|
||||||
|
result.append(int2byte(ordinal))
|
||||||
|
x >>= 8
|
||||||
|
|
||||||
|
result.reverse()
|
||||||
|
return b('').join(result)
|
||||||
|
|
||||||
|
|
||||||
|
def string_to_int( s ):
|
||||||
|
"""Convert a string of bytes into an integer, as per X9.62."""
|
||||||
|
result = 0
|
||||||
|
for c in s:
|
||||||
|
if not isinstance(c, int): c = ord( c )
|
||||||
|
result = 256 * result + c
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def digest_integer( m ):
|
||||||
|
"""Convert an integer into a string of bytes, compute
|
||||||
|
its SHA-1 hash, and convert the result to an integer."""
|
||||||
|
#
|
||||||
|
# I don't expect this function to be used much. I wrote
|
||||||
|
# it in order to be able to duplicate the examples
|
||||||
|
# in ECDSAVS.
|
||||||
|
#
|
||||||
|
from hashlib import sha1
|
||||||
|
return string_to_int( sha1( int_to_string( m ) ).digest() )
|
||||||
|
|
||||||
|
|
||||||
|
def point_is_valid( generator, x, y ):
|
||||||
|
"""Is (x,y) a valid public key based on the specified generator?"""
|
||||||
|
|
||||||
|
# These are the tests specified in X9.62.
|
||||||
|
|
||||||
|
n = generator.order()
|
||||||
|
curve = generator.curve()
|
||||||
|
if x < 0 or n <= x or y < 0 or n <= y:
|
||||||
|
return False
|
||||||
|
if not curve.contains_point( x, y ):
|
||||||
|
return False
|
||||||
|
if not n*ellipticcurve.Point( curve, x, y ) == \
|
||||||
|
ellipticcurve.INFINITY:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# NIST Curve P-192:
|
||||||
|
_p = 6277101735386680763835789423207666416083908700390324961279
|
||||||
|
_r = 6277101735386680763835789423176059013767194773182842284081
|
||||||
|
# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L
|
||||||
|
# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65L
|
||||||
|
_b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
|
||||||
|
_Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
|
||||||
|
_Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
|
||||||
|
|
||||||
|
curve_192 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||||
|
generator_192 = ellipticcurve.Point( curve_192, _Gx, _Gy, _r )
|
||||||
|
|
||||||
|
|
||||||
|
# NIST Curve P-224:
|
||||||
|
_p = 26959946667150639794667015087019630673557916260026308143510066298881
|
||||||
|
_r = 26959946667150639794667015087019625940457807714424391721682722368061
|
||||||
|
# s = 0xbd71344799d5c7fcdc45b59fa3b9ab8f6a948bc5L
|
||||||
|
# c = 0x5b056c7e11dd68f40469ee7f3c7a7d74f7d121116506d031218291fbL
|
||||||
|
_b = 0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4
|
||||||
|
_Gx =0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21
|
||||||
|
_Gy = 0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34
|
||||||
|
|
||||||
|
curve_224 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||||
|
generator_224 = ellipticcurve.Point( curve_224, _Gx, _Gy, _r )
|
||||||
|
|
||||||
|
# NIST Curve P-256:
|
||||||
|
_p = 115792089210356248762697446949407573530086143415290314195533631308867097853951
|
||||||
|
_r = 115792089210356248762697446949407573529996955224135760342422259061068512044369
|
||||||
|
# s = 0xc49d360886e704936a6678e1139d26b7819f7e90L
|
||||||
|
# c = 0x7efba1662985be9403cb055c75d4f7e0ce8d84a9c5114abcaf3177680104fa0dL
|
||||||
|
_b = 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b
|
||||||
|
_Gx = 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296
|
||||||
|
_Gy = 0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5
|
||||||
|
|
||||||
|
curve_256 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||||
|
generator_256 = ellipticcurve.Point( curve_256, _Gx, _Gy, _r )
|
||||||
|
|
||||||
|
# NIST Curve P-384:
|
||||||
|
_p = 39402006196394479212279040100143613805079739270465446667948293404245721771496870329047266088258938001861606973112319
|
||||||
|
_r = 39402006196394479212279040100143613805079739270465446667946905279627659399113263569398956308152294913554433653942643
|
||||||
|
# s = 0xa335926aa319a27a1d00896a6773a4827acdac73L
|
||||||
|
# c = 0x79d1e655f868f02fff48dcdee14151ddb80643c1406d0ca10dfe6fc52009540a495e8042ea5f744f6e184667cc722483L
|
||||||
|
_b = 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aef
|
||||||
|
_Gx = 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760ab7
|
||||||
|
_Gy = 0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5f
|
||||||
|
|
||||||
|
curve_384 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||||
|
generator_384 = ellipticcurve.Point( curve_384, _Gx, _Gy, _r )
|
||||||
|
|
||||||
|
# NIST Curve P-521:
|
||||||
|
_p = 6864797660130609714981900799081393217269435300143305409394463459185543183397656052122559640661454554977296311391480858037121987999716643812574028291115057151
|
||||||
|
_r = 6864797660130609714981900799081393217269435300143305409394463459185543183397655394245057746333217197532963996371363321113864768612440380340372808892707005449
|
||||||
|
# s = 0xd09e8800291cb85396cc6717393284aaa0da64baL
|
||||||
|
# c = 0x0b48bfa5f420a34949539d2bdfc264eeeeb077688e44fbf0ad8f6d0edb37bd6b533281000518e19f1b9ffbe0fe9ed8a3c2200b8f875e523868c70c1e5bf55bad637L
|
||||||
|
_b = 0x051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00
|
||||||
|
_Gx = 0xc6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66
|
||||||
|
_Gy = 0x11839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650
|
||||||
|
|
||||||
|
curve_521 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||||
|
generator_521 = ellipticcurve.Point( curve_521, _Gx, _Gy, _r )
|
||||||
|
|
||||||
|
# Certicom secp256-k1
|
||||||
|
_a = 0x0000000000000000000000000000000000000000000000000000000000000000
|
||||||
|
_b = 0x0000000000000000000000000000000000000000000000000000000000000007
|
||||||
|
_p = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f
|
||||||
|
_Gx = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
|
||||||
|
_Gy = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
|
||||||
|
_r = 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
|
||||||
|
|
||||||
|
curve_secp256k1 = ellipticcurve.CurveFp( _p, _a, _b)
|
||||||
|
generator_secp256k1 = ellipticcurve.Point( curve_secp256k1, _Gx, _Gy, _r)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def __main__():
|
||||||
|
class TestFailure(Exception): pass
|
||||||
|
|
||||||
|
def test_point_validity( generator, x, y, expected ):
|
||||||
|
"""generator defines the curve; is (x,y) a point on
|
||||||
|
this curve? "expected" is True if the right answer is Yes."""
|
||||||
|
if point_is_valid( generator, x, y ) == expected:
|
||||||
|
print_("Point validity tested as expected.")
|
||||||
|
else:
|
||||||
|
raise TestFailure("*** Point validity test gave wrong result.")
|
||||||
|
|
||||||
|
def test_signature_validity( Msg, Qx, Qy, R, S, expected ):
|
||||||
|
"""Msg = message, Qx and Qy represent the base point on
|
||||||
|
elliptic curve c192, R and S are the signature, and
|
||||||
|
"expected" is True iff the signature is expected to be valid."""
|
||||||
|
pubk = Public_key( generator_192,
|
||||||
|
ellipticcurve.Point( curve_192, Qx, Qy ) )
|
||||||
|
got = pubk.verifies( digest_integer( Msg ), Signature( R, S ) )
|
||||||
|
if got == expected:
|
||||||
|
print_("Signature tested as expected: got %s, expected %s." % \
|
||||||
|
( got, expected ))
|
||||||
|
else:
|
||||||
|
raise TestFailure("*** Signature test failed: got %s, expected %s." % \
|
||||||
|
( got, expected ))
|
||||||
|
|
||||||
|
print_("NIST Curve P-192:")
|
||||||
|
|
||||||
|
p192 = generator_192
|
||||||
|
|
||||||
|
# From X9.62:
|
||||||
|
|
||||||
|
d = 651056770906015076056810763456358567190100156695615665659
|
||||||
|
Q = d * p192
|
||||||
|
if Q.x() != 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5:
|
||||||
|
raise TestFailure("*** p192 * d came out wrong.")
|
||||||
|
else:
|
||||||
|
print_("p192 * d came out right.")
|
||||||
|
|
||||||
|
k = 6140507067065001063065065565667405560006161556565665656654
|
||||||
|
R = k * p192
|
||||||
|
if R.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||||
|
or R.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||||
|
raise TestFailure("*** k * p192 came out wrong.")
|
||||||
|
else:
|
||||||
|
print_("k * p192 came out right.")
|
||||||
|
|
||||||
|
u1 = 2563697409189434185194736134579731015366492496392189760599
|
||||||
|
u2 = 6266643813348617967186477710235785849136406323338782220568
|
||||||
|
temp = u1 * p192 + u2 * Q
|
||||||
|
if temp.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||||
|
or temp.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||||
|
raise TestFailure("*** u1 * p192 + u2 * Q came out wrong.")
|
||||||
|
else:
|
||||||
|
print_("u1 * p192 + u2 * Q came out right.")
|
||||||
|
|
||||||
|
e = 968236873715988614170569073515315707566766479517
|
||||||
|
pubk = Public_key( generator_192, generator_192 * d )
|
||||||
|
privk = Private_key( pubk, d )
|
||||||
|
sig = privk.sign( e, k )
|
||||||
|
r, s = sig.r, sig.s
|
||||||
|
if r != 3342403536405981729393488334694600415596881826869351677613 \
|
||||||
|
or s != 5735822328888155254683894997897571951568553642892029982342:
|
||||||
|
raise TestFailure("*** r or s came out wrong.")
|
||||||
|
else:
|
||||||
|
print_("r and s came out right.")
|
||||||
|
|
||||||
|
valid = pubk.verifies( e, sig )
|
||||||
|
if valid: print_("Signature verified OK.")
|
||||||
|
else: raise TestFailure("*** Signature failed verification.")
|
||||||
|
|
||||||
|
valid = pubk.verifies( e-1, sig )
|
||||||
|
if not valid: print_("Forgery was correctly rejected.")
|
||||||
|
else: raise TestFailure("*** Forgery was erroneously accepted.")
|
||||||
|
|
||||||
|
print_("Testing point validity, as per ECDSAVS.pdf B.2.2:")
|
||||||
|
|
||||||
|
test_point_validity( \
|
||||||
|
p192, \
|
||||||
|
0xcd6d0f029a023e9aaca429615b8f577abee685d8257cc83a, \
|
||||||
|
0x00019c410987680e9fb6c0b6ecc01d9a2647c8bae27721bacdfc, \
|
||||||
|
False )
|
||||||
|
|
||||||
|
test_point_validity(
|
||||||
|
p192, \
|
||||||
|
0x00017f2fce203639e9eaf9fb50b81fc32776b30e3b02af16c73b, \
|
||||||
|
0x95da95c5e72dd48e229d4748d4eee658a9a54111b23b2adb, \
|
||||||
|
False )
|
||||||
|
|
||||||
|
test_point_validity(
|
||||||
|
p192, \
|
||||||
|
0x4f77f8bc7fccbadd5760f4938746d5f253ee2168c1cf2792, \
|
||||||
|
0x000147156ff824d131629739817edb197717c41aab5c2a70f0f6, \
|
||||||
|
False )
|
||||||
|
|
||||||
|
test_point_validity(
|
||||||
|
p192, \
|
||||||
|
0xc58d61f88d905293bcd4cd0080bcb1b7f811f2ffa41979f6, \
|
||||||
|
0x8804dc7a7c4c7f8b5d437f5156f3312ca7d6de8a0e11867f, \
|
||||||
|
True )
|
||||||
|
|
||||||
|
test_point_validity(
|
||||||
|
p192, \
|
||||||
|
0xcdf56c1aa3d8afc53c521adf3ffb96734a6a630a4a5b5a70, \
|
||||||
|
0x97c1c44a5fb229007b5ec5d25f7413d170068ffd023caa4e, \
|
||||||
|
True )
|
||||||
|
|
||||||
|
test_point_validity(
|
||||||
|
p192, \
|
||||||
|
0x89009c0dc361c81e99280c8e91df578df88cdf4b0cdedced, \
|
||||||
|
0x27be44a529b7513e727251f128b34262a0fd4d8ec82377b9, \
|
||||||
|
True )
|
||||||
|
|
||||||
|
test_point_validity(
|
||||||
|
p192, \
|
||||||
|
0x6a223d00bd22c52833409a163e057e5b5da1def2a197dd15, \
|
||||||
|
0x7b482604199367f1f303f9ef627f922f97023e90eae08abf, \
|
||||||
|
True )
|
||||||
|
|
||||||
|
test_point_validity(
|
||||||
|
p192, \
|
||||||
|
0x6dccbde75c0948c98dab32ea0bc59fe125cf0fb1a3798eda, \
|
||||||
|
0x0001171a3e0fa60cf3096f4e116b556198de430e1fbd330c8835, \
|
||||||
|
False )
|
||||||
|
|
||||||
|
test_point_validity(
|
||||||
|
p192, \
|
||||||
|
0xd266b39e1f491fc4acbbbc7d098430931cfa66d55015af12, \
|
||||||
|
0x193782eb909e391a3148b7764e6b234aa94e48d30a16dbb2, \
|
||||||
|
False )
|
||||||
|
|
||||||
|
test_point_validity(
|
||||||
|
p192, \
|
||||||
|
0x9d6ddbcd439baa0c6b80a654091680e462a7d1d3f1ffeb43, \
|
||||||
|
0x6ad8efc4d133ccf167c44eb4691c80abffb9f82b932b8caa, \
|
||||||
|
False )
|
||||||
|
|
||||||
|
test_point_validity(
|
||||||
|
p192, \
|
||||||
|
0x146479d944e6bda87e5b35818aa666a4c998a71f4e95edbc, \
|
||||||
|
0xa86d6fe62bc8fbd88139693f842635f687f132255858e7f6, \
|
||||||
|
False )
|
||||||
|
|
||||||
|
test_point_validity(
|
||||||
|
p192, \
|
||||||
|
0xe594d4a598046f3598243f50fd2c7bd7d380edb055802253, \
|
||||||
|
0x509014c0c4d6b536e3ca750ec09066af39b4c8616a53a923, \
|
||||||
|
False )
|
||||||
|
|
||||||
|
print_("Trying signature-verification tests from ECDSAVS.pdf B.2.4:")
|
||||||
|
print_("P-192:")
|
||||||
|
Msg = 0x84ce72aa8699df436059f052ac51b6398d2511e49631bcb7e71f89c499b9ee425dfbc13a5f6d408471b054f2655617cbbaf7937b7c80cd8865cf02c8487d30d2b0fbd8b2c4e102e16d828374bbc47b93852f212d5043c3ea720f086178ff798cc4f63f787b9c2e419efa033e7644ea7936f54462dc21a6c4580725f7f0e7d158
|
||||||
|
Qx = 0xd9dbfb332aa8e5ff091e8ce535857c37c73f6250ffb2e7ac
|
||||||
|
Qy = 0x282102e364feded3ad15ddf968f88d8321aa268dd483ebc4
|
||||||
|
R = 0x64dca58a20787c488d11d6dd96313f1b766f2d8efe122916
|
||||||
|
S = 0x1ecba28141e84ab4ecad92f56720e2cc83eb3d22dec72479
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
||||||
|
|
||||||
|
Msg = 0x94bb5bacd5f8ea765810024db87f4224ad71362a3c28284b2b9f39fab86db12e8beb94aae899768229be8fdb6c4f12f28912bb604703a79ccff769c1607f5a91450f30ba0460d359d9126cbd6296be6d9c4bb96c0ee74cbb44197c207f6db326ab6f5a659113a9034e54be7b041ced9dcf6458d7fb9cbfb2744d999f7dfd63f4
|
||||||
|
Qx = 0x3e53ef8d3112af3285c0e74842090712cd324832d4277ae7
|
||||||
|
Qy = 0xcc75f8952d30aec2cbb719fc6aa9934590b5d0ff5a83adb7
|
||||||
|
R = 0x8285261607283ba18f335026130bab31840dcfd9c3e555af
|
||||||
|
S = 0x356d89e1b04541afc9704a45e9c535ce4a50929e33d7e06c
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
||||||
|
|
||||||
|
Msg = 0xf6227a8eeb34afed1621dcc89a91d72ea212cb2f476839d9b4243c66877911b37b4ad6f4448792a7bbba76c63bdd63414b6facab7dc71c3396a73bd7ee14cdd41a659c61c99b779cecf07bc51ab391aa3252386242b9853ea7da67fd768d303f1b9b513d401565b6f1eb722dfdb96b519fe4f9bd5de67ae131e64b40e78c42dd
|
||||||
|
Qx = 0x16335dbe95f8e8254a4e04575d736befb258b8657f773cb7
|
||||||
|
Qy = 0x421b13379c59bc9dce38a1099ca79bbd06d647c7f6242336
|
||||||
|
R = 0x4141bd5d64ea36c5b0bd21ef28c02da216ed9d04522b1e91
|
||||||
|
S = 0x159a6aa852bcc579e821b7bb0994c0861fb08280c38daa09
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||||
|
|
||||||
|
Msg = 0x16b5f93afd0d02246f662761ed8e0dd9504681ed02a253006eb36736b563097ba39f81c8e1bce7a16c1339e345efabbc6baa3efb0612948ae51103382a8ee8bc448e3ef71e9f6f7a9676694831d7f5dd0db5446f179bcb737d4a526367a447bfe2c857521c7f40b6d7d7e01a180d92431fb0bbd29c04a0c420a57b3ed26ccd8a
|
||||||
|
Qx = 0xfd14cdf1607f5efb7b1793037b15bdf4baa6f7c16341ab0b
|
||||||
|
Qy = 0x83fa0795cc6c4795b9016dac928fd6bac32f3229a96312c4
|
||||||
|
R = 0x8dfdb832951e0167c5d762a473c0416c5c15bc1195667dc1
|
||||||
|
S = 0x1720288a2dc13fa1ec78f763f8fe2ff7354a7e6fdde44520
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||||
|
|
||||||
|
Msg = 0x08a2024b61b79d260e3bb43ef15659aec89e5b560199bc82cf7c65c77d39192e03b9a895d766655105edd9188242b91fbde4167f7862d4ddd61e5d4ab55196683d4f13ceb90d87aea6e07eb50a874e33086c4a7cb0273a8e1c4408f4b846bceae1ebaac1b2b2ea851a9b09de322efe34cebe601653efd6ddc876ce8c2f2072fb
|
||||||
|
Qx = 0x674f941dc1a1f8b763c9334d726172d527b90ca324db8828
|
||||||
|
Qy = 0x65adfa32e8b236cb33a3e84cf59bfb9417ae7e8ede57a7ff
|
||||||
|
R = 0x9508b9fdd7daf0d8126f9e2bc5a35e4c6d800b5b804d7796
|
||||||
|
S = 0x36f2bf6b21b987c77b53bb801b3435a577e3d493744bfab0
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||||
|
|
||||||
|
Msg = 0x1843aba74b0789d4ac6b0b8923848023a644a7b70afa23b1191829bbe4397ce15b629bf21a8838298653ed0c19222b95fa4f7390d1b4c844d96e645537e0aae98afb5c0ac3bd0e4c37f8daaff25556c64e98c319c52687c904c4de7240a1cc55cd9756b7edaef184e6e23b385726e9ffcba8001b8f574987c1a3fedaaa83ca6d
|
||||||
|
Qx = 0x10ecca1aad7220b56a62008b35170bfd5e35885c4014a19f
|
||||||
|
Qy = 0x04eb61984c6c12ade3bc47f3c629ece7aa0a033b9948d686
|
||||||
|
R = 0x82bfa4e82c0dfe9274169b86694e76ce993fd83b5c60f325
|
||||||
|
S = 0xa97685676c59a65dbde002fe9d613431fb183e8006d05633
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||||
|
|
||||||
|
Msg = 0x5a478f4084ddd1a7fea038aa9732a822106385797d02311aeef4d0264f824f698df7a48cfb6b578cf3da416bc0799425bb491be5b5ecc37995b85b03420a98f2c4dc5c31a69a379e9e322fbe706bbcaf0f77175e05cbb4fa162e0da82010a278461e3e974d137bc746d1880d6eb02aa95216014b37480d84b87f717bb13f76e1
|
||||||
|
Qx = 0x6636653cb5b894ca65c448277b29da3ad101c4c2300f7c04
|
||||||
|
Qy = 0xfdf1cbb3fc3fd6a4f890b59e554544175fa77dbdbeb656c1
|
||||||
|
R = 0xeac2ddecddfb79931a9c3d49c08de0645c783a24cb365e1c
|
||||||
|
S = 0x3549fee3cfa7e5f93bc47d92d8ba100e881a2a93c22f8d50
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||||
|
|
||||||
|
Msg = 0xc598774259a058fa65212ac57eaa4f52240e629ef4c310722088292d1d4af6c39b49ce06ba77e4247b20637174d0bd67c9723feb57b5ead232b47ea452d5d7a089f17c00b8b6767e434a5e16c231ba0efa718a340bf41d67ea2d295812ff1b9277daacb8bc27b50ea5e6443bcf95ef4e9f5468fe78485236313d53d1c68f6ba2
|
||||||
|
Qx = 0xa82bd718d01d354001148cd5f69b9ebf38ff6f21898f8aaa
|
||||||
|
Qy = 0xe67ceede07fc2ebfafd62462a51e4b6c6b3d5b537b7caf3e
|
||||||
|
R = 0x4d292486c620c3de20856e57d3bb72fcde4a73ad26376955
|
||||||
|
S = 0xa85289591a6081d5728825520e62ff1c64f94235c04c7f95
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||||
|
|
||||||
|
Msg = 0xca98ed9db081a07b7557f24ced6c7b9891269a95d2026747add9e9eb80638a961cf9c71a1b9f2c29744180bd4c3d3db60f2243c5c0b7cc8a8d40a3f9a7fc910250f2187136ee6413ffc67f1a25e1c4c204fa9635312252ac0e0481d89b6d53808f0c496ba87631803f6c572c1f61fa049737fdacce4adff757afed4f05beb658
|
||||||
|
Qx = 0x7d3b016b57758b160c4fca73d48df07ae3b6b30225126c2f
|
||||||
|
Qy = 0x4af3790d9775742bde46f8da876711be1b65244b2b39e7ec
|
||||||
|
R = 0x95f778f5f656511a5ab49a5d69ddd0929563c29cbc3a9e62
|
||||||
|
S = 0x75c87fc358c251b4c83d2dd979faad496b539f9f2ee7a289
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||||
|
|
||||||
|
Msg = 0x31dd9a54c8338bea06b87eca813d555ad1850fac9742ef0bbe40dad400e10288acc9c11ea7dac79eb16378ebea9490e09536099f1b993e2653cd50240014c90a9c987f64545abc6a536b9bd2435eb5e911fdfde2f13be96ea36ad38df4ae9ea387b29cced599af777338af2794820c9cce43b51d2112380a35802ab7e396c97a
|
||||||
|
Qx = 0x9362f28c4ef96453d8a2f849f21e881cd7566887da8beb4a
|
||||||
|
Qy = 0xe64d26d8d74c48a024ae85d982ee74cd16046f4ee5333905
|
||||||
|
R = 0xf3923476a296c88287e8de914b0b324ad5a963319a4fe73b
|
||||||
|
S = 0xf0baeed7624ed00d15244d8ba2aede085517dbdec8ac65f5
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
||||||
|
|
||||||
|
Msg = 0xb2b94e4432267c92f9fdb9dc6040c95ffa477652761290d3c7de312283f6450d89cc4aabe748554dfb6056b2d8e99c7aeaad9cdddebdee9dbc099839562d9064e68e7bb5f3a6bba0749ca9a538181fc785553a4000785d73cc207922f63e8ce1112768cb1de7b673aed83a1e4a74592f1268d8e2a4e9e63d414b5d442bd0456d
|
||||||
|
Qx = 0xcc6fc032a846aaac25533eb033522824f94e670fa997ecef
|
||||||
|
Qy = 0xe25463ef77a029eccda8b294fd63dd694e38d223d30862f1
|
||||||
|
R = 0x066b1d07f3a40e679b620eda7f550842a35c18b80c5ebe06
|
||||||
|
S = 0xa0b0fb201e8f2df65e2c4508ef303bdc90d934016f16b2dc
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||||
|
|
||||||
|
Msg = 0x4366fcadf10d30d086911de30143da6f579527036937007b337f7282460eae5678b15cccda853193ea5fc4bc0a6b9d7a31128f27e1214988592827520b214eed5052f7775b750b0c6b15f145453ba3fee24a085d65287e10509eb5d5f602c440341376b95c24e5c4727d4b859bfe1483d20538acdd92c7997fa9c614f0f839d7
|
||||||
|
Qx = 0x955c908fe900a996f7e2089bee2f6376830f76a19135e753
|
||||||
|
Qy = 0xba0c42a91d3847de4a592a46dc3fdaf45a7cc709b90de520
|
||||||
|
R = 0x1f58ad77fc04c782815a1405b0925e72095d906cbf52a668
|
||||||
|
S = 0xf2e93758b3af75edf784f05a6761c9b9a6043c66b845b599
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||||
|
|
||||||
|
Msg = 0x543f8af57d750e33aa8565e0cae92bfa7a1ff78833093421c2942cadf9986670a5ff3244c02a8225e790fbf30ea84c74720abf99cfd10d02d34377c3d3b41269bea763384f372bb786b5846f58932defa68023136cd571863b304886e95e52e7877f445b9364b3f06f3c28da12707673fecb4b8071de06b6e0a3c87da160cef3
|
||||||
|
Qx = 0x31f7fa05576d78a949b24812d4383107a9a45bb5fccdd835
|
||||||
|
Qy = 0x8dc0eb65994a90f02b5e19bd18b32d61150746c09107e76b
|
||||||
|
R = 0xbe26d59e4e883dde7c286614a767b31e49ad88789d3a78ff
|
||||||
|
S = 0x8762ca831c1ce42df77893c9b03119428e7a9b819b619068
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||||
|
|
||||||
|
Msg = 0xd2e8454143ce281e609a9d748014dcebb9d0bc53adb02443a6aac2ffe6cb009f387c346ecb051791404f79e902ee333ad65e5c8cb38dc0d1d39a8dc90add5023572720e5b94b190d43dd0d7873397504c0c7aef2727e628eb6a74411f2e400c65670716cb4a815dc91cbbfeb7cfe8c929e93184c938af2c078584da045e8f8d1
|
||||||
|
Qx = 0x66aa8edbbdb5cf8e28ceb51b5bda891cae2df84819fe25c0
|
||||||
|
Qy = 0x0c6bc2f69030a7ce58d4a00e3b3349844784a13b8936f8da
|
||||||
|
R = 0xa4661e69b1734f4a71b788410a464b71e7ffe42334484f23
|
||||||
|
S = 0x738421cf5e049159d69c57a915143e226cac8355e149afe9
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||||
|
|
||||||
|
Msg = 0x6660717144040f3e2f95a4e25b08a7079c702a8b29babad5a19a87654bc5c5afa261512a11b998a4fb36b5d8fe8bd942792ff0324b108120de86d63f65855e5461184fc96a0a8ffd2ce6d5dfb0230cbbdd98f8543e361b3205f5da3d500fdc8bac6db377d75ebef3cb8f4d1ff738071ad0938917889250b41dd1d98896ca06fb
|
||||||
|
Qx = 0xbcfacf45139b6f5f690a4c35a5fffa498794136a2353fc77
|
||||||
|
Qy = 0x6f4a6c906316a6afc6d98fe1f0399d056f128fe0270b0f22
|
||||||
|
R = 0x9db679a3dafe48f7ccad122933acfe9da0970b71c94c21c1
|
||||||
|
S = 0x984c2db99827576c0a41a5da41e07d8cc768bc82f18c9da9
|
||||||
|
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
print_("Testing the example code:")
|
||||||
|
|
||||||
|
# Building a public/private key pair from the NIST Curve P-192:
|
||||||
|
|
||||||
|
g = generator_192
|
||||||
|
n = g.order()
|
||||||
|
|
||||||
|
# (random.SystemRandom is supposed to provide
|
||||||
|
# crypto-quality random numbers, but as Debian recently
|
||||||
|
# illustrated, a systems programmer can accidentally
|
||||||
|
# demolish this security, so in serious applications
|
||||||
|
# further precautions are appropriate.)
|
||||||
|
|
||||||
|
randrange = random.SystemRandom().randrange
|
||||||
|
|
||||||
|
secret = randrange( 1, n )
|
||||||
|
pubkey = Public_key( g, g * secret )
|
||||||
|
privkey = Private_key( pubkey, secret )
|
||||||
|
|
||||||
|
# Signing a hash value:
|
||||||
|
|
||||||
|
hash = randrange( 1, n )
|
||||||
|
signature = privkey.sign( hash, randrange( 1, n ) )
|
||||||
|
|
||||||
|
# Verifying a signature for a hash value:
|
||||||
|
|
||||||
|
if pubkey.verifies( hash, signature ):
|
||||||
|
print_("Demo verification succeeded.")
|
||||||
|
else:
|
||||||
|
raise TestFailure("*** Demo verification failed.")
|
||||||
|
|
||||||
|
if pubkey.verifies( hash-1, signature ):
|
||||||
|
raise TestFailure( "**** Demo verification failed to reject tampered hash.")
|
||||||
|
else:
|
||||||
|
print_("Demo verification correctly rejected tampered hash.")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
__main__()
|
||||||
293
bin/python/ecdsa/ellipticcurve.py
Normal file
@@ -0,0 +1,293 @@
|
|||||||
|
#! /usr/bin/env python
|
||||||
|
#
|
||||||
|
# Implementation of elliptic curves, for cryptographic applications.
|
||||||
|
#
|
||||||
|
# This module doesn't provide any way to choose a random elliptic
|
||||||
|
# curve, nor to verify that an elliptic curve was chosen randomly,
|
||||||
|
# because one can simply use NIST's standard curves.
|
||||||
|
#
|
||||||
|
# Notes from X9.62-1998 (draft):
|
||||||
|
# Nomenclature:
|
||||||
|
# - Q is a public key.
|
||||||
|
# The "Elliptic Curve Domain Parameters" include:
|
||||||
|
# - q is the "field size", which in our case equals p.
|
||||||
|
# - p is a big prime.
|
||||||
|
# - G is a point of prime order (5.1.1.1).
|
||||||
|
# - n is the order of G (5.1.1.1).
|
||||||
|
# Public-key validation (5.2.2):
|
||||||
|
# - Verify that Q is not the point at infinity.
|
||||||
|
# - Verify that X_Q and Y_Q are in [0,p-1].
|
||||||
|
# - Verify that Q is on the curve.
|
||||||
|
# - Verify that nQ is the point at infinity.
|
||||||
|
# Signature generation (5.3):
|
||||||
|
# - Pick random k from [1,n-1].
|
||||||
|
# Signature checking (5.4.2):
|
||||||
|
# - Verify that r and s are in [1,n-1].
|
||||||
|
#
|
||||||
|
# Version of 2008.11.25.
|
||||||
|
#
|
||||||
|
# Revision history:
|
||||||
|
# 2005.12.31 - Initial version.
|
||||||
|
# 2008.11.25 - Change CurveFp.is_on to contains_point.
|
||||||
|
#
|
||||||
|
# Written in 2005 by Peter Pearson and placed in the public domain.
|
||||||
|
|
||||||
|
from __future__ import division
|
||||||
|
|
||||||
|
from .six import print_
|
||||||
|
from . import numbertheory
|
||||||
|
|
||||||
|
class CurveFp( object ):
|
||||||
|
"""Elliptic Curve over the field of integers modulo a prime."""
|
||||||
|
def __init__( self, p, a, b ):
|
||||||
|
"""The curve of points satisfying y^2 = x^3 + a*x + b (mod p)."""
|
||||||
|
self.__p = p
|
||||||
|
self.__a = a
|
||||||
|
self.__b = b
|
||||||
|
|
||||||
|
def p( self ):
|
||||||
|
return self.__p
|
||||||
|
|
||||||
|
def a( self ):
|
||||||
|
return self.__a
|
||||||
|
|
||||||
|
def b( self ):
|
||||||
|
return self.__b
|
||||||
|
|
||||||
|
def contains_point( self, x, y ):
|
||||||
|
"""Is the point (x,y) on this curve?"""
|
||||||
|
return ( y * y - ( x * x * x + self.__a * x + self.__b ) ) % self.__p == 0
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Point( object ):
|
||||||
|
"""A point on an elliptic curve. Altering x and y is forbidding,
|
||||||
|
but they can be read by the x() and y() methods."""
|
||||||
|
def __init__( self, curve, x, y, order = None ):
|
||||||
|
"""curve, x, y, order; order (optional) is the order of this point."""
|
||||||
|
self.__curve = curve
|
||||||
|
self.__x = x
|
||||||
|
self.__y = y
|
||||||
|
self.__order = order
|
||||||
|
# self.curve is allowed to be None only for INFINITY:
|
||||||
|
if self.__curve: assert self.__curve.contains_point( x, y )
|
||||||
|
if order: assert self * order == INFINITY
|
||||||
|
|
||||||
|
def __eq__( self, other ):
|
||||||
|
"""Return True if the points are identical, False otherwise."""
|
||||||
|
if self.__curve == other.__curve \
|
||||||
|
and self.__x == other.__x \
|
||||||
|
and self.__y == other.__y:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __add__( self, other ):
|
||||||
|
"""Add one point to another point."""
|
||||||
|
|
||||||
|
# X9.62 B.3:
|
||||||
|
|
||||||
|
if other == INFINITY: return self
|
||||||
|
if self == INFINITY: return other
|
||||||
|
assert self.__curve == other.__curve
|
||||||
|
if self.__x == other.__x:
|
||||||
|
if ( self.__y + other.__y ) % self.__curve.p() == 0:
|
||||||
|
return INFINITY
|
||||||
|
else:
|
||||||
|
return self.double()
|
||||||
|
|
||||||
|
p = self.__curve.p()
|
||||||
|
|
||||||
|
l = ( ( other.__y - self.__y ) * \
|
||||||
|
numbertheory.inverse_mod( other.__x - self.__x, p ) ) % p
|
||||||
|
|
||||||
|
x3 = ( l * l - self.__x - other.__x ) % p
|
||||||
|
y3 = ( l * ( self.__x - x3 ) - self.__y ) % p
|
||||||
|
|
||||||
|
return Point( self.__curve, x3, y3 )
|
||||||
|
|
||||||
|
def __mul__( self, other ):
|
||||||
|
"""Multiply a point by an integer."""
|
||||||
|
|
||||||
|
def leftmost_bit( x ):
|
||||||
|
assert x > 0
|
||||||
|
result = 1
|
||||||
|
while result <= x: result = 2 * result
|
||||||
|
return result // 2
|
||||||
|
|
||||||
|
e = other
|
||||||
|
if self.__order: e = e % self.__order
|
||||||
|
if e == 0: return INFINITY
|
||||||
|
if self == INFINITY: return INFINITY
|
||||||
|
assert e > 0
|
||||||
|
|
||||||
|
# From X9.62 D.3.2:
|
||||||
|
|
||||||
|
e3 = 3 * e
|
||||||
|
negative_self = Point( self.__curve, self.__x, -self.__y, self.__order )
|
||||||
|
i = leftmost_bit( e3 ) // 2
|
||||||
|
result = self
|
||||||
|
# print_("Multiplying %s by %d (e3 = %d):" % ( self, other, e3 ))
|
||||||
|
while i > 1:
|
||||||
|
result = result.double()
|
||||||
|
if ( e3 & i ) != 0 and ( e & i ) == 0: result = result + self
|
||||||
|
if ( e3 & i ) == 0 and ( e & i ) != 0: result = result + negative_self
|
||||||
|
# print_(". . . i = %d, result = %s" % ( i, result ))
|
||||||
|
i = i // 2
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def __rmul__( self, other ):
|
||||||
|
"""Multiply a point by an integer."""
|
||||||
|
|
||||||
|
return self * other
|
||||||
|
|
||||||
|
def __str__( self ):
|
||||||
|
if self == INFINITY: return "infinity"
|
||||||
|
return "(%d,%d)" % ( self.__x, self.__y )
|
||||||
|
|
||||||
|
def double( self ):
|
||||||
|
"""Return a new point that is twice the old."""
|
||||||
|
|
||||||
|
if self == INFINITY:
|
||||||
|
return INFINITY
|
||||||
|
|
||||||
|
# X9.62 B.3:
|
||||||
|
|
||||||
|
p = self.__curve.p()
|
||||||
|
a = self.__curve.a()
|
||||||
|
|
||||||
|
l = ( ( 3 * self.__x * self.__x + a ) * \
|
||||||
|
numbertheory.inverse_mod( 2 * self.__y, p ) ) % p
|
||||||
|
|
||||||
|
x3 = ( l * l - 2 * self.__x ) % p
|
||||||
|
y3 = ( l * ( self.__x - x3 ) - self.__y ) % p
|
||||||
|
|
||||||
|
return Point( self.__curve, x3, y3 )
|
||||||
|
|
||||||
|
def x( self ):
|
||||||
|
return self.__x
|
||||||
|
|
||||||
|
def y( self ):
|
||||||
|
return self.__y
|
||||||
|
|
||||||
|
def curve( self ):
|
||||||
|
return self.__curve
|
||||||
|
|
||||||
|
def order( self ):
|
||||||
|
return self.__order
|
||||||
|
|
||||||
|
|
||||||
|
# This one point is the Point At Infinity for all purposes:
|
||||||
|
INFINITY = Point( None, None, None )
|
||||||
|
|
||||||
|
def __main__():
|
||||||
|
|
||||||
|
class FailedTest(Exception): pass
|
||||||
|
def test_add( c, x1, y1, x2, y2, x3, y3 ):
|
||||||
|
"""We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3)."""
|
||||||
|
p1 = Point( c, x1, y1 )
|
||||||
|
p2 = Point( c, x2, y2 )
|
||||||
|
p3 = p1 + p2
|
||||||
|
print_("%s + %s = %s" % ( p1, p2, p3 ), end=' ')
|
||||||
|
if p3.x() != x3 or p3.y() != y3:
|
||||||
|
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
||||||
|
else:
|
||||||
|
print_(" Good.")
|
||||||
|
|
||||||
|
def test_double( c, x1, y1, x3, y3 ):
|
||||||
|
"""We expect that on curve c, 2*(x1,y1) = (x3, y3)."""
|
||||||
|
p1 = Point( c, x1, y1 )
|
||||||
|
p3 = p1.double()
|
||||||
|
print_("%s doubled = %s" % ( p1, p3 ), end=' ')
|
||||||
|
if p3.x() != x3 or p3.y() != y3:
|
||||||
|
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
||||||
|
else:
|
||||||
|
print_(" Good.")
|
||||||
|
|
||||||
|
def test_double_infinity( c ):
|
||||||
|
"""We expect that on curve c, 2*INFINITY = INFINITY."""
|
||||||
|
p1 = INFINITY
|
||||||
|
p3 = p1.double()
|
||||||
|
print_("%s doubled = %s" % ( p1, p3 ), end=' ')
|
||||||
|
if p3.x() != INFINITY.x() or p3.y() != INFINITY.y():
|
||||||
|
raise FailedTest("Failure: should give (%d,%d)." % ( INFINITY.x(), INFINITY.y() ))
|
||||||
|
else:
|
||||||
|
print_(" Good.")
|
||||||
|
|
||||||
|
def test_multiply( c, x1, y1, m, x3, y3 ):
|
||||||
|
"""We expect that on curve c, m*(x1,y1) = (x3,y3)."""
|
||||||
|
p1 = Point( c, x1, y1 )
|
||||||
|
p3 = p1 * m
|
||||||
|
print_("%s * %d = %s" % ( p1, m, p3 ), end=' ')
|
||||||
|
if p3.x() != x3 or p3.y() != y3:
|
||||||
|
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
||||||
|
else:
|
||||||
|
print_(" Good.")
|
||||||
|
|
||||||
|
|
||||||
|
# A few tests from X9.62 B.3:
|
||||||
|
|
||||||
|
c = CurveFp( 23, 1, 1 )
|
||||||
|
test_add( c, 3, 10, 9, 7, 17, 20 )
|
||||||
|
test_double( c, 3, 10, 7, 12 )
|
||||||
|
test_add( c, 3, 10, 3, 10, 7, 12 ) # (Should just invoke double.)
|
||||||
|
test_multiply( c, 3, 10, 2, 7, 12 )
|
||||||
|
|
||||||
|
test_double_infinity(c)
|
||||||
|
|
||||||
|
# From X9.62 I.1 (p. 96):
|
||||||
|
|
||||||
|
g = Point( c, 13, 7, 7 )
|
||||||
|
|
||||||
|
check = INFINITY
|
||||||
|
for i in range( 7 + 1 ):
|
||||||
|
p = ( i % 7 ) * g
|
||||||
|
print_("%s * %d = %s, expected %s . . ." % ( g, i, p, check ), end=' ')
|
||||||
|
if p == check:
|
||||||
|
print_(" Good.")
|
||||||
|
else:
|
||||||
|
raise FailedTest("Bad.")
|
||||||
|
check = check + g
|
||||||
|
|
||||||
|
# NIST Curve P-192:
|
||||||
|
p = 6277101735386680763835789423207666416083908700390324961279
|
||||||
|
r = 6277101735386680763835789423176059013767194773182842284081
|
||||||
|
#s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L
|
||||||
|
c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65
|
||||||
|
b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
|
||||||
|
Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
|
||||||
|
Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
|
||||||
|
|
||||||
|
c192 = CurveFp( p, -3, b )
|
||||||
|
p192 = Point( c192, Gx, Gy, r )
|
||||||
|
|
||||||
|
# Checking against some sample computations presented
|
||||||
|
# in X9.62:
|
||||||
|
|
||||||
|
d = 651056770906015076056810763456358567190100156695615665659
|
||||||
|
Q = d * p192
|
||||||
|
if Q.x() != 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5:
|
||||||
|
raise FailedTest("p192 * d came out wrong.")
|
||||||
|
else:
|
||||||
|
print_("p192 * d came out right.")
|
||||||
|
|
||||||
|
k = 6140507067065001063065065565667405560006161556565665656654
|
||||||
|
R = k * p192
|
||||||
|
if R.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||||
|
or R.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||||
|
raise FailedTest("k * p192 came out wrong.")
|
||||||
|
else:
|
||||||
|
print_("k * p192 came out right.")
|
||||||
|
|
||||||
|
u1 = 2563697409189434185194736134579731015366492496392189760599
|
||||||
|
u2 = 6266643813348617967186477710235785849136406323338782220568
|
||||||
|
temp = u1 * p192 + u2 * Q
|
||||||
|
if temp.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||||
|
or temp.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||||
|
raise FailedTest("u1 * p192 + u2 * Q came out wrong.")
|
||||||
|
else:
|
||||||
|
print_("u1 * p192 + u2 * Q came out right.")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
__main__()
|
||||||
283
bin/python/ecdsa/keys.py
Normal file
@@ -0,0 +1,283 @@
|
|||||||
|
import binascii
|
||||||
|
|
||||||
|
from . import ecdsa
|
||||||
|
from . import der
|
||||||
|
from . import rfc6979
|
||||||
|
from .curves import NIST192p, find_curve
|
||||||
|
from .util import string_to_number, number_to_string, randrange
|
||||||
|
from .util import sigencode_string, sigdecode_string
|
||||||
|
from .util import oid_ecPublicKey, encoded_oid_ecPublicKey
|
||||||
|
from .six import PY3, b
|
||||||
|
from hashlib import sha1
|
||||||
|
|
||||||
|
class BadSignatureError(Exception):
|
||||||
|
pass
|
||||||
|
class BadDigestError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class VerifyingKey:
|
||||||
|
def __init__(self, _error__please_use_generate=None):
|
||||||
|
if not _error__please_use_generate:
|
||||||
|
raise TypeError("Please use SigningKey.generate() to construct me")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_public_point(klass, point, curve=NIST192p, hashfunc=sha1):
|
||||||
|
self = klass(_error__please_use_generate=True)
|
||||||
|
self.curve = curve
|
||||||
|
self.default_hashfunc = hashfunc
|
||||||
|
self.pubkey = ecdsa.Public_key(curve.generator, point)
|
||||||
|
self.pubkey.order = curve.order
|
||||||
|
return self
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_string(klass, string, curve=NIST192p, hashfunc=sha1,
|
||||||
|
validate_point=True):
|
||||||
|
order = curve.order
|
||||||
|
assert len(string) == curve.verifying_key_length, \
|
||||||
|
(len(string), curve.verifying_key_length)
|
||||||
|
xs = string[:curve.baselen]
|
||||||
|
ys = string[curve.baselen:]
|
||||||
|
assert len(xs) == curve.baselen, (len(xs), curve.baselen)
|
||||||
|
assert len(ys) == curve.baselen, (len(ys), curve.baselen)
|
||||||
|
x = string_to_number(xs)
|
||||||
|
y = string_to_number(ys)
|
||||||
|
if validate_point:
|
||||||
|
assert ecdsa.point_is_valid(curve.generator, x, y)
|
||||||
|
from . import ellipticcurve
|
||||||
|
point = ellipticcurve.Point(curve.curve, x, y, order)
|
||||||
|
return klass.from_public_point(point, curve, hashfunc)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_pem(klass, string):
|
||||||
|
return klass.from_der(der.unpem(string))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_der(klass, string):
|
||||||
|
# [[oid_ecPublicKey,oid_curve], point_str_bitstring]
|
||||||
|
s1,empty = der.remove_sequence(string)
|
||||||
|
if empty != b(""):
|
||||||
|
raise der.UnexpectedDER("trailing junk after DER pubkey: %s" %
|
||||||
|
binascii.hexlify(empty))
|
||||||
|
s2,point_str_bitstring = der.remove_sequence(s1)
|
||||||
|
# s2 = oid_ecPublicKey,oid_curve
|
||||||
|
oid_pk, rest = der.remove_object(s2)
|
||||||
|
oid_curve, empty = der.remove_object(rest)
|
||||||
|
if empty != b(""):
|
||||||
|
raise der.UnexpectedDER("trailing junk after DER pubkey objects: %s" %
|
||||||
|
binascii.hexlify(empty))
|
||||||
|
assert oid_pk == oid_ecPublicKey, (oid_pk, oid_ecPublicKey)
|
||||||
|
curve = find_curve(oid_curve)
|
||||||
|
point_str, empty = der.remove_bitstring(point_str_bitstring)
|
||||||
|
if empty != b(""):
|
||||||
|
raise der.UnexpectedDER("trailing junk after pubkey pointstring: %s" %
|
||||||
|
binascii.hexlify(empty))
|
||||||
|
assert point_str.startswith(b("\x00\x04"))
|
||||||
|
return klass.from_string(point_str[2:], curve)
|
||||||
|
|
||||||
|
def to_string(self):
|
||||||
|
# VerifyingKey.from_string(vk.to_string()) == vk as long as the
|
||||||
|
# curves are the same: the curve itself is not included in the
|
||||||
|
# serialized form
|
||||||
|
order = self.pubkey.order
|
||||||
|
x_str = number_to_string(self.pubkey.point.x(), order)
|
||||||
|
y_str = number_to_string(self.pubkey.point.y(), order)
|
||||||
|
return x_str + y_str
|
||||||
|
|
||||||
|
def to_pem(self):
|
||||||
|
return der.topem(self.to_der(), "PUBLIC KEY")
|
||||||
|
|
||||||
|
def to_der(self):
|
||||||
|
order = self.pubkey.order
|
||||||
|
x_str = number_to_string(self.pubkey.point.x(), order)
|
||||||
|
y_str = number_to_string(self.pubkey.point.y(), order)
|
||||||
|
point_str = b("\x00\x04") + x_str + y_str
|
||||||
|
return der.encode_sequence(der.encode_sequence(encoded_oid_ecPublicKey,
|
||||||
|
self.curve.encoded_oid),
|
||||||
|
der.encode_bitstring(point_str))
|
||||||
|
|
||||||
|
def verify(self, signature, data, hashfunc=None, sigdecode=sigdecode_string):
|
||||||
|
hashfunc = hashfunc or self.default_hashfunc
|
||||||
|
digest = hashfunc(data).digest()
|
||||||
|
return self.verify_digest(signature, digest, sigdecode)
|
||||||
|
|
||||||
|
def verify_digest(self, signature, digest, sigdecode=sigdecode_string):
|
||||||
|
if len(digest) > self.curve.baselen:
|
||||||
|
raise BadDigestError("this curve (%s) is too short "
|
||||||
|
"for your digest (%d)" % (self.curve.name,
|
||||||
|
8*len(digest)))
|
||||||
|
number = string_to_number(digest)
|
||||||
|
r, s = sigdecode(signature, self.pubkey.order)
|
||||||
|
sig = ecdsa.Signature(r, s)
|
||||||
|
if self.pubkey.verifies(number, sig):
|
||||||
|
return True
|
||||||
|
raise BadSignatureError
|
||||||
|
|
||||||
|
class SigningKey:
|
||||||
|
def __init__(self, _error__please_use_generate=None):
|
||||||
|
if not _error__please_use_generate:
|
||||||
|
raise TypeError("Please use SigningKey.generate() to construct me")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def generate(klass, curve=NIST192p, entropy=None, hashfunc=sha1):
|
||||||
|
secexp = randrange(curve.order, entropy)
|
||||||
|
return klass.from_secret_exponent(secexp, curve, hashfunc)
|
||||||
|
|
||||||
|
# to create a signing key from a short (arbitrary-length) seed, convert
|
||||||
|
# that seed into an integer with something like
|
||||||
|
# secexp=util.randrange_from_seed__X(seed, curve.order), and then pass
|
||||||
|
# that integer into SigningKey.from_secret_exponent(secexp, curve)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_secret_exponent(klass, secexp, curve=NIST192p, hashfunc=sha1):
|
||||||
|
self = klass(_error__please_use_generate=True)
|
||||||
|
self.curve = curve
|
||||||
|
self.default_hashfunc = hashfunc
|
||||||
|
self.baselen = curve.baselen
|
||||||
|
n = curve.order
|
||||||
|
assert 1 <= secexp < n
|
||||||
|
pubkey_point = curve.generator*secexp
|
||||||
|
pubkey = ecdsa.Public_key(curve.generator, pubkey_point)
|
||||||
|
pubkey.order = n
|
||||||
|
self.verifying_key = VerifyingKey.from_public_point(pubkey_point, curve,
|
||||||
|
hashfunc)
|
||||||
|
self.privkey = ecdsa.Private_key(pubkey, secexp)
|
||||||
|
self.privkey.order = n
|
||||||
|
return self
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_string(klass, string, curve=NIST192p, hashfunc=sha1):
|
||||||
|
assert len(string) == curve.baselen, (len(string), curve.baselen)
|
||||||
|
secexp = string_to_number(string)
|
||||||
|
return klass.from_secret_exponent(secexp, curve, hashfunc)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_pem(klass, string, hashfunc=sha1):
|
||||||
|
# the privkey pem file has two sections: "EC PARAMETERS" and "EC
|
||||||
|
# PRIVATE KEY". The first is redundant.
|
||||||
|
if PY3 and isinstance(string, str):
|
||||||
|
string = string.encode()
|
||||||
|
privkey_pem = string[string.index(b("-----BEGIN EC PRIVATE KEY-----")):]
|
||||||
|
return klass.from_der(der.unpem(privkey_pem), hashfunc)
|
||||||
|
@classmethod
|
||||||
|
def from_der(klass, string, hashfunc=sha1):
|
||||||
|
# SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1),
|
||||||
|
# cont[1],bitstring])
|
||||||
|
s, empty = der.remove_sequence(string)
|
||||||
|
if empty != b(""):
|
||||||
|
raise der.UnexpectedDER("trailing junk after DER privkey: %s" %
|
||||||
|
binascii.hexlify(empty))
|
||||||
|
one, s = der.remove_integer(s)
|
||||||
|
if one != 1:
|
||||||
|
raise der.UnexpectedDER("expected '1' at start of DER privkey,"
|
||||||
|
" got %d" % one)
|
||||||
|
privkey_str, s = der.remove_octet_string(s)
|
||||||
|
tag, curve_oid_str, s = der.remove_constructed(s)
|
||||||
|
if tag != 0:
|
||||||
|
raise der.UnexpectedDER("expected tag 0 in DER privkey,"
|
||||||
|
" got %d" % tag)
|
||||||
|
curve_oid, empty = der.remove_object(curve_oid_str)
|
||||||
|
if empty != b(""):
|
||||||
|
raise der.UnexpectedDER("trailing junk after DER privkey "
|
||||||
|
"curve_oid: %s" % binascii.hexlify(empty))
|
||||||
|
curve = find_curve(curve_oid)
|
||||||
|
|
||||||
|
# we don't actually care about the following fields
|
||||||
|
#
|
||||||
|
#tag, pubkey_bitstring, s = der.remove_constructed(s)
|
||||||
|
#if tag != 1:
|
||||||
|
# raise der.UnexpectedDER("expected tag 1 in DER privkey, got %d"
|
||||||
|
# % tag)
|
||||||
|
#pubkey_str = der.remove_bitstring(pubkey_bitstring)
|
||||||
|
#if empty != "":
|
||||||
|
# raise der.UnexpectedDER("trailing junk after DER privkey "
|
||||||
|
# "pubkeystr: %s" % binascii.hexlify(empty))
|
||||||
|
|
||||||
|
# our from_string method likes fixed-length privkey strings
|
||||||
|
if len(privkey_str) < curve.baselen:
|
||||||
|
privkey_str = b("\x00")*(curve.baselen-len(privkey_str)) + privkey_str
|
||||||
|
return klass.from_string(privkey_str, curve, hashfunc)
|
||||||
|
|
||||||
|
def to_string(self):
|
||||||
|
secexp = self.privkey.secret_multiplier
|
||||||
|
s = number_to_string(secexp, self.privkey.order)
|
||||||
|
return s
|
||||||
|
|
||||||
|
def to_pem(self):
|
||||||
|
# TODO: "BEGIN ECPARAMETERS"
|
||||||
|
return der.topem(self.to_der(), "EC PRIVATE KEY")
|
||||||
|
|
||||||
|
def to_der(self):
|
||||||
|
# SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1),
|
||||||
|
# cont[1],bitstring])
|
||||||
|
encoded_vk = b("\x00\x04") + self.get_verifying_key().to_string()
|
||||||
|
return der.encode_sequence(der.encode_integer(1),
|
||||||
|
der.encode_octet_string(self.to_string()),
|
||||||
|
der.encode_constructed(0, self.curve.encoded_oid),
|
||||||
|
der.encode_constructed(1, der.encode_bitstring(encoded_vk)),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_verifying_key(self):
|
||||||
|
return self.verifying_key
|
||||||
|
|
||||||
|
def sign_deterministic(self, data, hashfunc=None, sigencode=sigencode_string):
|
||||||
|
hashfunc = hashfunc or self.default_hashfunc
|
||||||
|
digest = hashfunc(data).digest()
|
||||||
|
|
||||||
|
return self.sign_digest_deterministic(digest, hashfunc=hashfunc, sigencode=sigencode)
|
||||||
|
|
||||||
|
def sign_digest_deterministic(self, digest, hashfunc=None, sigencode=sigencode_string):
|
||||||
|
"""
|
||||||
|
Calculates 'k' from data itself, removing the need for strong
|
||||||
|
random generator and producing deterministic (reproducible) signatures.
|
||||||
|
See RFC 6979 for more details.
|
||||||
|
"""
|
||||||
|
secexp = self.privkey.secret_multiplier
|
||||||
|
k = rfc6979.generate_k(
|
||||||
|
self.curve.generator.order(), secexp, hashfunc, digest)
|
||||||
|
|
||||||
|
return self.sign_digest(digest, sigencode=sigencode, k=k)
|
||||||
|
|
||||||
|
def sign(self, data, entropy=None, hashfunc=None, sigencode=sigencode_string, k=None):
|
||||||
|
"""
|
||||||
|
hashfunc= should behave like hashlib.sha1 . The output length of the
|
||||||
|
hash (in bytes) must not be longer than the length of the curve order
|
||||||
|
(rounded up to the nearest byte), so using SHA256 with nist256p is
|
||||||
|
ok, but SHA256 with nist192p is not. (In the 2**-96ish unlikely event
|
||||||
|
of a hash output larger than the curve order, the hash will
|
||||||
|
effectively be wrapped mod n).
|
||||||
|
|
||||||
|
Use hashfunc=hashlib.sha1 to match openssl's -ecdsa-with-SHA1 mode,
|
||||||
|
or hashfunc=hashlib.sha256 for openssl-1.0.0's -ecdsa-with-SHA256.
|
||||||
|
"""
|
||||||
|
|
||||||
|
hashfunc = hashfunc or self.default_hashfunc
|
||||||
|
h = hashfunc(data).digest()
|
||||||
|
return self.sign_digest(h, entropy, sigencode, k)
|
||||||
|
|
||||||
|
def sign_digest(self, digest, entropy=None, sigencode=sigencode_string, k=None):
|
||||||
|
if len(digest) > self.curve.baselen:
|
||||||
|
raise BadDigestError("this curve (%s) is too short "
|
||||||
|
"for your digest (%d)" % (self.curve.name,
|
||||||
|
8*len(digest)))
|
||||||
|
number = string_to_number(digest)
|
||||||
|
r, s = self.sign_number(number, entropy, k)
|
||||||
|
return sigencode(r, s, self.privkey.order)
|
||||||
|
|
||||||
|
def sign_number(self, number, entropy=None, k=None):
|
||||||
|
# returns a pair of numbers
|
||||||
|
order = self.privkey.order
|
||||||
|
# privkey.sign() may raise RuntimeError in the amazingly unlikely
|
||||||
|
# (2**-192) event that r=0 or s=0, because that would leak the key.
|
||||||
|
# We could re-try with a different 'k', but we couldn't test that
|
||||||
|
# code, so I choose to allow the signature to fail instead.
|
||||||
|
|
||||||
|
# If k is set, it is used directly. In other cases
|
||||||
|
# it is generated using entropy function
|
||||||
|
if k is not None:
|
||||||
|
_k = k
|
||||||
|
else:
|
||||||
|
_k = randrange(order, entropy)
|
||||||
|
|
||||||
|
assert 1 <= _k < order
|
||||||
|
sig = self.privkey.sign(number, _k)
|
||||||
|
return sig.r, sig.s
|
||||||
613
bin/python/ecdsa/numbertheory.py
Normal file
@@ -0,0 +1,613 @@
|
|||||||
|
#! /usr/bin/env python
|
||||||
|
#
|
||||||
|
# Provide some simple capabilities from number theory.
|
||||||
|
#
|
||||||
|
# Version of 2008.11.14.
|
||||||
|
#
|
||||||
|
# Written in 2005 and 2006 by Peter Pearson and placed in the public domain.
|
||||||
|
# Revision history:
|
||||||
|
# 2008.11.14: Use pow( base, exponent, modulus ) for modular_exp.
|
||||||
|
# Make gcd and lcm accept arbitrarly many arguments.
|
||||||
|
|
||||||
|
from __future__ import division
|
||||||
|
|
||||||
|
from .six import print_, integer_types
|
||||||
|
from .six.moves import reduce
|
||||||
|
|
||||||
|
import math
|
||||||
|
|
||||||
|
|
||||||
|
class Error( Exception ):
|
||||||
|
"""Base class for exceptions in this module."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class SquareRootError( Error ):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class NegativeExponentError( Error ):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def modular_exp( base, exponent, modulus ):
|
||||||
|
"Raise base to exponent, reducing by modulus"
|
||||||
|
if exponent < 0:
|
||||||
|
raise NegativeExponentError( "Negative exponents (%d) not allowed" \
|
||||||
|
% exponent )
|
||||||
|
return pow( base, exponent, modulus )
|
||||||
|
# result = 1L
|
||||||
|
# x = exponent
|
||||||
|
# b = base + 0L
|
||||||
|
# while x > 0:
|
||||||
|
# if x % 2 > 0: result = (result * b) % modulus
|
||||||
|
# x = x // 2
|
||||||
|
# b = ( b * b ) % modulus
|
||||||
|
# return result
|
||||||
|
|
||||||
|
|
||||||
|
def polynomial_reduce_mod( poly, polymod, p ):
|
||||||
|
"""Reduce poly by polymod, integer arithmetic modulo p.
|
||||||
|
|
||||||
|
Polynomials are represented as lists of coefficients
|
||||||
|
of increasing powers of x."""
|
||||||
|
|
||||||
|
# This module has been tested only by extensive use
|
||||||
|
# in calculating modular square roots.
|
||||||
|
|
||||||
|
# Just to make this easy, require a monic polynomial:
|
||||||
|
assert polymod[-1] == 1
|
||||||
|
|
||||||
|
assert len( polymod ) > 1
|
||||||
|
|
||||||
|
while len( poly ) >= len( polymod ):
|
||||||
|
if poly[-1] != 0:
|
||||||
|
for i in range( 2, len( polymod ) + 1 ):
|
||||||
|
poly[-i] = ( poly[-i] - poly[-1] * polymod[-i] ) % p
|
||||||
|
poly = poly[0:-1]
|
||||||
|
|
||||||
|
return poly
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def polynomial_multiply_mod( m1, m2, polymod, p ):
|
||||||
|
"""Polynomial multiplication modulo a polynomial over ints mod p.
|
||||||
|
|
||||||
|
Polynomials are represented as lists of coefficients
|
||||||
|
of increasing powers of x."""
|
||||||
|
|
||||||
|
# This is just a seat-of-the-pants implementation.
|
||||||
|
|
||||||
|
# This module has been tested only by extensive use
|
||||||
|
# in calculating modular square roots.
|
||||||
|
|
||||||
|
# Initialize the product to zero:
|
||||||
|
|
||||||
|
prod = ( len( m1 ) + len( m2 ) - 1 ) * [0]
|
||||||
|
|
||||||
|
# Add together all the cross-terms:
|
||||||
|
|
||||||
|
for i in range( len( m1 ) ):
|
||||||
|
for j in range( len( m2 ) ):
|
||||||
|
prod[i+j] = ( prod[i+j] + m1[i] * m2[j] ) % p
|
||||||
|
|
||||||
|
return polynomial_reduce_mod( prod, polymod, p )
|
||||||
|
|
||||||
|
|
||||||
|
def polynomial_exp_mod( base, exponent, polymod, p ):
|
||||||
|
"""Polynomial exponentiation modulo a polynomial over ints mod p.
|
||||||
|
|
||||||
|
Polynomials are represented as lists of coefficients
|
||||||
|
of increasing powers of x."""
|
||||||
|
|
||||||
|
# Based on the Handbook of Applied Cryptography, algorithm 2.227.
|
||||||
|
|
||||||
|
# This module has been tested only by extensive use
|
||||||
|
# in calculating modular square roots.
|
||||||
|
|
||||||
|
assert exponent < p
|
||||||
|
|
||||||
|
if exponent == 0: return [ 1 ]
|
||||||
|
|
||||||
|
G = base
|
||||||
|
k = exponent
|
||||||
|
if k%2 == 1: s = G
|
||||||
|
else: s = [ 1 ]
|
||||||
|
|
||||||
|
while k > 1:
|
||||||
|
k = k // 2
|
||||||
|
G = polynomial_multiply_mod( G, G, polymod, p )
|
||||||
|
if k%2 == 1: s = polynomial_multiply_mod( G, s, polymod, p )
|
||||||
|
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def jacobi( a, n ):
|
||||||
|
"""Jacobi symbol"""
|
||||||
|
|
||||||
|
# Based on the Handbook of Applied Cryptography (HAC), algorithm 2.149.
|
||||||
|
|
||||||
|
# This function has been tested by comparison with a small
|
||||||
|
# table printed in HAC, and by extensive use in calculating
|
||||||
|
# modular square roots.
|
||||||
|
|
||||||
|
assert n >= 3
|
||||||
|
assert n%2 == 1
|
||||||
|
a = a % n
|
||||||
|
if a == 0: return 0
|
||||||
|
if a == 1: return 1
|
||||||
|
a1, e = a, 0
|
||||||
|
while a1%2 == 0:
|
||||||
|
a1, e = a1//2, e+1
|
||||||
|
if e%2 == 0 or n%8 == 1 or n%8 == 7: s = 1
|
||||||
|
else: s = -1
|
||||||
|
if a1 == 1: return s
|
||||||
|
if n%4 == 3 and a1%4 == 3: s = -s
|
||||||
|
return s * jacobi( n % a1, a1 )
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def square_root_mod_prime( a, p ):
|
||||||
|
"""Modular square root of a, mod p, p prime."""
|
||||||
|
|
||||||
|
# Based on the Handbook of Applied Cryptography, algorithms 3.34 to 3.39.
|
||||||
|
|
||||||
|
# This module has been tested for all values in [0,p-1] for
|
||||||
|
# every prime p from 3 to 1229.
|
||||||
|
|
||||||
|
assert 0 <= a < p
|
||||||
|
assert 1 < p
|
||||||
|
|
||||||
|
if a == 0: return 0
|
||||||
|
if p == 2: return a
|
||||||
|
|
||||||
|
jac = jacobi( a, p )
|
||||||
|
if jac == -1: raise SquareRootError( "%d has no square root modulo %d" \
|
||||||
|
% ( a, p ) )
|
||||||
|
|
||||||
|
if p % 4 == 3: return modular_exp( a, (p+1)//4, p )
|
||||||
|
|
||||||
|
if p % 8 == 5:
|
||||||
|
d = modular_exp( a, (p-1)//4, p )
|
||||||
|
if d == 1: return modular_exp( a, (p+3)//8, p )
|
||||||
|
if d == p-1: return ( 2 * a * modular_exp( 4*a, (p-5)//8, p ) ) % p
|
||||||
|
raise RuntimeError("Shouldn't get here.")
|
||||||
|
|
||||||
|
for b in range( 2, p ):
|
||||||
|
if jacobi( b*b-4*a, p ) == -1:
|
||||||
|
f = ( a, -b, 1 )
|
||||||
|
ff = polynomial_exp_mod( ( 0, 1 ), (p+1)//2, f, p )
|
||||||
|
assert ff[1] == 0
|
||||||
|
return ff[0]
|
||||||
|
raise RuntimeError("No b found.")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def inverse_mod( a, m ):
|
||||||
|
"""Inverse of a mod m."""
|
||||||
|
|
||||||
|
if a < 0 or m <= a: a = a % m
|
||||||
|
|
||||||
|
# From Ferguson and Schneier, roughly:
|
||||||
|
|
||||||
|
c, d = a, m
|
||||||
|
uc, vc, ud, vd = 1, 0, 0, 1
|
||||||
|
while c != 0:
|
||||||
|
q, c, d = divmod( d, c ) + ( c, )
|
||||||
|
uc, vc, ud, vd = ud - q*uc, vd - q*vc, uc, vc
|
||||||
|
|
||||||
|
# At this point, d is the GCD, and ud*a+vd*m = d.
|
||||||
|
# If d == 1, this means that ud is a inverse.
|
||||||
|
|
||||||
|
assert d == 1
|
||||||
|
if ud > 0: return ud
|
||||||
|
else: return ud + m
|
||||||
|
|
||||||
|
|
||||||
|
def gcd2(a, b):
|
||||||
|
"""Greatest common divisor using Euclid's algorithm."""
|
||||||
|
while a:
|
||||||
|
a, b = b%a, a
|
||||||
|
return b
|
||||||
|
|
||||||
|
|
||||||
|
def gcd( *a ):
|
||||||
|
"""Greatest common divisor.
|
||||||
|
|
||||||
|
Usage: gcd( [ 2, 4, 6 ] )
|
||||||
|
or: gcd( 2, 4, 6 )
|
||||||
|
"""
|
||||||
|
|
||||||
|
if len( a ) > 1: return reduce( gcd2, a )
|
||||||
|
if hasattr( a[0], "__iter__" ): return reduce( gcd2, a[0] )
|
||||||
|
return a[0]
|
||||||
|
|
||||||
|
|
||||||
|
def lcm2(a,b):
|
||||||
|
"""Least common multiple of two integers."""
|
||||||
|
|
||||||
|
return (a*b)//gcd(a,b)
|
||||||
|
|
||||||
|
|
||||||
|
def lcm( *a ):
|
||||||
|
"""Least common multiple.
|
||||||
|
|
||||||
|
Usage: lcm( [ 3, 4, 5 ] )
|
||||||
|
or: lcm( 3, 4, 5 )
|
||||||
|
"""
|
||||||
|
|
||||||
|
if len( a ) > 1: return reduce( lcm2, a )
|
||||||
|
if hasattr( a[0], "__iter__" ): return reduce( lcm2, a[0] )
|
||||||
|
return a[0]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def factorization( n ):
|
||||||
|
"""Decompose n into a list of (prime,exponent) pairs."""
|
||||||
|
|
||||||
|
assert isinstance( n, integer_types )
|
||||||
|
|
||||||
|
if n < 2: return []
|
||||||
|
|
||||||
|
result = []
|
||||||
|
d = 2
|
||||||
|
|
||||||
|
# Test the small primes:
|
||||||
|
|
||||||
|
for d in smallprimes:
|
||||||
|
if d > n: break
|
||||||
|
q, r = divmod( n, d )
|
||||||
|
if r == 0:
|
||||||
|
count = 1
|
||||||
|
while d <= n:
|
||||||
|
n = q
|
||||||
|
q, r = divmod( n, d )
|
||||||
|
if r != 0: break
|
||||||
|
count = count + 1
|
||||||
|
result.append( ( d, count ) )
|
||||||
|
|
||||||
|
# If n is still greater than the last of our small primes,
|
||||||
|
# it may require further work:
|
||||||
|
|
||||||
|
if n > smallprimes[-1]:
|
||||||
|
if is_prime( n ): # If what's left is prime, it's easy:
|
||||||
|
result.append( ( n, 1 ) )
|
||||||
|
else: # Ugh. Search stupidly for a divisor:
|
||||||
|
d = smallprimes[-1]
|
||||||
|
while 1:
|
||||||
|
d = d + 2 # Try the next divisor.
|
||||||
|
q, r = divmod( n, d )
|
||||||
|
if q < d: break # n < d*d means we're done, n = 1 or prime.
|
||||||
|
if r == 0: # d divides n. How many times?
|
||||||
|
count = 1
|
||||||
|
n = q
|
||||||
|
while d <= n: # As long as d might still divide n,
|
||||||
|
q, r = divmod( n, d ) # see if it does.
|
||||||
|
if r != 0: break
|
||||||
|
n = q # It does. Reduce n, increase count.
|
||||||
|
count = count + 1
|
||||||
|
result.append( ( d, count ) )
|
||||||
|
if n > 1: result.append( ( n, 1 ) )
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def phi( n ):
|
||||||
|
"""Return the Euler totient function of n."""
|
||||||
|
|
||||||
|
assert isinstance( n, integer_types )
|
||||||
|
|
||||||
|
if n < 3: return 1
|
||||||
|
|
||||||
|
result = 1
|
||||||
|
ff = factorization( n )
|
||||||
|
for f in ff:
|
||||||
|
e = f[1]
|
||||||
|
if e > 1:
|
||||||
|
result = result * f[0] ** (e-1) * ( f[0] - 1 )
|
||||||
|
else:
|
||||||
|
result = result * ( f[0] - 1 )
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def carmichael( n ):
|
||||||
|
"""Return Carmichael function of n.
|
||||||
|
|
||||||
|
Carmichael(n) is the smallest integer x such that
|
||||||
|
m**x = 1 mod n for all m relatively prime to n.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return carmichael_of_factorized( factorization( n ) )
|
||||||
|
|
||||||
|
|
||||||
|
def carmichael_of_factorized( f_list ):
|
||||||
|
"""Return the Carmichael function of a number that is
|
||||||
|
represented as a list of (prime,exponent) pairs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if len( f_list ) < 1: return 1
|
||||||
|
|
||||||
|
result = carmichael_of_ppower( f_list[0] )
|
||||||
|
for i in range( 1, len( f_list ) ):
|
||||||
|
result = lcm( result, carmichael_of_ppower( f_list[i] ) )
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def carmichael_of_ppower( pp ):
|
||||||
|
"""Carmichael function of the given power of the given prime.
|
||||||
|
"""
|
||||||
|
|
||||||
|
p, a = pp
|
||||||
|
if p == 2 and a > 2: return 2**(a-2)
|
||||||
|
else: return (p-1) * p**(a-1)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def order_mod( x, m ):
|
||||||
|
"""Return the order of x in the multiplicative group mod m.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Warning: this implementation is not very clever, and will
|
||||||
|
# take a long time if m is very large.
|
||||||
|
|
||||||
|
if m <= 1: return 0
|
||||||
|
|
||||||
|
assert gcd( x, m ) == 1
|
||||||
|
|
||||||
|
z = x
|
||||||
|
result = 1
|
||||||
|
while z != 1:
|
||||||
|
z = ( z * x ) % m
|
||||||
|
result = result + 1
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def largest_factor_relatively_prime( a, b ):
|
||||||
|
"""Return the largest factor of a relatively prime to b.
|
||||||
|
"""
|
||||||
|
|
||||||
|
while 1:
|
||||||
|
d = gcd( a, b )
|
||||||
|
if d <= 1: break
|
||||||
|
b = d
|
||||||
|
while 1:
|
||||||
|
q, r = divmod( a, d )
|
||||||
|
if r > 0:
|
||||||
|
break
|
||||||
|
a = q
|
||||||
|
return a
|
||||||
|
|
||||||
|
|
||||||
|
def kinda_order_mod( x, m ):
|
||||||
|
"""Return the order of x in the multiplicative group mod m',
|
||||||
|
where m' is the largest factor of m relatively prime to x.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return order_mod( x, largest_factor_relatively_prime( m, x ) )
|
||||||
|
|
||||||
|
|
||||||
|
def is_prime( n ):
|
||||||
|
"""Return True if x is prime, False otherwise.
|
||||||
|
|
||||||
|
We use the Miller-Rabin test, as given in Menezes et al. p. 138.
|
||||||
|
This test is not exact: there are composite values n for which
|
||||||
|
it returns True.
|
||||||
|
|
||||||
|
In testing the odd numbers from 10000001 to 19999999,
|
||||||
|
about 66 composites got past the first test,
|
||||||
|
5 got past the second test, and none got past the third.
|
||||||
|
Since factors of 2, 3, 5, 7, and 11 were detected during
|
||||||
|
preliminary screening, the number of numbers tested by
|
||||||
|
Miller-Rabin was (19999999 - 10000001)*(2/3)*(4/5)*(6/7)
|
||||||
|
= 4.57 million.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# (This is used to study the risk of false positives:)
|
||||||
|
global miller_rabin_test_count
|
||||||
|
|
||||||
|
miller_rabin_test_count = 0
|
||||||
|
|
||||||
|
if n <= smallprimes[-1]:
|
||||||
|
if n in smallprimes: return True
|
||||||
|
else: return False
|
||||||
|
|
||||||
|
if gcd( n, 2*3*5*7*11 ) != 1: return False
|
||||||
|
|
||||||
|
# Choose a number of iterations sufficient to reduce the
|
||||||
|
# probability of accepting a composite below 2**-80
|
||||||
|
# (from Menezes et al. Table 4.4):
|
||||||
|
|
||||||
|
t = 40
|
||||||
|
n_bits = 1 + int( math.log( n, 2 ) )
|
||||||
|
for k, tt in ( ( 100, 27 ),
|
||||||
|
( 150, 18 ),
|
||||||
|
( 200, 15 ),
|
||||||
|
( 250, 12 ),
|
||||||
|
( 300, 9 ),
|
||||||
|
( 350, 8 ),
|
||||||
|
( 400, 7 ),
|
||||||
|
( 450, 6 ),
|
||||||
|
( 550, 5 ),
|
||||||
|
( 650, 4 ),
|
||||||
|
( 850, 3 ),
|
||||||
|
( 1300, 2 ),
|
||||||
|
):
|
||||||
|
if n_bits < k: break
|
||||||
|
t = tt
|
||||||
|
|
||||||
|
# Run the test t times:
|
||||||
|
|
||||||
|
s = 0
|
||||||
|
r = n - 1
|
||||||
|
while ( r % 2 ) == 0:
|
||||||
|
s = s + 1
|
||||||
|
r = r // 2
|
||||||
|
for i in range( t ):
|
||||||
|
a = smallprimes[ i ]
|
||||||
|
y = modular_exp( a, r, n )
|
||||||
|
if y != 1 and y != n-1:
|
||||||
|
j = 1
|
||||||
|
while j <= s - 1 and y != n - 1:
|
||||||
|
y = modular_exp( y, 2, n )
|
||||||
|
if y == 1:
|
||||||
|
miller_rabin_test_count = i + 1
|
||||||
|
return False
|
||||||
|
j = j + 1
|
||||||
|
if y != n-1:
|
||||||
|
miller_rabin_test_count = i + 1
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def next_prime( starting_value ):
|
||||||
|
"Return the smallest prime larger than the starting value."
|
||||||
|
|
||||||
|
if starting_value < 2: return 2
|
||||||
|
result = ( starting_value + 1 ) | 1
|
||||||
|
while not is_prime( result ): result = result + 2
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
smallprimes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41,
|
||||||
|
43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97,
|
||||||
|
101, 103, 107, 109, 113, 127, 131, 137, 139, 149,
|
||||||
|
151, 157, 163, 167, 173, 179, 181, 191, 193, 197,
|
||||||
|
199, 211, 223, 227, 229, 233, 239, 241, 251, 257,
|
||||||
|
263, 269, 271, 277, 281, 283, 293, 307, 311, 313,
|
||||||
|
317, 331, 337, 347, 349, 353, 359, 367, 373, 379,
|
||||||
|
383, 389, 397, 401, 409, 419, 421, 431, 433, 439,
|
||||||
|
443, 449, 457, 461, 463, 467, 479, 487, 491, 499,
|
||||||
|
503, 509, 521, 523, 541, 547, 557, 563, 569, 571,
|
||||||
|
577, 587, 593, 599, 601, 607, 613, 617, 619, 631,
|
||||||
|
641, 643, 647, 653, 659, 661, 673, 677, 683, 691,
|
||||||
|
701, 709, 719, 727, 733, 739, 743, 751, 757, 761,
|
||||||
|
769, 773, 787, 797, 809, 811, 821, 823, 827, 829,
|
||||||
|
839, 853, 857, 859, 863, 877, 881, 883, 887, 907,
|
||||||
|
911, 919, 929, 937, 941, 947, 953, 967, 971, 977,
|
||||||
|
983, 991, 997, 1009, 1013, 1019, 1021, 1031, 1033,
|
||||||
|
1039, 1049, 1051, 1061, 1063, 1069, 1087, 1091, 1093,
|
||||||
|
1097, 1103, 1109, 1117, 1123, 1129, 1151, 1153, 1163,
|
||||||
|
1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, 1229]
|
||||||
|
|
||||||
|
miller_rabin_test_count = 0
|
||||||
|
|
||||||
|
def __main__():
|
||||||
|
|
||||||
|
# Making sure locally defined exceptions work:
|
||||||
|
# p = modular_exp( 2, -2, 3 )
|
||||||
|
# p = square_root_mod_prime( 2, 3 )
|
||||||
|
|
||||||
|
|
||||||
|
print_("Testing gcd...")
|
||||||
|
assert gcd( 3*5*7, 3*5*11, 3*5*13 ) == 3*5
|
||||||
|
assert gcd( [ 3*5*7, 3*5*11, 3*5*13 ] ) == 3*5
|
||||||
|
assert gcd( 3 ) == 3
|
||||||
|
|
||||||
|
print_("Testing lcm...")
|
||||||
|
assert lcm( 3, 5*3, 7*3 ) == 3*5*7
|
||||||
|
assert lcm( [ 3, 5*3, 7*3 ] ) == 3*5*7
|
||||||
|
assert lcm( 3 ) == 3
|
||||||
|
|
||||||
|
print_("Testing next_prime...")
|
||||||
|
bigprimes = ( 999671,
|
||||||
|
999683,
|
||||||
|
999721,
|
||||||
|
999727,
|
||||||
|
999749,
|
||||||
|
999763,
|
||||||
|
999769,
|
||||||
|
999773,
|
||||||
|
999809,
|
||||||
|
999853,
|
||||||
|
999863,
|
||||||
|
999883,
|
||||||
|
999907,
|
||||||
|
999917,
|
||||||
|
999931,
|
||||||
|
999953,
|
||||||
|
999959,
|
||||||
|
999961,
|
||||||
|
999979,
|
||||||
|
999983 )
|
||||||
|
|
||||||
|
for i in range( len( bigprimes ) - 1 ):
|
||||||
|
assert next_prime( bigprimes[i] ) == bigprimes[ i+1 ]
|
||||||
|
|
||||||
|
error_tally = 0
|
||||||
|
|
||||||
|
# Test the square_root_mod_prime function:
|
||||||
|
|
||||||
|
for p in smallprimes:
|
||||||
|
print_("Testing square_root_mod_prime for modulus p = %d." % p)
|
||||||
|
squares = []
|
||||||
|
|
||||||
|
for root in range( 0, 1+p//2 ):
|
||||||
|
sq = ( root * root ) % p
|
||||||
|
squares.append( sq )
|
||||||
|
calculated = square_root_mod_prime( sq, p )
|
||||||
|
if ( calculated * calculated ) % p != sq:
|
||||||
|
error_tally = error_tally + 1
|
||||||
|
print_("Failed to find %d as sqrt( %d ) mod %d. Said %d." % \
|
||||||
|
( root, sq, p, calculated ))
|
||||||
|
|
||||||
|
for nonsquare in range( 0, p ):
|
||||||
|
if nonsquare not in squares:
|
||||||
|
try:
|
||||||
|
calculated = square_root_mod_prime( nonsquare, p )
|
||||||
|
except SquareRootError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
error_tally = error_tally + 1
|
||||||
|
print_("Failed to report no root for sqrt( %d ) mod %d." % \
|
||||||
|
( nonsquare, p ))
|
||||||
|
|
||||||
|
# Test the jacobi function:
|
||||||
|
for m in range( 3, 400, 2 ):
|
||||||
|
print_("Testing jacobi for modulus m = %d." % m)
|
||||||
|
if is_prime( m ):
|
||||||
|
squares = []
|
||||||
|
for root in range( 1, m ):
|
||||||
|
if jacobi( root * root, m ) != 1:
|
||||||
|
error_tally = error_tally + 1
|
||||||
|
print_("jacobi( %d * %d, %d ) != 1" % ( root, root, m ))
|
||||||
|
squares.append( root * root % m )
|
||||||
|
for i in range( 1, m ):
|
||||||
|
if not i in squares:
|
||||||
|
if jacobi( i, m ) != -1:
|
||||||
|
error_tally = error_tally + 1
|
||||||
|
print_("jacobi( %d, %d ) != -1" % ( i, m ))
|
||||||
|
else: # m is not prime.
|
||||||
|
f = factorization( m )
|
||||||
|
for a in range( 1, m ):
|
||||||
|
c = 1
|
||||||
|
for i in f:
|
||||||
|
c = c * jacobi( a, i[0] ) ** i[1]
|
||||||
|
if c != jacobi( a, m ):
|
||||||
|
error_tally = error_tally + 1
|
||||||
|
print_("%d != jacobi( %d, %d )" % ( c, a, m ))
|
||||||
|
|
||||||
|
|
||||||
|
# Test the inverse_mod function:
|
||||||
|
print_("Testing inverse_mod . . .")
|
||||||
|
import random
|
||||||
|
n_tests = 0
|
||||||
|
for i in range( 100 ):
|
||||||
|
m = random.randint( 20, 10000 )
|
||||||
|
for j in range( 100 ):
|
||||||
|
a = random.randint( 1, m-1 )
|
||||||
|
if gcd( a, m ) == 1:
|
||||||
|
n_tests = n_tests + 1
|
||||||
|
inv = inverse_mod( a, m )
|
||||||
|
if inv <= 0 or inv >= m or ( a * inv ) % m != 1:
|
||||||
|
error_tally = error_tally + 1
|
||||||
|
print_("%d = inverse_mod( %d, %d ) is wrong." % ( inv, a, m ))
|
||||||
|
assert n_tests > 1000
|
||||||
|
print_(n_tests, " tests of inverse_mod completed.")
|
||||||
|
|
||||||
|
class FailedTest(Exception): pass
|
||||||
|
print_(error_tally, "errors detected.")
|
||||||
|
if error_tally != 0:
|
||||||
|
raise FailedTest("%d errors detected" % error_tally)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
__main__()
|
||||||
103
bin/python/ecdsa/rfc6979.py
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
'''
|
||||||
|
RFC 6979:
|
||||||
|
Deterministic Usage of the Digital Signature Algorithm (DSA) and
|
||||||
|
Elliptic Curve Digital Signature Algorithm (ECDSA)
|
||||||
|
|
||||||
|
http://tools.ietf.org/html/rfc6979
|
||||||
|
|
||||||
|
Many thanks to Coda Hale for his implementation in Go language:
|
||||||
|
https://github.com/codahale/rfc6979
|
||||||
|
'''
|
||||||
|
|
||||||
|
import hmac
|
||||||
|
from binascii import hexlify
|
||||||
|
from .util import number_to_string, number_to_string_crop
|
||||||
|
from .six import b
|
||||||
|
|
||||||
|
try:
|
||||||
|
bin(0)
|
||||||
|
except NameError:
|
||||||
|
binmap = {"0": "0000", "1": "0001", "2": "0010", "3": "0011",
|
||||||
|
"4": "0100", "5": "0101", "6": "0110", "7": "0111",
|
||||||
|
"8": "1000", "9": "1001", "a": "1010", "b": "1011",
|
||||||
|
"c": "1100", "d": "1101", "e": "1110", "f": "1111"}
|
||||||
|
def bin(value): # for python2.5
|
||||||
|
v = "".join(binmap[x] for x in "%x"%abs(value)).lstrip("0")
|
||||||
|
if value < 0:
|
||||||
|
return "-0b" + v
|
||||||
|
return "0b" + v
|
||||||
|
|
||||||
|
def bit_length(num):
|
||||||
|
# http://docs.python.org/dev/library/stdtypes.html#int.bit_length
|
||||||
|
s = bin(num) # binary representation: bin(-37) --> '-0b100101'
|
||||||
|
s = s.lstrip('-0b') # remove leading zeros and minus sign
|
||||||
|
return len(s) # len('100101') --> 6
|
||||||
|
|
||||||
|
def bits2int(data, qlen):
|
||||||
|
x = int(hexlify(data), 16)
|
||||||
|
l = len(data) * 8
|
||||||
|
|
||||||
|
if l > qlen:
|
||||||
|
return x >> (l-qlen)
|
||||||
|
return x
|
||||||
|
|
||||||
|
def bits2octets(data, order):
|
||||||
|
z1 = bits2int(data, bit_length(order))
|
||||||
|
z2 = z1 - order
|
||||||
|
|
||||||
|
if z2 < 0:
|
||||||
|
z2 = z1
|
||||||
|
|
||||||
|
return number_to_string_crop(z2, order)
|
||||||
|
|
||||||
|
# https://tools.ietf.org/html/rfc6979#section-3.2
|
||||||
|
def generate_k(order, secexp, hash_func, data):
|
||||||
|
'''
|
||||||
|
generator - order of the DSA generator used in the signature
|
||||||
|
secexp - secure exponent (private key) in numeric form
|
||||||
|
hash_func - reference to the same hash function used for generating hash
|
||||||
|
data - hash in binary form of the signing data
|
||||||
|
'''
|
||||||
|
|
||||||
|
qlen = bit_length(order)
|
||||||
|
holen = hash_func().digest_size
|
||||||
|
rolen = (qlen + 7) / 8
|
||||||
|
bx = number_to_string(secexp, order) + bits2octets(data, order)
|
||||||
|
|
||||||
|
# Step B
|
||||||
|
v = b('\x01') * holen
|
||||||
|
|
||||||
|
# Step C
|
||||||
|
k = b('\x00') * holen
|
||||||
|
|
||||||
|
# Step D
|
||||||
|
|
||||||
|
k = hmac.new(k, v+b('\x00')+bx, hash_func).digest()
|
||||||
|
|
||||||
|
# Step E
|
||||||
|
v = hmac.new(k, v, hash_func).digest()
|
||||||
|
|
||||||
|
# Step F
|
||||||
|
k = hmac.new(k, v+b('\x01')+bx, hash_func).digest()
|
||||||
|
|
||||||
|
# Step G
|
||||||
|
v = hmac.new(k, v, hash_func).digest()
|
||||||
|
|
||||||
|
# Step H
|
||||||
|
while True:
|
||||||
|
# Step H1
|
||||||
|
t = b('')
|
||||||
|
|
||||||
|
# Step H2
|
||||||
|
while len(t) < rolen:
|
||||||
|
v = hmac.new(k, v, hash_func).digest()
|
||||||
|
t += v
|
||||||
|
|
||||||
|
# Step H3
|
||||||
|
secret = bits2int(t, qlen)
|
||||||
|
|
||||||
|
if secret >= 1 and secret < order:
|
||||||
|
return secret
|
||||||
|
|
||||||
|
k = hmac.new(k, v+b('\x00'), hash_func).digest()
|
||||||
|
v = hmac.new(k, v, hash_func).digest()
|
||||||
394
bin/python/ecdsa/six.py
Normal file
@@ -0,0 +1,394 @@
|
|||||||
|
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||||
|
|
||||||
|
# Copyright (c) 2010-2012 Benjamin Peterson
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
# this software and associated documentation files (the "Software"), to deal in
|
||||||
|
# the Software without restriction, including without limitation the rights to
|
||||||
|
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
# the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
# subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in all
|
||||||
|
# copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
|
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
import operator
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
|
||||||
|
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||||
|
__version__ = "1.2.0"
|
||||||
|
|
||||||
|
|
||||||
|
# True if we are running on Python 3.
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
string_types = str,
|
||||||
|
integer_types = int,
|
||||||
|
class_types = type,
|
||||||
|
text_type = str
|
||||||
|
binary_type = bytes
|
||||||
|
|
||||||
|
MAXSIZE = sys.maxsize
|
||||||
|
else:
|
||||||
|
string_types = basestring,
|
||||||
|
integer_types = (int, long)
|
||||||
|
class_types = (type, types.ClassType)
|
||||||
|
text_type = unicode
|
||||||
|
binary_type = str
|
||||||
|
|
||||||
|
if sys.platform.startswith("java"):
|
||||||
|
# Jython always uses 32 bits.
|
||||||
|
MAXSIZE = int((1 << 31) - 1)
|
||||||
|
else:
|
||||||
|
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||||
|
class X(object):
|
||||||
|
def __len__(self):
|
||||||
|
return 1 << 31
|
||||||
|
try:
|
||||||
|
len(X())
|
||||||
|
except OverflowError:
|
||||||
|
# 32-bit
|
||||||
|
MAXSIZE = int((1 << 31) - 1)
|
||||||
|
else:
|
||||||
|
# 64-bit
|
||||||
|
MAXSIZE = int((1 << 63) - 1)
|
||||||
|
del X
|
||||||
|
|
||||||
|
|
||||||
|
def _add_doc(func, doc):
|
||||||
|
"""Add documentation to a function."""
|
||||||
|
func.__doc__ = doc
|
||||||
|
|
||||||
|
|
||||||
|
def _import_module(name):
|
||||||
|
"""Import module, returning the module after the last dot."""
|
||||||
|
__import__(name)
|
||||||
|
return sys.modules[name]
|
||||||
|
|
||||||
|
|
||||||
|
class _LazyDescr(object):
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __get__(self, obj, tp):
|
||||||
|
result = self._resolve()
|
||||||
|
setattr(obj, self.name, result)
|
||||||
|
# This is a bit ugly, but it avoids running this again.
|
||||||
|
delattr(tp, self.name)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class MovedModule(_LazyDescr):
|
||||||
|
|
||||||
|
def __init__(self, name, old, new=None):
|
||||||
|
super(MovedModule, self).__init__(name)
|
||||||
|
if PY3:
|
||||||
|
if new is None:
|
||||||
|
new = name
|
||||||
|
self.mod = new
|
||||||
|
else:
|
||||||
|
self.mod = old
|
||||||
|
|
||||||
|
def _resolve(self):
|
||||||
|
return _import_module(self.mod)
|
||||||
|
|
||||||
|
|
||||||
|
class MovedAttribute(_LazyDescr):
|
||||||
|
|
||||||
|
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||||
|
super(MovedAttribute, self).__init__(name)
|
||||||
|
if PY3:
|
||||||
|
if new_mod is None:
|
||||||
|
new_mod = name
|
||||||
|
self.mod = new_mod
|
||||||
|
if new_attr is None:
|
||||||
|
if old_attr is None:
|
||||||
|
new_attr = name
|
||||||
|
else:
|
||||||
|
new_attr = old_attr
|
||||||
|
self.attr = new_attr
|
||||||
|
else:
|
||||||
|
self.mod = old_mod
|
||||||
|
if old_attr is None:
|
||||||
|
old_attr = name
|
||||||
|
self.attr = old_attr
|
||||||
|
|
||||||
|
def _resolve(self):
|
||||||
|
module = _import_module(self.mod)
|
||||||
|
return getattr(module, self.attr)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class _MovedItems(types.ModuleType):
|
||||||
|
"""Lazy loading of moved objects"""
|
||||||
|
|
||||||
|
|
||||||
|
_moved_attributes = [
|
||||||
|
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||||
|
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||||
|
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||||
|
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||||
|
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
|
||||||
|
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||||
|
MovedAttribute("StringIO", "StringIO", "io"),
|
||||||
|
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||||
|
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||||
|
|
||||||
|
MovedModule("builtins", "__builtin__"),
|
||||||
|
MovedModule("configparser", "ConfigParser"),
|
||||||
|
MovedModule("copyreg", "copy_reg"),
|
||||||
|
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||||
|
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||||
|
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||||
|
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||||
|
MovedModule("http_client", "httplib", "http.client"),
|
||||||
|
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||||
|
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||||
|
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||||
|
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||||
|
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||||
|
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||||
|
MovedModule("cPickle", "cPickle", "pickle"),
|
||||||
|
MovedModule("queue", "Queue"),
|
||||||
|
MovedModule("reprlib", "repr"),
|
||||||
|
MovedModule("socketserver", "SocketServer"),
|
||||||
|
MovedModule("tkinter", "Tkinter"),
|
||||||
|
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||||
|
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||||
|
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||||
|
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||||
|
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||||
|
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||||
|
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||||
|
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||||
|
"tkinter.colorchooser"),
|
||||||
|
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||||
|
"tkinter.commondialog"),
|
||||||
|
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||||
|
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||||
|
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||||
|
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||||
|
"tkinter.simpledialog"),
|
||||||
|
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||||
|
MovedModule("winreg", "_winreg"),
|
||||||
|
]
|
||||||
|
for attr in _moved_attributes:
|
||||||
|
setattr(_MovedItems, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
|
||||||
|
|
||||||
|
|
||||||
|
def add_move(move):
|
||||||
|
"""Add an item to six.moves."""
|
||||||
|
setattr(_MovedItems, move.name, move)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_move(name):
|
||||||
|
"""Remove item from six.moves."""
|
||||||
|
try:
|
||||||
|
delattr(_MovedItems, name)
|
||||||
|
except AttributeError:
|
||||||
|
try:
|
||||||
|
del moves.__dict__[name]
|
||||||
|
except KeyError:
|
||||||
|
raise AttributeError("no such move, %r" % (name,))
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
_meth_func = "__func__"
|
||||||
|
_meth_self = "__self__"
|
||||||
|
|
||||||
|
_func_code = "__code__"
|
||||||
|
_func_defaults = "__defaults__"
|
||||||
|
|
||||||
|
_iterkeys = "keys"
|
||||||
|
_itervalues = "values"
|
||||||
|
_iteritems = "items"
|
||||||
|
else:
|
||||||
|
_meth_func = "im_func"
|
||||||
|
_meth_self = "im_self"
|
||||||
|
|
||||||
|
_func_code = "func_code"
|
||||||
|
_func_defaults = "func_defaults"
|
||||||
|
|
||||||
|
_iterkeys = "iterkeys"
|
||||||
|
_itervalues = "itervalues"
|
||||||
|
_iteritems = "iteritems"
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
advance_iterator = next
|
||||||
|
except NameError:
|
||||||
|
def advance_iterator(it):
|
||||||
|
return it.next()
|
||||||
|
next = advance_iterator
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
callable = callable
|
||||||
|
except NameError:
|
||||||
|
def callable(obj):
|
||||||
|
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def get_unbound_function(unbound):
|
||||||
|
return unbound
|
||||||
|
|
||||||
|
Iterator = object
|
||||||
|
else:
|
||||||
|
def get_unbound_function(unbound):
|
||||||
|
return unbound.im_func
|
||||||
|
|
||||||
|
class Iterator(object):
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
return type(self).__next__(self)
|
||||||
|
|
||||||
|
callable = callable
|
||||||
|
_add_doc(get_unbound_function,
|
||||||
|
"""Get the function out of a possibly unbound function""")
|
||||||
|
|
||||||
|
|
||||||
|
get_method_function = operator.attrgetter(_meth_func)
|
||||||
|
get_method_self = operator.attrgetter(_meth_self)
|
||||||
|
get_function_code = operator.attrgetter(_func_code)
|
||||||
|
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||||
|
|
||||||
|
|
||||||
|
def iterkeys(d):
|
||||||
|
"""Return an iterator over the keys of a dictionary."""
|
||||||
|
return iter(getattr(d, _iterkeys)())
|
||||||
|
|
||||||
|
def itervalues(d):
|
||||||
|
"""Return an iterator over the values of a dictionary."""
|
||||||
|
return iter(getattr(d, _itervalues)())
|
||||||
|
|
||||||
|
def iteritems(d):
|
||||||
|
"""Return an iterator over the (key, value) pairs of a dictionary."""
|
||||||
|
return iter(getattr(d, _iteritems)())
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def b(s):
|
||||||
|
return s.encode("latin-1")
|
||||||
|
def u(s):
|
||||||
|
return s
|
||||||
|
if sys.version_info[1] <= 1:
|
||||||
|
def int2byte(i):
|
||||||
|
return bytes((i,))
|
||||||
|
else:
|
||||||
|
# This is about 2x faster than the implementation above on 3.2+
|
||||||
|
int2byte = operator.methodcaller("to_bytes", 1, "big")
|
||||||
|
import io
|
||||||
|
StringIO = io.StringIO
|
||||||
|
BytesIO = io.BytesIO
|
||||||
|
else:
|
||||||
|
def b(s):
|
||||||
|
return s
|
||||||
|
def u(s):
|
||||||
|
if isinstance(s, unicode):
|
||||||
|
return s
|
||||||
|
return unicode(s, "unicode_escape")
|
||||||
|
int2byte = chr
|
||||||
|
import StringIO
|
||||||
|
StringIO = BytesIO = StringIO.StringIO
|
||||||
|
_add_doc(b, """Byte literal""")
|
||||||
|
_add_doc(u, """Text literal""")
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
import builtins
|
||||||
|
exec_ = getattr(builtins, "exec")
|
||||||
|
|
||||||
|
|
||||||
|
def reraise(tp, value, tb=None):
|
||||||
|
if value.__traceback__ is not tb:
|
||||||
|
raise value.with_traceback(tb)
|
||||||
|
raise value
|
||||||
|
|
||||||
|
|
||||||
|
print_ = getattr(builtins, "print")
|
||||||
|
del builtins
|
||||||
|
|
||||||
|
else:
|
||||||
|
def exec_(_code_, _globs_=None, _locs_=None):
|
||||||
|
"""Execute code in a namespace."""
|
||||||
|
if _globs_ is None:
|
||||||
|
frame = sys._getframe(1)
|
||||||
|
_globs_ = frame.f_globals
|
||||||
|
if _locs_ is None:
|
||||||
|
_locs_ = frame.f_locals
|
||||||
|
del frame
|
||||||
|
elif _locs_ is None:
|
||||||
|
_locs_ = _globs_
|
||||||
|
exec("""exec _code_ in _globs_, _locs_""")
|
||||||
|
|
||||||
|
|
||||||
|
exec_("""def reraise(tp, value, tb=None):
|
||||||
|
raise tp, value, tb
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
def print_(*args, **kwargs):
|
||||||
|
"""The new-style print function."""
|
||||||
|
fp = kwargs.pop("file", sys.stdout)
|
||||||
|
if fp is None:
|
||||||
|
return
|
||||||
|
def write(data):
|
||||||
|
if not isinstance(data, basestring):
|
||||||
|
data = str(data)
|
||||||
|
fp.write(data)
|
||||||
|
want_unicode = False
|
||||||
|
sep = kwargs.pop("sep", None)
|
||||||
|
if sep is not None:
|
||||||
|
if isinstance(sep, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
elif not isinstance(sep, str):
|
||||||
|
raise TypeError("sep must be None or a string")
|
||||||
|
end = kwargs.pop("end", None)
|
||||||
|
if end is not None:
|
||||||
|
if isinstance(end, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
elif not isinstance(end, str):
|
||||||
|
raise TypeError("end must be None or a string")
|
||||||
|
if kwargs:
|
||||||
|
raise TypeError("invalid keyword arguments to print()")
|
||||||
|
if not want_unicode:
|
||||||
|
for arg in args:
|
||||||
|
if isinstance(arg, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
break
|
||||||
|
if want_unicode:
|
||||||
|
newline = unicode("\n")
|
||||||
|
space = unicode(" ")
|
||||||
|
else:
|
||||||
|
newline = "\n"
|
||||||
|
space = " "
|
||||||
|
if sep is None:
|
||||||
|
sep = space
|
||||||
|
if end is None:
|
||||||
|
end = newline
|
||||||
|
for i, arg in enumerate(args):
|
||||||
|
if i:
|
||||||
|
write(sep)
|
||||||
|
write(arg)
|
||||||
|
write(end)
|
||||||
|
|
||||||
|
_add_doc(reraise, """Reraise an exception.""")
|
||||||
|
|
||||||
|
|
||||||
|
def with_metaclass(meta, base=object):
|
||||||
|
"""Create a base class with a metaclass."""
|
||||||
|
return meta("NewBase", (base,), {})
|
||||||
663
bin/python/ecdsa/test_pyecdsa.py
Normal file
@@ -0,0 +1,663 @@
|
|||||||
|
from __future__ import with_statement, division
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
from binascii import hexlify, unhexlify
|
||||||
|
from hashlib import sha1, sha256, sha512
|
||||||
|
|
||||||
|
from .six import b, print_, binary_type
|
||||||
|
from .keys import SigningKey, VerifyingKey
|
||||||
|
from .keys import BadSignatureError
|
||||||
|
from . import util
|
||||||
|
from .util import sigencode_der, sigencode_strings
|
||||||
|
from .util import sigdecode_der, sigdecode_strings
|
||||||
|
from .curves import Curve, UnknownCurveError
|
||||||
|
from .curves import NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1
|
||||||
|
from .ellipticcurve import Point
|
||||||
|
from . import der
|
||||||
|
from . import rfc6979
|
||||||
|
|
||||||
|
class SubprocessError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def run_openssl(cmd):
|
||||||
|
OPENSSL = "openssl"
|
||||||
|
p = subprocess.Popen([OPENSSL] + cmd.split(),
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT)
|
||||||
|
stdout, ignored = p.communicate()
|
||||||
|
if p.returncode != 0:
|
||||||
|
raise SubprocessError("cmd '%s %s' failed: rc=%s, stdout/err was %s" %
|
||||||
|
(OPENSSL, cmd, p.returncode, stdout))
|
||||||
|
return stdout.decode()
|
||||||
|
|
||||||
|
BENCH = False
|
||||||
|
|
||||||
|
class ECDSA(unittest.TestCase):
|
||||||
|
def test_basic(self):
|
||||||
|
priv = SigningKey.generate()
|
||||||
|
pub = priv.get_verifying_key()
|
||||||
|
|
||||||
|
data = b("blahblah")
|
||||||
|
sig = priv.sign(data)
|
||||||
|
|
||||||
|
self.assertTrue(pub.verify(sig, data))
|
||||||
|
self.assertRaises(BadSignatureError, pub.verify, sig, data+b("bad"))
|
||||||
|
|
||||||
|
pub2 = VerifyingKey.from_string(pub.to_string())
|
||||||
|
self.assertTrue(pub2.verify(sig, data))
|
||||||
|
|
||||||
|
def test_deterministic(self):
|
||||||
|
data = b("blahblah")
|
||||||
|
secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16)
|
||||||
|
|
||||||
|
priv = SigningKey.from_secret_exponent(secexp, SECP256k1, sha256)
|
||||||
|
pub = priv.get_verifying_key()
|
||||||
|
|
||||||
|
k = rfc6979.generate_k(
|
||||||
|
SECP256k1.generator.order(), secexp, sha256, sha256(data).digest())
|
||||||
|
|
||||||
|
sig1 = priv.sign(data, k=k)
|
||||||
|
self.assertTrue(pub.verify(sig1, data))
|
||||||
|
|
||||||
|
sig2 = priv.sign(data, k=k)
|
||||||
|
self.assertTrue(pub.verify(sig2, data))
|
||||||
|
|
||||||
|
sig3 = priv.sign_deterministic(data, sha256)
|
||||||
|
self.assertTrue(pub.verify(sig3, data))
|
||||||
|
|
||||||
|
self.assertEqual(sig1, sig2)
|
||||||
|
self.assertEqual(sig1, sig3)
|
||||||
|
|
||||||
|
def test_bad_usage(self):
|
||||||
|
# sk=SigningKey() is wrong
|
||||||
|
self.assertRaises(TypeError, SigningKey)
|
||||||
|
self.assertRaises(TypeError, VerifyingKey)
|
||||||
|
|
||||||
|
def test_lengths(self):
|
||||||
|
default = NIST192p
|
||||||
|
priv = SigningKey.generate()
|
||||||
|
pub = priv.get_verifying_key()
|
||||||
|
self.assertEqual(len(pub.to_string()), default.verifying_key_length)
|
||||||
|
sig = priv.sign(b("data"))
|
||||||
|
self.assertEqual(len(sig), default.signature_length)
|
||||||
|
if BENCH:
|
||||||
|
print_()
|
||||||
|
for curve in (NIST192p, NIST224p, NIST256p, NIST384p, NIST521p):
|
||||||
|
start = time.time()
|
||||||
|
priv = SigningKey.generate(curve=curve)
|
||||||
|
pub1 = priv.get_verifying_key()
|
||||||
|
keygen_time = time.time() - start
|
||||||
|
pub2 = VerifyingKey.from_string(pub1.to_string(), curve)
|
||||||
|
self.assertEqual(pub1.to_string(), pub2.to_string())
|
||||||
|
self.assertEqual(len(pub1.to_string()),
|
||||||
|
curve.verifying_key_length)
|
||||||
|
start = time.time()
|
||||||
|
sig = priv.sign(b("data"))
|
||||||
|
sign_time = time.time() - start
|
||||||
|
self.assertEqual(len(sig), curve.signature_length)
|
||||||
|
if BENCH:
|
||||||
|
start = time.time()
|
||||||
|
pub1.verify(sig, b("data"))
|
||||||
|
verify_time = time.time() - start
|
||||||
|
print_("%s: siglen=%d, keygen=%0.3fs, sign=%0.3f, verify=%0.3f" \
|
||||||
|
% (curve.name, curve.signature_length,
|
||||||
|
keygen_time, sign_time, verify_time))
|
||||||
|
|
||||||
|
def test_serialize(self):
|
||||||
|
seed = b("secret")
|
||||||
|
curve = NIST192p
|
||||||
|
secexp1 = util.randrange_from_seed__trytryagain(seed, curve.order)
|
||||||
|
secexp2 = util.randrange_from_seed__trytryagain(seed, curve.order)
|
||||||
|
self.assertEqual(secexp1, secexp2)
|
||||||
|
priv1 = SigningKey.from_secret_exponent(secexp1, curve)
|
||||||
|
priv2 = SigningKey.from_secret_exponent(secexp2, curve)
|
||||||
|
self.assertEqual(hexlify(priv1.to_string()),
|
||||||
|
hexlify(priv2.to_string()))
|
||||||
|
self.assertEqual(priv1.to_pem(), priv2.to_pem())
|
||||||
|
pub1 = priv1.get_verifying_key()
|
||||||
|
pub2 = priv2.get_verifying_key()
|
||||||
|
data = b("data")
|
||||||
|
sig1 = priv1.sign(data)
|
||||||
|
sig2 = priv2.sign(data)
|
||||||
|
self.assertTrue(pub1.verify(sig1, data))
|
||||||
|
self.assertTrue(pub2.verify(sig1, data))
|
||||||
|
self.assertTrue(pub1.verify(sig2, data))
|
||||||
|
self.assertTrue(pub2.verify(sig2, data))
|
||||||
|
self.assertEqual(hexlify(pub1.to_string()),
|
||||||
|
hexlify(pub2.to_string()))
|
||||||
|
|
||||||
|
def test_nonrandom(self):
|
||||||
|
s = b("all the entropy in the entire world, compressed into one line")
|
||||||
|
def not_much_entropy(numbytes):
|
||||||
|
return s[:numbytes]
|
||||||
|
# we control the entropy source, these two keys should be identical:
|
||||||
|
priv1 = SigningKey.generate(entropy=not_much_entropy)
|
||||||
|
priv2 = SigningKey.generate(entropy=not_much_entropy)
|
||||||
|
self.assertEqual(hexlify(priv1.get_verifying_key().to_string()),
|
||||||
|
hexlify(priv2.get_verifying_key().to_string()))
|
||||||
|
# likewise, signatures should be identical. Obviously you'd never
|
||||||
|
# want to do this with keys you care about, because the secrecy of
|
||||||
|
# the private key depends upon using different random numbers for
|
||||||
|
# each signature
|
||||||
|
sig1 = priv1.sign(b("data"), entropy=not_much_entropy)
|
||||||
|
sig2 = priv2.sign(b("data"), entropy=not_much_entropy)
|
||||||
|
self.assertEqual(hexlify(sig1), hexlify(sig2))
|
||||||
|
|
||||||
|
def assertTruePrivkeysEqual(self, priv1, priv2):
|
||||||
|
self.assertEqual(priv1.privkey.secret_multiplier,
|
||||||
|
priv2.privkey.secret_multiplier)
|
||||||
|
self.assertEqual(priv1.privkey.public_key.generator,
|
||||||
|
priv2.privkey.public_key.generator)
|
||||||
|
|
||||||
|
def failIfPrivkeysEqual(self, priv1, priv2):
|
||||||
|
self.failIfEqual(priv1.privkey.secret_multiplier,
|
||||||
|
priv2.privkey.secret_multiplier)
|
||||||
|
|
||||||
|
def test_privkey_creation(self):
|
||||||
|
s = b("all the entropy in the entire world, compressed into one line")
|
||||||
|
def not_much_entropy(numbytes):
|
||||||
|
return s[:numbytes]
|
||||||
|
priv1 = SigningKey.generate()
|
||||||
|
self.assertEqual(priv1.baselen, NIST192p.baselen)
|
||||||
|
|
||||||
|
priv1 = SigningKey.generate(curve=NIST224p)
|
||||||
|
self.assertEqual(priv1.baselen, NIST224p.baselen)
|
||||||
|
|
||||||
|
priv1 = SigningKey.generate(entropy=not_much_entropy)
|
||||||
|
self.assertEqual(priv1.baselen, NIST192p.baselen)
|
||||||
|
priv2 = SigningKey.generate(entropy=not_much_entropy)
|
||||||
|
self.assertEqual(priv2.baselen, NIST192p.baselen)
|
||||||
|
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||||
|
|
||||||
|
priv1 = SigningKey.from_secret_exponent(secexp=3)
|
||||||
|
self.assertEqual(priv1.baselen, NIST192p.baselen)
|
||||||
|
priv2 = SigningKey.from_secret_exponent(secexp=3)
|
||||||
|
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||||
|
|
||||||
|
priv1 = SigningKey.from_secret_exponent(secexp=4, curve=NIST224p)
|
||||||
|
self.assertEqual(priv1.baselen, NIST224p.baselen)
|
||||||
|
|
||||||
|
def test_privkey_strings(self):
|
||||||
|
priv1 = SigningKey.generate()
|
||||||
|
s1 = priv1.to_string()
|
||||||
|
self.assertEqual(type(s1), binary_type)
|
||||||
|
self.assertEqual(len(s1), NIST192p.baselen)
|
||||||
|
priv2 = SigningKey.from_string(s1)
|
||||||
|
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||||
|
|
||||||
|
s1 = priv1.to_pem()
|
||||||
|
self.assertEqual(type(s1), binary_type)
|
||||||
|
self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----")))
|
||||||
|
self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----")))
|
||||||
|
priv2 = SigningKey.from_pem(s1)
|
||||||
|
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||||
|
|
||||||
|
s1 = priv1.to_der()
|
||||||
|
self.assertEqual(type(s1), binary_type)
|
||||||
|
priv2 = SigningKey.from_der(s1)
|
||||||
|
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||||
|
|
||||||
|
priv1 = SigningKey.generate(curve=NIST256p)
|
||||||
|
s1 = priv1.to_pem()
|
||||||
|
self.assertEqual(type(s1), binary_type)
|
||||||
|
self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----")))
|
||||||
|
self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----")))
|
||||||
|
priv2 = SigningKey.from_pem(s1)
|
||||||
|
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||||
|
|
||||||
|
s1 = priv1.to_der()
|
||||||
|
self.assertEqual(type(s1), binary_type)
|
||||||
|
priv2 = SigningKey.from_der(s1)
|
||||||
|
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||||
|
|
||||||
|
def assertTruePubkeysEqual(self, pub1, pub2):
|
||||||
|
self.assertEqual(pub1.pubkey.point, pub2.pubkey.point)
|
||||||
|
self.assertEqual(pub1.pubkey.generator, pub2.pubkey.generator)
|
||||||
|
self.assertEqual(pub1.curve, pub2.curve)
|
||||||
|
|
||||||
|
def test_pubkey_strings(self):
|
||||||
|
priv1 = SigningKey.generate()
|
||||||
|
pub1 = priv1.get_verifying_key()
|
||||||
|
s1 = pub1.to_string()
|
||||||
|
self.assertEqual(type(s1), binary_type)
|
||||||
|
self.assertEqual(len(s1), NIST192p.verifying_key_length)
|
||||||
|
pub2 = VerifyingKey.from_string(s1)
|
||||||
|
self.assertTruePubkeysEqual(pub1, pub2)
|
||||||
|
|
||||||
|
priv1 = SigningKey.generate(curve=NIST256p)
|
||||||
|
pub1 = priv1.get_verifying_key()
|
||||||
|
s1 = pub1.to_string()
|
||||||
|
self.assertEqual(type(s1), binary_type)
|
||||||
|
self.assertEqual(len(s1), NIST256p.verifying_key_length)
|
||||||
|
pub2 = VerifyingKey.from_string(s1, curve=NIST256p)
|
||||||
|
self.assertTruePubkeysEqual(pub1, pub2)
|
||||||
|
|
||||||
|
pub1_der = pub1.to_der()
|
||||||
|
self.assertEqual(type(pub1_der), binary_type)
|
||||||
|
pub2 = VerifyingKey.from_der(pub1_der)
|
||||||
|
self.assertTruePubkeysEqual(pub1, pub2)
|
||||||
|
|
||||||
|
self.assertRaises(der.UnexpectedDER,
|
||||||
|
VerifyingKey.from_der, pub1_der+b("junk"))
|
||||||
|
badpub = VerifyingKey.from_der(pub1_der)
|
||||||
|
class FakeGenerator:
|
||||||
|
def order(self): return 123456789
|
||||||
|
badcurve = Curve("unknown", None, None, FakeGenerator(), (1,2,3,4,5,6))
|
||||||
|
badpub.curve = badcurve
|
||||||
|
badder = badpub.to_der()
|
||||||
|
self.assertRaises(UnknownCurveError, VerifyingKey.from_der, badder)
|
||||||
|
|
||||||
|
pem = pub1.to_pem()
|
||||||
|
self.assertEqual(type(pem), binary_type)
|
||||||
|
self.assertTrue(pem.startswith(b("-----BEGIN PUBLIC KEY-----")), pem)
|
||||||
|
self.assertTrue(pem.strip().endswith(b("-----END PUBLIC KEY-----")), pem)
|
||||||
|
pub2 = VerifyingKey.from_pem(pem)
|
||||||
|
self.assertTruePubkeysEqual(pub1, pub2)
|
||||||
|
|
||||||
|
def test_signature_strings(self):
|
||||||
|
priv1 = SigningKey.generate()
|
||||||
|
pub1 = priv1.get_verifying_key()
|
||||||
|
data = b("data")
|
||||||
|
|
||||||
|
sig = priv1.sign(data)
|
||||||
|
self.assertEqual(type(sig), binary_type)
|
||||||
|
self.assertEqual(len(sig), NIST192p.signature_length)
|
||||||
|
self.assertTrue(pub1.verify(sig, data))
|
||||||
|
|
||||||
|
sig = priv1.sign(data, sigencode=sigencode_strings)
|
||||||
|
self.assertEqual(type(sig), tuple)
|
||||||
|
self.assertEqual(len(sig), 2)
|
||||||
|
self.assertEqual(type(sig[0]), binary_type)
|
||||||
|
self.assertEqual(type(sig[1]), binary_type)
|
||||||
|
self.assertEqual(len(sig[0]), NIST192p.baselen)
|
||||||
|
self.assertEqual(len(sig[1]), NIST192p.baselen)
|
||||||
|
self.assertTrue(pub1.verify(sig, data, sigdecode=sigdecode_strings))
|
||||||
|
|
||||||
|
sig_der = priv1.sign(data, sigencode=sigencode_der)
|
||||||
|
self.assertEqual(type(sig_der), binary_type)
|
||||||
|
self.assertTrue(pub1.verify(sig_der, data, sigdecode=sigdecode_der))
|
||||||
|
|
||||||
|
def test_hashfunc(self):
|
||||||
|
sk = SigningKey.generate(curve=NIST256p, hashfunc=sha256)
|
||||||
|
data = b("security level is 128 bits")
|
||||||
|
sig = sk.sign(data)
|
||||||
|
vk = VerifyingKey.from_string(sk.get_verifying_key().to_string(),
|
||||||
|
curve=NIST256p, hashfunc=sha256)
|
||||||
|
self.assertTrue(vk.verify(sig, data))
|
||||||
|
|
||||||
|
sk2 = SigningKey.generate(curve=NIST256p)
|
||||||
|
sig2 = sk2.sign(data, hashfunc=sha256)
|
||||||
|
vk2 = VerifyingKey.from_string(sk2.get_verifying_key().to_string(),
|
||||||
|
curve=NIST256p, hashfunc=sha256)
|
||||||
|
self.assertTrue(vk2.verify(sig2, data))
|
||||||
|
|
||||||
|
vk3 = VerifyingKey.from_string(sk.get_verifying_key().to_string(),
|
||||||
|
curve=NIST256p)
|
||||||
|
self.assertTrue(vk3.verify(sig, data, hashfunc=sha256))
|
||||||
|
|
||||||
|
|
||||||
|
class OpenSSL(unittest.TestCase):
|
||||||
|
# test interoperability with OpenSSL tools. Note that openssl's ECDSA
|
||||||
|
# sign/verify arguments changed between 0.9.8 and 1.0.0: the early
|
||||||
|
# versions require "-ecdsa-with-SHA1", the later versions want just
|
||||||
|
# "-SHA1" (or to leave out that argument entirely, which means the
|
||||||
|
# signature will use some default digest algorithm, probably determined
|
||||||
|
# by the key, probably always SHA1).
|
||||||
|
#
|
||||||
|
# openssl ecparam -name secp224r1 -genkey -out privkey.pem
|
||||||
|
# openssl ec -in privkey.pem -text -noout # get the priv/pub keys
|
||||||
|
# openssl dgst -ecdsa-with-SHA1 -sign privkey.pem -out data.sig data.txt
|
||||||
|
# openssl asn1parse -in data.sig -inform DER
|
||||||
|
# data.sig is 64 bytes, probably 56b plus ASN1 overhead
|
||||||
|
# openssl dgst -ecdsa-with-SHA1 -prverify privkey.pem -signature data.sig data.txt ; echo $?
|
||||||
|
# openssl ec -in privkey.pem -pubout -out pubkey.pem
|
||||||
|
# openssl ec -in privkey.pem -pubout -outform DER -out pubkey.der
|
||||||
|
|
||||||
|
def get_openssl_messagedigest_arg(self):
|
||||||
|
v = run_openssl("version")
|
||||||
|
# e.g. "OpenSSL 1.0.0 29 Mar 2010", or "OpenSSL 1.0.0a 1 Jun 2010",
|
||||||
|
# or "OpenSSL 0.9.8o 01 Jun 2010"
|
||||||
|
vs = v.split()[1].split(".")
|
||||||
|
if vs >= ["1","0","0"]:
|
||||||
|
return "-SHA1"
|
||||||
|
else:
|
||||||
|
return "-ecdsa-with-SHA1"
|
||||||
|
|
||||||
|
# sk: 1:OpenSSL->python 2:python->OpenSSL
|
||||||
|
# vk: 3:OpenSSL->python 4:python->OpenSSL
|
||||||
|
# sig: 5:OpenSSL->python 6:python->OpenSSL
|
||||||
|
|
||||||
|
def test_from_openssl_nist192p(self):
|
||||||
|
return self.do_test_from_openssl(NIST192p)
|
||||||
|
def test_from_openssl_nist224p(self):
|
||||||
|
return self.do_test_from_openssl(NIST224p)
|
||||||
|
def test_from_openssl_nist256p(self):
|
||||||
|
return self.do_test_from_openssl(NIST256p)
|
||||||
|
def test_from_openssl_nist384p(self):
|
||||||
|
return self.do_test_from_openssl(NIST384p)
|
||||||
|
def test_from_openssl_nist521p(self):
|
||||||
|
return self.do_test_from_openssl(NIST521p)
|
||||||
|
def test_from_openssl_secp256k1(self):
|
||||||
|
return self.do_test_from_openssl(SECP256k1)
|
||||||
|
|
||||||
|
def do_test_from_openssl(self, curve):
|
||||||
|
curvename = curve.openssl_name
|
||||||
|
assert curvename
|
||||||
|
# OpenSSL: create sk, vk, sign.
|
||||||
|
# Python: read vk(3), checksig(5), read sk(1), sign, check
|
||||||
|
mdarg = self.get_openssl_messagedigest_arg()
|
||||||
|
if os.path.isdir("t"):
|
||||||
|
shutil.rmtree("t")
|
||||||
|
os.mkdir("t")
|
||||||
|
run_openssl("ecparam -name %s -genkey -out t/privkey.pem" % curvename)
|
||||||
|
run_openssl("ec -in t/privkey.pem -pubout -out t/pubkey.pem")
|
||||||
|
data = b("data")
|
||||||
|
with open("t/data.txt","wb") as e: e.write(data)
|
||||||
|
run_openssl("dgst %s -sign t/privkey.pem -out t/data.sig t/data.txt" % mdarg)
|
||||||
|
run_openssl("dgst %s -verify t/pubkey.pem -signature t/data.sig t/data.txt" % mdarg)
|
||||||
|
with open("t/pubkey.pem","rb") as e: pubkey_pem = e.read()
|
||||||
|
vk = VerifyingKey.from_pem(pubkey_pem) # 3
|
||||||
|
with open("t/data.sig","rb") as e: sig_der = e.read()
|
||||||
|
self.assertTrue(vk.verify(sig_der, data, # 5
|
||||||
|
hashfunc=sha1, sigdecode=sigdecode_der))
|
||||||
|
|
||||||
|
with open("t/privkey.pem") as e: fp = e.read()
|
||||||
|
sk = SigningKey.from_pem(fp) # 1
|
||||||
|
sig = sk.sign(data)
|
||||||
|
self.assertTrue(vk.verify(sig, data))
|
||||||
|
|
||||||
|
def test_to_openssl_nist192p(self):
|
||||||
|
self.do_test_to_openssl(NIST192p)
|
||||||
|
def test_to_openssl_nist224p(self):
|
||||||
|
self.do_test_to_openssl(NIST224p)
|
||||||
|
def test_to_openssl_nist256p(self):
|
||||||
|
self.do_test_to_openssl(NIST256p)
|
||||||
|
def test_to_openssl_nist384p(self):
|
||||||
|
self.do_test_to_openssl(NIST384p)
|
||||||
|
def test_to_openssl_nist521p(self):
|
||||||
|
self.do_test_to_openssl(NIST521p)
|
||||||
|
def test_to_openssl_secp256k1(self):
|
||||||
|
self.do_test_to_openssl(SECP256k1)
|
||||||
|
|
||||||
|
def do_test_to_openssl(self, curve):
|
||||||
|
curvename = curve.openssl_name
|
||||||
|
assert curvename
|
||||||
|
# Python: create sk, vk, sign.
|
||||||
|
# OpenSSL: read vk(4), checksig(6), read sk(2), sign, check
|
||||||
|
mdarg = self.get_openssl_messagedigest_arg()
|
||||||
|
if os.path.isdir("t"):
|
||||||
|
shutil.rmtree("t")
|
||||||
|
os.mkdir("t")
|
||||||
|
sk = SigningKey.generate(curve=curve)
|
||||||
|
vk = sk.get_verifying_key()
|
||||||
|
data = b("data")
|
||||||
|
with open("t/pubkey.der","wb") as e: e.write(vk.to_der()) # 4
|
||||||
|
with open("t/pubkey.pem","wb") as e: e.write(vk.to_pem()) # 4
|
||||||
|
sig_der = sk.sign(data, hashfunc=sha1, sigencode=sigencode_der)
|
||||||
|
|
||||||
|
with open("t/data.sig","wb") as e: e.write(sig_der) # 6
|
||||||
|
with open("t/data.txt","wb") as e: e.write(data)
|
||||||
|
with open("t/baddata.txt","wb") as e: e.write(data+b("corrupt"))
|
||||||
|
|
||||||
|
self.assertRaises(SubprocessError, run_openssl,
|
||||||
|
"dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/baddata.txt" % mdarg)
|
||||||
|
run_openssl("dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/data.txt" % mdarg)
|
||||||
|
|
||||||
|
with open("t/privkey.pem","wb") as e: e.write(sk.to_pem()) # 2
|
||||||
|
run_openssl("dgst %s -sign t/privkey.pem -out t/data.sig2 t/data.txt" % mdarg)
|
||||||
|
run_openssl("dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt" % mdarg)
|
||||||
|
|
||||||
|
class DER(unittest.TestCase):
|
||||||
|
def test_oids(self):
|
||||||
|
oid_ecPublicKey = der.encode_oid(1, 2, 840, 10045, 2, 1)
|
||||||
|
self.assertEqual(hexlify(oid_ecPublicKey), b("06072a8648ce3d0201"))
|
||||||
|
self.assertEqual(hexlify(NIST224p.encoded_oid), b("06052b81040021"))
|
||||||
|
self.assertEqual(hexlify(NIST256p.encoded_oid),
|
||||||
|
b("06082a8648ce3d030107"))
|
||||||
|
x = oid_ecPublicKey + b("more")
|
||||||
|
x1, rest = der.remove_object(x)
|
||||||
|
self.assertEqual(x1, (1, 2, 840, 10045, 2, 1))
|
||||||
|
self.assertEqual(rest, b("more"))
|
||||||
|
|
||||||
|
def test_integer(self):
|
||||||
|
self.assertEqual(der.encode_integer(0), b("\x02\x01\x00"))
|
||||||
|
self.assertEqual(der.encode_integer(1), b("\x02\x01\x01"))
|
||||||
|
self.assertEqual(der.encode_integer(127), b("\x02\x01\x7f"))
|
||||||
|
self.assertEqual(der.encode_integer(128), b("\x02\x02\x00\x80"))
|
||||||
|
self.assertEqual(der.encode_integer(256), b("\x02\x02\x01\x00"))
|
||||||
|
#self.assertEqual(der.encode_integer(-1), b("\x02\x01\xff"))
|
||||||
|
|
||||||
|
def s(n): return der.remove_integer(der.encode_integer(n) + b("junk"))
|
||||||
|
self.assertEqual(s(0), (0, b("junk")))
|
||||||
|
self.assertEqual(s(1), (1, b("junk")))
|
||||||
|
self.assertEqual(s(127), (127, b("junk")))
|
||||||
|
self.assertEqual(s(128), (128, b("junk")))
|
||||||
|
self.assertEqual(s(256), (256, b("junk")))
|
||||||
|
self.assertEqual(s(1234567890123456789012345678901234567890),
|
||||||
|
(1234567890123456789012345678901234567890,b("junk")))
|
||||||
|
|
||||||
|
def test_number(self):
|
||||||
|
self.assertEqual(der.encode_number(0), b("\x00"))
|
||||||
|
self.assertEqual(der.encode_number(127), b("\x7f"))
|
||||||
|
self.assertEqual(der.encode_number(128), b("\x81\x00"))
|
||||||
|
self.assertEqual(der.encode_number(3*128+7), b("\x83\x07"))
|
||||||
|
#self.assertEqual(der.read_number("\x81\x9b"+"more"), (155, 2))
|
||||||
|
#self.assertEqual(der.encode_number(155), b("\x81\x9b"))
|
||||||
|
for n in (0, 1, 2, 127, 128, 3*128+7, 840, 10045): #, 155):
|
||||||
|
x = der.encode_number(n) + b("more")
|
||||||
|
n1, llen = der.read_number(x)
|
||||||
|
self.assertEqual(n1, n)
|
||||||
|
self.assertEqual(x[llen:], b("more"))
|
||||||
|
|
||||||
|
def test_length(self):
|
||||||
|
self.assertEqual(der.encode_length(0), b("\x00"))
|
||||||
|
self.assertEqual(der.encode_length(127), b("\x7f"))
|
||||||
|
self.assertEqual(der.encode_length(128), b("\x81\x80"))
|
||||||
|
self.assertEqual(der.encode_length(255), b("\x81\xff"))
|
||||||
|
self.assertEqual(der.encode_length(256), b("\x82\x01\x00"))
|
||||||
|
self.assertEqual(der.encode_length(3*256+7), b("\x82\x03\x07"))
|
||||||
|
self.assertEqual(der.read_length(b("\x81\x9b")+b("more")), (155, 2))
|
||||||
|
self.assertEqual(der.encode_length(155), b("\x81\x9b"))
|
||||||
|
for n in (0, 1, 2, 127, 128, 255, 256, 3*256+7, 155):
|
||||||
|
x = der.encode_length(n) + b("more")
|
||||||
|
n1, llen = der.read_length(x)
|
||||||
|
self.assertEqual(n1, n)
|
||||||
|
self.assertEqual(x[llen:], b("more"))
|
||||||
|
|
||||||
|
def test_sequence(self):
|
||||||
|
x = der.encode_sequence(b("ABC"), b("DEF")) + b("GHI")
|
||||||
|
self.assertEqual(x, b("\x30\x06ABCDEFGHI"))
|
||||||
|
x1, rest = der.remove_sequence(x)
|
||||||
|
self.assertEqual(x1, b("ABCDEF"))
|
||||||
|
self.assertEqual(rest, b("GHI"))
|
||||||
|
|
||||||
|
def test_constructed(self):
|
||||||
|
x = der.encode_constructed(0, NIST224p.encoded_oid)
|
||||||
|
self.assertEqual(hexlify(x), b("a007") + b("06052b81040021"))
|
||||||
|
x = der.encode_constructed(1, unhexlify(b("0102030a0b0c")))
|
||||||
|
self.assertEqual(hexlify(x), b("a106") + b("0102030a0b0c"))
|
||||||
|
|
||||||
|
class Util(unittest.TestCase):
|
||||||
|
def test_trytryagain(self):
|
||||||
|
tta = util.randrange_from_seed__trytryagain
|
||||||
|
for i in range(1000):
|
||||||
|
seed = "seed-%d" % i
|
||||||
|
for order in (2**8-2, 2**8-1, 2**8, 2**8+1, 2**8+2,
|
||||||
|
2**16-1, 2**16+1):
|
||||||
|
n = tta(seed, order)
|
||||||
|
self.assertTrue(1 <= n < order, (1, n, order))
|
||||||
|
# this trytryagain *does* provide long-term stability
|
||||||
|
self.assertEqual(("%x"%(tta("seed", NIST224p.order))).encode(),
|
||||||
|
b("6fa59d73bf0446ae8743cf748fc5ac11d5585a90356417e97155c3bc"))
|
||||||
|
|
||||||
|
def test_randrange(self):
|
||||||
|
# util.randrange does not provide long-term stability: we might
|
||||||
|
# change the algorithm in the future.
|
||||||
|
for i in range(1000):
|
||||||
|
entropy = util.PRNG("seed-%d" % i)
|
||||||
|
for order in (2**8-2, 2**8-1, 2**8,
|
||||||
|
2**16-1, 2**16+1,
|
||||||
|
):
|
||||||
|
# that oddball 2**16+1 takes half our runtime
|
||||||
|
n = util.randrange(order, entropy=entropy)
|
||||||
|
self.assertTrue(1 <= n < order, (1, n, order))
|
||||||
|
|
||||||
|
def OFF_test_prove_uniformity(self):
|
||||||
|
order = 2**8-2
|
||||||
|
counts = dict([(i, 0) for i in range(1, order)])
|
||||||
|
assert 0 not in counts
|
||||||
|
assert order not in counts
|
||||||
|
for i in range(1000000):
|
||||||
|
seed = "seed-%d" % i
|
||||||
|
n = util.randrange_from_seed__trytryagain(seed, order)
|
||||||
|
counts[n] += 1
|
||||||
|
# this technique should use the full range
|
||||||
|
self.assertTrue(counts[order-1])
|
||||||
|
for i in range(1, order):
|
||||||
|
print_("%3d: %s" % (i, "*"*(counts[i]//100)))
|
||||||
|
|
||||||
|
class RFC6979(unittest.TestCase):
|
||||||
|
# https://tools.ietf.org/html/rfc6979#appendix-A.1
|
||||||
|
def _do(self, generator, secexp, hsh, hash_func, expected):
|
||||||
|
actual = rfc6979.generate_k(generator.order(), secexp, hash_func, hsh)
|
||||||
|
self.assertEqual(expected, actual)
|
||||||
|
|
||||||
|
def test_SECP256k1(self):
|
||||||
|
'''RFC doesn't contain test vectors for SECP256k1 used in bitcoin.
|
||||||
|
This vector has been computed by Golang reference implementation instead.'''
|
||||||
|
self._do(
|
||||||
|
generator = SECP256k1.generator,
|
||||||
|
secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16),
|
||||||
|
hsh = sha256(b("sample")).digest(),
|
||||||
|
hash_func = sha256,
|
||||||
|
expected = int("8fa1f95d514760e498f28957b824ee6ec39ed64826ff4fecc2b5739ec45b91cd", 16))
|
||||||
|
|
||||||
|
def test_SECP256k1_2(self):
|
||||||
|
self._do(
|
||||||
|
generator=SECP256k1.generator,
|
||||||
|
secexp=int("cca9fbcc1b41e5a95d369eaa6ddcff73b61a4efaa279cfc6567e8daa39cbaf50", 16),
|
||||||
|
hsh=sha256(b("sample")).digest(),
|
||||||
|
hash_func=sha256,
|
||||||
|
expected=int("2df40ca70e639d89528a6b670d9d48d9165fdc0febc0974056bdce192b8e16a3", 16))
|
||||||
|
|
||||||
|
def test_SECP256k1_3(self):
|
||||||
|
self._do(
|
||||||
|
generator=SECP256k1.generator,
|
||||||
|
secexp=0x1,
|
||||||
|
hsh=sha256(b("Satoshi Nakamoto")).digest(),
|
||||||
|
hash_func=sha256,
|
||||||
|
expected=0x8F8A276C19F4149656B280621E358CCE24F5F52542772691EE69063B74F15D15)
|
||||||
|
|
||||||
|
def test_SECP256k1_4(self):
|
||||||
|
self._do(
|
||||||
|
generator=SECP256k1.generator,
|
||||||
|
secexp=0x1,
|
||||||
|
hsh=sha256(b("All those moments will be lost in time, like tears in rain. Time to die...")).digest(),
|
||||||
|
hash_func=sha256,
|
||||||
|
expected=0x38AA22D72376B4DBC472E06C3BA403EE0A394DA63FC58D88686C611ABA98D6B3)
|
||||||
|
|
||||||
|
def test_SECP256k1_5(self):
|
||||||
|
self._do(
|
||||||
|
generator=SECP256k1.generator,
|
||||||
|
secexp=0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364140,
|
||||||
|
hsh=sha256(b("Satoshi Nakamoto")).digest(),
|
||||||
|
hash_func=sha256,
|
||||||
|
expected=0x33A19B60E25FB6F4435AF53A3D42D493644827367E6453928554F43E49AA6F90)
|
||||||
|
|
||||||
|
def test_SECP256k1_6(self):
|
||||||
|
self._do(
|
||||||
|
generator=SECP256k1.generator,
|
||||||
|
secexp=0xf8b8af8ce3c7cca5e300d33939540c10d45ce001b8f252bfbc57ba0342904181,
|
||||||
|
hsh=sha256(b("Alan Turing")).digest(),
|
||||||
|
hash_func=sha256,
|
||||||
|
expected=0x525A82B70E67874398067543FD84C83D30C175FDC45FDEEE082FE13B1D7CFDF1)
|
||||||
|
|
||||||
|
def test_1(self):
|
||||||
|
# Basic example of the RFC, it also tests 'try-try-again' from Step H of rfc6979
|
||||||
|
self._do(
|
||||||
|
generator = Point(None, 0, 0, int("4000000000000000000020108A2E0CC0D99F8A5EF", 16)),
|
||||||
|
secexp = int("09A4D6792295A7F730FC3F2B49CBC0F62E862272F", 16),
|
||||||
|
hsh = unhexlify(b("AF2BDBE1AA9B6EC1E2ADE1D694F41FC71A831D0268E9891562113D8A62ADD1BF")),
|
||||||
|
hash_func = sha256,
|
||||||
|
expected = int("23AF4074C90A02B3FE61D286D5C87F425E6BDD81B", 16))
|
||||||
|
|
||||||
|
def test_2(self):
|
||||||
|
self._do(
|
||||||
|
generator=NIST192p.generator,
|
||||||
|
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||||
|
hsh = sha1(b("sample")).digest(),
|
||||||
|
hash_func = sha1,
|
||||||
|
expected = int("37D7CA00D2C7B0E5E412AC03BD44BA837FDD5B28CD3B0021", 16))
|
||||||
|
|
||||||
|
def test_3(self):
|
||||||
|
self._do(
|
||||||
|
generator=NIST192p.generator,
|
||||||
|
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||||
|
hsh = sha256(b("sample")).digest(),
|
||||||
|
hash_func = sha256,
|
||||||
|
expected = int("32B1B6D7D42A05CB449065727A84804FB1A3E34D8F261496", 16))
|
||||||
|
|
||||||
|
def test_4(self):
|
||||||
|
self._do(
|
||||||
|
generator=NIST192p.generator,
|
||||||
|
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||||
|
hsh = sha512(b("sample")).digest(),
|
||||||
|
hash_func = sha512,
|
||||||
|
expected = int("A2AC7AB055E4F20692D49209544C203A7D1F2C0BFBC75DB1", 16))
|
||||||
|
|
||||||
|
def test_5(self):
|
||||||
|
self._do(
|
||||||
|
generator=NIST192p.generator,
|
||||||
|
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||||
|
hsh = sha1(b("test")).digest(),
|
||||||
|
hash_func = sha1,
|
||||||
|
expected = int("D9CF9C3D3297D3260773A1DA7418DB5537AB8DD93DE7FA25", 16))
|
||||||
|
|
||||||
|
def test_6(self):
|
||||||
|
self._do(
|
||||||
|
generator=NIST192p.generator,
|
||||||
|
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||||
|
hsh = sha256(b("test")).digest(),
|
||||||
|
hash_func = sha256,
|
||||||
|
expected = int("5C4CE89CF56D9E7C77C8585339B006B97B5F0680B4306C6C", 16))
|
||||||
|
|
||||||
|
def test_7(self):
|
||||||
|
self._do(
|
||||||
|
generator=NIST192p.generator,
|
||||||
|
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||||
|
hsh = sha512(b("test")).digest(),
|
||||||
|
hash_func = sha512,
|
||||||
|
expected = int("0758753A5254759C7CFBAD2E2D9B0792EEE44136C9480527", 16))
|
||||||
|
|
||||||
|
def test_8(self):
|
||||||
|
self._do(
|
||||||
|
generator=NIST521p.generator,
|
||||||
|
secexp = int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
|
||||||
|
hsh = sha1(b("sample")).digest(),
|
||||||
|
hash_func = sha1,
|
||||||
|
expected = int("089C071B419E1C2820962321787258469511958E80582E95D8378E0C2CCDB3CB42BEDE42F50E3FA3C71F5A76724281D31D9C89F0F91FC1BE4918DB1C03A5838D0F9", 16))
|
||||||
|
|
||||||
|
def test_9(self):
|
||||||
|
self._do(
|
||||||
|
generator=NIST521p.generator,
|
||||||
|
secexp = int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
|
||||||
|
hsh = sha256(b("sample")).digest(),
|
||||||
|
hash_func = sha256,
|
||||||
|
expected = int("0EDF38AFCAAECAB4383358B34D67C9F2216C8382AAEA44A3DAD5FDC9C32575761793FEF24EB0FC276DFC4F6E3EC476752F043CF01415387470BCBD8678ED2C7E1A0", 16))
|
||||||
|
|
||||||
|
def test_10(self):
|
||||||
|
self._do(
|
||||||
|
generator=NIST521p.generator,
|
||||||
|
secexp = int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
|
||||||
|
hsh = sha512(b("test")).digest(),
|
||||||
|
hash_func = sha512,
|
||||||
|
expected = int("16200813020EC986863BEDFC1B121F605C1215645018AEA1A7B215A564DE9EB1B38A67AA1128B80CE391C4FB71187654AAA3431027BFC7F395766CA988C964DC56D", 16))
|
||||||
|
|
||||||
|
def __main__():
|
||||||
|
unittest.main()
|
||||||
|
if __name__ == "__main__":
|
||||||
|
__main__()
|
||||||
247
bin/python/ecdsa/util.py
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
from __future__ import division
|
||||||
|
|
||||||
|
import os
|
||||||
|
import math
|
||||||
|
import binascii
|
||||||
|
from hashlib import sha256
|
||||||
|
from . import der
|
||||||
|
from .curves import orderlen
|
||||||
|
from .six import PY3, int2byte, b, next
|
||||||
|
|
||||||
|
# RFC5480:
|
||||||
|
# The "unrestricted" algorithm identifier is:
|
||||||
|
# id-ecPublicKey OBJECT IDENTIFIER ::= {
|
||||||
|
# iso(1) member-body(2) us(840) ansi-X9-62(10045) keyType(2) 1 }
|
||||||
|
|
||||||
|
oid_ecPublicKey = (1, 2, 840, 10045, 2, 1)
|
||||||
|
encoded_oid_ecPublicKey = der.encode_oid(*oid_ecPublicKey)
|
||||||
|
|
||||||
|
def randrange(order, entropy=None):
|
||||||
|
"""Return a random integer k such that 1 <= k < order, uniformly
|
||||||
|
distributed across that range. For simplicity, this only behaves well if
|
||||||
|
'order' is fairly close (but below) a power of 256. The try-try-again
|
||||||
|
algorithm we use takes longer and longer time (on average) to complete as
|
||||||
|
'order' falls, rising to a maximum of avg=512 loops for the worst-case
|
||||||
|
(256**k)+1 . All of the standard curves behave well. There is a cutoff at
|
||||||
|
10k loops (which raises RuntimeError) to prevent an infinite loop when
|
||||||
|
something is really broken like the entropy function not working.
|
||||||
|
|
||||||
|
Note that this function is not declared to be forwards-compatible: we may
|
||||||
|
change the behavior in future releases. The entropy= argument (which
|
||||||
|
should get a callable that behaves like os.urandom) can be used to
|
||||||
|
achieve stability within a given release (for repeatable unit tests), but
|
||||||
|
should not be used as a long-term-compatible key generation algorithm.
|
||||||
|
"""
|
||||||
|
# we could handle arbitrary orders (even 256**k+1) better if we created
|
||||||
|
# candidates bit-wise instead of byte-wise, which would reduce the
|
||||||
|
# worst-case behavior to avg=2 loops, but that would be more complex. The
|
||||||
|
# change would be to round the order up to a power of 256, subtract one
|
||||||
|
# (to get 0xffff..), use that to get a byte-long mask for the top byte,
|
||||||
|
# generate the len-1 entropy bytes, generate one extra byte and mask off
|
||||||
|
# the top bits, then combine it with the rest. Requires jumping back and
|
||||||
|
# forth between strings and integers a lot.
|
||||||
|
|
||||||
|
if entropy is None:
|
||||||
|
entropy = os.urandom
|
||||||
|
assert order > 1
|
||||||
|
bytes = orderlen(order)
|
||||||
|
dont_try_forever = 10000 # gives about 2**-60 failures for worst case
|
||||||
|
while dont_try_forever > 0:
|
||||||
|
dont_try_forever -= 1
|
||||||
|
candidate = string_to_number(entropy(bytes)) + 1
|
||||||
|
if 1 <= candidate < order:
|
||||||
|
return candidate
|
||||||
|
continue
|
||||||
|
raise RuntimeError("randrange() tried hard but gave up, either something"
|
||||||
|
" is very wrong or you got realllly unlucky. Order was"
|
||||||
|
" %x" % order)
|
||||||
|
|
||||||
|
class PRNG:
|
||||||
|
# this returns a callable which, when invoked with an integer N, will
|
||||||
|
# return N pseudorandom bytes. Note: this is a short-term PRNG, meant
|
||||||
|
# primarily for the needs of randrange_from_seed__trytryagain(), which
|
||||||
|
# only needs to run it a few times per seed. It does not provide
|
||||||
|
# protection against state compromise (forward security).
|
||||||
|
def __init__(self, seed):
|
||||||
|
self.generator = self.block_generator(seed)
|
||||||
|
|
||||||
|
def __call__(self, numbytes):
|
||||||
|
a = [next(self.generator) for i in range(numbytes)]
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
return bytes(a)
|
||||||
|
else:
|
||||||
|
return "".join(a)
|
||||||
|
|
||||||
|
|
||||||
|
def block_generator(self, seed):
|
||||||
|
counter = 0
|
||||||
|
while True:
|
||||||
|
for byte in sha256(("prng-%d-%s" % (counter, seed)).encode()).digest():
|
||||||
|
yield byte
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
def randrange_from_seed__overshoot_modulo(seed, order):
|
||||||
|
# hash the data, then turn the digest into a number in [1,order).
|
||||||
|
#
|
||||||
|
# We use David-Sarah Hopwood's suggestion: turn it into a number that's
|
||||||
|
# sufficiently larger than the group order, then modulo it down to fit.
|
||||||
|
# This should give adequate (but not perfect) uniformity, and simple
|
||||||
|
# code. There are other choices: try-try-again is the main one.
|
||||||
|
base = PRNG(seed)(2*orderlen(order))
|
||||||
|
number = (int(binascii.hexlify(base), 16) % (order-1)) + 1
|
||||||
|
assert 1 <= number < order, (1, number, order)
|
||||||
|
return number
|
||||||
|
|
||||||
|
def lsb_of_ones(numbits):
|
||||||
|
return (1 << numbits) - 1
|
||||||
|
def bits_and_bytes(order):
|
||||||
|
bits = int(math.log(order-1, 2)+1)
|
||||||
|
bytes = bits // 8
|
||||||
|
extrabits = bits % 8
|
||||||
|
return bits, bytes, extrabits
|
||||||
|
|
||||||
|
# the following randrange_from_seed__METHOD() functions take an
|
||||||
|
# arbitrarily-sized secret seed and turn it into a number that obeys the same
|
||||||
|
# range limits as randrange() above. They are meant for deriving consistent
|
||||||
|
# signing keys from a secret rather than generating them randomly, for
|
||||||
|
# example a protocol in which three signing keys are derived from a master
|
||||||
|
# secret. You should use a uniformly-distributed unguessable seed with about
|
||||||
|
# curve.baselen bytes of entropy. To use one, do this:
|
||||||
|
# seed = os.urandom(curve.baselen) # or other starting point
|
||||||
|
# secexp = ecdsa.util.randrange_from_seed__trytryagain(sed, curve.order)
|
||||||
|
# sk = SigningKey.from_secret_exponent(secexp, curve)
|
||||||
|
|
||||||
|
def randrange_from_seed__truncate_bytes(seed, order, hashmod=sha256):
|
||||||
|
# hash the seed, then turn the digest into a number in [1,order), but
|
||||||
|
# don't worry about trying to uniformly fill the range. This will lose,
|
||||||
|
# on average, four bits of entropy.
|
||||||
|
bits, bytes, extrabits = bits_and_bytes(order)
|
||||||
|
if extrabits:
|
||||||
|
bytes += 1
|
||||||
|
base = hashmod(seed).digest()[:bytes]
|
||||||
|
base = "\x00"*(bytes-len(base)) + base
|
||||||
|
number = 1+int(binascii.hexlify(base), 16)
|
||||||
|
assert 1 <= number < order
|
||||||
|
return number
|
||||||
|
|
||||||
|
def randrange_from_seed__truncate_bits(seed, order, hashmod=sha256):
|
||||||
|
# like string_to_randrange_truncate_bytes, but only lose an average of
|
||||||
|
# half a bit
|
||||||
|
bits = int(math.log(order-1, 2)+1)
|
||||||
|
maxbytes = (bits+7) // 8
|
||||||
|
base = hashmod(seed).digest()[:maxbytes]
|
||||||
|
base = "\x00"*(maxbytes-len(base)) + base
|
||||||
|
topbits = 8*maxbytes - bits
|
||||||
|
if topbits:
|
||||||
|
base = int2byte(ord(base[0]) & lsb_of_ones(topbits)) + base[1:]
|
||||||
|
number = 1+int(binascii.hexlify(base), 16)
|
||||||
|
assert 1 <= number < order
|
||||||
|
return number
|
||||||
|
|
||||||
|
def randrange_from_seed__trytryagain(seed, order):
|
||||||
|
# figure out exactly how many bits we need (rounded up to the nearest
|
||||||
|
# bit), so we can reduce the chance of looping to less than 0.5 . This is
|
||||||
|
# specified to feed from a byte-oriented PRNG, and discards the
|
||||||
|
# high-order bits of the first byte as necessary to get the right number
|
||||||
|
# of bits. The average number of loops will range from 1.0 (when
|
||||||
|
# order=2**k-1) to 2.0 (when order=2**k+1).
|
||||||
|
assert order > 1
|
||||||
|
bits, bytes, extrabits = bits_and_bytes(order)
|
||||||
|
generate = PRNG(seed)
|
||||||
|
while True:
|
||||||
|
extrabyte = b("")
|
||||||
|
if extrabits:
|
||||||
|
extrabyte = int2byte(ord(generate(1)) & lsb_of_ones(extrabits))
|
||||||
|
guess = string_to_number(extrabyte + generate(bytes)) + 1
|
||||||
|
if 1 <= guess < order:
|
||||||
|
return guess
|
||||||
|
|
||||||
|
|
||||||
|
def number_to_string(num, order):
|
||||||
|
l = orderlen(order)
|
||||||
|
fmt_str = "%0" + str(2*l) + "x"
|
||||||
|
string = binascii.unhexlify((fmt_str % num).encode())
|
||||||
|
assert len(string) == l, (len(string), l)
|
||||||
|
return string
|
||||||
|
|
||||||
|
def number_to_string_crop(num, order):
|
||||||
|
l = orderlen(order)
|
||||||
|
fmt_str = "%0" + str(2*l) + "x"
|
||||||
|
string = binascii.unhexlify((fmt_str % num).encode())
|
||||||
|
return string[:l]
|
||||||
|
|
||||||
|
def string_to_number(string):
|
||||||
|
return int(binascii.hexlify(string), 16)
|
||||||
|
|
||||||
|
def string_to_number_fixedlen(string, order):
|
||||||
|
l = orderlen(order)
|
||||||
|
assert len(string) == l, (len(string), l)
|
||||||
|
return int(binascii.hexlify(string), 16)
|
||||||
|
|
||||||
|
# these methods are useful for the sigencode= argument to SK.sign() and the
|
||||||
|
# sigdecode= argument to VK.verify(), and control how the signature is packed
|
||||||
|
# or unpacked.
|
||||||
|
|
||||||
|
def sigencode_strings(r, s, order):
|
||||||
|
r_str = number_to_string(r, order)
|
||||||
|
s_str = number_to_string(s, order)
|
||||||
|
return (r_str, s_str)
|
||||||
|
|
||||||
|
def sigencode_string(r, s, order):
|
||||||
|
# for any given curve, the size of the signature numbers is
|
||||||
|
# fixed, so just use simple concatenation
|
||||||
|
r_str, s_str = sigencode_strings(r, s, order)
|
||||||
|
return r_str + s_str
|
||||||
|
|
||||||
|
def sigencode_der(r, s, order):
|
||||||
|
return der.encode_sequence(der.encode_integer(r), der.encode_integer(s))
|
||||||
|
|
||||||
|
# canonical versions of sigencode methods
|
||||||
|
# these enforce low S values, by negating the value (modulo the order) if above order/2
|
||||||
|
# see CECKey::Sign() https://github.com/bitcoin/bitcoin/blob/master/src/key.cpp#L214
|
||||||
|
def sigencode_strings_canonize(r, s, order):
|
||||||
|
if s > order / 2:
|
||||||
|
s = order - s
|
||||||
|
return sigencode_strings(r, s, order)
|
||||||
|
|
||||||
|
def sigencode_string_canonize(r, s, order):
|
||||||
|
if s > order / 2:
|
||||||
|
s = order - s
|
||||||
|
return sigencode_string(r, s, order)
|
||||||
|
|
||||||
|
def sigencode_der_canonize(r, s, order):
|
||||||
|
if s > order / 2:
|
||||||
|
s = order - s
|
||||||
|
return sigencode_der(r, s, order)
|
||||||
|
|
||||||
|
|
||||||
|
def sigdecode_string(signature, order):
|
||||||
|
l = orderlen(order)
|
||||||
|
assert len(signature) == 2*l, (len(signature), 2*l)
|
||||||
|
r = string_to_number_fixedlen(signature[:l], order)
|
||||||
|
s = string_to_number_fixedlen(signature[l:], order)
|
||||||
|
return r, s
|
||||||
|
|
||||||
|
def sigdecode_strings(rs_strings, order):
|
||||||
|
(r_str, s_str) = rs_strings
|
||||||
|
l = orderlen(order)
|
||||||
|
assert len(r_str) == l, (len(r_str), l)
|
||||||
|
assert len(s_str) == l, (len(s_str), l)
|
||||||
|
r = string_to_number_fixedlen(r_str, order)
|
||||||
|
s = string_to_number_fixedlen(s_str, order)
|
||||||
|
return r, s
|
||||||
|
|
||||||
|
def sigdecode_der(sig_der, order):
|
||||||
|
#return der.encode_sequence(der.encode_integer(r), der.encode_integer(s))
|
||||||
|
rs_strings, empty = der.remove_sequence(sig_der)
|
||||||
|
if empty != b(""):
|
||||||
|
raise der.UnexpectedDER("trailing junk after DER sig: %s" %
|
||||||
|
binascii.hexlify(empty))
|
||||||
|
r, rest = der.remove_integer(rs_strings)
|
||||||
|
s, empty = der.remove_integer(rest)
|
||||||
|
if empty != b(""):
|
||||||
|
raise der.UnexpectedDER("trailing junk after DER numbers: %s" %
|
||||||
|
binascii.hexlify(empty))
|
||||||
|
return r, s
|
||||||
|
|
||||||
105
bin/python/ed25519.py
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
import hashlib
|
||||||
|
|
||||||
|
b = 256
|
||||||
|
q = 2**255 - 19
|
||||||
|
l = 2**252 + 27742317777372353535851937790883648493
|
||||||
|
|
||||||
|
def H(m):
|
||||||
|
return hashlib.sha512(m).digest()
|
||||||
|
|
||||||
|
def expmod(b,e,m):
|
||||||
|
if e == 0: return 1
|
||||||
|
t = expmod(b,e/2,m)**2 % m
|
||||||
|
if e & 1: t = (t*b) % m
|
||||||
|
return t
|
||||||
|
|
||||||
|
def inv(x):
|
||||||
|
return expmod(x,q-2,q)
|
||||||
|
|
||||||
|
d = -121665 * inv(121666)
|
||||||
|
I = expmod(2,(q-1)/4,q)
|
||||||
|
|
||||||
|
def xrecover(y):
|
||||||
|
xx = (y*y-1) * inv(d*y*y+1)
|
||||||
|
x = expmod(xx,(q+3)/8,q)
|
||||||
|
if (x*x - xx) % q != 0: x = (x*I) % q
|
||||||
|
if x % 2 != 0: x = q-x
|
||||||
|
return x
|
||||||
|
|
||||||
|
By = 4 * inv(5)
|
||||||
|
Bx = xrecover(By)
|
||||||
|
B = [Bx % q,By % q]
|
||||||
|
|
||||||
|
def edwards(P,Q):
|
||||||
|
x1 = P[0]
|
||||||
|
y1 = P[1]
|
||||||
|
x2 = Q[0]
|
||||||
|
y2 = Q[1]
|
||||||
|
x3 = (x1*y2+x2*y1) * inv(1+d*x1*x2*y1*y2)
|
||||||
|
y3 = (y1*y2+x1*x2) * inv(1-d*x1*x2*y1*y2)
|
||||||
|
return [x3 % q,y3 % q]
|
||||||
|
|
||||||
|
def scalarmult(P,e):
|
||||||
|
if e == 0: return [0,1]
|
||||||
|
Q = scalarmult(P,e/2)
|
||||||
|
Q = edwards(Q,Q)
|
||||||
|
if e & 1: Q = edwards(Q,P)
|
||||||
|
return Q
|
||||||
|
|
||||||
|
def encodeint(y):
|
||||||
|
bits = [(y >> i) & 1 for i in range(b)]
|
||||||
|
return ''.join([chr(sum([bits[i * 8 + j] << j for j in range(8)])) for i in range(b/8)])
|
||||||
|
|
||||||
|
def encodepoint(P):
|
||||||
|
x = P[0]
|
||||||
|
y = P[1]
|
||||||
|
bits = [(y >> i) & 1 for i in range(b - 1)] + [x & 1]
|
||||||
|
return ''.join([chr(sum([bits[i * 8 + j] << j for j in range(8)])) for i in range(b/8)])
|
||||||
|
|
||||||
|
def bit(h,i):
|
||||||
|
return (ord(h[i/8]) >> (i%8)) & 1
|
||||||
|
|
||||||
|
def publickey(sk):
|
||||||
|
h = H(sk)
|
||||||
|
a = 2**(b-2) + sum(2**i * bit(h,i) for i in range(3,b-2))
|
||||||
|
A = scalarmult(B,a)
|
||||||
|
return encodepoint(A)
|
||||||
|
|
||||||
|
def Hint(m):
|
||||||
|
h = H(m)
|
||||||
|
return sum(2**i * bit(h,i) for i in range(2*b))
|
||||||
|
|
||||||
|
def signature(m,sk,pk):
|
||||||
|
h = H(sk)
|
||||||
|
a = 2**(b-2) + sum(2**i * bit(h,i) for i in range(3,b-2))
|
||||||
|
r = Hint(''.join([h[i] for i in range(b/8,b/4)]) + m)
|
||||||
|
R = scalarmult(B,r)
|
||||||
|
S = (r + Hint(encodepoint(R) + pk + m) * a) % l
|
||||||
|
return encodepoint(R) + encodeint(S)
|
||||||
|
|
||||||
|
def isoncurve(P):
|
||||||
|
x = P[0]
|
||||||
|
y = P[1]
|
||||||
|
return (-x*x + y*y - 1 - d*x*x*y*y) % q == 0
|
||||||
|
|
||||||
|
def decodeint(s):
|
||||||
|
return sum(2**i * bit(s,i) for i in range(0,b))
|
||||||
|
|
||||||
|
def decodepoint(s):
|
||||||
|
y = sum(2**i * bit(s,i) for i in range(0,b-1))
|
||||||
|
x = xrecover(y)
|
||||||
|
if x & 1 != bit(s,b-1): x = q-x
|
||||||
|
P = [x,y]
|
||||||
|
if not isoncurve(P): raise Exception("decoding point that is not on curve")
|
||||||
|
return P
|
||||||
|
|
||||||
|
def checkvalid(s,m,pk):
|
||||||
|
if len(s) != b/4: raise Exception("signature length is wrong")
|
||||||
|
if len(pk) != b/8: raise Exception("public-key length is wrong")
|
||||||
|
R = decodepoint(s[0:b/8])
|
||||||
|
A = decodepoint(pk)
|
||||||
|
S = decodeint(s[b/8:b/4])
|
||||||
|
h = Hint(encodepoint(R) + pk + m)
|
||||||
|
if scalarmult(B,S) != edwards(R,scalarmult(A,h)):
|
||||||
|
raise Exception("signature does not pass verification")
|
||||||
|
|
||||||
51
bin/python/ripple/ledger/SField.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# Constants from ripple/protocol/SField.h
|
||||||
|
|
||||||
|
# special types
|
||||||
|
STI_UNKNOWN = -2
|
||||||
|
STI_DONE = -1
|
||||||
|
STI_NOTPRESENT = 0
|
||||||
|
|
||||||
|
# # types (common)
|
||||||
|
STI_UINT16 = 1
|
||||||
|
STI_UINT32 = 2
|
||||||
|
STI_UINT64 = 3
|
||||||
|
STI_HASH128 = 4
|
||||||
|
STI_HASH256 = 5
|
||||||
|
STI_AMOUNT = 6
|
||||||
|
STI_VL = 7
|
||||||
|
STI_ACCOUNT = 8
|
||||||
|
# 9-13 are reserved
|
||||||
|
STI_OBJECT = 14
|
||||||
|
STI_ARRAY = 15
|
||||||
|
|
||||||
|
# types (uncommon)
|
||||||
|
STI_UINT8 = 16
|
||||||
|
STI_HASH160 = 17
|
||||||
|
STI_PATHSET = 18
|
||||||
|
STI_VECTOR256 = 19
|
||||||
|
|
||||||
|
# high level types
|
||||||
|
# cannot be serialized inside other types
|
||||||
|
STI_TRANSACTION = 10001
|
||||||
|
STI_LEDGERENTRY = 10002
|
||||||
|
STI_VALIDATION = 10003
|
||||||
|
STI_METADATA = 10004
|
||||||
|
|
||||||
|
def field_code(sti, name):
|
||||||
|
if sti < 16:
|
||||||
|
if name < 16:
|
||||||
|
bytes = [(sti << 4) + name]
|
||||||
|
else:
|
||||||
|
bytes = [sti << 4, name]
|
||||||
|
elif name < 16:
|
||||||
|
bytes = [name, sti]
|
||||||
|
else:
|
||||||
|
bytes = [0, sti, name]
|
||||||
|
return ''.join(chr(i) for i in bytes)
|
||||||
|
|
||||||
|
# Selected constants from SField.cpp
|
||||||
|
|
||||||
|
sfSequence = field_code(STI_UINT32, 4)
|
||||||
|
sfPublicKey = field_code(STI_VL, 1)
|
||||||
|
sfSigningPubKey = field_code(STI_VL, 3)
|
||||||
|
sfSignature = field_code(STI_VL, 6)
|
||||||
94
bin/python/ripple/util/Base58.py
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
from hashlib import sha256
|
||||||
|
|
||||||
|
#
|
||||||
|
# Human strings are base-58 with a
|
||||||
|
# version prefix and a checksum suffix.
|
||||||
|
#
|
||||||
|
# Copied from ripple/protocol/RippleAddress.h
|
||||||
|
#
|
||||||
|
|
||||||
|
VER_NONE = 1
|
||||||
|
VER_NODE_PUBLIC = 28
|
||||||
|
VER_NODE_PRIVATE = 32
|
||||||
|
VER_ACCOUNT_ID = 0
|
||||||
|
VER_ACCOUNT_PUBLIC = 35
|
||||||
|
VER_ACCOUNT_PRIVATE = 34
|
||||||
|
VER_FAMILY_GENERATOR = 41
|
||||||
|
VER_FAMILY_SEED = 33
|
||||||
|
|
||||||
|
ALPHABET = 'rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz'
|
||||||
|
|
||||||
|
VERSION_NAME = {
|
||||||
|
VER_NONE: 'VER_NONE',
|
||||||
|
VER_NODE_PUBLIC: 'VER_NODE_PUBLIC',
|
||||||
|
VER_NODE_PRIVATE: 'VER_NODE_PRIVATE',
|
||||||
|
VER_ACCOUNT_ID: 'VER_ACCOUNT_ID',
|
||||||
|
VER_ACCOUNT_PUBLIC: 'VER_ACCOUNT_PUBLIC',
|
||||||
|
VER_ACCOUNT_PRIVATE: 'VER_ACCOUNT_PRIVATE',
|
||||||
|
VER_FAMILY_GENERATOR: 'VER_FAMILY_GENERATOR',
|
||||||
|
VER_FAMILY_SEED: 'VER_FAMILY_SEED'
|
||||||
|
}
|
||||||
|
|
||||||
|
class Alphabet(object):
|
||||||
|
def __init__(self, radix, digit_to_char, char_to_digit):
|
||||||
|
self.radix = radix
|
||||||
|
self.digit_to_char = digit_to_char
|
||||||
|
self.char_to_digit = char_to_digit
|
||||||
|
|
||||||
|
def transcode_from(self, s, source_alphabet):
|
||||||
|
n, zero_count = source_alphabet._digits_to_number(s)
|
||||||
|
digits = []
|
||||||
|
while n > 0:
|
||||||
|
n, digit = divmod(n, self.radix)
|
||||||
|
digits.append(self.digit_to_char(digit))
|
||||||
|
|
||||||
|
s = ''.join(digits)
|
||||||
|
return self.digit_to_char(0) * zero_count + s[::-1]
|
||||||
|
|
||||||
|
def _digits_to_number(self, digits):
|
||||||
|
stripped = digits.lstrip(self.digit_to_char(0))
|
||||||
|
n = 0
|
||||||
|
for d in stripped:
|
||||||
|
n *= self.radix
|
||||||
|
n += self.char_to_digit(d)
|
||||||
|
return n, len(digits) - len(stripped)
|
||||||
|
|
||||||
|
_INVERSE_INDEX = dict((c, i) for (i, c) in enumerate(ALPHABET))
|
||||||
|
|
||||||
|
# In base 58 encoding, the digits come from the ALPHABET string.
|
||||||
|
BASE58 = Alphabet(len(ALPHABET), ALPHABET.__getitem__, _INVERSE_INDEX.get)
|
||||||
|
|
||||||
|
# In base 256 encoding, each digit is just a character between 0 and 255.
|
||||||
|
BASE256 = Alphabet(256, chr, ord)
|
||||||
|
|
||||||
|
def encode(b):
|
||||||
|
return BASE58.transcode_from(b, BASE256)
|
||||||
|
|
||||||
|
def decode(b):
|
||||||
|
return BASE256.transcode_from(b, BASE58)
|
||||||
|
|
||||||
|
def checksum(b):
|
||||||
|
"""Returns a 4-byte checksum of a binary."""
|
||||||
|
return sha256(sha256(b).digest()).digest()[:4]
|
||||||
|
|
||||||
|
def encode_version(ver, b):
|
||||||
|
"""Encodes a version encoding and a binary as human string."""
|
||||||
|
b = chr(ver) + b
|
||||||
|
return encode(b + checksum(b))
|
||||||
|
|
||||||
|
def decode_version(s):
|
||||||
|
"""Decodes a human base-58 string into its version encoding and binary."""
|
||||||
|
b = decode(s)
|
||||||
|
body, check = b[:-4], b[-4:]
|
||||||
|
assert check == checksum(body), ('Bad checksum for', s)
|
||||||
|
return ord(body[0]), body[1:]
|
||||||
|
|
||||||
|
def version_name(ver):
|
||||||
|
return VERSION_NAME.get(ver) or ('(unknown version %s)' % ver)
|
||||||
|
|
||||||
|
def check_version(version, expected):
|
||||||
|
if version != expected:
|
||||||
|
raise ValueError('Expected version %s but got %s' % (
|
||||||
|
version_name(version), version_name(expected)))
|
||||||
164
bin/python/ripple/util/Sign.py
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import base64, os, random, struct, sys
|
||||||
|
import ed25519
|
||||||
|
import ecdsa
|
||||||
|
from ripple.util import Base58
|
||||||
|
from ripple.ledger import SField
|
||||||
|
|
||||||
|
ED25519_BYTE = chr(0xed)
|
||||||
|
WRAP_COLUMNS = 60
|
||||||
|
|
||||||
|
USAGE = """\
|
||||||
|
Usage:
|
||||||
|
create
|
||||||
|
Create a new master public/secret key pair.
|
||||||
|
|
||||||
|
check <key>
|
||||||
|
Check an existing key for validity.
|
||||||
|
|
||||||
|
sign <sequence> <validator-public> <master-secret>
|
||||||
|
Create a new signed manifest with the given sequence
|
||||||
|
number, validator public key, and master secret key.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def prepend_length_byte(b):
|
||||||
|
assert len(b) <= 192, 'Too long'
|
||||||
|
return chr(len(b)) + b
|
||||||
|
|
||||||
|
def to_int32(i):
|
||||||
|
return struct.pack('>I', i)
|
||||||
|
|
||||||
|
#-----------------------------------------------------------
|
||||||
|
|
||||||
|
def make_seed(urandom=os.urandom):
|
||||||
|
# This is not used.
|
||||||
|
return urandom(16)
|
||||||
|
|
||||||
|
def make_ed25519_keypair(urandom=os.urandom):
|
||||||
|
private_key = urandom(32)
|
||||||
|
return private_key, ed25519.publickey(private_key)
|
||||||
|
|
||||||
|
def make_ecdsa_keypair():
|
||||||
|
# This is not used.
|
||||||
|
private_key = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1)
|
||||||
|
# Can't be unit tested easily - need a mock for ecdsa.
|
||||||
|
vk = private_key.get_verifying_key()
|
||||||
|
sig = private_key.sign('message')
|
||||||
|
assert vk.verify(sig, 'message')
|
||||||
|
return private_key, vk
|
||||||
|
|
||||||
|
def make_seed_from_passphrase(passphrase):
|
||||||
|
# For convenience, like say testing against rippled we can hash a passphrase
|
||||||
|
# to get the seed. validation_create (Josh may have killed it by now) takes
|
||||||
|
# an optional arg, which can be a base58 encoded seed, or a passphrase.
|
||||||
|
return hashlib.sha512(passphrase).digest()[:16]
|
||||||
|
|
||||||
|
def make_manifest(public_key, validator_public_key, seq):
|
||||||
|
return ''.join([
|
||||||
|
SField.sfSequence,
|
||||||
|
to_int32(seq),
|
||||||
|
SField.sfPublicKey, # Master public key.
|
||||||
|
prepend_length_byte(public_key),
|
||||||
|
SField.sfSigningPubKey, # Ephemeral public key.
|
||||||
|
prepend_length_byte(validator_public_key)])
|
||||||
|
|
||||||
|
def sign_manifest(manifest, private_key, public_key):
|
||||||
|
sig = ed25519.signature('MAN\0' + manifest, private_key, public_key)
|
||||||
|
return manifest + SField.sfSignature + prepend_length_byte(sig)
|
||||||
|
|
||||||
|
def wrap(s, cols=WRAP_COLUMNS):
|
||||||
|
if s:
|
||||||
|
size = max((len(s) + cols - 1) / cols, 1)
|
||||||
|
w = len(s) / size
|
||||||
|
s = '\n'.join(s[i:i + w] for i in range(0, len(s), w))
|
||||||
|
return s
|
||||||
|
|
||||||
|
def create_ed_keys(urandom=os.urandom):
|
||||||
|
private_key, public_key = make_ed25519_keypair(urandom)
|
||||||
|
public_key_human = Base58.encode_version(
|
||||||
|
Base58.VER_NODE_PUBLIC, ED25519_BYTE + public_key)
|
||||||
|
private_key_human = Base58.encode_version(
|
||||||
|
Base58.VER_NODE_PRIVATE, private_key)
|
||||||
|
return public_key_human, private_key_human
|
||||||
|
|
||||||
|
def check_validator_public(v, validator_public_key):
|
||||||
|
Base58.check_version(v, Base58.VER_NODE_PUBLIC)
|
||||||
|
if len(validator_public_key) != 33:
|
||||||
|
raise ValueError('Validator key should be length 33, is %s' %
|
||||||
|
len(validator_public_key))
|
||||||
|
b = ord(validator_public_key[0])
|
||||||
|
if b not in (2, 3):
|
||||||
|
raise ValueError('First validator key byte must be 2 or 3, is %d' % b)
|
||||||
|
|
||||||
|
def check_master_secret(v, private_key):
|
||||||
|
Base58.check_version(v, Base58.VER_NODE_PRIVATE)
|
||||||
|
if len(private_key) != 32:
|
||||||
|
raise ValueError('Length of master secret should be 32, is %s' %
|
||||||
|
len(private_key))
|
||||||
|
|
||||||
|
|
||||||
|
def get_signature(seq, validator_public_key_human, private_key_human):
|
||||||
|
v, validator_public_key = Base58.decode_version(validator_public_key_human)
|
||||||
|
check_validator_public(v, validator_public_key)
|
||||||
|
|
||||||
|
v, private_key = Base58.decode_version(private_key_human)
|
||||||
|
check_master_secret(v, private_key)
|
||||||
|
|
||||||
|
pk = ed25519.publickey(private_key)
|
||||||
|
apk = ED25519_BYTE + pk
|
||||||
|
m = make_manifest(apk, validator_public_key, seq)
|
||||||
|
m1 = sign_manifest(m, private_key, pk)
|
||||||
|
return base64.b64encode(m1)
|
||||||
|
|
||||||
|
|
||||||
|
# Testable versions of functions.
|
||||||
|
def perform_create(urandom=os.urandom, print=print):
|
||||||
|
public, private = create_ed_keys(urandom)
|
||||||
|
print('[validator_keys]', public, '', '[master_secret]', private, sep='\n')
|
||||||
|
|
||||||
|
def perform_check(s, print=print):
|
||||||
|
version, b = Base58.decode_version(s)
|
||||||
|
print('version = ' + Base58.version_name(version))
|
||||||
|
print('decoded length = ' + str(len(b)))
|
||||||
|
assert Base58.encode_version(version, b) == s
|
||||||
|
|
||||||
|
def perform_sign(
|
||||||
|
seq, validator_public_key_human, private_key_human, print=print):
|
||||||
|
print('[validation_manifest]')
|
||||||
|
print(wrap(get_signature(
|
||||||
|
int(seq), validator_public_key_human, private_key_human)))
|
||||||
|
|
||||||
|
# Externally visible versions of functions.
|
||||||
|
def create():
|
||||||
|
perform_create()
|
||||||
|
|
||||||
|
def check(s):
|
||||||
|
perform_check(s)
|
||||||
|
|
||||||
|
def sign(seq, validator_public_key_human, private_key_human):
|
||||||
|
perform_sign(seq, validator_public_key_human, private_key_human)
|
||||||
|
|
||||||
|
|
||||||
|
def usage(*errors):
|
||||||
|
if errors:
|
||||||
|
print(*errors)
|
||||||
|
print(USAGE)
|
||||||
|
return not errors
|
||||||
|
|
||||||
|
_COMMANDS = dict((f.__name__, f) for f in (create, check, sign))
|
||||||
|
|
||||||
|
def run_command(args):
|
||||||
|
if not args:
|
||||||
|
return usage()
|
||||||
|
name = args[0]
|
||||||
|
command = _COMMANDS.get(name)
|
||||||
|
if not command:
|
||||||
|
return usage('No such command:', command)
|
||||||
|
try:
|
||||||
|
command(*args[1:])
|
||||||
|
except TypeError:
|
||||||
|
return usage('Wrong number of arguments for:', command)
|
||||||
|
return True
|
||||||
682
bin/python/ripple/util/ValidatorManifestTest.py
Executable file
@@ -0,0 +1,682 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
"""
|
||||||
|
Test for setting ephemeral keys for the validator manifest.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import (
|
||||||
|
absolute_import, division, print_function, unicode_literals
|
||||||
|
)
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import contextlib
|
||||||
|
from contextlib import contextmanager
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
|
||||||
|
DELAY_WHILE_PROCESS_STARTS_UP = 1.5
|
||||||
|
ARGS = None
|
||||||
|
|
||||||
|
NOT_FOUND = -1 # not in log
|
||||||
|
ACCEPTED_NEW = 0 # added new manifest
|
||||||
|
ACCEPTED_UPDATE = 1 # replaced old manifest with new
|
||||||
|
UNTRUSTED = 2 # don't trust master key
|
||||||
|
STALE = 3 # seq is too old
|
||||||
|
REVOKED = 4 # revoked validator key
|
||||||
|
INVALID = 5 # invalid signature
|
||||||
|
|
||||||
|
MANIFEST_ACTION_STR_TO_ID = {
|
||||||
|
'NotFound': NOT_FOUND, # not found in log
|
||||||
|
'AcceptedNew': ACCEPTED_NEW,
|
||||||
|
'AcceptedUpdate': ACCEPTED_UPDATE,
|
||||||
|
'Untrusted': UNTRUSTED,
|
||||||
|
'Stale': STALE,
|
||||||
|
'Revoked': REVOKED,
|
||||||
|
'Invalid': INVALID,
|
||||||
|
}
|
||||||
|
|
||||||
|
MANIFEST_ACTION_ID_TO_STR = {
|
||||||
|
v: k for k, v in MANIFEST_ACTION_STR_TO_ID.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
CONF_TEMPLATE = """
|
||||||
|
[server]
|
||||||
|
port_rpc
|
||||||
|
port_peer
|
||||||
|
port_wss_admin
|
||||||
|
|
||||||
|
[port_rpc]
|
||||||
|
port = {rpc_port}
|
||||||
|
ip = 127.0.0.1
|
||||||
|
admin = 127.0.0.1
|
||||||
|
protocol = https
|
||||||
|
|
||||||
|
[port_peer]
|
||||||
|
port = {peer_port}
|
||||||
|
ip = 0.0.0.0
|
||||||
|
protocol = peer
|
||||||
|
|
||||||
|
[port_wss_admin]
|
||||||
|
port = {wss_port}
|
||||||
|
ip = 127.0.0.1
|
||||||
|
admin = 127.0.0.1
|
||||||
|
protocol = wss
|
||||||
|
|
||||||
|
[node_size]
|
||||||
|
medium
|
||||||
|
|
||||||
|
[node_db]
|
||||||
|
type={node_db_type}
|
||||||
|
path={node_db_path}
|
||||||
|
open_files=2000
|
||||||
|
filter_bits=12
|
||||||
|
cache_mb=256
|
||||||
|
file_size_mb=8
|
||||||
|
file_size_mult=2
|
||||||
|
online_delete=256
|
||||||
|
advisory_delete=0
|
||||||
|
|
||||||
|
[database_path]
|
||||||
|
{db_path}
|
||||||
|
|
||||||
|
[debug_logfile]
|
||||||
|
{debug_logfile}
|
||||||
|
|
||||||
|
[sntp_servers]
|
||||||
|
time.windows.com
|
||||||
|
time.apple.com
|
||||||
|
time.nist.gov
|
||||||
|
pool.ntp.org
|
||||||
|
|
||||||
|
[ips]
|
||||||
|
r.ripple.com 51235
|
||||||
|
|
||||||
|
[ips_fixed]
|
||||||
|
{sibling_ip} {sibling_port}
|
||||||
|
|
||||||
|
[validators]
|
||||||
|
n949f75evCHwgyP4fPVgaHqNHxUVN15PsJEZ3B3HnXPcPjcZAoy7 RL1
|
||||||
|
n9MD5h24qrQqiyBC8aeqqCWvpiBiYQ3jxSr91uiDvmrkyHRdYLUj RL2
|
||||||
|
n9L81uNCaPgtUJfaHh89gmdvXKAmSt5Gdsw2g1iPWaPkAHW5Nm4C RL3
|
||||||
|
n9KiYM9CgngLvtRCQHZwgC2gjpdaZcCcbt3VboxiNFcKuwFVujzS RL4
|
||||||
|
n9LdgEtkmGB9E2h3K4Vp7iGUaKuq23Zr32ehxiU8FWY7xoxbWTSA RL5
|
||||||
|
|
||||||
|
[validation_quorum]
|
||||||
|
3
|
||||||
|
|
||||||
|
[validation_seed]
|
||||||
|
{validation_seed}
|
||||||
|
#vaidation_public_key: {validation_public_key}
|
||||||
|
|
||||||
|
# Other rippled's trusting this validator need this key
|
||||||
|
[validator_keys]
|
||||||
|
{all_validator_keys}
|
||||||
|
|
||||||
|
[peer_private]
|
||||||
|
1
|
||||||
|
|
||||||
|
[overlay]
|
||||||
|
expire = 1
|
||||||
|
auto_connect = 1
|
||||||
|
|
||||||
|
[validation_manifest]
|
||||||
|
{validation_manifest}
|
||||||
|
|
||||||
|
[rpc_startup]
|
||||||
|
{{ "command": "log_level", "severity": "debug" }}
|
||||||
|
|
||||||
|
[ssl_verify]
|
||||||
|
0
|
||||||
|
"""
|
||||||
|
# End config template
|
||||||
|
|
||||||
|
|
||||||
|
def static_vars(**kwargs):
|
||||||
|
def decorate(func):
|
||||||
|
for k in kwargs:
|
||||||
|
setattr(func, k, kwargs[k])
|
||||||
|
return func
|
||||||
|
return decorate
|
||||||
|
|
||||||
|
|
||||||
|
@static_vars(rpc=5005, peer=51235, wss=6006)
|
||||||
|
def checkout_port_nums():
|
||||||
|
"""Returns a tuple of port nums for rpc, peer, and wss_admin"""
|
||||||
|
checkout_port_nums.rpc += 1
|
||||||
|
checkout_port_nums.peer += 1
|
||||||
|
checkout_port_nums.wss += 1
|
||||||
|
return (
|
||||||
|
checkout_port_nums.rpc,
|
||||||
|
checkout_port_nums.peer,
|
||||||
|
checkout_port_nums.wss
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def is_windows():
|
||||||
|
return platform.system() == 'Windows'
|
||||||
|
|
||||||
|
|
||||||
|
def manifest_create():
|
||||||
|
"""returns dict with keys: 'validator_keys', 'master_secret'"""
|
||||||
|
to_run = ['python', ARGS.ripple_home + '/bin/python/Manifest.py', 'create']
|
||||||
|
r = subprocess.check_output(to_run)
|
||||||
|
result = {}
|
||||||
|
k = None
|
||||||
|
for l in r.splitlines():
|
||||||
|
l = l.strip()
|
||||||
|
if not l:
|
||||||
|
continue
|
||||||
|
elif l == '[validator_keys]':
|
||||||
|
k = l[1:-1]
|
||||||
|
elif l == '[master_secret]':
|
||||||
|
k = l[1:-1]
|
||||||
|
elif l.startswith('['):
|
||||||
|
raise ValueError(
|
||||||
|
'Unexpected key: {} from `manifest create`'.format(l))
|
||||||
|
else:
|
||||||
|
if not k:
|
||||||
|
raise ValueError('Value with no key')
|
||||||
|
result[k] = l
|
||||||
|
k = None
|
||||||
|
|
||||||
|
if k in result:
|
||||||
|
raise ValueError('Repeat key from `manifest create`: ' + k)
|
||||||
|
if len(result) != 2:
|
||||||
|
raise ValueError(
|
||||||
|
'Expected 2 keys from `manifest create` but got {} keys instead ({})'.
|
||||||
|
format(len(result), result))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def sign_manifest(seq, validation_pk, master_secret):
|
||||||
|
"""returns the signed manifest as a string"""
|
||||||
|
to_run = ['python', ARGS.ripple_home + '/bin/python/Manifest.py', 'sign',
|
||||||
|
str(seq), validation_pk, master_secret]
|
||||||
|
try:
|
||||||
|
r = subprocess.check_output(to_run)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
print('Error in sign_manifest: ', e.output)
|
||||||
|
raise e
|
||||||
|
result = []
|
||||||
|
for l in r.splitlines():
|
||||||
|
l.strip()
|
||||||
|
if not l or l == '[validation_manifest]':
|
||||||
|
continue
|
||||||
|
result.append(l)
|
||||||
|
return '\n'.join(result)
|
||||||
|
|
||||||
|
|
||||||
|
def get_ripple_exe():
|
||||||
|
"""Find the rippled executable"""
|
||||||
|
prefix = ARGS.ripple_home + '/build/'
|
||||||
|
exe = ['rippled', 'RippleD.exe']
|
||||||
|
to_test = [prefix + t + '.debug/' + e
|
||||||
|
for t in ['clang', 'gcc', 'msvc'] for e in exe]
|
||||||
|
for e in exe:
|
||||||
|
to_test.append(prefix + '/' + e)
|
||||||
|
for t in to_test:
|
||||||
|
if os.path.isfile(t):
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
class RippledServer(object):
|
||||||
|
def __init__(self, exe, config_file, server_out):
|
||||||
|
self.config_file = config_file
|
||||||
|
self.exe = exe
|
||||||
|
self.process = None
|
||||||
|
self.server_out = server_out
|
||||||
|
self.reinit(config_file)
|
||||||
|
|
||||||
|
def reinit(self, config_file):
|
||||||
|
self.config_file = config_file
|
||||||
|
self.to_run = [self.exe, '--verbose', '--conf', self.config_file]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def config_root(self):
|
||||||
|
return os.path.dirname(self.config_file)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def master_secret_file(self):
|
||||||
|
return self.config_root + '/master_secret.txt'
|
||||||
|
|
||||||
|
def startup(self):
|
||||||
|
if ARGS.verbose:
|
||||||
|
print('starting rippled:' + self.config_file)
|
||||||
|
fout = open(self.server_out, 'w')
|
||||||
|
self.process = subprocess.Popen(
|
||||||
|
self.to_run, stdout=fout, stderr=subprocess.STDOUT)
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
if not self.process:
|
||||||
|
return
|
||||||
|
fout = open(os.devnull, 'w')
|
||||||
|
subprocess.Popen(
|
||||||
|
self.to_run + ['stop'], stdout=fout, stderr=subprocess.STDOUT)
|
||||||
|
self.process.wait()
|
||||||
|
self.process = None
|
||||||
|
|
||||||
|
def rotate_logfile(self):
|
||||||
|
if self.server_out == os.devnull:
|
||||||
|
return
|
||||||
|
for i in range(100):
|
||||||
|
backup_name = '{}.{}'.format(self.server_out, i)
|
||||||
|
if not os.path.exists(backup_name):
|
||||||
|
os.rename(self.server_out, backup_name)
|
||||||
|
return
|
||||||
|
raise ValueError('Could not rotate logfile: {}'.
|
||||||
|
format(self.server_out))
|
||||||
|
|
||||||
|
def validation_create(self):
|
||||||
|
"""returns dict with keys:
|
||||||
|
'validation_key', 'validation_public_key', 'validation_seed'
|
||||||
|
"""
|
||||||
|
to_run = [self.exe, '-q', '--conf', self.config_file,
|
||||||
|
'--', 'validation_create']
|
||||||
|
try:
|
||||||
|
return json.loads(subprocess.check_output(to_run))['result']
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
print('Error in validation_create: ', e.output)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def rippled_server(config_file, server_out=os.devnull):
|
||||||
|
"""Start a ripple server"""
|
||||||
|
try:
|
||||||
|
server = None
|
||||||
|
server = RippledServer(ARGS.ripple_exe, config_file, server_out)
|
||||||
|
server.startup()
|
||||||
|
yield server
|
||||||
|
finally:
|
||||||
|
if server:
|
||||||
|
server.shutdown()
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def pause_server(server, config_file):
|
||||||
|
"""Shutdown and then restart a ripple server"""
|
||||||
|
try:
|
||||||
|
server.shutdown()
|
||||||
|
server.rotate_logfile()
|
||||||
|
yield server
|
||||||
|
finally:
|
||||||
|
server.reinit(config_file)
|
||||||
|
server.startup()
|
||||||
|
|
||||||
|
|
||||||
|
def parse_date(d, t):
|
||||||
|
"""Return the timestamp of a line, or none if the line has no timestamp"""
|
||||||
|
try:
|
||||||
|
return time.strptime(d+' '+t, '%Y-%B-%d %H:%M:%S')
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def to_dict(l):
|
||||||
|
"""Given a line of the form Key0: Value0;Key2: Valuue2; Return a dict"""
|
||||||
|
fields = l.split(';')
|
||||||
|
result = {}
|
||||||
|
for f in fields:
|
||||||
|
if f:
|
||||||
|
v = f.split(':')
|
||||||
|
assert len(v) == 2
|
||||||
|
result[v[0].strip()] = v[1].strip()
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def check_ephemeral_key(validator_key,
|
||||||
|
log_file,
|
||||||
|
seq,
|
||||||
|
change_time):
|
||||||
|
"""
|
||||||
|
Detect when a server is informed of a validator's ephemeral key change.
|
||||||
|
`change_time` and `seq` may be None, in which case they are ignored.
|
||||||
|
"""
|
||||||
|
manifest_prefix = 'Manifest:'
|
||||||
|
# a manifest line has the form Manifest: action; Key: value;
|
||||||
|
# Key can be Pk (public key), Seq, OldSeq,
|
||||||
|
for l in open(log_file):
|
||||||
|
sa = l.split()
|
||||||
|
if len(sa) < 5 or sa[4] != manifest_prefix:
|
||||||
|
continue
|
||||||
|
|
||||||
|
d = to_dict(' '.join(sa[4:]))
|
||||||
|
# check the seq number and validator_key
|
||||||
|
if d['Pk'] != validator_key:
|
||||||
|
continue
|
||||||
|
if seq is not None and int(d['Seq']) != seq:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if change_time:
|
||||||
|
t = parse_date(sa[0], sa[1])
|
||||||
|
if not t or t < change_time:
|
||||||
|
continue
|
||||||
|
action = d['Manifest']
|
||||||
|
return MANIFEST_ACTION_STR_TO_ID[action]
|
||||||
|
return NOT_FOUND
|
||||||
|
|
||||||
|
|
||||||
|
def check_ephemeral_keys(validator_key,
|
||||||
|
log_files,
|
||||||
|
seq,
|
||||||
|
change_time=None,
|
||||||
|
timeout_s=60):
|
||||||
|
result = [NOT_FOUND for i in range(len(log_files))]
|
||||||
|
if timeout_s < 10:
|
||||||
|
sleep_time = 1
|
||||||
|
elif timeout_s < 60:
|
||||||
|
sleep_time = 5
|
||||||
|
else:
|
||||||
|
sleep_time = 10
|
||||||
|
n = timeout_s//sleep_time
|
||||||
|
if n == 0:
|
||||||
|
n = 1
|
||||||
|
start_time = time.time()
|
||||||
|
for _ in range(n):
|
||||||
|
for i, lf in enumerate(log_files):
|
||||||
|
if result[i] != NOT_FOUND:
|
||||||
|
continue
|
||||||
|
result[i] = check_ephemeral_key(validator_key,
|
||||||
|
lf,
|
||||||
|
seq,
|
||||||
|
change_time)
|
||||||
|
if result[i] != NOT_FOUND:
|
||||||
|
if all(r != NOT_FOUND for r in result):
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
server_dir = os.path.basename(os.path.dirname(log_files[i]))
|
||||||
|
if ARGS.verbose:
|
||||||
|
print('Check for {}: {}'.format(
|
||||||
|
server_dir, MANIFEST_ACTION_ID_TO_STR[result[i]]))
|
||||||
|
tsf = time.time() - start_time
|
||||||
|
if tsf > 20:
|
||||||
|
if ARGS.verbose:
|
||||||
|
print('Waiting for key to propigate: ', tsf)
|
||||||
|
time.sleep(sleep_time)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def get_validator_key(config_file):
|
||||||
|
in_validator_keys = False
|
||||||
|
for l in open(config_file):
|
||||||
|
sl = l.strip()
|
||||||
|
if not in_validator_keys and sl == '[validator_keys]':
|
||||||
|
in_validator_keys = True
|
||||||
|
continue
|
||||||
|
if in_validator_keys:
|
||||||
|
if sl.startswith('['):
|
||||||
|
raise ValueError('ThisServer validator key not found')
|
||||||
|
if sl.startswith('#'):
|
||||||
|
continue
|
||||||
|
s = sl.split()
|
||||||
|
if len(s) == 2 and s[1] == 'ThisServer':
|
||||||
|
return s[0]
|
||||||
|
|
||||||
|
|
||||||
|
def new_config_ephemeral_key(
|
||||||
|
server, seq, rm_dbs=False, master_secret_file=None):
|
||||||
|
"""Generate a new ephemeral key, add to config, restart server"""
|
||||||
|
config_root = server.config_root
|
||||||
|
config_file = config_root + '/rippled.cfg'
|
||||||
|
db_dir = config_root + '/db'
|
||||||
|
if not master_secret_file:
|
||||||
|
master_secret_file = server.master_secret_file
|
||||||
|
with open(master_secret_file) as f:
|
||||||
|
master_secret = f.read()
|
||||||
|
v = server.validation_create()
|
||||||
|
signed = sign_manifest(seq, v['validation_public_key'], master_secret)
|
||||||
|
with pause_server(server, config_file):
|
||||||
|
if rm_dbs and os.path.exists(db_dir):
|
||||||
|
shutil.rmtree(db_dir)
|
||||||
|
os.makedirs(db_dir)
|
||||||
|
# replace the validation_manifest section with `signed`
|
||||||
|
bak = config_file + '.bak'
|
||||||
|
if is_windows() and os.path.isfile(bak):
|
||||||
|
os.remove(bak)
|
||||||
|
os.rename(config_file, bak)
|
||||||
|
in_manifest = False
|
||||||
|
with open(bak, 'r') as src:
|
||||||
|
with open(config_file, 'w') as out:
|
||||||
|
for l in src:
|
||||||
|
sl = l.strip()
|
||||||
|
if not in_manifest and sl == '[validation_manifest]':
|
||||||
|
in_manifest = True
|
||||||
|
elif in_manifest:
|
||||||
|
if sl.startswith('[') or sl.startswith('#'):
|
||||||
|
in_manifest = False
|
||||||
|
out.write(signed)
|
||||||
|
out.write('\n\n')
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
out.write(l)
|
||||||
|
return (bak, config_file)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description=('Create config files for n validators')
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--ripple_home', '-r',
|
||||||
|
default=os.sep.join(os.path.realpath(__file__).split(os.sep)[:-5]),
|
||||||
|
help=('Root directory of the ripple repo'), )
|
||||||
|
parser.add_argument('--num_validators', '-n',
|
||||||
|
default=2,
|
||||||
|
help=('Number of validators'), )
|
||||||
|
parser.add_argument('--conf', '-c', help=('rippled config file'), )
|
||||||
|
parser.add_argument('--out', '-o',
|
||||||
|
default='test_output',
|
||||||
|
help=('config root directory'), )
|
||||||
|
parser.add_argument(
|
||||||
|
'--existing', '-e',
|
||||||
|
action='store_true',
|
||||||
|
help=('use existing config files'), )
|
||||||
|
parser.add_argument(
|
||||||
|
'--generate', '-g',
|
||||||
|
action='store_true',
|
||||||
|
help=('generate conf files only'), )
|
||||||
|
parser.add_argument(
|
||||||
|
'--verbose', '-v',
|
||||||
|
action='store_true',
|
||||||
|
help=('verbose status reporting'), )
|
||||||
|
parser.add_argument(
|
||||||
|
'--quiet', '-q',
|
||||||
|
action='store_true',
|
||||||
|
help=('quiet status reporting'), )
|
||||||
|
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def get_configs(manifest_seq):
|
||||||
|
global ARGS
|
||||||
|
ARGS.ripple_home = os.path.expanduser(ARGS.ripple_home)
|
||||||
|
|
||||||
|
n = int(ARGS.num_validators)
|
||||||
|
if n<2:
|
||||||
|
raise ValueError(
|
||||||
|
'Need at least 2 rippled servers. Specified: {}'.format(n))
|
||||||
|
config_root = ARGS.out
|
||||||
|
ARGS.ripple_exe = get_ripple_exe()
|
||||||
|
if not ARGS.ripple_exe:
|
||||||
|
raise ValueError('No Exe Found')
|
||||||
|
|
||||||
|
if ARGS.existing:
|
||||||
|
return [
|
||||||
|
os.path.abspath('{}/validator_{}/rippled.cfg'.format(config_root, i))
|
||||||
|
for i in range(n)
|
||||||
|
]
|
||||||
|
|
||||||
|
initial_config = ARGS.conf
|
||||||
|
|
||||||
|
manifests = [manifest_create() for i in range(n)]
|
||||||
|
port_nums = [checkout_port_nums() for i in range(n)]
|
||||||
|
with rippled_server(initial_config) as server:
|
||||||
|
time.sleep(DELAY_WHILE_PROCESS_STARTS_UP)
|
||||||
|
validations = [server.validation_create() for i in range(n)]
|
||||||
|
|
||||||
|
signed_manifests = [sign_manifest(manifest_seq,
|
||||||
|
v['validation_public_key'],
|
||||||
|
m['master_secret'])
|
||||||
|
for m, v in zip(manifests, validations)]
|
||||||
|
node_db_type = 'RocksDB' if not is_windows() else 'NuDB'
|
||||||
|
node_db_filename = node_db_type.lower()
|
||||||
|
|
||||||
|
config_files = []
|
||||||
|
for i, (m, v, s) in enumerate(zip(manifests, validations, signed_manifests)):
|
||||||
|
sibling_index = (i - 1) % len(manifests)
|
||||||
|
all_validator_keys = '\n'.join([
|
||||||
|
m['validator_keys'] + ' ThisServer',
|
||||||
|
manifests[sibling_index]['validator_keys'] + ' NextInRing'])
|
||||||
|
this_validator_dir = os.path.abspath(
|
||||||
|
'{}/validator_{}'.format(config_root, i))
|
||||||
|
db_path = this_validator_dir + '/db'
|
||||||
|
node_db_path = db_path + '/' + node_db_filename
|
||||||
|
log_path = this_validator_dir + '/log'
|
||||||
|
debug_logfile = log_path + '/debug.log'
|
||||||
|
rpc_port, peer_port, wss_port = port_nums[i]
|
||||||
|
sibling_ip = '127.0.0.1'
|
||||||
|
sibling_port = port_nums[sibling_index][1]
|
||||||
|
d = {
|
||||||
|
'validation_manifest': s,
|
||||||
|
'all_validator_keys': all_validator_keys,
|
||||||
|
'node_db_type': node_db_type,
|
||||||
|
'node_db_path': node_db_path,
|
||||||
|
'db_path': db_path,
|
||||||
|
'debug_logfile': debug_logfile,
|
||||||
|
'rpc_port': rpc_port,
|
||||||
|
'peer_port': peer_port,
|
||||||
|
'wss_port': wss_port,
|
||||||
|
'sibling_ip': sibling_ip,
|
||||||
|
'sibling_port': sibling_port,
|
||||||
|
}
|
||||||
|
d.update(m)
|
||||||
|
d.update(v)
|
||||||
|
|
||||||
|
for p in [this_validator_dir, db_path, log_path]:
|
||||||
|
if not os.path.exists(p):
|
||||||
|
os.makedirs(p)
|
||||||
|
|
||||||
|
config_files.append('{}/rippled.cfg'.format(this_validator_dir))
|
||||||
|
with open(config_files[-1], 'w') as f:
|
||||||
|
f.write(CONF_TEMPLATE.format(**d))
|
||||||
|
|
||||||
|
with open('{}/master_secret.txt'.format(this_validator_dir), 'w') as f:
|
||||||
|
f.write(m['master_secret'])
|
||||||
|
|
||||||
|
return config_files
|
||||||
|
|
||||||
|
|
||||||
|
def update_ephemeral_key(
|
||||||
|
server, new_seq, log_files,
|
||||||
|
expected=None, rm_dbs=False, master_secret_file=None,
|
||||||
|
restore_origional_conf=False, timeout_s=300):
|
||||||
|
if not expected:
|
||||||
|
expected = {}
|
||||||
|
|
||||||
|
change_time = time.gmtime()
|
||||||
|
back_conf, new_conf = new_config_ephemeral_key(
|
||||||
|
server,
|
||||||
|
new_seq,
|
||||||
|
rm_dbs,
|
||||||
|
master_secret_file
|
||||||
|
)
|
||||||
|
validator_key = get_validator_key(server.config_file)
|
||||||
|
start_time = time.time()
|
||||||
|
ck = check_ephemeral_keys(validator_key,
|
||||||
|
log_files,
|
||||||
|
seq=new_seq,
|
||||||
|
change_time=change_time,
|
||||||
|
timeout_s=timeout_s)
|
||||||
|
if ARGS.verbose:
|
||||||
|
print('Check finished: {} secs.'.format(int(time.time() - start_time)))
|
||||||
|
all_success = True
|
||||||
|
for i, r in enumerate(ck):
|
||||||
|
e = expected.get(i, UNTRUSTED)
|
||||||
|
server_dir = os.path.basename(os.path.dirname(log_files[i]))
|
||||||
|
status = 'OK' if e == r else 'FAIL'
|
||||||
|
print('{}: Server: {} Expected: {} Got: {}'.
|
||||||
|
format(status, server_dir,
|
||||||
|
MANIFEST_ACTION_ID_TO_STR[e], MANIFEST_ACTION_ID_TO_STR[r]))
|
||||||
|
all_success = all_success and (e == r)
|
||||||
|
if restore_origional_conf:
|
||||||
|
if is_windows() and os.path.isfile(new_conf):
|
||||||
|
os.remove(new_conf)
|
||||||
|
os.rename(back_conf, new_conf)
|
||||||
|
return all_success
|
||||||
|
|
||||||
|
|
||||||
|
def run_main():
|
||||||
|
global ARGS
|
||||||
|
ARGS = parse_args()
|
||||||
|
manifest_seq = 1
|
||||||
|
config_files = get_configs(manifest_seq)
|
||||||
|
if ARGS.generate:
|
||||||
|
return
|
||||||
|
if len(config_files) <= 1:
|
||||||
|
print('Script requires at least 2 servers. Actual #: {}'.
|
||||||
|
format(len(config_files)))
|
||||||
|
return
|
||||||
|
with contextlib.nested(*(rippled_server(c, os.path.dirname(c)+'/log.txt')
|
||||||
|
for c in config_files)) as servers:
|
||||||
|
log_files = [os.path.dirname(cf)+'/log.txt' for cf in config_files[1:]]
|
||||||
|
validator_key = get_validator_key(config_files[0])
|
||||||
|
start_time = time.time()
|
||||||
|
ck = check_ephemeral_keys(validator_key,
|
||||||
|
[log_files[0]],
|
||||||
|
seq=None,
|
||||||
|
timeout_s=60)
|
||||||
|
if ARGS.verbose:
|
||||||
|
print('Check finished: {} secs.'.format(
|
||||||
|
int(time.time() - start_time)))
|
||||||
|
if any(r == NOT_FOUND for r in ck):
|
||||||
|
print('FAIL: Initial key did not propigate to all servers')
|
||||||
|
return
|
||||||
|
|
||||||
|
manifest_seq += 2
|
||||||
|
expected = {i: UNTRUSTED for i in range(len(log_files))}
|
||||||
|
expected[0] = ACCEPTED_UPDATE
|
||||||
|
if not ARGS.quiet:
|
||||||
|
print('Testing key update')
|
||||||
|
kr = update_ephemeral_key(servers[0], manifest_seq, log_files, expected)
|
||||||
|
if not kr:
|
||||||
|
print('\nFail: Key Update Test. Exiting')
|
||||||
|
return
|
||||||
|
|
||||||
|
expected = {i: UNTRUSTED for i in range(len(log_files))}
|
||||||
|
expected[0] = STALE
|
||||||
|
if not ARGS.quiet:
|
||||||
|
print('Testing stale key')
|
||||||
|
kr = update_ephemeral_key(
|
||||||
|
servers[0], manifest_seq-1, log_files, expected, rm_dbs=True)
|
||||||
|
if not kr:
|
||||||
|
print('\nFail: Stale Key Test. Exiting')
|
||||||
|
return
|
||||||
|
|
||||||
|
expected = {i: UNTRUSTED for i in range(len(log_files))}
|
||||||
|
expected[0] = STALE
|
||||||
|
if not ARGS.quiet:
|
||||||
|
print('Testing stale key 2')
|
||||||
|
kr = update_ephemeral_key(
|
||||||
|
servers[0], manifest_seq, log_files, expected, rm_dbs=True)
|
||||||
|
if not kr:
|
||||||
|
print('\nFail: Stale Key Test. Exiting')
|
||||||
|
return
|
||||||
|
|
||||||
|
expected = {i: UNTRUSTED for i in range(len(log_files))}
|
||||||
|
expected[0] = REVOKED
|
||||||
|
if not ARGS.quiet:
|
||||||
|
print('Testing revoked key')
|
||||||
|
kr = update_ephemeral_key(
|
||||||
|
servers[0], 0xffffffff, log_files, expected, rm_dbs=True)
|
||||||
|
if not kr:
|
||||||
|
print('\nFail: Revoked Key Text. Exiting')
|
||||||
|
return
|
||||||
|
print('\nOK: All tests passed')
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
run_main()
|
||||||
47
bin/python/ripple/util/test_Base58.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from ripple.util import Base58
|
||||||
|
|
||||||
|
from unittest import TestCase
|
||||||
|
|
||||||
|
BINARY = 'nN9kfUnKTf7PpgLG'
|
||||||
|
|
||||||
|
class test_Base58(TestCase):
|
||||||
|
def run_test(self, before, after):
|
||||||
|
self.assertEquals(Base58.decode(before), after)
|
||||||
|
self.assertEquals(Base58.encode(after), before)
|
||||||
|
|
||||||
|
def test_trivial(self):
|
||||||
|
self.run_test('', '')
|
||||||
|
|
||||||
|
def test_zeroes(self):
|
||||||
|
for before, after in (('', ''), ('abc', 'I\x8b')):
|
||||||
|
for i in range(1, 257):
|
||||||
|
self.run_test('r' * i + before, '\0' * i + after)
|
||||||
|
|
||||||
|
def test_single_digits(self):
|
||||||
|
for i, c in enumerate(Base58.ALPHABET):
|
||||||
|
self.run_test(c, chr(i))
|
||||||
|
|
||||||
|
def test_various(self):
|
||||||
|
# Test three random numbers.
|
||||||
|
self.run_test('88Mw', '\x88L\xed')
|
||||||
|
self.run_test(
|
||||||
|
'nN9kfUnKTf7PpgLG', '\x03\xdc\x9co\xdea\xefn\xd3\xb8\xe2\xc1')
|
||||||
|
self.run_test(
|
||||||
|
'zzWWb4C5p6kNrVa4fEBoZpZKd3XQLXch7QJbLCuLdoS1CWr8qdAZHEmwMiJy8Hwp',
|
||||||
|
'xN\x82\xfcQ\x1f\xb3~\xdf\xc7\xb37#\xc6~A\xe9\xf6-\x1f\xcb"\xfab'
|
||||||
|
'(\'\xccv\x9e\x85\xc3\xd1\x19\x941{\x8et\xfbS}\x86.k\x07\xb5\xb3')
|
||||||
|
|
||||||
|
def test_check(self):
|
||||||
|
self.assertEquals(Base58.checksum(BINARY), '\xaa\xaar\x9d')
|
||||||
|
|
||||||
|
def test_encode(self):
|
||||||
|
self.assertEquals(
|
||||||
|
Base58.encode_version(Base58.VER_ACCOUNT_PUBLIC, BINARY),
|
||||||
|
'sB49XwJgmdEZDo8LmYwki7FYkiaN7')
|
||||||
|
|
||||||
|
def test_decode(self):
|
||||||
|
ver, b = Base58.decode_version('sB49XwJgmdEZDo8LmYwki7FYkiaN7')
|
||||||
|
self.assertEquals(ver, Base58.VER_ACCOUNT_PUBLIC)
|
||||||
|
self.assertEquals(b, BINARY)
|
||||||